summaryrefslogtreecommitdiffstats
path: root/src/boost/tools/build
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 18:45:59 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 18:45:59 +0000
commit19fcec84d8d7d21e796c7624e521b60d28ee21ed (patch)
tree42d26aa27d1e3f7c0b8bd3fd14e7d7082f5008dc /src/boost/tools/build
parentInitial commit. (diff)
downloadceph-19fcec84d8d7d21e796c7624e521b60d28ee21ed.tar.xz
ceph-19fcec84d8d7d21e796c7624e521b60d28ee21ed.zip
Adding upstream version 16.2.11+ds.upstream/16.2.11+dsupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/boost/tools/build')
-rw-r--r--src/boost/tools/build/CONTRIBUTING.adoc179
-rw-r--r--src/boost/tools/build/Jamroot.jam271
-rw-r--r--src/boost/tools/build/LICENSE.txt23
-rw-r--r--src/boost/tools/build/README.adoc31
-rw-r--r--src/boost/tools/build/azure-pipelines.yml727
-rw-r--r--src/boost/tools/build/boost-build.jam8
-rw-r--r--src/boost/tools/build/bootstrap.bat39
-rwxr-xr-xsrc/boost/tools/build/bootstrap.sh28
-rw-r--r--src/boost/tools/build/bootstrap_vms.com48
-rw-r--r--src/boost/tools/build/example/asciidoctor/example.adoc3
-rw-r--r--src/boost/tools/build/example/asciidoctor/example_manpage.adoc38
-rw-r--r--src/boost/tools/build/example/asciidoctor/jamroot.jam11
-rw-r--r--src/boost/tools/build/example/boost-build.jam6
-rw-r--r--src/boost/tools/build/example/built_tool/Jamroot.jam8
-rw-r--r--src/boost/tools/build/example/built_tool/core/Jamfile.jam39
-rw-r--r--src/boost/tools/build/example/built_tool/core/a.td0
-rw-r--r--src/boost/tools/build/example/built_tool/core/core.cpp5
-rw-r--r--src/boost/tools/build/example/built_tool/readme.txt5
-rw-r--r--src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam4
-rw-r--r--src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp9
-rw-r--r--src/boost/tools/build/example/complex-testing/compile-fail.cpp17
-rw-r--r--src/boost/tools/build/example/complex-testing/fail.cpp17
-rw-r--r--src/boost/tools/build/example/complex-testing/jamroot.jam15
-rw-r--r--src/boost/tools/build/example/complex-testing/post.cpp17
-rw-r--r--src/boost/tools/build/example/complex-testing/success.cpp17
-rw-r--r--src/boost/tools/build/example/customization/class.verbatim7
-rw-r--r--src/boost/tools/build/example/customization/codegen.cpp36
-rw-r--r--src/boost/tools/build/example/customization/inline_file.py44
-rw-r--r--src/boost/tools/build/example/customization/jamroot.jam9
-rw-r--r--src/boost/tools/build/example/customization/readme.txt11
-rw-r--r--src/boost/tools/build/example/customization/t1.verbatim2
-rw-r--r--src/boost/tools/build/example/customization/t2.verbatim0
-rw-r--r--src/boost/tools/build/example/customization/usage.verbatim5
-rw-r--r--src/boost/tools/build/example/customization/verbatim.jam61
-rw-r--r--src/boost/tools/build/example/customization/verbatim.py47
-rw-r--r--src/boost/tools/build/example/generate/README.txt11
-rw-r--r--src/boost/tools/build/example/generate/a.cpp10
-rw-r--r--src/boost/tools/build/example/generate/gen.jam26
-rw-r--r--src/boost/tools/build/example/generate/gen.py16
-rw-r--r--src/boost/tools/build/example/generate/jamroot.jam9
-rw-r--r--src/boost/tools/build/example/generator/README.txt6
-rw-r--r--src/boost/tools/build/example/generator/foo.gci10
-rw-r--r--src/boost/tools/build/example/generator/jamroot.jam6
-rw-r--r--src/boost/tools/build/example/generator/soap.jam86
-rw-r--r--src/boost/tools/build/example/gettext/jamfile.jam26
-rw-r--r--src/boost/tools/build/example/gettext/jamroot.jam6
-rw-r--r--src/boost/tools/build/example/gettext/main.cpp28
-rw-r--r--src/boost/tools/build/example/gettext/readme.txt24
-rw-r--r--src/boost/tools/build/example/gettext/russian.po21
-rw-r--r--src/boost/tools/build/example/hello/hello.cpp18
-rw-r--r--src/boost/tools/build/example/hello/jamroot.jam1
-rw-r--r--src/boost/tools/build/example/hello/readme.adoc46
-rw-r--r--src/boost/tools/build/example/libraries/app/app.cpp15
-rw-r--r--src/boost/tools/build/example/libraries/app/jamfile.jam9
-rw-r--r--src/boost/tools/build/example/libraries/jamroot.jam4
-rw-r--r--src/boost/tools/build/example/libraries/util/foo/bar.cpp13
-rw-r--r--src/boost/tools/build/example/libraries/util/foo/include/lib1.h10
-rw-r--r--src/boost/tools/build/example/libraries/util/foo/jamfile.jam9
-rw-r--r--src/boost/tools/build/example/make/foo.py2
-rw-r--r--src/boost/tools/build/example/make/jamroot.jam22
-rw-r--r--src/boost/tools/build/example/make/main_cpp.pro1
-rw-r--r--src/boost/tools/build/example/make/readme.txt7
-rw-r--r--src/boost/tools/build/example/pch-multi/include/extra/meta.hpp17
-rw-r--r--src/boost/tools/build/example/pch-multi/include/pch.hpp19
-rw-r--r--src/boost/tools/build/example/pch-multi/include/std.hpp16
-rw-r--r--src/boost/tools/build/example/pch-multi/jamroot.jam30
-rw-r--r--src/boost/tools/build/example/pch-multi/source/hello_world.cpp17
-rw-r--r--src/boost/tools/build/example/pch/include/pch.hpp19
-rw-r--r--src/boost/tools/build/example/pch/jamroot.jam29
-rw-r--r--src/boost/tools/build/example/pch/source/hello_world.cpp15
-rw-r--r--src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc4
-rw-r--r--src/boost/tools/build/example/pkg-config/jamroot.jam104
-rw-r--r--src/boost/tools/build/example/pkg-config/packages/debugged.pc4
-rw-r--r--src/boost/tools/build/example/pkg-config/packages/foobar.pc4
-rw-r--r--src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc4
-rw-r--r--src/boost/tools/build/example/pkg-config/packages/mangled.pc4
-rw-r--r--src/boost/tools/build/example/pkg-config/packages/versioned.pc3
-rw-r--r--src/boost/tools/build/example/pkg-config/packages/with-var.pc4
-rw-r--r--src/boost/tools/build/example/pkg-config/test1.cpp11
-rw-r--r--src/boost/tools/build/example/pkg-config/test2.cpp12
-rw-r--r--src/boost/tools/build/example/pkg-config/test3.cpp12
-rw-r--r--src/boost/tools/build/example/pkg-config/test4.cpp11
-rw-r--r--src/boost/tools/build/example/pkg-config/test5.cpp12
-rw-r--r--src/boost/tools/build/example/python_modules/jamroot.jam8
-rw-r--r--src/boost/tools/build/example/python_modules/python_helpers.jam15
-rw-r--r--src/boost/tools/build/example/python_modules/python_helpers.py18
-rw-r--r--src/boost/tools/build/example/python_modules/readme.txt16
-rw-r--r--src/boost/tools/build/example/qt/README.txt20
-rw-r--r--src/boost/tools/build/example/qt/qt3/hello/canvas.cpp73
-rw-r--r--src/boost/tools/build/example/qt/qt3/hello/canvas.h35
-rw-r--r--src/boost/tools/build/example/qt/qt3/hello/jamroot.jam13
-rw-r--r--src/boost/tools/build/example/qt/qt3/hello/main.cpp36
-rw-r--r--src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam11
-rw-r--r--src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp41
-rw-r--r--src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui58
-rw-r--r--src/boost/tools/build/example/qt/qt3/uic/jamroot.jam15
-rw-r--r--src/boost/tools/build/example/qt/qt3/uic/main.cpp18
-rw-r--r--src/boost/tools/build/example/qt/qt4/hello/arrow.cpp158
-rw-r--r--src/boost/tools/build/example/qt/qt4/hello/arrow.h30
-rw-r--r--src/boost/tools/build/example/qt/qt4/hello/jamroot.jam14
-rw-r--r--src/boost/tools/build/example/qt/qt4/hello/main.cpp27
-rw-r--r--src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam18
-rw-r--r--src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp39
-rw-r--r--src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui55
-rw-r--r--src/boost/tools/build/example/qt/qt4/uic/jamroot.jam18
-rw-r--r--src/boost/tools/build/example/qt/qt4/uic/main.cpp23
-rw-r--r--src/boost/tools/build/example/sanitizers/jamroot.jam1
-rw-r--r--src/boost/tools/build/example/sanitizers/main.cpp9
-rw-r--r--src/boost/tools/build/example/sanitizers/readme.adoc64
-rw-r--r--src/boost/tools/build/example/sass/importing.scss3
-rw-r--r--src/boost/tools/build/example/sass/include/foobar.scss3
-rw-r--r--src/boost/tools/build/example/sass/jamroot.jam15
-rw-r--r--src/boost/tools/build/example/sass/singleton.sass12
-rw-r--r--src/boost/tools/build/example/sass/singleton.scss11
-rw-r--r--src/boost/tools/build/example/site-config.jam4
-rw-r--r--src/boost/tools/build/example/testing/compile-fail.cpp17
-rw-r--r--src/boost/tools/build/example/testing/fail.cpp17
-rw-r--r--src/boost/tools/build/example/testing/jamroot.jam10
-rw-r--r--src/boost/tools/build/example/testing/success.cpp17
-rw-r--r--src/boost/tools/build/example/time/hello.cpp16
-rw-r--r--src/boost/tools/build/example/time/jamroot.jam16
-rw-r--r--src/boost/tools/build/example/time/readme.qbk47
-rw-r--r--src/boost/tools/build/example/try_compile/Jamroot.jam29
-rw-r--r--src/boost/tools/build/example/try_compile/foo.cpp6
-rw-r--r--src/boost/tools/build/example/try_compile/main.cpp8
-rw-r--r--src/boost/tools/build/example/user-config.jam92
-rw-r--r--src/boost/tools/build/example/variant/a.cpp7
-rw-r--r--src/boost/tools/build/example/variant/jamfile.jam11
-rw-r--r--src/boost/tools/build/example/variant/jamroot.jam12
-rw-r--r--src/boost/tools/build/example/variant/libs/jamfile.jam8
-rw-r--r--src/boost/tools/build/example/variant/libs/l.cpp9
-rw-r--r--src/boost/tools/build/example/variant/readme.qbk94
-rw-r--r--src/boost/tools/build/notes/README.txt8
-rw-r--r--src/boost/tools/build/notes/build_dir_option.txt77
-rw-r--r--src/boost/tools/build/notes/changes.txt317
-rw-r--r--src/boost/tools/build/notes/relative_source_paths.txt76
-rw-r--r--src/boost/tools/build/notes/release_procedure.txt83
-rw-r--r--src/boost/tools/build/src/__init__.py0
-rw-r--r--src/boost/tools/build/src/bootstrap.jam18
-rw-r--r--src/boost/tools/build/src/build-system.jam1079
-rw-r--r--src/boost/tools/build/src/build/__init__.py0
-rw-r--r--src/boost/tools/build/src/build/ac.jam324
-rw-r--r--src/boost/tools/build/src/build/alias.jam78
-rwxr-xr-xsrc/boost/tools/build/src/build/alias.py75
-rw-r--r--src/boost/tools/build/src/build/build-request.jam400
-rw-r--r--src/boost/tools/build/src/build/build_request.py222
-rw-r--r--src/boost/tools/build/src/build/config-cache.jam78
-rw-r--r--src/boost/tools/build/src/build/configure.jam620
-rw-r--r--src/boost/tools/build/src/build/configure.py176
-rw-r--r--src/boost/tools/build/src/build/engine.py246
-rw-r--r--src/boost/tools/build/src/build/errors.py135
-rw-r--r--src/boost/tools/build/src/build/feature.jam1442
-rw-r--r--src/boost/tools/build/src/build/feature.py914
-rw-r--r--src/boost/tools/build/src/build/generators.jam1447
-rw-r--r--src/boost/tools/build/src/build/generators.py1209
-rw-r--r--src/boost/tools/build/src/build/project.jam1357
-rw-r--r--src/boost/tools/build/src/build/project.py1285
-rw-r--r--src/boost/tools/build/src/build/property-set.jam591
-rw-r--r--src/boost/tools/build/src/build/property.jam977
-rw-r--r--src/boost/tools/build/src/build/property.py750
-rw-r--r--src/boost/tools/build/src/build/property_set.py498
-rw-r--r--src/boost/tools/build/src/build/readme.txt11
-rw-r--r--src/boost/tools/build/src/build/scanner.jam163
-rw-r--r--src/boost/tools/build/src/build/scanner.py167
-rw-r--r--src/boost/tools/build/src/build/targets.jam1792
-rw-r--r--src/boost/tools/build/src/build/targets.py1523
-rw-r--r--src/boost/tools/build/src/build/toolset.jam703
-rw-r--r--src/boost/tools/build/src/build/toolset.py417
-rw-r--r--src/boost/tools/build/src/build/type.jam404
-rw-r--r--src/boost/tools/build/src/build/type.py381
-rw-r--r--src/boost/tools/build/src/build/version.jam166
-rw-r--r--src/boost/tools/build/src/build/version.py38
-rw-r--r--src/boost/tools/build/src/build/virtual-target.jam1394
-rw-r--r--src/boost/tools/build/src/build/virtual_target.py1175
-rw-r--r--src/boost/tools/build/src/build_system.py682
-rw-r--r--src/boost/tools/build/src/contrib/__init__.py0
-rw-r--r--src/boost/tools/build/src/contrib/boost.jam308
-rw-r--r--src/boost/tools/build/src/contrib/boost.py280
-rw-r--r--src/boost/tools/build/src/contrib/modular.jam288
-rw-r--r--src/boost/tools/build/src/contrib/tntnet.jam208
-rw-r--r--src/boost/tools/build/src/contrib/wxFormBuilder.jam195
-rw-r--r--src/boost/tools/build/src/engine/Jambase189
-rw-r--r--src/boost/tools/build/src/engine/boost-jam.spec64
-rw-r--r--src/boost/tools/build/src/engine/boost-no-inspect1
-rw-r--r--src/boost/tools/build/src/engine/build.bat194
-rwxr-xr-xsrc/boost/tools/build/src/engine/build.sh496
-rw-r--r--src/boost/tools/build/src/engine/build_vms.com153
-rw-r--r--src/boost/tools/build/src/engine/builtins.cpp2728
-rw-r--r--src/boost/tools/build/src/engine/builtins.h74
-rw-r--r--src/boost/tools/build/src/engine/bump_version.py98
-rw-r--r--src/boost/tools/build/src/engine/check_cxx11.cpp21
-rw-r--r--src/boost/tools/build/src/engine/class.cpp191
-rw-r--r--src/boost/tools/build/src/engine/class.h15
-rw-r--r--src/boost/tools/build/src/engine/command.cpp121
-rw-r--r--src/boost/tools/build/src/engine/command.h101
-rw-r--r--src/boost/tools/build/src/engine/compile.cpp233
-rw-r--r--src/boost/tools/build/src/engine/compile.h60
-rw-r--r--src/boost/tools/build/src/engine/config.h34
-rw-r--r--src/boost/tools/build/src/engine/config_toolset.bat209
-rw-r--r--src/boost/tools/build/src/engine/constants.cpp192
-rw-r--r--src/boost/tools/build/src/engine/constants.h76
-rw-r--r--src/boost/tools/build/src/engine/cwd.cpp88
-rw-r--r--src/boost/tools/build/src/engine/cwd.h36
-rw-r--r--src/boost/tools/build/src/engine/debian/changelog72
-rw-r--r--src/boost/tools/build/src/engine/debian/control16
-rw-r--r--src/boost/tools/build/src/engine/debian/copyright25
-rw-r--r--src/boost/tools/build/src/engine/debian/jam.man.sgml236
-rwxr-xr-xsrc/boost/tools/build/src/engine/debian/rules73
-rw-r--r--src/boost/tools/build/src/engine/debug.cpp158
-rw-r--r--src/boost/tools/build/src/engine/debug.h63
-rw-r--r--src/boost/tools/build/src/engine/debugger.cpp2738
-rw-r--r--src/boost/tools/build/src/engine/debugger.h64
-rw-r--r--src/boost/tools/build/src/engine/execcmd.cpp122
-rw-r--r--src/boost/tools/build/src/engine/execcmd.h115
-rw-r--r--src/boost/tools/build/src/engine/execnt.cpp1370
-rw-r--r--src/boost/tools/build/src/engine/execunix.cpp606
-rw-r--r--src/boost/tools/build/src/engine/execvms.cpp419
-rw-r--r--src/boost/tools/build/src/engine/filent.cpp517
-rw-r--r--src/boost/tools/build/src/engine/filesys.cpp711
-rw-r--r--src/boost/tools/build/src/engine/filesys.h111
-rw-r--r--src/boost/tools/build/src/engine/fileunix.cpp527
-rw-r--r--src/boost/tools/build/src/engine/filevms.cpp440
-rw-r--r--src/boost/tools/build/src/engine/frames.cpp29
-rw-r--r--src/boost/tools/build/src/engine/frames.h46
-rw-r--r--src/boost/tools/build/src/engine/function.cpp5341
-rw-r--r--src/boost/tools/build/src/engine/function.h49
-rw-r--r--src/boost/tools/build/src/engine/glob.cpp152
-rw-r--r--src/boost/tools/build/src/engine/guess_toolset.bat115
-rw-r--r--src/boost/tools/build/src/engine/hash.cpp388
-rw-r--r--src/boost/tools/build/src/engine/hash.h80
-rw-r--r--src/boost/tools/build/src/engine/hcache.cpp522
-rw-r--r--src/boost/tools/build/src/engine/hcache.h20
-rw-r--r--src/boost/tools/build/src/engine/hdrmacro.cpp140
-rw-r--r--src/boost/tools/build/src/engine/hdrmacro.h22
-rw-r--r--src/boost/tools/build/src/engine/headers.cpp198
-rw-r--r--src/boost/tools/build/src/engine/headers.h26
-rw-r--r--src/boost/tools/build/src/engine/jam.cpp794
-rw-r--r--src/boost/tools/build/src/engine/jam.h524
-rw-r--r--src/boost/tools/build/src/engine/jam_strings.cpp240
-rw-r--r--src/boost/tools/build/src/engine/jam_strings.h38
-rw-r--r--src/boost/tools/build/src/engine/jambase.cpp112
-rw-r--r--src/boost/tools/build/src/engine/jambase.h15
-rw-r--r--src/boost/tools/build/src/engine/jamgram.cpp2548
-rw-r--r--src/boost/tools/build/src/engine/jamgram.hpp164
-rw-r--r--src/boost/tools/build/src/engine/jamgram.y386
-rw-r--r--src/boost/tools/build/src/engine/jamgram.yy340
-rw-r--r--src/boost/tools/build/src/engine/jamgramtab.h46
-rw-r--r--src/boost/tools/build/src/engine/lists.cpp474
-rw-r--r--src/boost/tools/build/src/engine/lists.h114
-rw-r--r--src/boost/tools/build/src/engine/make.cpp941
-rw-r--r--src/boost/tools/build/src/engine/make.h45
-rw-r--r--src/boost/tools/build/src/engine/make1.cpp1515
-rw-r--r--src/boost/tools/build/src/engine/md5.cpp381
-rw-r--r--src/boost/tools/build/src/engine/md5.h91
-rw-r--r--src/boost/tools/build/src/engine/mem.cpp8
-rw-r--r--src/boost/tools/build/src/engine/mem.h78
-rw-r--r--src/boost/tools/build/src/engine/mkjambase.cpp123
-rw-r--r--src/boost/tools/build/src/engine/modules.cpp431
-rw-r--r--src/boost/tools/build/src/engine/modules.h53
-rw-r--r--src/boost/tools/build/src/engine/modules/order.cpp159
-rw-r--r--src/boost/tools/build/src/engine/modules/path.cpp25
-rw-r--r--src/boost/tools/build/src/engine/modules/property-set.cpp330
-rw-r--r--src/boost/tools/build/src/engine/modules/readme.txt3
-rw-r--r--src/boost/tools/build/src/engine/modules/regex.cpp233
-rw-r--r--src/boost/tools/build/src/engine/modules/sequence.cpp96
-rw-r--r--src/boost/tools/build/src/engine/modules/set.cpp43
-rw-r--r--src/boost/tools/build/src/engine/native.cpp34
-rw-r--r--src/boost/tools/build/src/engine/native.h35
-rw-r--r--src/boost/tools/build/src/engine/object.cpp397
-rw-r--r--src/boost/tools/build/src/engine/object.h46
-rw-r--r--src/boost/tools/build/src/engine/option.cpp94
-rw-r--r--src/boost/tools/build/src/engine/option.h25
-rw-r--r--src/boost/tools/build/src/engine/output.cpp159
-rw-r--r--src/boost/tools/build/src/engine/output.h42
-rw-r--r--src/boost/tools/build/src/engine/parse.cpp148
-rw-r--r--src/boost/tools/build/src/engine/parse.h81
-rw-r--r--src/boost/tools/build/src/engine/patchlevel.h16
-rw-r--r--src/boost/tools/build/src/engine/pathnt.cpp409
-rw-r--r--src/boost/tools/build/src/engine/pathsys.cpp302
-rw-r--r--src/boost/tools/build/src/engine/pathsys.h87
-rw-r--r--src/boost/tools/build/src/engine/pathunix.cpp86
-rw-r--r--src/boost/tools/build/src/engine/pathvms.cpp254
-rw-r--r--src/boost/tools/build/src/engine/regexp.cpp1330
-rw-r--r--src/boost/tools/build/src/engine/regexp.h36
-rw-r--r--src/boost/tools/build/src/engine/rules.cpp739
-rw-r--r--src/boost/tools/build/src/engine/rules.h274
-rw-r--r--src/boost/tools/build/src/engine/scan.cpp743
-rw-r--r--src/boost/tools/build/src/engine/scan.h71
-rw-r--r--src/boost/tools/build/src/engine/search.cpp275
-rw-r--r--src/boost/tools/build/src/engine/search.h23
-rw-r--r--src/boost/tools/build/src/engine/subst.cpp116
-rw-r--r--src/boost/tools/build/src/engine/subst.h15
-rw-r--r--src/boost/tools/build/src/engine/sysinfo.cpp137
-rw-r--r--src/boost/tools/build/src/engine/sysinfo.h46
-rw-r--r--src/boost/tools/build/src/engine/timestamp.cpp230
-rw-r--r--src/boost/tools/build/src/engine/timestamp.h48
-rw-r--r--src/boost/tools/build/src/engine/variable.cpp393
-rw-r--r--src/boost/tools/build/src/engine/variable.h35
-rw-r--r--src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd59
-rw-r--r--src/boost/tools/build/src/engine/w32_getreg.cpp201
-rw-r--r--src/boost/tools/build/src/engine/yyacc.cpp268
-rw-r--r--src/boost/tools/build/src/exceptions.py55
-rw-r--r--src/boost/tools/build/src/kernel/boost-build.jam5
-rw-r--r--src/boost/tools/build/src/kernel/bootstrap.jam265
-rw-r--r--src/boost/tools/build/src/kernel/bootstrap.py25
-rw-r--r--src/boost/tools/build/src/kernel/class.jam420
-rw-r--r--src/boost/tools/build/src/kernel/errors.jam287
-rw-r--r--src/boost/tools/build/src/kernel/modules.jam365
-rw-r--r--src/boost/tools/build/src/manager.py110
-rw-r--r--src/boost/tools/build/src/options/help.jam222
-rw-r--r--src/boost/tools/build/src/tools/__init__.py0
-rw-r--r--src/boost/tools/build/src/tools/acc.jam160
-rw-r--r--src/boost/tools/build/src/tools/asciidoctor.jam212
-rw-r--r--src/boost/tools/build/src/tools/auto-index.jam204
-rw-r--r--src/boost/tools/build/src/tools/bison.jam26
-rw-r--r--src/boost/tools/build/src/tools/boostbook-config.jam13
-rw-r--r--src/boost/tools/build/src/tools/boostbook.jam740
-rw-r--r--src/boost/tools/build/src/tools/borland.jam270
-rw-r--r--src/boost/tools/build/src/tools/builtin.jam96
-rw-r--r--src/boost/tools/build/src/tools/builtin.py805
-rw-r--r--src/boost/tools/build/src/tools/bzip2.jam279
-rw-r--r--src/boost/tools/build/src/tools/cast.jam91
-rw-r--r--src/boost/tools/build/src/tools/cast.py76
-rw-r--r--src/boost/tools/build/src/tools/clang-darwin.jam189
-rw-r--r--src/boost/tools/build/src/tools/clang-linux.jam223
-rw-r--r--src/boost/tools/build/src/tools/clang-vxworks.jam128
-rw-r--r--src/boost/tools/build/src/tools/clang-win.jam183
-rw-r--r--src/boost/tools/build/src/tools/clang.jam65
-rw-r--r--src/boost/tools/build/src/tools/common.jam1095
-rw-r--r--src/boost/tools/build/src/tools/common.py860
-rw-r--r--src/boost/tools/build/src/tools/como-linux.jam103
-rw-r--r--src/boost/tools/build/src/tools/como-win.jam117
-rw-r--r--src/boost/tools/build/src/tools/como.jam75
-rw-r--r--src/boost/tools/build/src/tools/convert.jam62
-rw-r--r--src/boost/tools/build/src/tools/cray.jam1169
-rw-r--r--src/boost/tools/build/src/tools/cw-config.jam34
-rw-r--r--src/boost/tools/build/src/tools/cw.jam302
-rw-r--r--src/boost/tools/build/src/tools/cygwin.jam12
-rw-r--r--src/boost/tools/build/src/tools/darwin.jam620
-rw-r--r--src/boost/tools/build/src/tools/darwin.py57
-rw-r--r--src/boost/tools/build/src/tools/diab.jam131
-rw-r--r--src/boost/tools/build/src/tools/dmc.jam174
-rw-r--r--src/boost/tools/build/src/tools/docutils.jam125
-rw-r--r--src/boost/tools/build/src/tools/doxproc.py859
-rw-r--r--src/boost/tools/build/src/tools/doxygen-config.jam11
-rw-r--r--src/boost/tools/build/src/tools/doxygen.jam782
-rw-r--r--src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile3
-rw-r--r--src/boost/tools/build/src/tools/doxygen/windows-paths-check.hpp0
-rw-r--r--src/boost/tools/build/src/tools/emscripten.jam113
-rw-r--r--src/boost/tools/build/src/tools/features/__init_features__.jam23
-rw-r--r--src/boost/tools/build/src/tools/features/address-model-feature.jam22
-rw-r--r--src/boost/tools/build/src/tools/features/allow-feature.jam19
-rw-r--r--src/boost/tools/build/src/tools/features/architecture-feature.jam55
-rw-r--r--src/boost/tools/build/src/tools/features/archiveflags-feature.jam18
-rw-r--r--src/boost/tools/build/src/tools/features/asmflags-feature.jam17
-rw-r--r--src/boost/tools/build/src/tools/features/build-feature.jam22
-rw-r--r--src/boost/tools/build/src/tools/features/cflags-feature.jam21
-rw-r--r--src/boost/tools/build/src/tools/features/conditional-feature.jam31
-rw-r--r--src/boost/tools/build/src/tools/features/coverage-feature.jam22
-rw-r--r--src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam39
-rw-r--r--src/boost/tools/build/src/tools/features/cxxabi-feature.jam18
-rw-r--r--src/boost/tools/build/src/tools/features/cxxflags-feature.jam17
-rw-r--r--src/boost/tools/build/src/tools/features/cxxstd-feature.jam50
-rw-r--r--src/boost/tools/build/src/tools/features/debug-feature.jam34
-rw-r--r--src/boost/tools/build/src/tools/features/define-feature.jam30
-rw-r--r--src/boost/tools/build/src/tools/features/dependency-feature.jam62
-rw-r--r--src/boost/tools/build/src/tools/features/dll-feature.jam73
-rw-r--r--src/boost/tools/build/src/tools/features/exception-feature.jam47
-rw-r--r--src/boost/tools/build/src/tools/features/fflags-feature.jam18
-rw-r--r--src/boost/tools/build/src/tools/features/file-feature.jam18
-rw-r--r--src/boost/tools/build/src/tools/features/find-lib-feature.jam42
-rw-r--r--src/boost/tools/build/src/tools/features/flags-feature.jam19
-rw-r--r--src/boost/tools/build/src/tools/features/include-feature.jam19
-rw-r--r--src/boost/tools/build/src/tools/features/instruction-set-feature.jam64
-rw-r--r--src/boost/tools/build/src/tools/features/internal-feature.jam19
-rw-r--r--src/boost/tools/build/src/tools/features/library-feature.jam22
-rw-r--r--src/boost/tools/build/src/tools/features/link-feature.jam19
-rw-r--r--src/boost/tools/build/src/tools/features/linkflags-feature.jam17
-rw-r--r--src/boost/tools/build/src/tools/features/local-visibility-feature.jam27
-rw-r--r--src/boost/tools/build/src/tools/features/location-feature.jam18
-rw-r--r--src/boost/tools/build/src/tools/features/location-prefix-feature.jam18
-rw-r--r--src/boost/tools/build/src/tools/features/lto-feature.jam46
-rw-r--r--src/boost/tools/build/src/tools/features/name-feature.jam22
-rw-r--r--src/boost/tools/build/src/tools/features/objcflags-feature.jam32
-rw-r--r--src/boost/tools/build/src/tools/features/optimization-feature.jam46
-rw-r--r--src/boost/tools/build/src/tools/features/os-feature.jam95
-rw-r--r--src/boost/tools/build/src/tools/features/relevant-feature.jam48
-rw-r--r--src/boost/tools/build/src/tools/features/rtti-feature.jam19
-rw-r--r--src/boost/tools/build/src/tools/features/runtime-feature.jam40
-rw-r--r--src/boost/tools/build/src/tools/features/sanitizers-feature.jam63
-rw-r--r--src/boost/tools/build/src/tools/features/search-feature.jam20
-rw-r--r--src/boost/tools/build/src/tools/features/source-feature.jam22
-rw-r--r--src/boost/tools/build/src/tools/features/stdlib-feature.jam29
-rw-r--r--src/boost/tools/build/src/tools/features/strip-feature.jam25
-rw-r--r--src/boost/tools/build/src/tools/features/tag-feature.jam39
-rw-r--r--src/boost/tools/build/src/tools/features/threadapi-feature.jam39
-rw-r--r--src/boost/tools/build/src/tools/features/threading-feature.jam24
-rw-r--r--src/boost/tools/build/src/tools/features/toolset-feature.jam20
-rw-r--r--src/boost/tools/build/src/tools/features/user-interface-feature.jam28
-rw-r--r--src/boost/tools/build/src/tools/features/variant-feature.jam114
-rw-r--r--src/boost/tools/build/src/tools/features/version-feature.jam19
-rw-r--r--src/boost/tools/build/src/tools/features/visibility-feature.jam46
-rw-r--r--src/boost/tools/build/src/tools/features/warnings-feature.jam41
-rw-r--r--src/boost/tools/build/src/tools/flags.jam152
-rw-r--r--src/boost/tools/build/src/tools/fop.jam69
-rw-r--r--src/boost/tools/build/src/tools/fortran.jam55
-rw-r--r--src/boost/tools/build/src/tools/gcc.jam1330
-rw-r--r--src/boost/tools/build/src/tools/gcc.py871
-rw-r--r--src/boost/tools/build/src/tools/generate.jam111
-rw-r--r--src/boost/tools/build/src/tools/generators/__init_generators__.jam23
-rw-r--r--src/boost/tools/build/src/tools/generators/archive-generator.jam74
-rw-r--r--src/boost/tools/build/src/tools/generators/c-compiling-generator.jam70
-rw-r--r--src/boost/tools/build/src/tools/generators/dummy-generator.jam20
-rw-r--r--src/boost/tools/build/src/tools/generators/lib-generator.jam121
-rw-r--r--src/boost/tools/build/src/tools/generators/linking-generator.jam179
-rw-r--r--src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam30
-rw-r--r--src/boost/tools/build/src/tools/generators/searched-lib-generator.jam97
-rw-r--r--src/boost/tools/build/src/tools/gettext.jam230
-rw-r--r--src/boost/tools/build/src/tools/gfortran.jam39
-rw-r--r--src/boost/tools/build/src/tools/hp_cxx.jam222
-rw-r--r--src/boost/tools/build/src/tools/hpfortran.jam35
-rw-r--r--src/boost/tools/build/src/tools/ifort.jam44
-rw-r--r--src/boost/tools/build/src/tools/intel-darwin.jam233
-rw-r--r--src/boost/tools/build/src/tools/intel-linux.jam232
-rw-r--r--src/boost/tools/build/src/tools/intel-vxworks.jam183
-rw-r--r--src/boost/tools/build/src/tools/intel-win.jam514
-rw-r--r--src/boost/tools/build/src/tools/intel.jam84
-rw-r--r--src/boost/tools/build/src/tools/lex.jam25
-rw-r--r--src/boost/tools/build/src/tools/libjpeg.jam234
-rw-r--r--src/boost/tools/build/src/tools/libpng.jam229
-rw-r--r--src/boost/tools/build/src/tools/libtiff.jam227
-rw-r--r--src/boost/tools/build/src/tools/link.jam547
-rw-r--r--src/boost/tools/build/src/tools/lzma.jam134
-rw-r--r--src/boost/tools/build/src/tools/make.jam69
-rw-r--r--src/boost/tools/build/src/tools/make.py59
-rw-r--r--src/boost/tools/build/src/tools/mc.jam44
-rw-r--r--src/boost/tools/build/src/tools/mc.py46
-rw-r--r--src/boost/tools/build/src/tools/message.jam62
-rw-r--r--src/boost/tools/build/src/tools/message.py54
-rw-r--r--src/boost/tools/build/src/tools/midl.jam142
-rw-r--r--src/boost/tools/build/src/tools/midl.py134
-rw-r--r--src/boost/tools/build/src/tools/mipspro.jam145
-rw-r--r--src/boost/tools/build/src/tools/mpi.jam638
-rw-r--r--src/boost/tools/build/src/tools/msvc-config.jam12
-rw-r--r--src/boost/tools/build/src/tools/msvc.jam2092
-rw-r--r--src/boost/tools/build/src/tools/msvc.py1313
-rw-r--r--src/boost/tools/build/src/tools/notfile.jam65
-rw-r--r--src/boost/tools/build/src/tools/notfile.py51
-rw-r--r--src/boost/tools/build/src/tools/openssl.jam140
-rw-r--r--src/boost/tools/build/src/tools/package.jam274
-rw-r--r--src/boost/tools/build/src/tools/package.py168
-rw-r--r--src/boost/tools/build/src/tools/pathscale.jam178
-rw-r--r--src/boost/tools/build/src/tools/pch.jam95
-rw-r--r--src/boost/tools/build/src/tools/pch.py83
-rw-r--r--src/boost/tools/build/src/tools/pgi.jam138
-rw-r--r--src/boost/tools/build/src/tools/pkg-config.jam485
-rw-r--r--src/boost/tools/build/src/tools/python-config.jam27
-rw-r--r--src/boost/tools/build/src/tools/python.jam1333
-rw-r--r--src/boost/tools/build/src/tools/qcc.jam242
-rw-r--r--src/boost/tools/build/src/tools/qt.jam17
-rw-r--r--src/boost/tools/build/src/tools/qt3.jam209
-rw-r--r--src/boost/tools/build/src/tools/qt4.jam755
-rw-r--r--src/boost/tools/build/src/tools/qt5.jam800
-rw-r--r--src/boost/tools/build/src/tools/quickbook-config.jam44
-rw-r--r--src/boost/tools/build/src/tools/quickbook.jam363
-rw-r--r--src/boost/tools/build/src/tools/rc.jam155
-rw-r--r--src/boost/tools/build/src/tools/rc.py197
-rw-r--r--src/boost/tools/build/src/tools/sass.jam193
-rw-r--r--src/boost/tools/build/src/tools/saxonhe.jam53
-rw-r--r--src/boost/tools/build/src/tools/stage.jam519
-rw-r--r--src/boost/tools/build/src/tools/stage.py350
-rw-r--r--src/boost/tools/build/src/tools/stlport.jam312
-rw-r--r--src/boost/tools/build/src/tools/sun.jam224
-rw-r--r--src/boost/tools/build/src/tools/symlink.jam140
-rw-r--r--src/boost/tools/build/src/tools/symlink.py112
-rw-r--r--src/boost/tools/build/src/tools/testing-aux.jam344
-rw-r--r--src/boost/tools/build/src/tools/testing.jam847
-rw-r--r--src/boost/tools/build/src/tools/testing.py359
-rw-r--r--src/boost/tools/build/src/tools/types/__init__.py19
-rw-r--r--src/boost/tools/build/src/tools/types/adoc.jam26
-rw-r--r--src/boost/tools/build/src/tools/types/asm.jam4
-rw-r--r--src/boost/tools/build/src/tools/types/asm.py33
-rw-r--r--src/boost/tools/build/src/tools/types/cpp.jam90
-rw-r--r--src/boost/tools/build/src/tools/types/cpp.py10
-rw-r--r--src/boost/tools/build/src/tools/types/css.jam10
-rw-r--r--src/boost/tools/build/src/tools/types/docbook.jam8
-rw-r--r--src/boost/tools/build/src/tools/types/exe.jam9
-rw-r--r--src/boost/tools/build/src/tools/types/exe.py11
-rw-r--r--src/boost/tools/build/src/tools/types/html.jam4
-rw-r--r--src/boost/tools/build/src/tools/types/html.py10
-rw-r--r--src/boost/tools/build/src/tools/types/lib.jam74
-rw-r--r--src/boost/tools/build/src/tools/types/lib.py77
-rw-r--r--src/boost/tools/build/src/tools/types/man.jam8
-rw-r--r--src/boost/tools/build/src/tools/types/markdown.jam4
-rw-r--r--src/boost/tools/build/src/tools/types/markdown.py10
-rw-r--r--src/boost/tools/build/src/tools/types/obj.jam9
-rw-r--r--src/boost/tools/build/src/tools/types/obj.py11
-rw-r--r--src/boost/tools/build/src/tools/types/objc.jam26
-rw-r--r--src/boost/tools/build/src/tools/types/pdf.jam8
-rw-r--r--src/boost/tools/build/src/tools/types/preprocessed.jam9
-rw-r--r--src/boost/tools/build/src/tools/types/preprocessed.py11
-rw-r--r--src/boost/tools/build/src/tools/types/qt.jam12
-rw-r--r--src/boost/tools/build/src/tools/types/register.jam39
-rw-r--r--src/boost/tools/build/src/tools/types/rsp.jam4
-rw-r--r--src/boost/tools/build/src/tools/types/rsp.py10
-rw-r--r--src/boost/tools/build/src/tools/types/sass-type.jam49
-rw-r--r--src/boost/tools/build/src/tools/types/xml.jam49
-rw-r--r--src/boost/tools/build/src/tools/unix.jam224
-rw-r--r--src/boost/tools/build/src/tools/unix.py155
-rw-r--r--src/boost/tools/build/src/tools/vacpp.jam173
-rw-r--r--src/boost/tools/build/src/tools/vmsdecc.jam578
-rw-r--r--src/boost/tools/build/src/tools/whale.jam116
-rw-r--r--src/boost/tools/build/src/tools/xlcpp.jam164
-rw-r--r--src/boost/tools/build/src/tools/xlf.jam39
-rw-r--r--src/boost/tools/build/src/tools/xsltproc-config.jam36
-rw-r--r--src/boost/tools/build/src/tools/xsltproc.jam232
-rw-r--r--src/boost/tools/build/src/tools/xsltproc/included.xsl11
-rw-r--r--src/boost/tools/build/src/tools/xsltproc/test.xml2
-rw-r--r--src/boost/tools/build/src/tools/xsltproc/test.xsl12
-rw-r--r--src/boost/tools/build/src/tools/zlib.jam235
-rw-r--r--src/boost/tools/build/src/tools/zstd.jam100
-rw-r--r--src/boost/tools/build/src/util/__init__.py321
-rw-r--r--src/boost/tools/build/src/util/assert.jam346
-rw-r--r--src/boost/tools/build/src/util/container.jam339
-rw-r--r--src/boost/tools/build/src/util/doc.jam1076
-rw-r--r--src/boost/tools/build/src/util/indirect.jam124
-rw-r--r--src/boost/tools/build/src/util/indirect.py15
-rw-r--r--src/boost/tools/build/src/util/logger.py46
-rw-r--r--src/boost/tools/build/src/util/numbers.jam218
-rw-r--r--src/boost/tools/build/src/util/option.jam109
-rw-r--r--src/boost/tools/build/src/util/option.py35
-rw-r--r--src/boost/tools/build/src/util/order.jam173
-rw-r--r--src/boost/tools/build/src/util/order.py121
-rw-r--r--src/boost/tools/build/src/util/os.jam208
-rw-r--r--src/boost/tools/build/src/util/os_j.py24
-rw-r--r--src/boost/tools/build/src/util/param.jam54
-rw-r--r--src/boost/tools/build/src/util/path.jam1015
-rw-r--r--src/boost/tools/build/src/util/path.py937
-rw-r--r--src/boost/tools/build/src/util/print.jam508
-rw-r--r--src/boost/tools/build/src/util/regex.jam208
-rw-r--r--src/boost/tools/build/src/util/regex.py63
-rw-r--r--src/boost/tools/build/src/util/sequence.jam378
-rw-r--r--src/boost/tools/build/src/util/sequence.py58
-rw-r--r--src/boost/tools/build/src/util/set.jam93
-rw-r--r--src/boost/tools/build/src/util/set.py48
-rw-r--r--src/boost/tools/build/src/util/string.jam189
-rw-r--r--src/boost/tools/build/src/util/utility.jam235
-rw-r--r--src/boost/tools/build/src/util/utility.py176
-rw-r--r--src/boost/tools/build/test/BoostBuild.py1353
-rw-r--r--src/boost/tools/build/test/Jamfile.jam29
-rwxr-xr-xsrc/boost/tools/build/test/MockToolset.py267
-rw-r--r--src/boost/tools/build/test/TestCmd.py609
-rw-r--r--src/boost/tools/build/test/TestToolset.py121
-rw-r--r--src/boost/tools/build/test/abs_workdir.py26
-rw-r--r--src/boost/tools/build/test/absolute_sources.py73
-rw-r--r--src/boost/tools/build/test/alias.py107
-rw-r--r--src/boost/tools/build/test/alternatives.py129
-rw-r--r--src/boost/tools/build/test/always.py30
-rw-r--r--src/boost/tools/build/test/bad_dirname.py22
-rw-r--r--src/boost/tools/build/test/boost-build.jam14
-rw-r--r--src/boost/tools/build/test/boostbook.py23
-rw-r--r--src/boost/tools/build/test/boostbook/a.hpp16
-rw-r--r--src/boost/tools/build/test/boostbook/docs.xml36
-rw-r--r--src/boost/tools/build/test/boostbook/jamroot.jam3
-rw-r--r--src/boost/tools/build/test/build_dir.py107
-rw-r--r--src/boost/tools/build/test/build_file.py170
-rw-r--r--src/boost/tools/build/test/build_hooks.py39
-rw-r--r--src/boost/tools/build/test/build_no.py23
-rwxr-xr-xsrc/boost/tools/build/test/builtin_echo.py30
-rwxr-xr-xsrc/boost/tools/build/test/builtin_exit.py42
-rwxr-xr-xsrc/boost/tools/build/test/builtin_glob.py87
-rw-r--r--src/boost/tools/build/test/builtin_glob_archive.py217
-rwxr-xr-xsrc/boost/tools/build/test/builtin_readlink.py24
-rwxr-xr-xsrc/boost/tools/build/test/builtin_split_by_characters.py57
-rwxr-xr-xsrc/boost/tools/build/test/bzip2.py119
-rw-r--r--src/boost/tools/build/test/c_file.py36
-rw-r--r--src/boost/tools/build/test/chain.py56
-rw-r--r--src/boost/tools/build/test/clean.py104
-rw-r--r--src/boost/tools/build/test/cli_property_expansion.py41
-rwxr-xr-xsrc/boost/tools/build/test/collect_debug_info.py341
-rw-r--r--src/boost/tools/build/test/command_line_properties.py166
-rw-r--r--src/boost/tools/build/test/composite.py25
-rw-r--r--src/boost/tools/build/test/conditionals.py48
-rw-r--r--src/boost/tools/build/test/conditionals2.py43
-rw-r--r--src/boost/tools/build/test/conditionals3.py30
-rwxr-xr-xsrc/boost/tools/build/test/conditionals_multiple.py312
-rwxr-xr-xsrc/boost/tools/build/test/configuration.py397
-rw-r--r--src/boost/tools/build/test/configure.py267
-rwxr-xr-xsrc/boost/tools/build/test/copy_time.py69
-rw-r--r--src/boost/tools/build/test/core-language/test.jam1563
-rwxr-xr-xsrc/boost/tools/build/test/core_action_output.py62
-rwxr-xr-xsrc/boost/tools/build/test/core_action_status.py27
-rwxr-xr-xsrc/boost/tools/build/test/core_actions_quietly.py61
-rwxr-xr-xsrc/boost/tools/build/test/core_arguments.py103
-rwxr-xr-xsrc/boost/tools/build/test/core_at_file.py63
-rwxr-xr-xsrc/boost/tools/build/test/core_bindrule.py45
-rw-r--r--src/boost/tools/build/test/core_d12.py32
-rw-r--r--src/boost/tools/build/test/core_delete_module.py51
-rw-r--r--src/boost/tools/build/test/core_dependencies.py157
-rw-r--r--src/boost/tools/build/test/core_fail_expected.py139
-rw-r--r--src/boost/tools/build/test/core_import_module.py82
-rw-r--r--src/boost/tools/build/test/core_jamshell.py55
-rwxr-xr-xsrc/boost/tools/build/test/core_language.py12
-rw-r--r--src/boost/tools/build/test/core_modifiers.py51
-rwxr-xr-xsrc/boost/tools/build/test/core_multifile_actions.py202
-rwxr-xr-xsrc/boost/tools/build/test/core_nt_cmd_line.py266
-rwxr-xr-xsrc/boost/tools/build/test/core_option_d2.py55
-rwxr-xr-xsrc/boost/tools/build/test/core_option_l.py44
-rwxr-xr-xsrc/boost/tools/build/test/core_option_n.py51
-rwxr-xr-xsrc/boost/tools/build/test/core_parallel_actions.py103
-rwxr-xr-xsrc/boost/tools/build/test/core_parallel_multifile_actions_1.py78
-rwxr-xr-xsrc/boost/tools/build/test/core_parallel_multifile_actions_2.py71
-rw-r--r--src/boost/tools/build/test/core_scanner.py36
-rwxr-xr-xsrc/boost/tools/build/test/core_source_line_tracking.py74
-rw-r--r--src/boost/tools/build/test/core_syntax_error_exit_status.py23
-rw-r--r--src/boost/tools/build/test/core_typecheck.py47
-rwxr-xr-xsrc/boost/tools/build/test/core_update_now.py377
-rwxr-xr-xsrc/boost/tools/build/test/core_variables_in_actions.py39
-rw-r--r--src/boost/tools/build/test/core_varnames.py38
-rw-r--r--src/boost/tools/build/test/custom_generator.py66
-rw-r--r--src/boost/tools/build/test/debugger-mi.py326
-rw-r--r--src/boost/tools/build/test/debugger.py674
-rw-r--r--src/boost/tools/build/test/default_build.py80
-rw-r--r--src/boost/tools/build/test/default_features.py50
-rwxr-xr-xsrc/boost/tools/build/test/default_toolset.py215
-rw-r--r--src/boost/tools/build/test/dependency_property.py38
-rw-r--r--src/boost/tools/build/test/dependency_test.py239
-rw-r--r--src/boost/tools/build/test/disambiguation.py32
-rw-r--r--src/boost/tools/build/test/dll_path.py163
-rw-r--r--src/boost/tools/build/test/double_loading.py31
-rw-r--r--src/boost/tools/build/test/duplicate.py38
-rw-r--r--src/boost/tools/build/test/example_customization.py21
-rw-r--r--src/boost/tools/build/test/example_gettext.py30
-rw-r--r--src/boost/tools/build/test/example_libraries.py21
-rw-r--r--src/boost/tools/build/test/example_make.py17
-rw-r--r--src/boost/tools/build/test/example_qt4.py26
-rwxr-xr-xsrc/boost/tools/build/test/exit_status.py26
-rw-r--r--src/boost/tools/build/test/expansion.py140
-rw-r--r--src/boost/tools/build/test/explicit.py58
-rwxr-xr-xsrc/boost/tools/build/test/feature_cxxflags.py37
-rw-r--r--src/boost/tools/build/test/feature_implicit_dependency.py113
-rw-r--r--src/boost/tools/build/test/feature_relevant.py142
-rw-r--r--src/boost/tools/build/test/feature_suppress_import_lib.py33
-rw-r--r--src/boost/tools/build/test/file_types.py44
-rw-r--r--src/boost/tools/build/test/flags.py74
-rw-r--r--src/boost/tools/build/test/gcc_runtime.py27
-rwxr-xr-xsrc/boost/tools/build/test/generator_selection.py158
-rw-r--r--src/boost/tools/build/test/generators_test.py433
-rw-r--r--src/boost/tools/build/test/implicit_dependency.py81
-rw-r--r--src/boost/tools/build/test/indirect_conditional.py148
-rw-r--r--src/boost/tools/build/test/inherit_toolset.py100
-rwxr-xr-xsrc/boost/tools/build/test/inherited_dependency.py237
-rw-r--r--src/boost/tools/build/test/inline.py62
-rw-r--r--src/boost/tools/build/test/lib_source_property.py45
-rwxr-xr-xsrc/boost/tools/build/test/lib_zlib.py184
-rwxr-xr-xsrc/boost/tools/build/test/libjpeg.py119
-rwxr-xr-xsrc/boost/tools/build/test/liblzma.py118
-rwxr-xr-xsrc/boost/tools/build/test/libpng.py119
-rw-r--r--src/boost/tools/build/test/library_chain.py152
-rw-r--r--src/boost/tools/build/test/library_order.py94
-rw-r--r--src/boost/tools/build/test/library_property.py56
-rwxr-xr-xsrc/boost/tools/build/test/libtiff.py119
-rwxr-xr-xsrc/boost/tools/build/test/libzstd.py118
-rwxr-xr-xsrc/boost/tools/build/test/link.py350
-rw-r--r--src/boost/tools/build/test/load_dir.py84
-rw-r--r--src/boost/tools/build/test/load_order.py71
-rw-r--r--src/boost/tools/build/test/loop.py24
-rw-r--r--src/boost/tools/build/test/make_rule.py54
-rwxr-xr-xsrc/boost/tools/build/test/message.py38
-rw-r--r--src/boost/tools/build/test/module_actions.py105
-rw-r--r--src/boost/tools/build/test/ndebug.py33
-rw-r--r--src/boost/tools/build/test/no_type.py19
-rw-r--r--src/boost/tools/build/test/notfile.py36
-rw-r--r--src/boost/tools/build/test/ordered_include.py251
-rw-r--r--src/boost/tools/build/test/ordered_properties.py33
-rw-r--r--src/boost/tools/build/test/out_of_tree.py29
-rw-r--r--src/boost/tools/build/test/package.py231
-rw-r--r--src/boost/tools/build/test/param.py61
-rw-r--r--src/boost/tools/build/test/path_features.py163
-rw-r--r--src/boost/tools/build/test/pch.py58
-rw-r--r--src/boost/tools/build/test/prebuilt.py43
-rw-r--r--src/boost/tools/build/test/prebuilt/ext/a.cpp17
-rw-r--r--src/boost/tools/build/test/prebuilt/ext/debug/a.h13
-rw-r--r--src/boost/tools/build/test/prebuilt/ext/jamfile.jam13
-rw-r--r--src/boost/tools/build/test/prebuilt/ext/jamfile2.jam41
-rw-r--r--src/boost/tools/build/test/prebuilt/ext/jamfile3.jam48
-rw-r--r--src/boost/tools/build/test/prebuilt/ext/jamroot.jam5
-rw-r--r--src/boost/tools/build/test/prebuilt/ext/release/a.h13
-rw-r--r--src/boost/tools/build/test/prebuilt/hello.cpp20
-rw-r--r--src/boost/tools/build/test/prebuilt/jamfile.jam13
-rw-r--r--src/boost/tools/build/test/prebuilt/jamroot.jam4
-rwxr-xr-xsrc/boost/tools/build/test/preprocessor.py53
-rw-r--r--src/boost/tools/build/test/print.py48
-rw-r--r--src/boost/tools/build/test/project-test3/a.cpp8
-rw-r--r--src/boost/tools/build/test/project-test3/jamfile.jam13
-rw-r--r--src/boost/tools/build/test/project-test3/jamroot.jam67
-rw-r--r--src/boost/tools/build/test/project-test3/lib/b.cpp8
-rw-r--r--src/boost/tools/build/test/project-test3/lib/jamfile.jam9
-rw-r--r--src/boost/tools/build/test/project-test3/lib2/c.cpp8
-rw-r--r--src/boost/tools/build/test/project-test3/lib2/d.cpp8
-rw-r--r--src/boost/tools/build/test/project-test3/lib2/helper/e.cpp8
-rw-r--r--src/boost/tools/build/test/project-test3/lib2/helper/jamfile.jam9
-rw-r--r--src/boost/tools/build/test/project-test3/lib2/jamfile.jam11
-rw-r--r--src/boost/tools/build/test/project-test3/lib3/f.cpp8
-rw-r--r--src/boost/tools/build/test/project-test3/lib3/jamfile.jam47
-rw-r--r--src/boost/tools/build/test/project-test3/lib3/jamroot.jam5
-rw-r--r--src/boost/tools/build/test/project-test3/readme.txt7
-rw-r--r--src/boost/tools/build/test/project-test4/a.cpp8
-rw-r--r--src/boost/tools/build/test/project-test4/a_gcc.cpp8
-rw-r--r--src/boost/tools/build/test/project-test4/jamfile.jam11
-rw-r--r--src/boost/tools/build/test/project-test4/jamfile3.jam5
-rw-r--r--src/boost/tools/build/test/project-test4/jamfile4.jam4
-rw-r--r--src/boost/tools/build/test/project-test4/jamfile5.jam6
-rw-r--r--src/boost/tools/build/test/project-test4/jamroot.jam68
-rw-r--r--src/boost/tools/build/test/project-test4/lib/b.cpp8
-rw-r--r--src/boost/tools/build/test/project-test4/lib/jamfile.jam6
-rw-r--r--src/boost/tools/build/test/project-test4/lib/jamfile1.jam2
-rw-r--r--src/boost/tools/build/test/project-test4/lib/jamfile2.jam4
-rw-r--r--src/boost/tools/build/test/project-test4/lib/jamfile3.jam2
-rw-r--r--src/boost/tools/build/test/project-test4/lib2/jamfile.jam8
-rw-r--r--src/boost/tools/build/test/project-test4/lib2/jamfile2.jam4
-rw-r--r--src/boost/tools/build/test/project-test4/readme.txt6
-rw-r--r--src/boost/tools/build/test/project_dependencies.py51
-rw-r--r--src/boost/tools/build/test/project_glob.py212
-rwxr-xr-xsrc/boost/tools/build/test/project_id.py414
-rw-r--r--src/boost/tools/build/test/project_root_constants.py62
-rw-r--r--src/boost/tools/build/test/project_root_rule.py34
-rw-r--r--src/boost/tools/build/test/project_test3.py135
-rw-r--r--src/boost/tools/build/test/project_test4.py65
-rw-r--r--src/boost/tools/build/test/property_expansion.py28
-rwxr-xr-xsrc/boost/tools/build/test/qt4.py19
-rw-r--r--src/boost/tools/build/test/qt4/jamroot.jam82
-rw-r--r--src/boost/tools/build/test/qt4/mock.cpp26
-rw-r--r--src/boost/tools/build/test/qt4/mock.h21
-rw-r--r--src/boost/tools/build/test/qt4/phonon.cpp23
-rw-r--r--src/boost/tools/build/test/qt4/qt3support.cpp29
-rw-r--r--src/boost/tools/build/test/qt4/qtassistant.cpp21
-rw-r--r--src/boost/tools/build/test/qt4/qtcore.cpp22
-rw-r--r--src/boost/tools/build/test/qt4/qtcorefail.cpp23
-rw-r--r--src/boost/tools/build/test/qt4/qtdeclarative.cpp27
-rw-r--r--src/boost/tools/build/test/qt4/qtgui.cpp42
-rw-r--r--src/boost/tools/build/test/qt4/qthelp.cpp22
-rw-r--r--src/boost/tools/build/test/qt4/qtmultimedia.cpp25
-rw-r--r--src/boost/tools/build/test/qt4/qtnetwork.cpp33
-rw-r--r--src/boost/tools/build/test/qt4/qtscript.cpp37
-rw-r--r--src/boost/tools/build/test/qt4/qtscripttools.cpp47
-rw-r--r--src/boost/tools/build/test/qt4/qtsql.cpp37
-rw-r--r--src/boost/tools/build/test/qt4/qtsvg.cpp21
-rw-r--r--src/boost/tools/build/test/qt4/qttest.cpp30
-rw-r--r--src/boost/tools/build/test/qt4/qtwebkit.cpp24
-rw-r--r--src/boost/tools/build/test/qt4/qtxml.cpp29
-rw-r--r--src/boost/tools/build/test/qt4/qtxmlpatterns.cpp76
-rw-r--r--src/boost/tools/build/test/qt4/rcc.cpp20
-rw-r--r--src/boost/tools/build/test/qt4/rcc.qrc5
-rwxr-xr-xsrc/boost/tools/build/test/qt5.py19
-rw-r--r--src/boost/tools/build/test/qt5/jamroot.jam104
-rw-r--r--src/boost/tools/build/test/qt5/mock.cpp26
-rw-r--r--src/boost/tools/build/test/qt5/mock.h21
-rw-r--r--src/boost/tools/build/test/qt5/qt3dcore.cpp21
-rw-r--r--src/boost/tools/build/test/qt5/qt3dinput.cpp24
-rw-r--r--src/boost/tools/build/test/qt5/qt3dlogic.cpp20
-rw-r--r--src/boost/tools/build/test/qt5/qt3drender.cpp21
-rw-r--r--src/boost/tools/build/test/qt5/qtassistant.cpp21
-rw-r--r--src/boost/tools/build/test/qt5/qtbluetooth.cpp34
-rw-r--r--src/boost/tools/build/test/qt5/qtcharts.cpp15
-rw-r--r--src/boost/tools/build/test/qt5/qtcore.cpp22
-rw-r--r--src/boost/tools/build/test/qt5/qtcorefail.cpp23
-rw-r--r--src/boost/tools/build/test/qt5/qtdatavisualization.cpp31
-rw-r--r--src/boost/tools/build/test/qt5/qtdeclarative.cpp26
-rw-r--r--src/boost/tools/build/test/qt5/qtgamepad.cpp29
-rw-r--r--src/boost/tools/build/test/qt5/qthelp.cpp22
-rw-r--r--src/boost/tools/build/test/qt5/qtlocation.cpp30
-rw-r--r--src/boost/tools/build/test/qt5/qtmultimedia.cpp25
-rw-r--r--src/boost/tools/build/test/qt5/qtnetwork.cpp33
-rw-r--r--src/boost/tools/build/test/qt5/qtnfc.cpp28
-rw-r--r--src/boost/tools/build/test/qt5/qtpositioning.cpp23
-rw-r--r--src/boost/tools/build/test/qt5/qtpurchasing.cpp44
-rw-r--r--src/boost/tools/build/test/qt5/qtquick.cpp43
-rw-r--r--src/boost/tools/build/test/qt5/qtquick.qml20
-rw-r--r--src/boost/tools/build/test/qt5/qtscript.cpp37
-rw-r--r--src/boost/tools/build/test/qt5/qtscripttools.cpp47
-rw-r--r--src/boost/tools/build/test/qt5/qtscxml.cpp33
-rw-r--r--src/boost/tools/build/test/qt5/qtserialbus.cpp25
-rw-r--r--src/boost/tools/build/test/qt5/qtserialport.cpp22
-rw-r--r--src/boost/tools/build/test/qt5/qtsql.cpp37
-rw-r--r--src/boost/tools/build/test/qt5/qtsvg.cpp21
-rw-r--r--src/boost/tools/build/test/qt5/qttest.cpp30
-rw-r--r--src/boost/tools/build/test/qt5/qtwebchannel.cpp29
-rw-r--r--src/boost/tools/build/test/qt5/qtwebengine.cpp30
-rw-r--r--src/boost/tools/build/test/qt5/qtwebenginewidgets.cpp40
-rw-r--r--src/boost/tools/build/test/qt5/qtwebkit.cpp22
-rw-r--r--src/boost/tools/build/test/qt5/qtwebkitwidgets.cpp23
-rw-r--r--src/boost/tools/build/test/qt5/qtwebsocket.cpp26
-rw-r--r--src/boost/tools/build/test/qt5/qtwebsockets.cpp24
-rw-r--r--src/boost/tools/build/test/qt5/qtwebview.cpp31
-rw-r--r--src/boost/tools/build/test/qt5/qtwidgets.cpp43
-rw-r--r--src/boost/tools/build/test/qt5/qtxml.cpp29
-rw-r--r--src/boost/tools/build/test/qt5/qtxmlpatterns.cpp76
-rw-r--r--src/boost/tools/build/test/qt5/rcc.cpp20
-rw-r--r--src/boost/tools/build/test/qt5/rcc.qrc5
-rw-r--r--src/boost/tools/build/test/railsys.py14
-rw-r--r--src/boost/tools/build/test/railsys/libx/include/test_libx.h25
-rw-r--r--src/boost/tools/build/test/railsys/libx/jamroot.jam13
-rw-r--r--src/boost/tools/build/test/railsys/libx/src/jamfile.jam19
-rw-r--r--src/boost/tools/build/test/railsys/libx/src/test_libx.cpp15
-rw-r--r--src/boost/tools/build/test/railsys/program/include/test_a.h22
-rw-r--r--src/boost/tools/build/test/railsys/program/jamfile.jam45
-rw-r--r--src/boost/tools/build/test/railsys/program/jamroot.jam14
-rw-r--r--src/boost/tools/build/test/railsys/program/liba/jamfile.jam14
-rw-r--r--src/boost/tools/build/test/railsys/program/liba/test_a.cpp17
-rw-r--r--src/boost/tools/build/test/railsys/program/main/jamfile.jam12
-rw-r--r--src/boost/tools/build/test/railsys/program/main/main.cpp19
-rw-r--r--src/boost/tools/build/test/readme.txt6
-rw-r--r--src/boost/tools/build/test/rebuilds.py68
-rw-r--r--src/boost/tools/build/test/relative_sources.py38
-rw-r--r--src/boost/tools/build/test/remove_requirement.py91
-rwxr-xr-xsrc/boost/tools/build/test/rescan_header.py265
-rw-r--r--src/boost/tools/build/test/resolution.py35
-rw-r--r--src/boost/tools/build/test/results-python.txt132
-rw-r--r--src/boost/tools/build/test/rootless.py36
-rw-r--r--src/boost/tools/build/test/rootless/test1/sub_root/a.cpp6
-rw-r--r--src/boost/tools/build/test/rootless/test1/sub_root/jamfile.jam10
-rw-r--r--src/boost/tools/build/test/rootless/test2/sub_root/a.cpp6
-rw-r--r--src/boost/tools/build/test/rootless/test2/sub_root/jamfile.jam13
-rw-r--r--src/boost/tools/build/test/rootless/test3/jamfile.jam6
-rw-r--r--src/boost/tools/build/test/rootless/test3/sub/inner/a.cpp6
-rw-r--r--src/boost/tools/build/test/rootless/test3/sub/inner/jamfile.jam11
-rwxr-xr-xsrc/boost/tools/build/test/scanner_causing_rebuilds.py132
-rw-r--r--src/boost/tools/build/test/searched_lib.py186
-rw-r--r--src/boost/tools/build/test/skipping.py27
-rwxr-xr-xsrc/boost/tools/build/test/sort_rule.py98
-rw-r--r--src/boost/tools/build/test/source_locations.py42
-rwxr-xr-xsrc/boost/tools/build/test/source_order.py84
-rwxr-xr-xsrc/boost/tools/build/test/space_in_path.py51
-rw-r--r--src/boost/tools/build/test/stage.py207
-rw-r--r--src/boost/tools/build/test/standalone.py53
-rw-r--r--src/boost/tools/build/test/startup/boost-root/boost-build.jam7
-rw-r--r--src/boost/tools/build/test/startup/boost-root/build/boost-build.jam6
-rw-r--r--src/boost/tools/build/test/startup/boost-root/build/bootstrap.jam7
-rw-r--r--src/boost/tools/build/test/startup/bootstrap-env/boost-build.jam5
-rw-r--r--src/boost/tools/build/test/startup/bootstrap-explicit/boost-build.jam6
-rw-r--r--src/boost/tools/build/test/startup/bootstrap-implicit/readme.txt5
-rw-r--r--src/boost/tools/build/test/startup/no-bootstrap1/boost-build.jam6
-rw-r--r--src/boost/tools/build/test/startup/no-bootstrap1/subdir/readme.txt5
-rw-r--r--src/boost/tools/build/test/startup/no-bootstrap2/boost-build.jam6
-rw-r--r--src/boost/tools/build/test/startup/no-bootstrap3/boost-build.jam5
-rw-r--r--src/boost/tools/build/test/startup_v2.py94
-rwxr-xr-xsrc/boost/tools/build/test/static_and_shared_library.py36
-rw-r--r--src/boost/tools/build/test/suffix.py78
-rw-r--r--src/boost/tools/build/test/symlink.py43
-rw-r--r--src/boost/tools/build/test/tag.py122
-rw-r--r--src/boost/tools/build/test/template.py42
-rw-r--r--src/boost/tools/build/test/test-config-example.jam19
-rw-r--r--src/boost/tools/build/test/test.jam39
-rw-r--r--src/boost/tools/build/test/test1.py18
-rw-r--r--src/boost/tools/build/test/test2.py25
-rw-r--r--src/boost/tools/build/test/test2/foo.cpp10
-rw-r--r--src/boost/tools/build/test/test2/jamroot.jam5
-rw-r--r--src/boost/tools/build/test/test_all.py347
-rwxr-xr-xsrc/boost/tools/build/test/test_rc.py148
-rw-r--r--src/boost/tools/build/test/test_system.html623
-rwxr-xr-xsrc/boost/tools/build/test/testing.py535
-rw-r--r--src/boost/tools/build/test/timedata.py178
-rw-r--r--src/boost/tools/build/test/toolset-mock/Jamroot.jam8
-rw-r--r--src/boost/tools/build/test/toolset-mock/lib.cpp7
-rw-r--r--src/boost/tools/build/test/toolset-mock/main.cpp7
-rw-r--r--src/boost/tools/build/test/toolset-mock/project-config.jam43
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/Jamroot.jam61
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/MockProgram.py262
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/ar.py24
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/clang-3.9.0-darwin.py49
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/clang-linux-3.9.0.py48
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/clang-vxworks-4.0.1.py42
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/darwin-4.2.1.py38
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/gcc-4.2.1-darwin.py37
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/gcc-4.8.3-linux.py50
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/intel-darwin-10.2.py43
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/ld.py33
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/libtool.py14
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/mock-program.cpp42
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/project-config.jam5
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/ranlib.py22
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/strip.py13
-rw-r--r--src/boost/tools/build/test/toolset-mock/src/verify.py9
-rw-r--r--src/boost/tools/build/test/toolset_clang_darwin.py20
-rw-r--r--src/boost/tools/build/test/toolset_clang_linux.py19
-rw-r--r--src/boost/tools/build/test/toolset_clang_vxworks.py20
-rw-r--r--src/boost/tools/build/test/toolset_darwin.py21
-rw-r--r--src/boost/tools/build/test/toolset_defaults.py60
-rw-r--r--src/boost/tools/build/test/toolset_gcc.py26
-rw-r--r--src/boost/tools/build/test/toolset_intel_darwin.py19
-rw-r--r--src/boost/tools/build/test/toolset_requirements.py44
-rw-r--r--src/boost/tools/build/test/tree.py245
-rw-r--r--src/boost/tools/build/test/unit_test.py36
-rw-r--r--src/boost/tools/build/test/unit_tests.py11
-rw-r--r--src/boost/tools/build/test/unused.py81
-rw-r--r--src/boost/tools/build/test/use_requirements.py283
-rw-r--r--src/boost/tools/build/test/using.py32
-rw-r--r--src/boost/tools/build/test/wrapper.py38
-rw-r--r--src/boost/tools/build/test/wrong_project.py39
900 files changed, 137894 insertions, 0 deletions
diff --git a/src/boost/tools/build/CONTRIBUTING.adoc b/src/boost/tools/build/CONTRIBUTING.adoc
new file mode 100644
index 000000000..2d9d12d8f
--- /dev/null
+++ b/src/boost/tools/build/CONTRIBUTING.adoc
@@ -0,0 +1,179 @@
+// Copyright 2019-2020 Rene Rivera
+// Copyright 2003, 2006 Vladimir Prus
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+= B2 contributor guidelines
+
+B2 is an open-source project. This means that we welcome and appreciate
+all contributions -- be it ideas, bug reports, or patches. This document
+contains guidelines which helps to assure that development goes on smoothly, and
+changes are made quickly.
+
+The guidelines are not mandatory, and you can decide for yourself which one to
+follow. But note, that 10 mins that you spare writing a comment, for example,
+might lead to significantly longer delay for everyone.
+
+Before contributing, make sure you are subscribed to our mailing list
+at boost-build@lists.boost.org.
+
+== Additional resources include
+
+=== The issue tracker
+
+https://github.com/boostorg/build/issues
+
+=== Mailing list
+
+boost-build@lists.boost.org
+
+http://lists.boost.org/boost-build/
+
+== BUGS and PATCHES
+
+Both bugs and patches can be submitted to the GitHub tracker.
+
+When reporting a bug, please try to provide the following information:
+
+* What you did.
+ * A minimal reproducible test case is very much appreciated.
+ * Shell script with some annotations is much better than verbose
+ description of the problem.
+ * A regression test is the best (see test/test_system.html).
+
+* What you got.
+
+* What you expected.
+
+* What version of B2 did you use. If possible, please try to test with the
+ develop branch state.
+
+When submitting a patch, please:
+
+* Make a single patch for a single logical change
+* Follow the policies and coding conventions below
+* Send patches as pull requests to the develop branch
+* Provide a good PR message together with the patch
+
+The purpose of message serves to communicate what was changed, and *why*.
+Without a good message, you might spend a lot of time later, wondering where
+a strange piece of code came from and why it was necessary.
+
+The good message mentions each changed file and each rule/method, saying
+what happened to it, and why. Consider, the following log message
+
+----
+Better direct request handling.
+
+* new/build-request.jam
+ (directly-requested-properties-adjuster): Redo.
+
+* new/targets.jam
+ (main-target.generate-really): Adjust properties here.
+
+* new/virtual-target.jam
+ (register-actual-name): New rule.
+ (virtual-target.actualize-no-scanner): Call the above, to detected bugs,
+ where two virtual target correspond to one Jam target name.
+----
+
+The messages for the last two files are good. They tell what was changed.
+The change to the first file is clearly under-commented.
+
+It's okay to use terse messages for uninteresting changes, like ones induced
+by interface changes elsewhere.
+
+== POLICIES
+
+=== Testing
+
+All serious changes must be tested. New rules must be tested by the module where
+they are declared. The test system (link:test/test_system.html[test/test_system.html])
+should be used to verify user-observable behavior.
+
+=== Documentation
+
+It turns out that it's hard to have too much comments, but it's easy to have too
+little. Please prepend each rule with a comment saying what the rule does and
+what arguments mean. Stop for a minute and consider if the comment makes sense
+for anybody else, and completely describes what the rules does. Generic phrases
+like "adjusts properties" are really not enough.
+
+When applicable, make changes to the user documentation as well.
+
+== CODING CONVENTIONS
+
+1. All names of rules and variables are lowercase with "-" to separate
+ words.
++
+----
+rule call-me-ishmael ( ) ...
+----
+
+2. Names with dots in them are "intended globals". Ordinary globals use a
+ dot prefix:
++
+----
+.foobar
+$(.foobar)
+----
+
+3. Pseudofunctions or associations are <parameter>.<property>:
++
+----
+$(argument).name = hello ;
+$($(argument).name)
+----
+
+4. Class attribute names are prefixed with "self.":
++
+----
+self.x
+$(self.x)
+----
+
+5. Builtin rules are called via their ALL_UPPERCASE_NAMES:
++
+----
+DEPENDS $(target) : $(sources) ;
+----
+
+6. Opening and closing braces go on separate lines:
++
+----
+if $(a)
+{
+ #
+}
+else
+{
+ #
+}
+----
+
+== ENGINE
+
+Developing in the `b2` engine, the C++ part, requires two steps to be
+effective: building the "stable" engine, and developing the
+"in-progress" engine.
+
+What is the "stable" engine is up to you. It only refers to a build of the
+engine you know is at a good working state. When you are at a point the
+source is stable you can run `bootstrap.sh/bat` from the root. That will
+create the `b2` executable at the root. You can then use this version to run
+regular B2 builds as needed both within the B2 tree and in other projects.
+
+The "in-progress" engine is whatever build you happen to be testing at the
+moment. There are two ways to build this be engine. You can either
+(a) run `b2 b2` at the root, or (b) run `build.sh/bat` in `src/engine`.
+
+Using (a) will place, by default, a fully debuggable `b2` in the `.build`
+directories. You can run that one from a debugger with full symbols and
+stepping features. This should be the first choice in developing in the
+engine.
+
+After using (a) to implement functionality you can use (b) to fully test
+that functionality. The engine built from (b) is fully optimized and
+is the one used, by default, by the test system when running in the `test`
+directory. Before submitting patches it's required to build this way and
+run the tests in at least one toolset version (but preferably at least two). \ No newline at end of file
diff --git a/src/boost/tools/build/Jamroot.jam b/src/boost/tools/build/Jamroot.jam
new file mode 100644
index 000000000..be460547e
--- /dev/null
+++ b/src/boost/tools/build/Jamroot.jam
@@ -0,0 +1,271 @@
+# Copyright 2019 Rene Rivera
+# Copyright 2017 Steven Watanabe
+# Copyright 2016 Vladimir Prus
+# Copyright 2017 Edward Diener
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import path ;
+import package ;
+import os ;
+import feature ;
+import set ;
+import toolset ;
+import bison ;
+import indirect ;
+import "class" : new ;
+import type ;
+import virtual-target ;
+import errors ;
+
+path-constant SELF : . ;
+
+project b2
+ : build-dir .build
+ : requirements
+ <cxxstd>11
+ <toolset>msvc:<define>_CRT_SECURE_NO_WARNINGS=1
+ <toolset>msvc:<define>_CRT_NONSTDC_NO_DEPRECATE=1
+ ;
+
+#|
+Build the engine and its dependencies outside of the simple core build scripts.
+This allows us to keep the core build scripts as simple as possible. And lets
+us support more functionality for development of the engine core.
+|#
+
+#|
+Define custom yyacc tool.
+|#
+
+feature.feature yyacc : : dependency free ;
+toolset.flags yyacc TOOL <yyacc> ;
+
+exe yyacc
+ : src/engine/yyacc.cpp
+ : ;
+explicit yyacc ;
+
+rule yyacc-gen ( project name : property-set : sources * )
+{
+ local relevant = [ toolset.relevant $(__name__).yyacc ] ;
+ local a = [ new action $(sources[1]) : $(__name__).yyacc : [ $(property-set).add $(relevant) ] ] ;
+ local targets ;
+ for local n in $(name:S=).y $(name:S=)tab.h
+ {
+ targets += [ virtual-target.register
+ [ new file-target $(n) exact : [ type.type $(n) ]
+ : $(project) : $(a)
+ ] ] ;
+ }
+ return $(targets) ;
+}
+
+actions yyacc bind TOOL
+{
+ "$(TOOL)" "$(<)" "$(>)"
+}
+
+generate jamgram.y
+ : src/engine/jamgram.yy
+ : <generating-rule>@yyacc-gen
+ <location>src/engine
+ <yyacc>yyacc <dependency>yyacc
+ ;
+explicit jamgram.y ;
+
+#|
+Define grammar translation with Bison.
+|#
+
+BISON = [ os.environ BISON ] ;
+BISON ?= bison ;
+local BISON_IN_PATH = [ path.glob [ os.executable-path ] : $(BISON[1]) $(BISON[1]).* ] ;
+
+rule grammar ( target : source : properties * )
+{
+ # LOCATE on $(target) = $(source:D) ;
+ BISON on $(target) = $(BISON) ;
+}
+
+actions grammar
+{
+ "$(BISON)" --yacc --defines -o "$(<[1])" "$(>)"
+}
+
+if $(BISON_IN_PATH)
+{
+ make jamgram.cpp
+ : src/engine/jamgram.y
+ : @grammar
+ : <dependency>jamgram.y
+ <location>src/engine ;
+}
+else
+{
+ errors.warning "Bison generator program '$(BISON:J= )' not found. Skipping grammar build." ;
+ alias jamgram.cpp
+ : src/engine/jamgram.cpp ;
+}
+explicit jamgram.cpp ;
+
+#|
+Generate the embedded jambase.
+|#
+
+feature.feature mkjambase : : dependency free ;
+toolset.flags mkjambase TOOL <mkjambase> ;
+
+exe mkjambase
+ : src/engine/mkjambase.cpp
+ ;
+explicit mkjambase ;
+
+actions mkjambase bind TOOL
+{
+ "$(TOOL)" "$(<)" "$(>)"
+}
+
+make jambase.cpp
+ : src/engine/Jambase
+ : @mkjambase
+ : <location>src/engine
+ <mkjambase>mkjambase <dependency>mkjambase ;
+explicit jambase.cpp ;
+
+#|
+Define the b2 executable. Sources are based on platform.
+TODO: Make platform specific source be no-ops when not needed.
+|#
+
+local python-exe = [ MATCH --with-python=(.*) : [ modules.peek : ARGV ] ] ;
+local python-include ;
+local python-ldlib ;
+if $(python-exe)
+{
+ python-include = [ SHELL
+ "$(python-exe) -c \"import sysconfig; print(sysconfig.get_path('include'));\""
+ : strip-eol ] ;
+ python-libdir = [ SHELL
+ "$(python-exe) -c \"import sysconfig; import os.path; print(sysconfig.get_config_var('LIBDIR'));\""
+ : strip-eol ] ;
+ python-ldlib = [ MATCH ".*(python.*)" : [ SHELL
+ "$(python-exe) -c \"import sysconfig; import os.path; print(sysconfig.get_config_var('LIBRARY'));\""
+ : strip-eol ] ] ;
+ python-ldlib = $(python-ldlib:S=) ;
+
+ lib python
+ :
+ : <name>$(python-ldlib) <search>$(python-libdir)
+ :
+ : <include>$(python-include) <define>HAVE_PYTHON ;
+}
+else
+{
+ alias python ;
+}
+
+local b2_src =
+ [ glob src/engine/*.cpp src/engine/modules/*.cpp :
+ src/engine/*nt.cpp src/engine/*unix.cpp src/engine/*vms.cpp
+ src/engine/yyacc.cpp src/engine/mkjambase.cpp
+ src/engine/check_cxx11.cpp
+ ] ;
+local b2_src_nt = [ glob src/engine/*nt.cpp ] ;
+local b2_src_unix = [ glob src/engine/*unix.cpp ] ;
+local b2_src_vms = [ glob src/engine/*vms.cpp ] ;
+local unix_os = [ set.difference [ feature.values <target-os> ] : windows vms ] ;
+
+exe b2
+ : $(b2_src)
+ python
+ : <target-os>windows:<source>$(b2_src_nt)
+ <target-os>vms:<source>$(b2_src_vms)
+ <target-os>$(unix_os):<source>$(b2_src_unix)
+ <dependency>jamgram.cpp
+ <dependency>jambase.cpp
+ <toolset>msvc:<find-static-library>kernel32
+ <toolset>msvc:<find-static-library>advapi32
+ <toolset>msvc:<find-static-library>user32
+ ;
+explicit b2 ;
+
+#|
+Installation of the engine, build, and example files.
+|#
+
+local ext = "" ;
+if [ os.on-windows ] || [ os.on-vms ]
+{
+ ext = ".exe" ;
+}
+
+package.install boost-build-engine boost-build
+ : # properties
+ : # binaries
+ $(SELF)/src/engine/b2$(ext)
+ ;
+explicit boost-build-engine ;
+
+local examples ;
+for local e in [ glob-tree-ex $(SELF)/example : * : . .svn ]
+{
+ if [ CHECK_IF_FILE [ path.native $(e) ] ]
+ {
+ examples += $(e) ;
+ }
+}
+package.install-data boost-build-examples
+ : # Which subdir of $prefix/share
+ boost-build
+ : # What to install
+ $(examples)
+ : # What is the root of the directory
+ <install-source-root>.
+ ;
+explicit boost-build-examples ;
+
+local .core-sources =
+ $(SELF)/boost-build.jam
+ $(SELF)/src/build-system.jam
+ [ path.glob-tree $(SELF)/src/build : *.jam ]
+ [ path.glob-tree $(SELF)/src/contrib : *.jam ]
+ [ path.glob-tree $(SELF)/src/kernel : *.jam ]
+ [ path.glob-tree $(SELF)/src/options : *.jam ]
+ [ path.glob-tree $(SELF)/src/util : *.jam ]
+ [ path.glob-tree $(SELF)/src/tools : *.jam *.xml *.xsl *.doxyfile *.hpp doxproc.py ]
+ ;
+if $(python-exe)
+{
+ .core-sources +=
+ [ path.glob-tree $(SELF)/src/build : *.py ]
+ [ path.glob-tree $(SELF)/src/contrib : *.py ]
+ [ path.glob-tree $(SELF)/src/kernel : *.py ]
+ [ path.glob-tree $(SELF)/src/options : *.py ]
+ [ path.glob-tree $(SELF)/src/util : *.py ]
+ [ path.glob-tree $(SELF)/src/tools : *.py : doxproc.py ]
+ ;
+}
+
+package.install-data boost-build-core
+ : # Which subdir of $prefix/share
+ boost-build
+ : # What to install
+ $(.core-sources)
+ : # What is the root of the directory
+ <install-source-root>.
+ ;
+explicit boost-build-core ;
+
+#|
+Only install example files when requested to avoid bloating install footprint.
+|#
+if --with-examples in [ modules.peek : ARGV ]
+{
+ alias install : boost-build-engine boost-build-core boost-build-examples ;
+}
+else
+{
+ alias install : boost-build-engine boost-build-core ;
+}
+explicit install ;
diff --git a/src/boost/tools/build/LICENSE.txt b/src/boost/tools/build/LICENSE.txt
new file mode 100644
index 000000000..36b7cd93c
--- /dev/null
+++ b/src/boost/tools/build/LICENSE.txt
@@ -0,0 +1,23 @@
+Boost Software License - Version 1.0 - August 17th, 2003
+
+Permission is hereby granted, free of charge, to any person or organization
+obtaining a copy of the software and accompanying documentation covered by
+this license (the "Software") to use, reproduce, display, distribute,
+execute, and transmit the Software, and to prepare derivative works of the
+Software, and to permit third-parties to whom the Software is furnished to
+do so, all subject to the following:
+
+The copyright notices in the Software and this entire statement, including
+the above license grant, this restriction and the following disclaimer,
+must be included in all copies of the Software, in whole or in part, and
+all derivative works of the Software, unless such copies or derivative
+works are solely in the form of machine-executable object code generated by
+a source language processor.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/src/boost/tools/build/README.adoc b/src/boost/tools/build/README.adoc
new file mode 100644
index 000000000..2ca949fcb
--- /dev/null
+++ b/src/boost/tools/build/README.adoc
@@ -0,0 +1,31 @@
+= B2
+
+B2 makes it easy to build C++ projects, everywhere.
+
+image:https://img.shields.io/badge/license-BSL%201.0-blue.svg["Boost Software License 1.0", link="LICENSE.txt"]
+image:https://img.shields.io/github/languages/code-size/boostorg/build.svg["GitHub code size in bytes", link="https://github.com/boostorg/build"]
+
+== License
+
+Distributed under the Boost Software License, Version 1.0. (See accompanying
+file LICENSE.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+== Testing
+
+Continuously tested on:
+
+* Linux Clang 3.5, 3.6, 3.7, 3.8, 3.9, 4, 5, 6, 7, 8
+* Linux GCC 4.7, 4.8, 4.9, 5, 6, 7, 8, 9
+* macOS Xcode 8.3.3, 9.0, 9.0.1, 9.1, 9.2, 9.3, 9.3.1, 9.4, 9.4.1, 10.0, 10.1, 10.2, 10.2.1, 11.0, 11.1, 11.2, 11.3, 11.3.1
+* Windows MinGW 8.1.0
+* Windows VS 2013, 2015, 2017, 2019
+
+image:https://img.shields.io/azure-devops/build/grafikrobot/200cb3a3-3fcf-4a47-85ad-3cc17f2bec9e/3/master.svg?label=master&logo=azuredevops["Linux/Windows/macOS: master", link="https://dev.azure.com/grafikrobot/B2"]
+image:https://img.shields.io/azure-devops/build/grafikrobot/200cb3a3-3fcf-4a47-85ad-3cc17f2bec9e/3/develop.svg?label=develop&logo=azuredevops["Linux/Windows/macOS: develop", link="https://dev.azure.com/grafikrobot/B2"]
+
+== More
+
+See the link:https://boostorg.github.io/build/[website] for more information.
+
+See the link:CONTRIBUTING.adoc[guidelines for contributing] if you would like
+to get involved in the development.
diff --git a/src/boost/tools/build/azure-pipelines.yml b/src/boost/tools/build/azure-pipelines.yml
new file mode 100644
index 000000000..ebb568de8
--- /dev/null
+++ b/src/boost/tools/build/azure-pipelines.yml
@@ -0,0 +1,727 @@
+# Use, modification, and distribution are
+# subject to the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+#
+# Copyright Rene Rivera 2015-2019.
+
+trigger:
+ branches:
+ include:
+ - develop
+ - master
+ - feature/*
+pr:
+ branches:
+ include:
+ - develop
+
+stages:
+
+- stage: Test
+ jobs:
+
+ - job: 'Linux'
+ pool:
+ vmImage: 'ubuntu-16.04'
+ strategy:
+ matrix:
+ GCC 9:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ GCC 8:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-8
+ PACKAGES: g++-8
+ GCC 7:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-7
+ PACKAGES: g++-7
+ GCC 6:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-6
+ PACKAGES: g++-6
+ GCC 5:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-5
+ PACKAGES: g++-5
+ GCC 4.9:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-4.9
+ PACKAGES: g++-4.9
+ GCC 4.8:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-4.8
+ PACKAGES: g++-4.8
+ GCC 4.7:
+ TOOLSET: gcc
+ TEST_TOOLSET: gcc
+ CXX: g++-4.7
+ PACKAGES: g++-4.7
+ Clang 8:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-8
+ PACKAGES: clang-8
+ LLVM_REPO: llvm-toolchain-xenial-8
+ Clang 7:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-7
+ PACKAGES: clang-7
+ LLVM_REPO: llvm-toolchain-xenial-7
+ Clang 6:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-6.0
+ PACKAGES: clang-6.0
+ LLVM_REPO: llvm-toolchain-xenial-6.0
+ Clang 5:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-5.0
+ PACKAGES: clang-5.0
+ LLVM_REPO: llvm-toolchain-xenial-5.0
+ Clang 4:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-4.0
+ PACKAGES: clang-4.0
+ LLVM_REPO: llvm-toolchain-xenial-4.0
+ Clang 3.9:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-3.9
+ PACKAGES: clang-3.9
+ Clang 3.8:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-3.8
+ PACKAGES: clang-3.8
+ Clang 3.7:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-3.7
+ PACKAGES: clang-3.7
+ Clang 3.6:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-3.6
+ PACKAGES: clang-3.6
+ Clang 3.5:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++-3.5
+ PACKAGES: clang-3.5
+ steps:
+ - bash: |
+ set -e
+ uname -a
+ sudo -E apt-add-repository -y "ppa:ubuntu-toolchain-r/test"
+ if test -n "${LLVM_REPO}" ; then
+ wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
+ sudo -E apt-add-repository "deb http://apt.llvm.org/xenial/ ${LLVM_REPO} main"
+ fi
+ sudo -E apt-get update
+ sudo -E apt-get -yq --no-install-suggests --no-install-recommends install ${PACKAGES}
+ displayName: Install
+ - bash: |
+ set -e
+ cd src/engine
+ set PATH=${PATH};${CXX_PATH}
+ ./build.sh ${TOOLSET}
+ ./b2 -v
+ cd ../..
+ displayName: Build
+ - bash: |
+ set -e
+ CXX_PATH=`which ${CXX}`
+ cd test
+ echo "using ${TEST_TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ python test_all.py ${TEST_TOOLSET}
+ cd ..
+ displayName: Test
+ - bash: |
+ set -e
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TEST_TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ ./src/engine/b2 b2 warnings-as-errors=on toolset=${TEST_TOOLSET}
+ displayName: "No Warnings"
+ - bash: |
+ set -e
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TEST_TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ ./bootstrap.sh --with-toolset=${TOOLSET}
+ ./b2 --prefix=./.b2 install ${TEST_TOOLSET}
+ displayName: Bootstrap
+
+ - job: 'Windows'
+ strategy:
+ matrix:
+ VS 2019:
+ TOOLSET: vc142
+ TEST_TOOLSET: msvc
+ VM_IMAGE: 'windows-2019'
+ VS 2017:
+ TOOLSET: vc141
+ TEST_TOOLSET: msvc
+ VM_IMAGE: 'vs2017-win2016'
+ VS 2015:
+ TOOLSET: vc14
+ TEST_TOOLSET: msvc
+ VM_IMAGE: 'vs2015-win2012r2'
+ VS 2013:
+ TOOLSET: vc12
+ TEST_TOOLSET: msvc
+ VM_IMAGE: 'vs2015-win2012r2'
+ MinGW 8.1.0:
+ TOOLSET: mingw
+ TEST_TOOLSET: gcc
+ VM_IMAGE: 'vs2017-win2016'
+ pool:
+ vmImage: $(VM_IMAGE)
+ steps:
+ - powershell: |
+ cd src/engine
+ $env:path += ';' + $env:CXX_PATH
+ cmd /c build.bat $env:TOOLSET
+ ./b2.exe -v
+ cd ../..
+ displayName: Build
+ - powershell: |
+ $env:HOME = $env:HOMEDRIVE + $env:HOMEPATH
+ cd test
+ echo "using" $env:TEST_TOOLSET ":" ":" $env:CXX ";" > $env:HOME/user-config.jam
+ python test_all.py $env:TEST_TOOLSET
+ cd ..
+ displayName: Test
+ - powershell: |
+ $env:HOME = $env:HOMEDRIVE + $env:HOMEPATH
+ $env:path += ';' + $env:CXX_PATH
+ echo "using" $env:TEST_TOOLSET ":" ":" $env:CXX ";" > $env:HOME/user-config.jam
+ ./src/engine/b2.exe --debug-configuration b2 warnings-as-errors=on toolset=$env:TEST_TOOLSET
+ displayName: "No Warnings"
+ - powershell: |
+ $env:HOME = $env:HOMEDRIVE + $env:HOMEPATH
+ $env:path += ';' + $env:CXX_PATH
+ echo "using" $env:TEST_TOOLSET ":" ":" $env:CXX ";" > $env:HOME/user-config.jam
+ ./bootstrap.bat
+ ./b2.exe --debug-configuration --prefix=./.b2 install toolset=$env:TEST_TOOLSET
+ displayName: Bootstrap
+
+ - job: 'macOS'
+ strategy:
+ matrix:
+ Xcode 11.3.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ VM_IMAGE: 'macOS-10.15'
+ Xcode 11.3:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.app
+ VM_IMAGE: 'macOS-10.15'
+ Xcode 11.2:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.2.app
+ VM_IMAGE: 'macOS-10.15'
+ Xcode 11.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.1.app
+ VM_IMAGE: 'macOS-10.15'
+ Xcode 11.0:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.app
+ VM_IMAGE: 'macOS-10.15'
+ Xcode 10.2.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_10.2.1.app
+ VM_IMAGE: 'macOS-10.14'
+ Xcode 10.2:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_10.2.app
+ VM_IMAGE: 'macOS-10.14'
+ Xcode 10.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_10.1.app
+ VM_IMAGE: 'macOS-10.14'
+ Xcode 10.0:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_10.app
+ VM_IMAGE: 'macOS-10.14'
+ Xcode 9.4.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.4.1.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 9.4:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.4.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 9.3.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.3.1.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 9.3:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.3.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 9.2:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.2.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 9.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.1.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 9.0.1:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.0.1.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 9.0:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_9.app
+ VM_IMAGE: 'macOS-10.13'
+ Xcode 8.3.3:
+ TOOLSET: clang
+ TEST_TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_8.3.3.app
+ VM_IMAGE: 'macOS-10.13'
+ pool:
+ vmImage: $(VM_IMAGE)
+ steps:
+ - bash: |
+ set -e
+ uname -a
+ sudo xcode-select -switch ${XCODE_APP}
+ which clang++
+ displayName: Install
+ - bash: |
+ set -e
+ cd src/engine
+ ./build.sh ${TOOLSET}
+ ./b2 -v
+ cd ../..
+ displayName: Build
+ - bash: |
+ set -e
+ CXX_PATH=`which ${CXX}`
+ cd test
+ echo "using ${TEST_TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ python test_all.py ${TEST_TOOLSET}
+ cd ..
+ displayName: Test
+ - bash: |
+ set -e
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TEST_TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ ./src/engine/b2 b2 warnings-as-errors=on toolset=${TEST_TOOLSET}
+ displayName: "No Warnings"
+ - bash: |
+ set -e
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TEST_TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ ./bootstrap.sh --with-toolset=${TOOLSET}
+ ./b2 --prefix=./.b2 install ${TEST_TOOLSET}
+ displayName: Bootstrap
+
+- stage: Boost
+ jobs:
+
+ - job: 'Release_Linux'
+ displayName: 'Release Linux'
+ pool:
+ vmImage: 'ubuntu-latest'
+ strategy:
+ matrix:
+ 1.72.0 .. GCC 9:
+ BOOST_VERSION: 1.72.0
+ BOOST_VERSION_U: 1_72_0
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ 1.71.0 .. GCC 9:
+ BOOST_VERSION: 1.71.0
+ BOOST_VERSION_U: 1_71_0
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ 1.70.0 .. GCC 9:
+ BOOST_VERSION: 1.70.0
+ BOOST_VERSION_U: 1_70_0
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ 1.69.0 .. GCC 9:
+ BOOST_VERSION: 1.69.0
+ BOOST_VERSION_U: 1_69_0
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ 1.68.0 .. GCC 9:
+ BOOST_VERSION: 1.68.0
+ BOOST_VERSION_U: 1_68_0
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ 1.67.0 .. GCC 9:
+ BOOST_VERSION: 1.67.0
+ BOOST_VERSION_U: 1_67_0
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ 1.66.0 .. GCC 9:
+ BOOST_VERSION: 1.66.0
+ BOOST_VERSION_U: 1_66_0
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ steps:
+ - bash: |
+ set -e
+ uname -a
+ sudo -E apt-add-repository -y "ppa:ubuntu-toolchain-r/test"
+ if test -n "${LLVM_REPO}" ; then
+ wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
+ sudo -E apt-add-repository "deb http://apt.llvm.org/xenial/ ${LLVM_REPO} main"
+ fi
+ sudo -E apt-get update
+ sudo -E apt-get -yq --no-install-suggests --no-install-recommends install ${PACKAGES}
+ displayName: Install
+ - bash: |
+ set -e
+ cd src/engine
+ ./build.sh ${TOOLSET}
+ ./b2 -v
+ displayName: Build
+ - bash: |
+ set -e
+ pushd ${HOME}
+ git clone -b boost-${BOOST_VERSION} --single-branch --recurse-submodules https://github.com/boostorg/boost.git boost_${BOOST_VERSION_U}
+ cd boost_${BOOST_VERSION_U}
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install
+ popd
+ displayName: Test
+
+ - job: 'Dev_Linux'
+ displayName: 'Dev Linux'
+ pool:
+ vmImage: 'ubuntu-latest'
+ strategy:
+ matrix:
+ Master .. GCC 9:
+ BOOST_BRANCH: master
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ Master .. Clang 8:
+ BOOST_BRANCH: master
+ TOOLSET: clang
+ CXX: clang++-8
+ PACKAGES: clang-8
+ LLVM_REPO: llvm-toolchain-xenial-8
+ Develop .. GCC 9:
+ BOOST_BRANCH: develop
+ TOOLSET: gcc
+ CXX: g++-9
+ PACKAGES: g++-9
+ Develop .. Clang 8:
+ BOOST_BRANCH: develop
+ TOOLSET: clang
+ CXX: clang++-8
+ PACKAGES: clang-8
+ LLVM_REPO: llvm-toolchain-xenial-8
+ steps:
+ - bash: |
+ set -e
+ uname -a
+ sudo -E apt-add-repository -y "ppa:ubuntu-toolchain-r/test"
+ if test -n "${LLVM_REPO}" ; then
+ wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
+ sudo -E apt-add-repository "deb http://apt.llvm.org/xenial/ ${LLVM_REPO} main"
+ fi
+ sudo -E apt-get update
+ sudo -E apt-get -yq --no-install-suggests --no-install-recommends install ${PACKAGES}
+ displayName: Install
+ - bash: |
+ set -e
+ cd src/engine
+ ./build.sh ${TOOLSET}
+ ./b2 -v
+ displayName: Build
+ - bash: |
+ set -e
+ pushd ${HOME}
+ git clone --recursive https://github.com/boostorg/boost.git
+ cd boost
+ git checkout ${BOOST_BRANCH}
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install
+ popd
+ displayName: Test
+
+ - job: 'Release_macOS'
+ displayName: 'Release macOS'
+ pool:
+ vmImage: 'macOS-latest'
+ strategy:
+ matrix:
+ 1.72.0 .. Xcode 11.3.1:
+ BOOST_VERSION: 1.72.0
+ BOOST_VERSION_U: 1_72_0
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ 1.71.0 .. Xcode 11.3.1:
+ BOOST_VERSION: 1.71.0
+ BOOST_VERSION_U: 1_71_0
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ 1.70.0 .. Xcode 11.3.1:
+ BOOST_VERSION: 1.70.0
+ BOOST_VERSION_U: 1_70_0
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ 1.69.0 .. Xcode 11.3.1:
+ BOOST_VERSION: 1.69.0
+ BOOST_VERSION_U: 1_69_0
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ 1.68.0 .. Xcode 11.3.1:
+ BOOST_VERSION: 1.68.0
+ BOOST_VERSION_U: 1_68_0
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.2.app
+ 1.67.0 .. Xcode 11.3.1:
+ BOOST_VERSION: 1.67.0
+ BOOST_VERSION_U: 1_67_0
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ 1.66.0 .. Xcode 11.3.1:
+ BOOST_VERSION: 1.66.0
+ BOOST_VERSION_U: 1_66_0
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ steps:
+ - bash: |
+ set -e
+ uname -a
+ sudo xcode-select -switch ${XCODE_APP}
+ which clang++
+ displayName: Install
+ - bash: |
+ set -e
+ cd src/engine
+ ./build.sh ${TOOLSET}
+ ./b2 -v
+ displayName: Build
+ - bash: |
+ set -e
+ pushd ${HOME}
+ git clone -b boost-${BOOST_VERSION} --single-branch --recurse-submodules https://github.com/boostorg/boost.git boost_${BOOST_VERSION_U}
+ cd boost_${BOOST_VERSION_U}
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install
+ popd
+ displayName: Test
+
+ - job: 'Dev_macOS'
+ displayName: 'Dev macOS'
+ pool:
+ vmImage: 'macOS-latest'
+ strategy:
+ matrix:
+ Master .. Xcode 11.3.1:
+ BOOST_BRANCH: master
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ Develop .. Xcode 11.3.1:
+ BOOST_BRANCH: develop
+ TOOLSET: clang
+ CXX: clang++
+ XCODE_APP: /Applications/Xcode_11.3.1.app
+ steps:
+ - bash: |
+ set -e
+ uname -a
+ sudo xcode-select -switch ${XCODE_APP}
+ which clang++
+ displayName: Install
+ - bash: |
+ set -e
+ cd src/engine
+ ./build.sh ${TOOLSET}
+ ./b2 -v
+ displayName: Build
+ - bash: |
+ set -e
+ pushd ${HOME}
+ git clone --recursive https://github.com/boostorg/boost.git
+ cd boost
+ git checkout ${BOOST_BRANCH}
+ CXX_PATH=`which ${CXX}`
+ echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam
+ "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install
+ popd
+ displayName: Test
+
+ - job: 'Release_Windows'
+ displayName: 'Release Windows'
+ pool:
+ vmImage: 'windows-latest'
+ strategy:
+ matrix:
+ 1.72.0 .. VS 2019:
+ BOOST_VERSION: 1.72.0
+ BOOST_VERSION_U: 1_72_0
+ TOOLSET: vc142
+ 1.71.0 .. VS 2019:
+ BOOST_VERSION: 1.71.0
+ BOOST_VERSION_U: 1_71_0
+ TOOLSET: vc142
+ 1.70.0 .. VS 2019:
+ BOOST_VERSION: 1.70.0
+ BOOST_VERSION_U: 1_70_0
+ TOOLSET: vc142
+ 1.69.0 .. VS 2019:
+ BOOST_VERSION: 1.69.0
+ BOOST_VERSION_U: 1_69_0
+ TOOLSET: vc142
+ 1.68.0 .. VS 2019:
+ BOOST_VERSION: 1.68.0
+ BOOST_VERSION_U: 1_68_0
+ TOOLSET: vc142
+ 1.67.0 .. VS 2019:
+ BOOST_VERSION: 1.67.0
+ BOOST_VERSION_U: 1_67_0
+ TOOLSET: vc142
+ 1.66.0 .. VS 2019:
+ BOOST_VERSION: 1.66.0
+ BOOST_VERSION_U: 1_66_0
+ TOOLSET: vc142
+ steps:
+ - powershell: |
+ cd src/engine
+ $env:path += ';' + ${env:CXX_PATH}
+ cmd /c build.bat ${env:TOOLSET}
+ ./b2.exe -v
+ cd ../..
+ displayName: Build
+ - powershell: |
+ $env:HOME = "$env:HOMEDRIVE" + "$env:HOMEPATH"
+ cd "${env:HOME}"
+ git clone -b boost-${env:BOOST_VERSION} --single-branch --recurse-submodules https://github.com/boostorg/boost.git boost_${env:BOOST_VERSION_U}
+ cd "boost_${env:BOOST_VERSION_U}"
+ echo "using" "msvc" ";" > "${env:HOME}/user-config.jam"
+ & "${env:BUILD_SOURCESDIRECTORY}\src\engine\b2.exe" "--boost-build=${env:BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=msvc install
+ displayName: Test
+
+ - job: 'Dev_Windows'
+ displayName: 'Dev Windows'
+ pool:
+ vmImage: 'windows-latest'
+ strategy:
+ matrix:
+ Master .. VS 2019:
+ BOOST_BRANCH: master
+ TOOLSET: vc142
+ Develop .. VS 2019:
+ BOOST_BRANCH: develop
+ TOOLSET: vc142
+ steps:
+ - powershell: |
+ cd src/engine
+ $env:path += ';' + ${env:CXX_PATH}
+ cmd /c build.bat ${env:TOOLSET}
+ ./b2.exe -v
+ cd ../..
+ displayName: Build
+ - powershell: |
+ $env:HOME = "$env:HOMEDRIVE" + "$env:HOMEPATH"
+ cd "${env:HOME}"
+ git clone --recursive https://github.com/boostorg/boost.git
+ cd boost
+ $OriginalErrorActionPreference = $ErrorActionPreference
+ $ErrorActionPreference= 'silentlycontinue'
+ git checkout "${env:BOOST_BRANCH}"
+ $ErrorActionPreference = $OriginalErrorActionPreference
+ echo "using" "msvc" ";" > "${env:HOME}/user-config.jam"
+ & "${env:BUILD_SOURCESDIRECTORY}\src\engine\b2.exe" "--boost-build=${env:BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=msvc install
+ displayName: Test
+
+- stage: WebsiteUpdate
+ displayName: 'Website Update'
+ condition: in(variables['Build.SourceBranch'], 'refs/heads/master', 'refs/heads/develop')
+ jobs:
+
+ - job: Documentation
+ pool:
+ vmImage: 'ubuntu-16.04'
+ steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: '2.x'
+ - task: UseRubyVersion@0
+ - bash: |
+ pip install --user Pygments
+ pip install --user "https://github.com/bfgroup/jam_pygments/archive/master.zip"
+ gem install asciidoctor
+ gem install pygments.rb
+ echo "using asciidoctor ;" >> project-config.jam
+ ./bootstrap.sh
+ pushd doc
+ ../b2 --website-doc-dir=manual/$(Build.SourceBranchName) website
+ displayName: 'Build & Publish'
+ env:
+ GH_TOKEN: $(GitHubToken)
diff --git a/src/boost/tools/build/boost-build.jam b/src/boost/tools/build/boost-build.jam
new file mode 100644
index 000000000..a1240fdf7
--- /dev/null
+++ b/src/boost/tools/build/boost-build.jam
@@ -0,0 +1,8 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+boost-build src/kernel ;
diff --git a/src/boost/tools/build/bootstrap.bat b/src/boost/tools/build/bootstrap.bat
new file mode 100644
index 000000000..25acc0a1c
--- /dev/null
+++ b/src/boost/tools/build/bootstrap.bat
@@ -0,0 +1,39 @@
+@ECHO OFF
+
+REM Copyright (C) 2009 Vladimir Prus
+REM Copyright 2019-2020 Rene Rivera
+REM
+REM Distributed under the Boost Software License, Version 1.0.
+REM (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+:b2_build
+ECHO Building the B2 engine..
+pushd src\engine
+call .\build.bat %*
+@ECHO OFF
+popd
+if exist ".\src\engine\b2.exe" (
+ copy .\src\engine\b2.exe . > nul
+ goto :b2_built)
+goto :b2_failure
+
+
+:b2_built
+ECHO.
+ECHO Building is done. To install, run:
+ECHO.
+ECHO .\b2 --prefix=DIR install
+ECHO.
+goto :end
+
+
+:b2_failure
+ECHO.
+ECHO Failed to build the B2 engine.
+ECHO.
+goto :end
+
+
+:end
+exit /b %ERRORLEVEL%
diff --git a/src/boost/tools/build/bootstrap.sh b/src/boost/tools/build/bootstrap.sh
new file mode 100755
index 000000000..37bc9502f
--- /dev/null
+++ b/src/boost/tools/build/bootstrap.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# Copyright (C) 2005, 2006 Douglas Gregor.
+# Copyright (C) 2006 The Trustees of Indiana University
+# Copyright (C) 2010 Bryce Lelbach
+# Copyright 2018-2020 Rene Rivera
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Build b2
+echo "Building the B2 engine.."
+pwd=`pwd`
+( cd "./src/engine" && ./build.sh "$*" )
+if [ $? -ne 0 ]; then
+ echo
+ echo "Failed to build the B2 engine." 1>&2
+ exit 1
+fi
+cd "$pwd"
+cp "./src/engine/b2" .
+
+cat << EOF
+
+Building is done. To install, run:
+
+ ./b2 install --prefix=<DIR>
+
+EOF
diff --git a/src/boost/tools/build/bootstrap_vms.com b/src/boost/tools/build/bootstrap_vms.com
new file mode 100644
index 000000000..3d8afaab9
--- /dev/null
+++ b/src/boost/tools/build/bootstrap_vms.com
@@ -0,0 +1,48 @@
+$! Copyright 2015 Artur Shepilko.
+$!
+$! Distributed under the Boost Software License, Version 1.0.
+$! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+$!
+$ THIS_FACILITY = "BOOSTBUILD"
+$
+$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'")
+$ save_verify = f$verify(verify)
+$ save_default = f$env("DEFAULT")
+$
+$ SAY := WRITE SYS$OUTPUT
+$
+$ ON WARNING THEN CONTINUE
+$ ON ERROR THEN GOTO ERROR
+$
+$ SAY "I|Bootstrapping the build engine..."
+$
+$ set def [.src.engine]
+$ @build_vms /out=[--]bootstrap.log
+$
+$ set def 'save_default'
+$
+$ if f$search("[.src.engine.bin_vms]b2.exe") .eqs. "" then goto ERROR
+$ copy [.src.engine.bin_vms]b2.exe []
+$ copy [.src.engine.bin_vms]bjam.exe []
+$
+$ SAY "I|Bootstrapping is done, B2.EXE created."
+$ type sys$input
+$DECK
+
+ To build and install under ROOT: directory, run:
+ MC []B2 --prefix="/root" install
+
+ Set B2 command:
+ B2 :== $ROOT:[BIN]B2.EXE
+
+$EOD
+$ sts = 1
+$
+$EXIT:
+$ set def 'save_default'
+$ exit 'sts' + (0 * f$verify(save_verify))
+
+$ERROR:
+$ SAY "E|Failed to bootstrap build engine, see BOOTSTRAP.LOG for details."
+$ sts = 4
+$ goto EXIT
diff --git a/src/boost/tools/build/example/asciidoctor/example.adoc b/src/boost/tools/build/example/asciidoctor/example.adoc
new file mode 100644
index 000000000..1a7675c12
--- /dev/null
+++ b/src/boost/tools/build/example/asciidoctor/example.adoc
@@ -0,0 +1,3 @@
+= The Dangerous and Thrilling Documentation Chronicles
+
+This journey begins on a bleary Monday morning.
diff --git a/src/boost/tools/build/example/asciidoctor/example_manpage.adoc b/src/boost/tools/build/example/asciidoctor/example_manpage.adoc
new file mode 100644
index 000000000..ef70113d4
--- /dev/null
+++ b/src/boost/tools/build/example/asciidoctor/example_manpage.adoc
@@ -0,0 +1,38 @@
+= b2(1)
+Rene Rivera
+v0.0.0
+:doctype: manpage
+:manmanual: B2
+:mansource: B2
+:man-linkstyle: pass:[blue R < >]
+
+== NAME
+
+b2 - Boost Build
+
+== SYNOPSIS
+
+*b2* ['OPTION']... 'TARGET'...
+
+== OPTIONS
+
+*-n*::
+ Print out what would get built.
+
+== EXIT STATUS
+
+*0*::
+ Success.
+
+*1*::
+ Failure.
+
+== RESOURCES
+
+*Project web site:* http://boost.org
+
+== COPYING
+
+Copyright \(C) 2017 {author}. +
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) \ No newline at end of file
diff --git a/src/boost/tools/build/example/asciidoctor/jamroot.jam b/src/boost/tools/build/example/asciidoctor/jamroot.jam
new file mode 100644
index 000000000..d03788c5c
--- /dev/null
+++ b/src/boost/tools/build/example/asciidoctor/jamroot.jam
@@ -0,0 +1,11 @@
+#|
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+html example_html : example.adoc ;
+manpage example_1 : example_manpage.adoc ;
+pdf example_pdf : example.adoc ;
+docbook example_docbook : example.adoc ;
diff --git a/src/boost/tools/build/example/boost-build.jam b/src/boost/tools/build/example/boost-build.jam
new file mode 100644
index 000000000..02abe407f
--- /dev/null
+++ b/src/boost/tools/build/example/boost-build.jam
@@ -0,0 +1,6 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+boost-build ../src/kernel ;
diff --git a/src/boost/tools/build/example/built_tool/Jamroot.jam b/src/boost/tools/build/example/built_tool/Jamroot.jam
new file mode 100644
index 000000000..c458650e8
--- /dev/null
+++ b/src/boost/tools/build/example/built_tool/Jamroot.jam
@@ -0,0 +1,8 @@
+
+import feature ;
+
+feature.feature tblgen : : dependency free ;
+
+project built_tool ;
+
+build-project core ; \ No newline at end of file
diff --git a/src/boost/tools/build/example/built_tool/core/Jamfile.jam b/src/boost/tools/build/example/built_tool/core/Jamfile.jam
new file mode 100644
index 000000000..d4ec62382
--- /dev/null
+++ b/src/boost/tools/build/example/built_tool/core/Jamfile.jam
@@ -0,0 +1,39 @@
+
+import toolset ;
+import os ;
+
+project : requirements <tblgen>../tblgen//tblgen ;
+
+
+# Create a.c using a custom action defined below.
+make a.c : a.td : @tblgen ;
+
+# Use a.c in executable.
+exe core : core.cpp a.c ;
+
+# The action has to invoke the tool built in other
+# parts of the project. The <tblgen> feature is used
+# to specify the location of the tool, and the flags
+# statement below make the full path to the tool
+# available inside the action.
+toolset.flags tblgen COMMAND <tblgen> ;
+
+# We generally want a.c to be rebuilt when the tool changes.
+rule tblgen ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(COMMAND) ] ;
+}
+
+# The action that invokes the tool
+actions tblgen bind COMMAND
+{
+ $(COMMAND:E=tblgen) > $(<)
+}
+
+if [ os.name ] = VMS
+{
+ actions tblgen bind COMMAND
+ {
+ PIPE MCR $(COMMAND:WE=tblgen) > $(<:W)
+ }
+}
diff --git a/src/boost/tools/build/example/built_tool/core/a.td b/src/boost/tools/build/example/built_tool/core/a.td
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/boost/tools/build/example/built_tool/core/a.td
diff --git a/src/boost/tools/build/example/built_tool/core/core.cpp b/src/boost/tools/build/example/built_tool/core/core.cpp
new file mode 100644
index 000000000..31a133726
--- /dev/null
+++ b/src/boost/tools/build/example/built_tool/core/core.cpp
@@ -0,0 +1,5 @@
+
+int main()
+{
+ return 0;
+}
diff --git a/src/boost/tools/build/example/built_tool/readme.txt b/src/boost/tools/build/example/built_tool/readme.txt
new file mode 100644
index 000000000..bbb9f9b3a
--- /dev/null
+++ b/src/boost/tools/build/example/built_tool/readme.txt
@@ -0,0 +1,5 @@
+
+This example shows how to build an executable and then use it
+for generating other targets. The 'tblgen' subdirectory builds
+a tool, while the 'core' subdirectory uses that tool. Refer
+to core/Jamfile.jam for detailed comments. \ No newline at end of file
diff --git a/src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam b/src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam
new file mode 100644
index 000000000..af4906278
--- /dev/null
+++ b/src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam
@@ -0,0 +1,4 @@
+
+project : requirements -<tblgen>tblgen//tblgen ;
+
+exe tblgen : tblgen.cpp ; \ No newline at end of file
diff --git a/src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp b/src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp
new file mode 100644
index 000000000..fbd058133
--- /dev/null
+++ b/src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp
@@ -0,0 +1,9 @@
+
+#include <iostream>
+
+int main()
+{
+ std::cout << "int foo;\n";
+ return 0;
+}
+
diff --git a/src/boost/tools/build/example/complex-testing/compile-fail.cpp b/src/boost/tools/build/example/complex-testing/compile-fail.cpp
new file mode 100644
index 000000000..a219fa5c6
--- /dev/null
+++ b/src/boost/tools/build/example/complex-testing/compile-fail.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+#include <cstdlib>
+
+int main()
+{
+ std::cout << "Bye!\n";
+ return EXIT_FAILURE
+}
diff --git a/src/boost/tools/build/example/complex-testing/fail.cpp b/src/boost/tools/build/example/complex-testing/fail.cpp
new file mode 100644
index 000000000..965661188
--- /dev/null
+++ b/src/boost/tools/build/example/complex-testing/fail.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+#include <cstdlib>
+
+int main()
+{
+ std::cout << "Bye!\n";
+ return EXIT_FAILURE;
+}
diff --git a/src/boost/tools/build/example/complex-testing/jamroot.jam b/src/boost/tools/build/example/complex-testing/jamroot.jam
new file mode 100644
index 000000000..a5942a239
--- /dev/null
+++ b/src/boost/tools/build/example/complex-testing/jamroot.jam
@@ -0,0 +1,15 @@
+# Copyright 2016 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+using testing ;
+import property-set ;
+import path ;
+
+exe success : success.cpp ;
+
+run success : arg1 arg2 : : : success-a ;
+run success : arg3 arg4 : : : success-b ;
+
+run post.cpp : : success-a : : post-a ;
+run post.cpp : : success-b : : post-b ;
diff --git a/src/boost/tools/build/example/complex-testing/post.cpp b/src/boost/tools/build/example/complex-testing/post.cpp
new file mode 100644
index 000000000..6282e8f24
--- /dev/null
+++ b/src/boost/tools/build/example/complex-testing/post.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+#include <cstdlib>
+
+int main(int argc, char *argv[])
+{
+ std::cout << argv[1] << "\n";
+ return EXIT_SUCCESS;
+}
diff --git a/src/boost/tools/build/example/complex-testing/success.cpp b/src/boost/tools/build/example/complex-testing/success.cpp
new file mode 100644
index 000000000..a7e2b6ca0
--- /dev/null
+++ b/src/boost/tools/build/example/complex-testing/success.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+#include <cstdlib>
+
+int main(int argc, char *argv[])
+{
+ std::cout << "Hi!\n";
+ return EXIT_SUCCESS;
+}
diff --git a/src/boost/tools/build/example/customization/class.verbatim b/src/boost/tools/build/example/customization/class.verbatim
new file mode 100644
index 000000000..5c0d7b803
--- /dev/null
+++ b/src/boost/tools/build/example/customization/class.verbatim
@@ -0,0 +1,7 @@
+class_template
+
+class %class_name% {
+public:
+ %class_name%() {}
+ ~%class_name%() {}
+}; \ No newline at end of file
diff --git a/src/boost/tools/build/example/customization/codegen.cpp b/src/boost/tools/build/example/customization/codegen.cpp
new file mode 100644
index 000000000..6cdb45e4d
--- /dev/null
+++ b/src/boost/tools/build/example/customization/codegen.cpp
@@ -0,0 +1,36 @@
+// (C) Copyright Vladimir Prus, 2003
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+// Please see 'usage.verbatim' file for usage notes.
+
+#include <iostream>
+#include <string>
+#include <cstring>
+using std::cout;
+using std::string;
+using std::strlen;
+
+extern const char class_template[];
+extern const char usage[];
+
+int main(int ac, char* av[])
+{
+ if (av[1]) {
+
+ string class_name = av[1];
+ string s = class_template;
+
+ string::size_type n;
+ while((n = s.find("%class_name%")) != string::npos) {
+ s.replace(n, strlen("%class_name%"), class_name);
+ }
+ std::cout << "Output is:\n";
+ std::cout << s << "\n";
+ return 0;
+ } else {
+ std::cout << usage << "\n";
+ return 1;
+ }
+}
diff --git a/src/boost/tools/build/example/customization/inline_file.py b/src/boost/tools/build/example/customization/inline_file.py
new file mode 100644
index 000000000..9f13acd87
--- /dev/null
+++ b/src/boost/tools/build/example/customization/inline_file.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import sys
+from string import strip
+
+def quote_line(line):
+
+ result = ""
+
+ for i in line:
+ if (i == '\\'):
+ result = result + '\\\\'
+ elif (i == '\"'):
+ result = result + '\\\"'
+ elif (i != '\r' and i != '\n'):
+ result = result + i;
+
+ return '\"' + result + '\\n\"'
+
+def quote_file(file):
+ result = ""
+
+ for i in file.readlines():
+ result = result + quote_line(i) + "\n"
+
+ return result
+
+if len(sys.argv) < 3:
+ print "Usage: inline_file.py output_c_file file_to_include"
+else:
+ output_c_file = sys.argv[1]
+ out_file = open(output_c_file, "w");
+
+ file_to_include = sys.argv[2]
+
+ in_file = open(file_to_include, "r");
+ variable_name = strip(in_file.readline())
+ out_file.write("extern const char %s[] = {\n%s};\n\n" % (variable_name, quote_file(in_file)))
+ in_file.close()
+ out_file.close()
diff --git a/src/boost/tools/build/example/customization/jamroot.jam b/src/boost/tools/build/example/customization/jamroot.jam
new file mode 100644
index 000000000..5e986d91c
--- /dev/null
+++ b/src/boost/tools/build/example/customization/jamroot.jam
@@ -0,0 +1,9 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import verbatim ;
+
+exe codegen : codegen.cpp class.verbatim usage.verbatim
+ t1.verbatim ;
+
diff --git a/src/boost/tools/build/example/customization/readme.txt b/src/boost/tools/build/example/customization/readme.txt
new file mode 100644
index 000000000..6a799277a
--- /dev/null
+++ b/src/boost/tools/build/example/customization/readme.txt
@@ -0,0 +1,11 @@
+Copyright 2003 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This example show how to add a new target type and a new tool support to
+B2. Please refer to extender manual for a complete description of this
+example.
+
+Note that this example requires Python. If cygwin Python on Windows is to be
+used, please go to "verbatim.jam" and follow instructions there.
diff --git a/src/boost/tools/build/example/customization/t1.verbatim b/src/boost/tools/build/example/customization/t1.verbatim
new file mode 100644
index 000000000..144540f29
--- /dev/null
+++ b/src/boost/tools/build/example/customization/t1.verbatim
@@ -0,0 +1,2 @@
+t1
+//###include "t2.verbatim" \ No newline at end of file
diff --git a/src/boost/tools/build/example/customization/t2.verbatim b/src/boost/tools/build/example/customization/t2.verbatim
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/boost/tools/build/example/customization/t2.verbatim
diff --git a/src/boost/tools/build/example/customization/usage.verbatim b/src/boost/tools/build/example/customization/usage.verbatim
new file mode 100644
index 000000000..0fc4b4a37
--- /dev/null
+++ b/src/boost/tools/build/example/customization/usage.verbatim
@@ -0,0 +1,5 @@
+usage
+Usage: codegen class_name
+
+This program takes a template of C++ code and replaces of all occurrences of
+%class_name% with the passed 'class_name' parameter. \ No newline at end of file
diff --git a/src/boost/tools/build/example/customization/verbatim.jam b/src/boost/tools/build/example/customization/verbatim.jam
new file mode 100644
index 000000000..7b51604c4
--- /dev/null
+++ b/src/boost/tools/build/example/customization/verbatim.jam
@@ -0,0 +1,61 @@
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file shows some of the primary customization mechanisms in B2 V2
+# and should serve as a basic for your own customization.
+# Each part has a comment describing its purpose, and you can pick the parts
+# which are relevant to your case, remove everything else, and then change names
+# and actions to taste.
+
+import os ;
+
+# Declare a new target type. This allows B2 to do something sensible
+# when targets with the .verbatim extension are found in sources.
+import type ;
+type.register VERBATIM : verbatim ;
+
+# Declare a dependency scanner for the new target type. The
+# 'inline-file.py' script does not handle includes, so this is
+# only for illustraction.
+import scanner ;
+# First, define a new class, derived from 'common-scanner',
+# that class has all the interesting logic, and we only need
+# to override the 'pattern' method which return regular
+# expression to use when scanning.
+class verbatim-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "//###include[ ]*\"([^\"]*)\"" ;
+ }
+}
+
+# Register the scanner class. The 'include' is
+# the property which specifies the search path
+# for includes.
+scanner.register verbatim-scanner : include ;
+# Assign the scanner class to the target type.
+# Now, all .verbatim sources will be scanned.
+# To test this, build the project, touch the
+# t2.verbatim file and build again.
+type.set-scanner VERBATIM : verbatim-scanner ;
+
+import generators ;
+generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
+
+# Note: To use Cygwin Python on Windows change the following line
+# to "python inline_file.py $(<) $(>)"
+# Also, make sure that "python" in in PATH.
+actions inline-file
+{
+ "./inline_file.py" $(<) $(>)
+}
+
+if [ os.name ] = VMS
+{
+ actions inline-file
+ {
+ python inline_file.py $(<:W) $(>:W)
+ }
+}
diff --git a/src/boost/tools/build/example/customization/verbatim.py b/src/boost/tools/build/example/customization/verbatim.py
new file mode 100644
index 000000000..089bd3831
--- /dev/null
+++ b/src/boost/tools/build/example/customization/verbatim.py
@@ -0,0 +1,47 @@
+# Copyright 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is only used with Python port of Boost.Build
+
+# This file shows some of the primary customization mechanisms in Boost.Build V2
+# and should serve as a basic for your own customization.
+# Each part has a comment describing its purpose, and you can pick the parts
+# which are relevant to your case, remove everything else, and then change names
+# and actions to taste.
+
+# Declare a new target type. This allows Boost.Build to do something sensible
+# when targets with the .verbatim extension are found in sources.
+import b2.build.type as type
+type.register("VERBATIM", ["verbatim"])
+
+# Declare a dependency scanner for the new target type. The
+# 'inline-file.py' script does not handle includes, so this is
+# only for illustraction.
+import b2.build.scanner as scanner;
+# First, define a new class, derived from 'common-scanner',
+# that class has all the interesting logic, and we only need
+# to override the 'pattern' method which return regular
+# expression to use when scanning.
+class VerbatimScanner(scanner.CommonScanner):
+
+ def pattern(self):
+ return "//###include[ ]*\"([^\"]*)\""
+
+scanner.register(VerbatimScanner, ["include"])
+type.set_scanner("VERBATIM", VerbatimScanner)
+
+import b2.build.generators as generators
+
+generators.register_standard("verbatim.inline-file",
+ ["VERBATIM"], ["CPP"])
+
+from b2.manager import get_manager
+
+get_manager().engine().register_action("verbatim.inline-file",
+"""
+./inline_file.py $(<) $(>)
+""")
+
+
+
diff --git a/src/boost/tools/build/example/generate/README.txt b/src/boost/tools/build/example/generate/README.txt
new file mode 100644
index 000000000..018cbb564
--- /dev/null
+++ b/src/boost/tools/build/example/generate/README.txt
@@ -0,0 +1,11 @@
+# Copyright 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+This example shows the 'generate' rule, that allows you to construct target
+using any arbitrary set of transformation and commands.
+
+The rule is similar to 'make' and 'notfile', but unlike those, you can operate
+in terms of B2 'virtual targets', which is more flexible.
+
+Please consult the docs for more explanations.
diff --git a/src/boost/tools/build/example/generate/a.cpp b/src/boost/tools/build/example/generate/a.cpp
new file mode 100644
index 000000000..364975671
--- /dev/null
+++ b/src/boost/tools/build/example/generate/a.cpp
@@ -0,0 +1,10 @@
+
+int main()
+{
+}
+
+/*
+Copyright 2007 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
diff --git a/src/boost/tools/build/example/generate/gen.jam b/src/boost/tools/build/example/generate/gen.jam
new file mode 100644
index 000000000..73232aab5
--- /dev/null
+++ b/src/boost/tools/build/example/generate/gen.jam
@@ -0,0 +1,26 @@
+
+import "class" : new ;
+import common ;
+
+rule generate-example ( project name : property-set : sources * )
+{
+ local result ;
+ for local s in $(sources)
+ {
+ #local source-name = [ $(s).name ] ;
+ #local source-action = [ $(s).action ] ;
+ #local source-properties = [ $(source-action).properties ] ;
+
+ # Create a new action, that takes the source target and runs the
+ # 'common.copy' command on it.
+ local a = [ new non-scanning-action $(s) : common.copy : $(property-set)
+ ] ;
+
+ # Create a target to represent the action result. Uses the target name
+ # passed here via the 'name' parameter and the same type and project as
+ # the source.
+ result += [ new file-target $(name) : [ $(s).type ] : $(project) : $(a)
+ ] ;
+ }
+ return $(result) ;
+} \ No newline at end of file
diff --git a/src/boost/tools/build/example/generate/gen.py b/src/boost/tools/build/example/generate/gen.py
new file mode 100644
index 000000000..09ee15b43
--- /dev/null
+++ b/src/boost/tools/build/example/generate/gen.py
@@ -0,0 +1,16 @@
+
+from b2.build.virtual_target import NonScanningAction, FileTarget
+
+def generate_example(project, name, ps, sources):
+
+ result = []
+ for s in sources:
+
+ a = NonScanningAction([s], "common.copy", ps)
+
+ # Create a target to represent the action result. Uses the target name
+ # passed here via the 'name' parameter and the same type and project as
+ # the source.
+ result.append(FileTarget(name, s.type(), project, a))
+
+ return result
diff --git a/src/boost/tools/build/example/generate/jamroot.jam b/src/boost/tools/build/example/generate/jamroot.jam
new file mode 100644
index 000000000..c48f2207b
--- /dev/null
+++ b/src/boost/tools/build/example/generate/jamroot.jam
@@ -0,0 +1,9 @@
+# Copyright 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import generate ;
+
+import gen ;
+
+generate a2 : a.cpp : <generating-rule>@gen.generate-example ;
diff --git a/src/boost/tools/build/example/generator/README.txt b/src/boost/tools/build/example/generator/README.txt
new file mode 100644
index 000000000..f26a856a5
--- /dev/null
+++ b/src/boost/tools/build/example/generator/README.txt
@@ -0,0 +1,6 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+This example shows how to declare a new generator class. It is necessary when
+generator's logic is more complex that just running a single tool.
diff --git a/src/boost/tools/build/example/generator/foo.gci b/src/boost/tools/build/example/generator/foo.gci
new file mode 100644
index 000000000..2ccc45c6c
--- /dev/null
+++ b/src/boost/tools/build/example/generator/foo.gci
@@ -0,0 +1,10 @@
+
+int main()
+{
+ return 0;
+}
+/*
+Copyright 2006 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
diff --git a/src/boost/tools/build/example/generator/jamroot.jam b/src/boost/tools/build/example/generator/jamroot.jam
new file mode 100644
index 000000000..9703134db
--- /dev/null
+++ b/src/boost/tools/build/example/generator/jamroot.jam
@@ -0,0 +1,6 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import soap ;
+exe foo : foo.gci : <server>on ;
diff --git a/src/boost/tools/build/example/generator/soap.jam b/src/boost/tools/build/example/generator/soap.jam
new file mode 100644
index 000000000..b3d9e7633
--- /dev/null
+++ b/src/boost/tools/build/example/generator/soap.jam
@@ -0,0 +1,86 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This is example of a fictional code generator tool.
+# It accepts a single input of type '.gci' and produces
+# either one or two outputs of type .cpp, depending
+# on the value of the feature <server-mode>
+#
+# This example is loosely based on gSOAP code generator.
+
+import type ;
+import generators ;
+import feature ;
+import common ;
+import "class" : new ;
+import os ;
+
+type.register GCI : gci ;
+
+feature.feature server : off on : incidental ;
+
+class soap-generator : generator
+{
+ import "class" : new ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2])
+ {
+ # Accept only single source.
+ local t = [ $(sources[1]).type ] ;
+ if $(t) = GCI
+ {
+ # The type is correct.
+
+ # If no output name is specified, guess it from sources.
+ if ! $(name)
+ {
+ name = [ generator.determine-output-name $(sources) ] ;
+ }
+
+ # Produce one output, using just copy.
+ local a = [ new action $(sources[1])
+ : common.copy : $(property-set) ] ;
+ local t = [ new file-target $(name) : CPP : $(project)
+ : $(a) ] ;
+
+ # If in server mode, create another output -- an
+ # empty file. If this were a real SOAP generator, we
+ # might have created a single action, and two targets
+ # both using that action.
+ local t2 ;
+ if [ $(property-set).get <server> ] = "on"
+ {
+ local a = [ new action : soap.touch : $(property-set) ] ;
+ t2 = [ new file-target $(name)_server : CPP : $(project)
+ : $(a) ] ;
+ }
+ return [ virtual-target.register $(t) ]
+ [ virtual-target.register $(t2) ] ;
+ }
+ }
+ }
+}
+
+generators.register [ new soap-generator soap.soap : GCI : CPP ] ;
+
+TOUCH = [ common.file-touch-command ] ;
+actions touch
+{
+ $(TOUCH) $(<)
+}
+
+if [ os.name ] = VMS
+{
+ actions touch
+ {
+ $(TOUCH) $(<:W)
+ }
+}
diff --git a/src/boost/tools/build/example/gettext/jamfile.jam b/src/boost/tools/build/example/gettext/jamfile.jam
new file mode 100644
index 000000000..d5096df30
--- /dev/null
+++ b/src/boost/tools/build/example/gettext/jamfile.jam
@@ -0,0 +1,26 @@
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+# Declare a main target.
+exe main : main.cpp ;
+
+# Declare an action for updating translations
+# After changing main.cpp, invocation of
+#
+# bjam update-russian
+#
+# will update translations in russian.po
+gettext.update update-russian : russian.po main ;
+
+# Compiled message catalog.
+gettext.catalog russian : russian.po ;
+
+# A stage rule which installs message catalog to the
+# location gettext expects.
+stage messages-russian : russian
+ : <location>messages/ru_RU.KOI8-R/LC_MESSAGES
+ <name>main.mo
+ ;
+
diff --git a/src/boost/tools/build/example/gettext/jamroot.jam b/src/boost/tools/build/example/gettext/jamroot.jam
new file mode 100644
index 000000000..862f8930c
--- /dev/null
+++ b/src/boost/tools/build/example/gettext/jamroot.jam
@@ -0,0 +1,6 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+using gettext ;
diff --git a/src/boost/tools/build/example/gettext/main.cpp b/src/boost/tools/build/example/gettext/main.cpp
new file mode 100644
index 000000000..6888e1aba
--- /dev/null
+++ b/src/boost/tools/build/example/gettext/main.cpp
@@ -0,0 +1,28 @@
+// Copyright Vladimir Prus 2003.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+
+#include <locale.h>
+#include <libintl.h>
+#define i18n(s) gettext(s)
+
+#include <iostream>
+using namespace std;
+
+int main()
+{
+ // Specify that translations are stored in directory
+ // "messages".
+ bindtextdomain("main", "messages");
+ textdomain("main");
+
+ // Switch to russian locale.
+ setlocale(LC_MESSAGES, "ru_RU.KOI8-R");
+
+ // Output localized message.
+ std::cout << i18n("hello") << "\n";
+
+ return 0;
+}
diff --git a/src/boost/tools/build/example/gettext/readme.txt b/src/boost/tools/build/example/gettext/readme.txt
new file mode 100644
index 000000000..097e49b69
--- /dev/null
+++ b/src/boost/tools/build/example/gettext/readme.txt
@@ -0,0 +1,24 @@
+Copyright 2003 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This example shows how it is possible to use GNU gettext utilities with
+B2.
+
+A simple translation file is compiled and installed as message catalog for
+russian. The main application explicitly switches to russian locale and outputs
+the translation of "hello".
+
+To test:
+
+ bjam
+ bin/gcc/debug/main
+
+To test even more:
+
+ - add more localized strings to "main.cpp"
+ - run "bjam update-russian"
+ - edit "russian.po"
+ - run bjam
+ - run "main"
diff --git a/src/boost/tools/build/example/gettext/russian.po b/src/boost/tools/build/example/gettext/russian.po
new file mode 100644
index 000000000..daa7121c3
--- /dev/null
+++ b/src/boost/tools/build/example/gettext/russian.po
@@ -0,0 +1,21 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the PACKAGE package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2003-07-01 15:45+0400\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=CHARSET\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#: main.cpp:16
+msgid "hello"
+msgstr "international hello"
diff --git a/src/boost/tools/build/example/hello/hello.cpp b/src/boost/tools/build/example/hello/hello.cpp
new file mode 100644
index 000000000..97ac7fd6b
--- /dev/null
+++ b/src/boost/tools/build/example/hello/hello.cpp
@@ -0,0 +1,18 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+// tag::source[]
+#include <iostream>
+
+int main()
+{
+ std::cout << "Hello!\n";
+ return 1;
+}
+// end::source[]
diff --git a/src/boost/tools/build/example/hello/jamroot.jam b/src/boost/tools/build/example/hello/jamroot.jam
new file mode 100644
index 000000000..672ec02e9
--- /dev/null
+++ b/src/boost/tools/build/example/hello/jamroot.jam
@@ -0,0 +1 @@
+exe hello : hello.cpp ;
diff --git a/src/boost/tools/build/example/hello/readme.adoc b/src/boost/tools/build/example/hello/readme.adoc
new file mode 100644
index 000000000..78d327751
--- /dev/null
+++ b/src/boost/tools/build/example/hello/readme.adoc
@@ -0,0 +1,46 @@
+////
+Copyright 2008 Jurko Gospodnetic
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+////
+
+= Hello
+
+This example shows a very basic Boost Build project set up so it compiles a
+single executable from a single source file:
+
+.`hello.cpp`
+[source,cpp]
+----
+include::../../example/hello/hello.cpp[tag=source]
+----
+
+Our `jamroot.jam` is minimal and only specifies one `exe` target for the
+program:
+
+.`jamroot.jam`
+[source,jam]
+----
+include::jamroot.jam[]
+----
+
+Building the example yields:
+
+[source,bash]
+----
+> cd /example/hello
+> b2
+...found 8 targets...
+...updating 4 targets...
+common.mkdir bin/clang-darwin-4.2.1
+common.mkdir bin/clang-darwin-4.2.1/debug
+clang-darwin.compile.c++ bin/clang-darwin-4.2.1/debug/hello.o
+clang-darwin.link bin/clang-darwin-4.2.1/debug/hello
+...updated 4 targets...
+> bin/clang-darwin-4.2.1/debug/hello
+Hello!
+----
+
+NOTE: The actual paths in the `bin` sub-directory will depend on your
+toolset.
diff --git a/src/boost/tools/build/example/libraries/app/app.cpp b/src/boost/tools/build/example/libraries/app/app.cpp
new file mode 100644
index 000000000..f62c1c35d
--- /dev/null
+++ b/src/boost/tools/build/example/libraries/app/app.cpp
@@ -0,0 +1,15 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <lib1.h>
+
+int main()
+{
+ foo();
+}
diff --git a/src/boost/tools/build/example/libraries/app/jamfile.jam b/src/boost/tools/build/example/libraries/app/jamfile.jam
new file mode 100644
index 000000000..ed2054e13
--- /dev/null
+++ b/src/boost/tools/build/example/libraries/app/jamfile.jam
@@ -0,0 +1,9 @@
+# Copyright 2002, 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+# Declare a executable file, which uses a library. Note that
+# includes that for library will be automatically used
+# when compiling 'app.cpp'
+exe app : app.cpp /library-example/foo//bar ;
diff --git a/src/boost/tools/build/example/libraries/jamroot.jam b/src/boost/tools/build/example/libraries/jamroot.jam
new file mode 100644
index 000000000..5e0dc4814
--- /dev/null
+++ b/src/boost/tools/build/example/libraries/jamroot.jam
@@ -0,0 +1,4 @@
+
+use-project /library-example/foo : util/foo ;
+
+build-project app ;
diff --git a/src/boost/tools/build/example/libraries/util/foo/bar.cpp b/src/boost/tools/build/example/libraries/util/foo/bar.cpp
new file mode 100644
index 000000000..e6339ee9b
--- /dev/null
+++ b/src/boost/tools/build/example/libraries/util/foo/bar.cpp
@@ -0,0 +1,13 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
diff --git a/src/boost/tools/build/example/libraries/util/foo/include/lib1.h b/src/boost/tools/build/example/libraries/util/foo/include/lib1.h
new file mode 100644
index 000000000..50f5e19d2
--- /dev/null
+++ b/src/boost/tools/build/example/libraries/util/foo/include/lib1.h
@@ -0,0 +1,10 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+void foo();
diff --git a/src/boost/tools/build/example/libraries/util/foo/jamfile.jam b/src/boost/tools/build/example/libraries/util/foo/jamfile.jam
new file mode 100644
index 000000000..7b6359ea4
--- /dev/null
+++ b/src/boost/tools/build/example/libraries/util/foo/jamfile.jam
@@ -0,0 +1,9 @@
+# Copyright 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+project
+ : usage-requirements <include>include ;
+
+lib bar : bar.cpp ;
diff --git a/src/boost/tools/build/example/make/foo.py b/src/boost/tools/build/example/make/foo.py
new file mode 100644
index 000000000..e4c0b824a
--- /dev/null
+++ b/src/boost/tools/build/example/make/foo.py
@@ -0,0 +1,2 @@
+import sys
+open(sys.argv[2], "w").write(open(sys.argv[1]).read())
diff --git a/src/boost/tools/build/example/make/jamroot.jam b/src/boost/tools/build/example/make/jamroot.jam
new file mode 100644
index 000000000..3f5ec5b56
--- /dev/null
+++ b/src/boost/tools/build/example/make/jamroot.jam
@@ -0,0 +1,22 @@
+import feature ;
+import toolset ;
+import os ;
+
+path-constant HERE : . ;
+make main.cpp : main_cpp.pro : @do-something ;
+
+feature.feature example.python.interpreter : : free ;
+
+toolset.flags do-something PYTHON : <example.python.interpreter> ;
+actions do-something
+{
+ "$(PYTHON:E=python)" "$(HERE)/foo.py" "$(>)" "$(<)"
+}
+
+if [ os.name ] = VMS
+{
+ actions do-something
+ {
+ $(PYTHON:E=python) $(HERE:W)foo.py $(>:W) $(<:W)
+ }
+}
diff --git a/src/boost/tools/build/example/make/main_cpp.pro b/src/boost/tools/build/example/make/main_cpp.pro
new file mode 100644
index 000000000..237c8ce18
--- /dev/null
+++ b/src/boost/tools/build/example/make/main_cpp.pro
@@ -0,0 +1 @@
+int main() {}
diff --git a/src/boost/tools/build/example/make/readme.txt b/src/boost/tools/build/example/make/readme.txt
new file mode 100644
index 000000000..333c55a71
--- /dev/null
+++ b/src/boost/tools/build/example/make/readme.txt
@@ -0,0 +1,7 @@
+Copyright 2002, 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+Example of using custom command to create one file from another, using the
+built-in 'make' rule.
diff --git a/src/boost/tools/build/example/pch-multi/include/extra/meta.hpp b/src/boost/tools/build/example/pch-multi/include/extra/meta.hpp
new file mode 100644
index 000000000..6a85b0886
--- /dev/null
+++ b/src/boost/tools/build/example/pch-multi/include/extra/meta.hpp
@@ -0,0 +1,17 @@
+/*
+ Copyright Rene Rivera 2019
+
+ Distributed under the Boost Software License, Version 1.0. (See
+ accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifndef B2_EXAMPLE_PCH_MULTI_EXTRA_META_HPP
+#define B2_EXAMPLE_PCH_MULTI_EXTRA_META_HPP
+
+#include <type_traits>
+#include <functional>
+#include <typeindex>
+#include <tuple>
+
+#endif
diff --git a/src/boost/tools/build/example/pch-multi/include/pch.hpp b/src/boost/tools/build/example/pch-multi/include/pch.hpp
new file mode 100644
index 000000000..8f05cc43d
--- /dev/null
+++ b/src/boost/tools/build/example/pch-multi/include/pch.hpp
@@ -0,0 +1,19 @@
+/* Copyright 2006 Vladimir Prus
+
+ Distributed under the Boost Software License, Version 1.0. (See
+ accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifdef BOOST_BUILD_PCH_ENABLED
+
+#ifdef FOO2
+int bar();
+#endif
+
+class TestClass {
+public:
+ TestClass(int, int) {}
+};
+
+#endif
diff --git a/src/boost/tools/build/example/pch-multi/include/std.hpp b/src/boost/tools/build/example/pch-multi/include/std.hpp
new file mode 100644
index 000000000..89e76bf6a
--- /dev/null
+++ b/src/boost/tools/build/example/pch-multi/include/std.hpp
@@ -0,0 +1,16 @@
+/*
+ Copyright Rene Rivera 2019
+
+ Distributed under the Boost Software License, Version 1.0. (See
+ accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifndef B2_EXAMPLE_PCH_MULTI_STD_HPP
+#define B2_EXAMPLE_PCH_MULTI_STD_HPP
+
+#include <iostream>
+#include <vector>
+#include <algorithm>
+
+#endif
diff --git a/src/boost/tools/build/example/pch-multi/jamroot.jam b/src/boost/tools/build/example/pch-multi/jamroot.jam
new file mode 100644
index 000000000..fd1528510
--- /dev/null
+++ b/src/boost/tools/build/example/pch-multi/jamroot.jam
@@ -0,0 +1,30 @@
+# Copyright 2006 Ilya Sokolov
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# pch ##########################################################################
+
+import pch ;
+
+local pchs ;
+for local hpp in [ glob-tree *.hpp ]
+{
+ cpp-pch $(hpp:B) : $(hpp) : <include>include ;
+ explicit $(hpp:B) ;
+ pchs += $(hpp:B) ;
+}
+alias headers : $(pchs) ;
+
+# exe ##########################################################################
+
+exe hello_world
+ : # sources
+ headers
+ source/hello_world.cpp
+ : # requirements
+ <include>include
+ : # default build
+ : # usage requirements
+ ;
diff --git a/src/boost/tools/build/example/pch-multi/source/hello_world.cpp b/src/boost/tools/build/example/pch-multi/source/hello_world.cpp
new file mode 100644
index 000000000..741bf7025
--- /dev/null
+++ b/src/boost/tools/build/example/pch-multi/source/hello_world.cpp
@@ -0,0 +1,17 @@
+/* Copyright 2006 Ilya Sokolov
+ Copyright 2006 Vladimir Prus
+
+ Distributed under the Boost Software License, Version 1.0. (See
+ accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include <pch.hpp>
+#include <std.hpp>
+#include <extra/meta.hpp>
+
+int main()
+{
+ TestClass c(1, 2);
+ return 0;
+}
diff --git a/src/boost/tools/build/example/pch/include/pch.hpp b/src/boost/tools/build/example/pch/include/pch.hpp
new file mode 100644
index 000000000..8f05cc43d
--- /dev/null
+++ b/src/boost/tools/build/example/pch/include/pch.hpp
@@ -0,0 +1,19 @@
+/* Copyright 2006 Vladimir Prus
+
+ Distributed under the Boost Software License, Version 1.0. (See
+ accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifdef BOOST_BUILD_PCH_ENABLED
+
+#ifdef FOO2
+int bar();
+#endif
+
+class TestClass {
+public:
+ TestClass(int, int) {}
+};
+
+#endif
diff --git a/src/boost/tools/build/example/pch/jamroot.jam b/src/boost/tools/build/example/pch/jamroot.jam
new file mode 100644
index 000000000..115164aae
--- /dev/null
+++ b/src/boost/tools/build/example/pch/jamroot.jam
@@ -0,0 +1,29 @@
+# Copyright 2006 Ilya Sokolov
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# pch ##########################################################################
+
+import pch ;
+
+cpp-pch pch
+ : # sources
+ include/pch.hpp
+ : # requirements
+ <include>include
+ ;
+explicit pch ;
+
+# exe ##########################################################################
+
+exe hello_world
+ : # sources
+ pch
+ source/hello_world.cpp
+ : # requirements
+ <include>include
+ : # default build
+ : # usage requirements
+ ;
diff --git a/src/boost/tools/build/example/pch/source/hello_world.cpp b/src/boost/tools/build/example/pch/source/hello_world.cpp
new file mode 100644
index 000000000..f618056a0
--- /dev/null
+++ b/src/boost/tools/build/example/pch/source/hello_world.cpp
@@ -0,0 +1,15 @@
+/* Copyright 2006 Ilya Sokolov
+ Copyright 2006 Vladimir Prus
+
+ Distributed under the Boost Software License, Version 1.0. (See
+ accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include <pch.hpp>
+
+int main()
+{
+ TestClass c(1, 2);
+ return 0;
+}
diff --git a/src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc b/src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc
new file mode 100644
index 000000000..8b2d744ef
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc
@@ -0,0 +1,4 @@
+Name: debugged
+Version: 0.1
+Description: A package with separate debug version (debug version)
+Cflags: -DVARIANT=\"DEBUG\"
diff --git a/src/boost/tools/build/example/pkg-config/jamroot.jam b/src/boost/tools/build/example/pkg-config/jamroot.jam
new file mode 100644
index 000000000..190948174
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/jamroot.jam
@@ -0,0 +1,104 @@
+#|
+Copyright 2019 Dmitry Arkhipov
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+
+using pkg-config : : : <libdir>packages ;
+using pkg-config : debug : : <libdir>packages <path>debug-packages ;
+
+import common ;
+import pkg-config ;
+import property-set ;
+import testing ;
+import version ;
+
+
+project : requirements <variant>debug:<pkg-config>debug ;
+
+
+pkg-config.import debugged ;
+pkg-config.import foobar : requirements <version>>=0.3 ;
+pkg-config.import mangled : requirements <conditional>@mangle-name ;
+
+versioned =
+ [ pkg-config.import versioned
+ : usage-requirements <conditional>@versioned-api
+ ] ;
+
+with-var =
+ [ pkg-config.import with-var
+ : usage-requirements <conditional>@var-to-define
+ ] ;
+
+
+# test if a package is found at all
+run test1.cpp foobar ;
+
+# test if conditional requirement is applied
+run test2.cpp mangled
+ : target-name test2-1
+ : requirements <threading>single
+ : args SINGLE
+ ;
+
+run test2.cpp mangled
+ : target-name test2-2
+ : requirements <threading>multi
+ : args MULTI
+ ;
+
+# test if pkg-config configuration is properly inferred from property set
+run test3.cpp debugged
+ : target-name test3-1
+ : requirements <variant>release
+ : args RELEASE
+ ;
+
+run test3.cpp debugged
+ : target-name test3-2
+ : requirements <variant>debug
+ : args DEBUG
+ ;
+
+# test use of version method of pkg-config targets
+run test4.cpp versioned ;
+
+# test use of variable method of pkg-config targets
+run test5.cpp with-var ;
+
+
+rule mangle-name ( props * ) {
+ import feature ;
+ local name =
+ [ common.format-name
+ <base> <threading>
+ : mangled
+ : ""
+ : [ property-set.create $(props) ]
+ ] ;
+ return <name>$(name) ;
+}
+
+
+rule versioned-api ( props * ) {
+ local ps = [ property-set.create $(props) ] ;
+ local version = [ $(versioned).version $(ps) ] ;
+ if [ version.version-less $(version) : 2 ]
+ {
+ return <define>VERSIONED_API=1 ;
+ }
+ else
+ {
+ return <define>VERSIONED_API=2 ;
+ }
+}
+
+
+rule var-to-define ( props * ) {
+ local ps = [ property-set.create $(props) ] ;
+ local qwerty = [ $(with-var).variable qwerty : $(ps) ] ;
+ return <define>QWERTY=\\\"$(qwerty)\\\" ;
+}
diff --git a/src/boost/tools/build/example/pkg-config/packages/debugged.pc b/src/boost/tools/build/example/pkg-config/packages/debugged.pc
new file mode 100644
index 000000000..b22e10d8b
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/packages/debugged.pc
@@ -0,0 +1,4 @@
+Name: debugged
+Version: 0.1
+Description: A package with separate debug version (release version)
+Cflags: -DVARIANT=\"RELEASE\"
diff --git a/src/boost/tools/build/example/pkg-config/packages/foobar.pc b/src/boost/tools/build/example/pkg-config/packages/foobar.pc
new file mode 100644
index 000000000..f62cfc820
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/packages/foobar.pc
@@ -0,0 +1,4 @@
+Name: foobar
+Version: 0.3
+Description: The bar for your foo
+Cflags: -DQWERTY=\"uiop\"
diff --git a/src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc b/src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc
new file mode 100644
index 000000000..107b4d3d3
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc
@@ -0,0 +1,4 @@
+Name: mangled
+Version: 0.1
+Description: A package with mangled name (multi-threaded version)
+Cflags: -DTHREADING=\"MULTI\"
diff --git a/src/boost/tools/build/example/pkg-config/packages/mangled.pc b/src/boost/tools/build/example/pkg-config/packages/mangled.pc
new file mode 100644
index 000000000..76976ecc1
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/packages/mangled.pc
@@ -0,0 +1,4 @@
+Name: mangled
+Version: 0.1
+Description: A package with mangled name (single-threaded version)
+Cflags: -DTHREADING=\"SINGLE\"
diff --git a/src/boost/tools/build/example/pkg-config/packages/versioned.pc b/src/boost/tools/build/example/pkg-config/packages/versioned.pc
new file mode 100644
index 000000000..701f35146
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/packages/versioned.pc
@@ -0,0 +1,3 @@
+Name: versioned
+Version: 4.2
+Description: A package with versioned API
diff --git a/src/boost/tools/build/example/pkg-config/packages/with-var.pc b/src/boost/tools/build/example/pkg-config/packages/with-var.pc
new file mode 100644
index 000000000..4b3e2e558
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/packages/with-var.pc
@@ -0,0 +1,4 @@
+qwerty=UIOP
+Name: with-var
+Version: 0.1
+Description: A package that defines a custom variable
diff --git a/src/boost/tools/build/example/pkg-config/test1.cpp b/src/boost/tools/build/example/pkg-config/test1.cpp
new file mode 100644
index 000000000..36f37bfb3
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/test1.cpp
@@ -0,0 +1,11 @@
+// Copyright 2019 Dmitry Arkhipov
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+
+#include <string>
+
+int main() {
+ return QWERTY == std::string("uiop") ? EXIT_SUCCESS : EXIT_FAILURE ;
+}
diff --git a/src/boost/tools/build/example/pkg-config/test2.cpp b/src/boost/tools/build/example/pkg-config/test2.cpp
new file mode 100644
index 000000000..f911d4577
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/test2.cpp
@@ -0,0 +1,12 @@
+// Copyright 2019 Dmitry Arkhipov
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+
+#include <string>
+#include <iostream>
+
+int main(int, char const** argv) {
+ return THREADING == std::string(argv[1]) ? EXIT_SUCCESS : EXIT_FAILURE;
+}
diff --git a/src/boost/tools/build/example/pkg-config/test3.cpp b/src/boost/tools/build/example/pkg-config/test3.cpp
new file mode 100644
index 000000000..5df7ff0ef
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/test3.cpp
@@ -0,0 +1,12 @@
+// Copyright 2019 Dmitry Arkhipov
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+
+#include <string>
+#include <iostream>
+
+int main(int, char const** argv) {
+ return VARIANT == std::string(argv[1]) ? EXIT_SUCCESS : EXIT_FAILURE;
+}
diff --git a/src/boost/tools/build/example/pkg-config/test4.cpp b/src/boost/tools/build/example/pkg-config/test4.cpp
new file mode 100644
index 000000000..ca9bcc738
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/test4.cpp
@@ -0,0 +1,11 @@
+// Copyright 2019 Dmitry Arkhipov
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+
+#if VERSIONED_API < 2
+# error "API is too old"
+#endif
+
+int main() {}
diff --git a/src/boost/tools/build/example/pkg-config/test5.cpp b/src/boost/tools/build/example/pkg-config/test5.cpp
new file mode 100644
index 000000000..8d843637b
--- /dev/null
+++ b/src/boost/tools/build/example/pkg-config/test5.cpp
@@ -0,0 +1,12 @@
+// Copyright 2019 Dmitry Arkhipov
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+
+#include <string>
+#include <iostream>
+
+int main(int, char const** argv) {
+ return QWERTY == std::string("UIOP") ? EXIT_SUCCESS : EXIT_FAILURE;
+}
diff --git a/src/boost/tools/build/example/python_modules/jamroot.jam b/src/boost/tools/build/example/python_modules/jamroot.jam
new file mode 100644
index 000000000..c53e75d58
--- /dev/null
+++ b/src/boost/tools/build/example/python_modules/jamroot.jam
@@ -0,0 +1,8 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import python_helpers ;
+
+ECHO "test1:" [ python_helpers.test1 ] ;
+ECHO "test2:" [ python_helpers.test2 1234 : 5678 ] ;
diff --git a/src/boost/tools/build/example/python_modules/python_helpers.jam b/src/boost/tools/build/example/python_modules/python_helpers.jam
new file mode 100644
index 000000000..bbeb7007c
--- /dev/null
+++ b/src/boost/tools/build/example/python_modules/python_helpers.jam
@@ -0,0 +1,15 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+local here = [ modules.binding $(__name__) ] ;
+here = $(here:D) ;
+modules.poke : EXTRA_PYTHONPATH : $(here) ;
+
+# Import the Python rules to B2
+PYTHON_IMPORT_RULE python_helpers : test1 : python_helpers : test1 ;
+PYTHON_IMPORT_RULE python_helpers : test2 : python_helpers : test2 ;
+
+# Make the new rules accessible to everybody who imports us.
+EXPORT python_helpers : test1 test2 ;
diff --git a/src/boost/tools/build/example/python_modules/python_helpers.py b/src/boost/tools/build/example/python_modules/python_helpers.py
new file mode 100644
index 000000000..303363e91
--- /dev/null
+++ b/src/boost/tools/build/example/python_modules/python_helpers.py
@@ -0,0 +1,18 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Declare a couple of functions called from B2
+#
+# Each function will receive as many arguments as there ":"-separated
+# arguments in bjam call. Each argument is a list of strings.
+# As a special exception (aka bug), if no arguments are passed in bjam,
+# Python function will be passed a single empty list.
+#
+# All Python functions must return a list of strings, which may be empty.
+
+def test1(l):
+ return ["foo", "bar"]
+
+def test2(l, l2):
+ return [l[0], l2[0]] \ No newline at end of file
diff --git a/src/boost/tools/build/example/python_modules/readme.txt b/src/boost/tools/build/example/python_modules/readme.txt
new file mode 100644
index 000000000..76b219117
--- /dev/null
+++ b/src/boost/tools/build/example/python_modules/readme.txt
@@ -0,0 +1,16 @@
+Copyright 2006 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This example shows how you can use Python modules from B2.
+
+In order to do this, you need to build bjam with Python support, by running:
+
+ ./build.sh --with-python=/usr
+
+in the jam/src directory (replace /usr with the root of your Python
+installation).
+
+The integration between Python and bjam is very basic now, but enough to be
+useful.
diff --git a/src/boost/tools/build/example/qt/README.txt b/src/boost/tools/build/example/qt/README.txt
new file mode 100644
index 000000000..d6977b584
--- /dev/null
+++ b/src/boost/tools/build/example/qt/README.txt
@@ -0,0 +1,20 @@
+Copyright 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This directory contains B2 examples for the Qt library
+(http://www.trolltech.com/products/qt/index.html).
+
+The current examples are:
+ 1. Basic setup -- application with several sources and moccable header.
+ 2. Using of .ui source file.
+ 3. Running .cpp files via the moc tool.
+
+For convenience, there are examples both for 3.* and 4.* version of Qt, they are
+mostly identical and differ only in source code.
+
+All examples assumes that you just installed B2 and that QTDIR
+environment variables is set (typical values can be /usr/share/qt3 and
+/usr/share/qt4). After adding "using qt ..." to your user-config.jam, you would
+have to remove "using qt ; " statements from example Jamroot files.
diff --git a/src/boost/tools/build/example/qt/qt3/hello/canvas.cpp b/src/boost/tools/build/example/qt/qt3/hello/canvas.cpp
new file mode 100644
index 000000000..c6d23c9d4
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/hello/canvas.cpp
@@ -0,0 +1,73 @@
+// Copyright Vladimir Prus 2004.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "canvas.h"
+
+#include <qlabel.h>
+#include <qcanvas.h>
+#include <qlayout.h>
+
+Canvas::Canvas(QWidget* parent)
+: QWidget(parent)
+{
+ m_pen = QPen(QColor(255, 128, 128));
+ m_brushes = new QBrush[2];
+ m_brushes[0] = QBrush(QColor(255, 0, 0));
+ m_brushes[1] = QBrush(QColor(0, 255, 0));
+ m_current_brush = 0;
+
+ m_canvas = new QCanvas(this);
+ m_canvas->resize(4*1600, 600);
+
+ redraw();
+
+ QVBoxLayout* l = new QVBoxLayout(this);
+
+ m_canvas_view = new QCanvasView(m_canvas, this);
+ l->addWidget(m_canvas_view);
+ m_canvas_view->resize(rect().size());
+ m_canvas_view->show();
+}
+
+Canvas::~Canvas()
+{
+ delete m_brushes;
+}
+
+void Canvas::redraw()
+{
+ QCanvasItemList l = m_canvas->allItems();
+ for(QCanvasItemList::iterator i = l.begin(),
+ e = l.end(); i != e; ++i)
+ {
+ delete *i;
+ }
+
+ unsigned count = 0;
+ for (unsigned x = 10; x < 4*1600; x += 20)
+ for (unsigned y = 10; y < 600; y += 20) {
+ QCanvasRectangle* r = new QCanvasRectangle(x, y, 10, 10, m_canvas);
+ r->setPen(m_pen);
+ r->setBrush(m_brushes[m_current_brush]);
+ r->show();
+ ++count;
+ QCanvasText* t = new QCanvasText("D", m_canvas);
+ t->move(x, y);
+ t->show();
+ ++count;
+ }
+
+ (new QCanvasText(QString::number(count), m_canvas))->show();
+ m_canvas->setAllChanged();
+
+}
+
+void Canvas::change_color()
+{
+ m_current_brush = (m_current_brush + 1)%2;
+ redraw();
+ m_canvas->update();
+}
+
diff --git a/src/boost/tools/build/example/qt/qt3/hello/canvas.h b/src/boost/tools/build/example/qt/qt3/hello/canvas.h
new file mode 100644
index 000000000..f9f950267
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/hello/canvas.h
@@ -0,0 +1,35 @@
+// Copyright Vladimir Prus 2004.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+
+#ifndef CANVAS_VP_2004_08_31
+#define CANVAS_VP_2004_08_31
+
+#include <qmainwindow.h>
+#include <qpen.h>
+#include <qbrush.h>
+
+class Canvas : public QWidget
+{
+ Q_OBJECT
+public:
+ Canvas(QWidget* parent);
+
+ virtual ~Canvas();
+
+public slots:
+ void change_color();
+
+private:
+ void redraw();
+ class QCanvas* m_canvas;
+ class QCanvasView* m_canvas_view;
+ class QPen m_pen;
+ class QBrush* m_brushes;
+ int m_current_brush;
+};
+
+#endif
+
diff --git a/src/boost/tools/build/example/qt/qt3/hello/jamroot.jam b/src/boost/tools/build/example/qt/qt3/hello/jamroot.jam
new file mode 100644
index 000000000..03be582e5
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/hello/jamroot.jam
@@ -0,0 +1,13 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+using qt ;
+
+project
+ # built MT version, unless asked otherwise.
+ : default-build <threading>multi
+ ;
+
+exe canvas : main.cpp canvas.cpp canvas.h : <library>/qt//qt ; \ No newline at end of file
diff --git a/src/boost/tools/build/example/qt/qt3/hello/main.cpp b/src/boost/tools/build/example/qt/qt3/hello/main.cpp
new file mode 100644
index 000000000..8f1ffc2fb
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/hello/main.cpp
@@ -0,0 +1,36 @@
+// Copyright Vladimir Prus 2004.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "canvas.h"
+#include <qapplication.h>
+#include <qvbox.h>
+#include <qpushbutton.h>
+
+class Window : public QMainWindow
+{
+public:
+ Window()
+ {
+ setCaption("QCanvas test");
+ QVBox* vb = new QVBox(this);
+ setCentralWidget(vb);
+
+ Canvas* c = new Canvas(vb);
+ QPushButton* b = new QPushButton("Change color", vb);
+ connect(b, SIGNAL(clicked()), c, SLOT(change_color()));
+ }
+};
+
+int main(int argc, char **argv)
+{
+ QApplication app(argc, argv);
+ Window *w = new Window();
+
+ app.setMainWidget(w);
+ w->show();
+
+ return app.exec();
+}
+
diff --git a/src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam b/src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam
new file mode 100644
index 000000000..85778da20
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam
@@ -0,0 +1,11 @@
+
+using qt ;
+import cast ;
+
+project
+ : default-build <threading>multi
+ ;
+
+exe main : main.cpp [ cast _ moccable-cpp : main.cpp ]
+ /qt//qt
+ ;
diff --git a/src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp b/src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp
new file mode 100644
index 000000000..ed36f7469
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp
@@ -0,0 +1,41 @@
+// Copyright Vladimir Prus 2005.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+
+#include <qwidget.h>
+#include <qpushbutton.h>
+#include <qapplication.h>
+
+#include <iostream>
+
+class My_widget : public QWidget
+{
+ Q_OBJECT
+public:
+ My_widget() : QWidget()
+ {
+ QPushButton* b = new QPushButton("Push me", this);
+
+ connect(b, SIGNAL(clicked()), this, SLOT(theSlot()));
+ }
+
+private slots:
+ void theSlot()
+ {
+ std::cout << "Clicked\n";
+ }
+
+};
+
+int main(int ac, char* av[])
+{
+ QApplication app(ac, av);
+ My_widget mw;
+ mw.show();
+ app.setMainWidget(&mw);
+ app.exec();
+}
+
+#include "main.moc"
diff --git a/src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui b/src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui
new file mode 100644
index 000000000..26cc73487
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui
@@ -0,0 +1,58 @@
+<!DOCTYPE UI><UI version="3.0" stdsetdef="1">
+<class>HelloWorldWidget</class>
+<comment>
+<!--
+ Copyright Felix E. Klee, 2003
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt
+ or copy at http://www.boost.org/LICENSE_1_0.txt)
+-->
+</comment>
+<widget class="QWidget">
+ <property name="name">
+ <cstring>HelloWorldWidget</cstring>
+ </property>
+ <property name="geometry">
+ <rect>
+ <x>0</x>
+ <y>0</y>
+ <width>124</width>
+ <height>63</height>
+ </rect>
+ </property>
+ <property name="caption">
+ <string>Hello World!</string>
+ </property>
+ <vbox>
+ <property name="name">
+ <cstring>unnamed</cstring>
+ </property>
+ <property name="margin">
+ <number>11</number>
+ </property>
+ <property name="spacing">
+ <number>6</number>
+ </property>
+ <widget class="QLabel">
+ <property name="name">
+ <cstring>TextLabel2</cstring>
+ </property>
+ <property name="text">
+ <string>Hello World!</string>
+ </property>
+ <property name="alignment">
+ <set>AlignCenter</set>
+ </property>
+ </widget>
+ <widget class="QPushButton">
+ <property name="name">
+ <cstring>OkButton</cstring>
+ </property>
+ <property name="text">
+ <string>OK</string>
+ </property>
+ </widget>
+ </vbox>
+</widget>
+<layoutdefaults spacing="6" margin="11"/>
+</UI>
diff --git a/src/boost/tools/build/example/qt/qt3/uic/jamroot.jam b/src/boost/tools/build/example/qt/qt3/uic/jamroot.jam
new file mode 100644
index 000000000..d0b806294
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/uic/jamroot.jam
@@ -0,0 +1,15 @@
+# Copyright Felix E. Klee, 2003
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Tell that QT should be used. QTDIR will give installation
+# prefix.
+using qt ;
+
+project
+ : default-build <threading>multi
+ ;
+
+exe hello : main.cpp hello_world_widget.ui : <library>/qt//qt ;
+
diff --git a/src/boost/tools/build/example/qt/qt3/uic/main.cpp b/src/boost/tools/build/example/qt/qt3/uic/main.cpp
new file mode 100644
index 000000000..f2a08b5fa
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt3/uic/main.cpp
@@ -0,0 +1,18 @@
+// Copyright Felix E. Klee, 2003
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "hello_world_widget.h"
+#include <qapplication.h>
+
+#include <qpushbutton.h>
+
+int main(int argc, char **argv) {
+ QApplication a(argc, argv);
+ HelloWorldWidget w;
+ QObject::connect(static_cast<QObject*>(w.OkButton), SIGNAL(clicked()), &w, SLOT(close()));
+ a.setMainWidget(&w);
+ w.show();
+ return a.exec();
+}
diff --git a/src/boost/tools/build/example/qt/qt4/hello/arrow.cpp b/src/boost/tools/build/example/qt/qt4/hello/arrow.cpp
new file mode 100644
index 000000000..e821b1690
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/hello/arrow.cpp
@@ -0,0 +1,158 @@
+// Copyright Vladimir Prus 2005.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "arrow.h"
+
+#include <QtGui/qapplication.h>
+
+#include <QtGui/qwidget.h>
+#include <QtGui/qpainter.h>
+#include <QtGui/qpainterpath.h>
+
+#include <stdlib.h>
+#include <math.h>
+
+Arrow_widget::Arrow_widget(QWidget* parent) : QWidget(parent), color_(0)
+{
+ QPalette pal = palette();
+ pal.setBrush(backgroundRole(), QBrush(Qt::white));
+ setPalette(pal);
+}
+
+void Arrow_widget::slotChangeColor()
+{
+ color_ = (color_ + 1) % 3;
+ update();
+}
+
+void
+Arrow_widget::draw_arrow(int x1, int y1, int x2, int y2, QPainter& painter)
+{
+ // The length of the from the tip of the arrow to the point
+ // where line starts.
+ const int arrowhead_length = 16;
+
+ QPainterPath arrow;
+ arrow.moveTo(x1, y1);
+
+ // Determine the angle of the straight line.
+ double a1 = (x2-x1);
+ double a2 = (y2-y1);
+ double b1 = 1;
+ double b2 = 0;
+
+ double straight_length = sqrt(a1*a1 + a2*a2);
+
+ double dot_product = a1*b1 + a2*b2;
+ double cosine = dot_product/
+ (sqrt(pow(a1, 2) + pow(a2, 2))*sqrt(b1 + b2));
+ double angle = acos(cosine);
+ if (y1 < y2)
+ {
+ angle = -angle;
+ }
+ double straight_angle = angle*180/M_PI;
+
+ double limit = 10;
+
+ double angle_to_vertical;
+ if (fabs(straight_angle) < 90)
+ angle_to_vertical = fabs(straight_angle);
+ else if (straight_angle > 0)
+ angle_to_vertical = 180-straight_angle;
+ else
+ angle_to_vertical = 180-(-straight_angle);
+
+ double angle_delta = 0;
+ if (angle_to_vertical > limit)
+ angle_delta = 30 * (angle_to_vertical - limit)/90;
+ double start_angle = straight_angle > 0
+ ? straight_angle - angle_delta :
+ straight_angle + angle_delta;
+
+
+ QMatrix m1;
+ m1.translate(x1, y1);
+ m1.rotate(-start_angle);
+
+ double end_angle = straight_angle > 0
+ ? (straight_angle + 180 + angle_delta) :
+ (straight_angle + 180 - angle_delta);
+
+ QMatrix m2;
+ m2.reset();
+ m2.translate(x2, y2);
+ m2.rotate(-end_angle);
+
+ arrow.cubicTo(m1.map(QPointF(straight_length/2, 0)),
+ m2.map(QPointF(straight_length/2, 0)),
+ m2.map(QPointF(arrowhead_length, 0)));
+
+ painter.save();
+ painter.setBrush(Qt::NoBrush);
+ painter.drawPath(arrow);
+ painter.restore();
+
+ painter.save();
+ painter.translate(x2, y2);
+
+ painter.rotate(-90);
+ painter.rotate(-end_angle);
+ painter.rotate(180);
+
+ QPolygon arrowhead(4);
+ arrowhead.setPoint(0, 0, 0);
+ arrowhead.setPoint(1, arrowhead_length/3, -arrowhead_length*5/4);
+ arrowhead.setPoint(2, 0, -arrowhead_length);
+ arrowhead.setPoint(3, -arrowhead_length/3, -arrowhead_length*5/4);
+
+ painter.drawPolygon(arrowhead);
+
+ painter.restore();
+
+}
+
+
+void Arrow_widget::paintEvent(QPaintEvent*)
+{
+ QPainter p(this);
+
+ p.setRenderHint(QPainter::Antialiasing);
+
+ int base_x = 550;
+ int base_y = 200;
+
+ if (color_ == 0)
+ p.setBrush(Qt::black);
+ else if (color_ == 1)
+ p.setBrush(Qt::green);
+ else if (color_ == 2)
+ p.setBrush(Qt::yellow);
+ else
+ p.setBrush(Qt::black);
+
+ for (int x_step = 0; x_step < 6; ++x_step)
+ {
+ for (int y_step = 1; y_step <= 3; ++y_step)
+ {
+ draw_arrow(base_x, base_y, base_x+x_step*100,
+ base_y - y_step*50, p);
+
+ draw_arrow(base_x, base_y, base_x+x_step*100,
+ base_y + y_step*50, p);
+
+ draw_arrow(base_x, base_y, base_x-x_step*100,
+ base_y + y_step*50, p);
+
+ draw_arrow(base_x, base_y, base_x-x_step*100,
+ base_y - y_step*50, p);
+ }
+ }
+
+ draw_arrow(50, 400, 1000, 450, p);
+ draw_arrow(1000, 400, 50, 450, p);
+
+}
+
diff --git a/src/boost/tools/build/example/qt/qt4/hello/arrow.h b/src/boost/tools/build/example/qt/qt4/hello/arrow.h
new file mode 100644
index 000000000..d7743864f
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/hello/arrow.h
@@ -0,0 +1,30 @@
+// Copyright Vladimir Prus 2005.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include <QtGui/qapplication.h>
+
+#include <QtGui/qwidget.h>
+#include <QtGui/qpainter.h>
+#include <QtGui/qpainterpath.h>
+
+#include <stdlib.h>
+#include <math.h>
+
+class Arrow_widget : public QWidget
+{
+ Q_OBJECT
+public:
+ Arrow_widget(QWidget* parent = 0);
+
+public slots:
+ void slotChangeColor();
+
+private:
+ void draw_arrow(int x1, int y1, int x2, int y2, QPainter& painter);
+ void paintEvent(QPaintEvent*);
+
+private:
+ int color_;
+};
diff --git a/src/boost/tools/build/example/qt/qt4/hello/jamroot.jam b/src/boost/tools/build/example/qt/qt4/hello/jamroot.jam
new file mode 100644
index 000000000..83952f17b
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/hello/jamroot.jam
@@ -0,0 +1,14 @@
+
+import qt4 ;
+
+if ! [ qt4.initialized ]
+{
+ ECHO "Warning: Qt4 not initialized in user-config.jam" ;
+ ECHO "Assuming /space/p2/ghost/build/Qt4 as location." ;
+ ECHO "This is very likely won't work for you. " ;
+ using qt4 : /space/p2/ghost/build/Qt4 ;
+}
+
+project : requirements <threading>multi ;
+
+exe arrow : main.cpp arrow.cpp arrow.h /qt//QtGui ; \ No newline at end of file
diff --git a/src/boost/tools/build/example/qt/qt4/hello/main.cpp b/src/boost/tools/build/example/qt/qt4/hello/main.cpp
new file mode 100644
index 000000000..df27444bd
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/hello/main.cpp
@@ -0,0 +1,27 @@
+// Copyright Vladimir Prus 2005.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "arrow.h"
+
+#include <QApplication>
+#include <QTimer>
+
+int main(int ac, char* av[])
+{
+ QApplication app(ac, av);
+ Arrow_widget* w = new Arrow_widget;
+ w->resize(1100, 480);
+
+ QTimer timer;
+ QObject::connect(&timer, SIGNAL(timeout()),
+ w, SLOT(slotChangeColor()));
+
+ timer.start(2000);
+
+ w->show();
+ app.exec();
+ return 0;
+}
+
diff --git a/src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam b/src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam
new file mode 100644
index 000000000..d07b9c7d3
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam
@@ -0,0 +1,18 @@
+
+import qt4 ;
+if ! [ qt4.initialized ]
+{
+ ECHO "Warning: Qt4 not initialized in user-config.jam" ;
+ ECHO "Assuming /space/p2/ghost/build/Qt4 as location." ;
+ ECHO "This is very likely won't work for you. " ;
+ using qt4 : /space/p2/ghost/build/Qt4 ;
+}
+
+import cast ;
+exe main : main.cpp
+ [ cast _ moccable-cpp : main.cpp ]
+ /qt//QtGui
+ : <threading>multi
+ ;
+
+
diff --git a/src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp b/src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp
new file mode 100644
index 000000000..ffc96cc3e
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp
@@ -0,0 +1,39 @@
+// Copyright Vladimir Prus 2005.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include <qwidget.h>
+#include <qpushbutton.h>
+#include <qapplication.h>
+
+#include <iostream>
+
+class My_widget : public QWidget
+{
+ Q_OBJECT
+public:
+ My_widget() : QWidget()
+ {
+ QPushButton* b = new QPushButton("Push me", this);
+
+ connect(b, SIGNAL(clicked()), this, SLOT(theSlot()));
+ }
+
+private slots:
+ void theSlot()
+ {
+ std::cout << "Clicked\n";
+ }
+
+};
+
+int main(int ac, char* av[])
+{
+ QApplication app(ac, av);
+ My_widget mw;
+ mw.show();
+ app.exec();
+}
+
+#include "main.moc"
diff --git a/src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui b/src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui
new file mode 100644
index 000000000..67060b336
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui
@@ -0,0 +1,55 @@
+<ui version="4.0" >
+ <author></author>
+ <comment>
+<!--
+ Copyright Felix E. Klee, 2003
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt
+ or copy at http://www.boost.org/LICENSE_1_0.txt)
+-->
+ </comment>
+ <exportmacro></exportmacro>
+ <class>HelloWorldWidget</class>
+ <widget class="QWidget" name="HelloWorldWidget" >
+ <property name="geometry" >
+ <rect>
+ <x>0</x>
+ <y>0</y>
+ <width>124</width>
+ <height>63</height>
+ </rect>
+ </property>
+ <property name="windowTitle" >
+ <string>Hello World!</string>
+ </property>
+ <layout class="QVBoxLayout" >
+ <property name="margin" >
+ <number>11</number>
+ </property>
+ <property name="spacing" >
+ <number>6</number>
+ </property>
+ <item>
+ <widget class="QLabel" name="TextLabel2" >
+ <property name="text" >
+ <string>Hello World!</string>
+ </property>
+ <property name="alignment" >
+ <set>Qt::AlignCenter</set>
+ </property>
+ </widget>
+ </item>
+ <item>
+ <widget class="QPushButton" name="OkButton" >
+ <property name="text" >
+ <string>OK</string>
+ </property>
+ </widget>
+ </item>
+ </layout>
+ </widget>
+ <layoutdefault spacing="6" margin="11" />
+ <pixmapfunction>qPixmapFromMimeSource</pixmapfunction>
+ <resources/>
+ <connections/>
+</ui>
diff --git a/src/boost/tools/build/example/qt/qt4/uic/jamroot.jam b/src/boost/tools/build/example/qt/qt4/uic/jamroot.jam
new file mode 100644
index 000000000..40675a72e
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/uic/jamroot.jam
@@ -0,0 +1,18 @@
+# Copyright Felix E. Klee, 2003
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import qt4 ;
+if ! [ qt4.initialized ]
+{
+ ECHO "Warning: Qt4 not initialized in user-config.jam" ;
+ ECHO "Assuming /space/p2/ghost/build/Qt4 as location." ;
+ ECHO "This is very likely won't work for you. " ;
+ using qt4 : /space/p2/ghost/build/Qt4 ;
+}
+
+project : requirements <threading>multi
+ ;
+
+exe hello : main.cpp hello_world_widget.ui : <library>/qt//QtGui ;
diff --git a/src/boost/tools/build/example/qt/qt4/uic/main.cpp b/src/boost/tools/build/example/qt/qt4/uic/main.cpp
new file mode 100644
index 000000000..fc72fd5e6
--- /dev/null
+++ b/src/boost/tools/build/example/qt/qt4/uic/main.cpp
@@ -0,0 +1,23 @@
+// Copyright Felix E. Klee, 2003
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "ui_hello_world_widget.h"
+#include <qapplication.h>
+#include <qwidget.h>
+
+#include <qpushbutton.h>
+
+int main(int argc, char **argv) {
+ QApplication a(argc, argv);
+
+ QWidget w;
+ Ui::HelloWorldWidget wm;
+ wm.setupUi(&w);
+
+ QObject::connect(wm.OkButton, SIGNAL(clicked()), &w, SLOT(close()));
+
+ w.show();
+ return a.exec();
+}
diff --git a/src/boost/tools/build/example/sanitizers/jamroot.jam b/src/boost/tools/build/example/sanitizers/jamroot.jam
new file mode 100644
index 000000000..4b3bda916
--- /dev/null
+++ b/src/boost/tools/build/example/sanitizers/jamroot.jam
@@ -0,0 +1 @@
+exe main : main.cpp ;
diff --git a/src/boost/tools/build/example/sanitizers/main.cpp b/src/boost/tools/build/example/sanitizers/main.cpp
new file mode 100644
index 000000000..62e6f3b60
--- /dev/null
+++ b/src/boost/tools/build/example/sanitizers/main.cpp
@@ -0,0 +1,9 @@
+#include <iostream>
+
+// tag::source[]
+int main()
+{
+ char* c = nullptr;
+ std::cout << "Hello sanitizers\n " << *c;
+}
+// end::source[]
diff --git a/src/boost/tools/build/example/sanitizers/readme.adoc b/src/boost/tools/build/example/sanitizers/readme.adoc
new file mode 100644
index 000000000..b964324ca
--- /dev/null
+++ b/src/boost/tools/build/example/sanitizers/readme.adoc
@@ -0,0 +1,64 @@
+////
+Copyright 2019 Damian Jarek
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+////
+
+= Sanitizers
+
+This example shows how to enable sanitizers when using a clang or gcc toolset
+
+.`main.cpp`
+[source,cpp]
+----
+include::../../example/sanitizers/main.cpp[tag=source]
+----
+
+Our `jamroot.jam` is minimal and only specifies one `exe` target for the
+program:
+
+.`jamroot.jam`
+[source,jam]
+----
+include::jamroot.jam[]
+----
+
+Sanitizers can be enabled by passing `on` or `norecover` to the appropriate sanitizer feature
+(e.g. `thread-sanitizer=on`). The `norecover` option causes the program to terminate after
+the first sanitizer issue is detected. The following example shows how to enable `address` and `undefined`
+sanitizers in a simple program:
+
+[source,bash]
+----
+> cd /example/sanitizers
+> b2 toolset=gcc address-sanitizer=norecover undefined-sanitizer=on
+...found 10 targets...
+...updating 7 targets...
+gcc.compile.c++ bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main.o
+gcc.link bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main
+...updated 7 targets...
+----
+
+Running the produced program may produce an output simillar to the following:
+
+[source,bash]
+----
+> ./bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main
+Hello sanitizers
+main.cpp:6:43: runtime error: load of null pointer of type 'char'
+ASAN:DEADLYSIGNAL
+=================================================================
+==29767==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000000 (pc 0x55ba7988af1b bp 0x7ffdf3d76560 sp 0x7ffdf3d76530 T0)
+==29767==The signal is caused by a READ memory access.
+==29767==Hint: address points to the zero page.
+ #0 0x55ba7988af1a in main /home/damian/projects/boost/tools/build/example/sanitizers/main.cpp:6
+ #1 0x7f42f2ba1b96 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x21b96)
+ #2 0x55ba7988adb9 in _start (/home/damian/projects/boost/tools/build/example/sanitizers/bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main+0xdb9)
+
+AddressSanitizer can not provide additional info.
+SUMMARY: AddressSanitizer: SEGV /home/damian/projects/boost/tools/build/example/sanitizers/main.cpp:6 in main
+==29767==ABORTING
+----
+
+NOTE: The actual paths in the `bin` sub-directory will depend on your
+toolset and configuration. The presented output may vary depending on your compiler version.
diff --git a/src/boost/tools/build/example/sass/importing.scss b/src/boost/tools/build/example/sass/importing.scss
new file mode 100644
index 000000000..0c3586afc
--- /dev/null
+++ b/src/boost/tools/build/example/sass/importing.scss
@@ -0,0 +1,3 @@
+@import "foobar";
+
+body { color: red; }
diff --git a/src/boost/tools/build/example/sass/include/foobar.scss b/src/boost/tools/build/example/sass/include/foobar.scss
new file mode 100644
index 000000000..2c77cef1f
--- /dev/null
+++ b/src/boost/tools/build/example/sass/include/foobar.scss
@@ -0,0 +1,3 @@
+body {
+ border: { color: red; }
+}
diff --git a/src/boost/tools/build/example/sass/jamroot.jam b/src/boost/tools/build/example/sass/jamroot.jam
new file mode 100644
index 000000000..8297df876
--- /dev/null
+++ b/src/boost/tools/build/example/sass/jamroot.jam
@@ -0,0 +1,15 @@
+#|
+Copyright 2017 Dmitry Arkhipov
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+css stylesheet1 : singleton.scss : <flags>"--precision 1" ;
+css stylesheet2 : singleton.sass ;
+css stylesheet3 : importing.scss : <include>include ;
+css stylesheet4
+ : singleton.scss
+ : <sass-style>expanded
+ <sass-line-numbers>off
+ ;
diff --git a/src/boost/tools/build/example/sass/singleton.sass b/src/boost/tools/build/example/sass/singleton.sass
new file mode 100644
index 000000000..455fefdd1
--- /dev/null
+++ b/src/boost/tools/build/example/sass/singleton.sass
@@ -0,0 +1,12 @@
+body
+ p
+ line-height: 1.5em
+
+ span
+ font-weight: 700
+ a
+ text-decoration: none
+
+ &:hover
+ text-decoration: underline
+ font-size: (10px/3)
diff --git a/src/boost/tools/build/example/sass/singleton.scss b/src/boost/tools/build/example/sass/singleton.scss
new file mode 100644
index 000000000..afe15e9c4
--- /dev/null
+++ b/src/boost/tools/build/example/sass/singleton.scss
@@ -0,0 +1,11 @@
+body {
+ p { line-height: 1.5em; }
+ span { font-weight: 700; }
+ a {
+ text-decoration: none;
+ &:hover {
+ text-decoration: underline;
+ font-size: (10px/3);
+ }
+ }
+}
diff --git a/src/boost/tools/build/example/site-config.jam b/src/boost/tools/build/example/site-config.jam
new file mode 100644
index 000000000..ad22d6744
--- /dev/null
+++ b/src/boost/tools/build/example/site-config.jam
@@ -0,0 +1,4 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
diff --git a/src/boost/tools/build/example/testing/compile-fail.cpp b/src/boost/tools/build/example/testing/compile-fail.cpp
new file mode 100644
index 000000000..a219fa5c6
--- /dev/null
+++ b/src/boost/tools/build/example/testing/compile-fail.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+#include <cstdlib>
+
+int main()
+{
+ std::cout << "Bye!\n";
+ return EXIT_FAILURE
+}
diff --git a/src/boost/tools/build/example/testing/fail.cpp b/src/boost/tools/build/example/testing/fail.cpp
new file mode 100644
index 000000000..965661188
--- /dev/null
+++ b/src/boost/tools/build/example/testing/fail.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+#include <cstdlib>
+
+int main()
+{
+ std::cout << "Bye!\n";
+ return EXIT_FAILURE;
+}
diff --git a/src/boost/tools/build/example/testing/jamroot.jam b/src/boost/tools/build/example/testing/jamroot.jam
new file mode 100644
index 000000000..047aff39c
--- /dev/null
+++ b/src/boost/tools/build/example/testing/jamroot.jam
@@ -0,0 +1,10 @@
+# Copyright 2014 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+using testing ;
+
+run success.cpp : : ;
+run-fail fail.cpp : : ;
+compile success.cpp : : success-compile ;
+compile-fail compile-fail.cpp ;
diff --git a/src/boost/tools/build/example/testing/success.cpp b/src/boost/tools/build/example/testing/success.cpp
new file mode 100644
index 000000000..bf5588062
--- /dev/null
+++ b/src/boost/tools/build/example/testing/success.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+#include <cstdlib>
+
+int main()
+{
+ std::cout << "Hi!\n";
+ return EXIT_SUCCESS;
+}
diff --git a/src/boost/tools/build/example/time/hello.cpp b/src/boost/tools/build/example/time/hello.cpp
new file mode 100644
index 000000000..680802289
--- /dev/null
+++ b/src/boost/tools/build/example/time/hello.cpp
@@ -0,0 +1,16 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+
+int main()
+{
+ std::cout << "Hello!\n";
+ return 1;
+}
diff --git a/src/boost/tools/build/example/time/jamroot.jam b/src/boost/tools/build/example/time/jamroot.jam
new file mode 100644
index 000000000..3e2730f80
--- /dev/null
+++ b/src/boost/tools/build/example/time/jamroot.jam
@@ -0,0 +1,16 @@
+#|
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+#[jamroot
+#<< Import the time rule from the testing module.
+import testing ;
+
+#<< The target we are timing just builds a hello program.
+exe hello : hello.cpp ;
+
+#<< This target records the time to build the `hello` target.
+time hello.time : hello ;
+#]
diff --git a/src/boost/tools/build/example/time/readme.qbk b/src/boost/tools/build/example/time/readme.qbk
new file mode 100644
index 000000000..808a2ceec
--- /dev/null
+++ b/src/boost/tools/build/example/time/readme.qbk
@@ -0,0 +1,47 @@
+[/
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+]
+
+[section Time Action]
+
+This example shows how to use the `testing.time` utility to show time
+information for building a target.
+
+Files:
+
+* [@../../example/time/jamroot.jam jamroot.jam]
+* [@../../example/time/hello.cpp hello.cpp]
+
+Our `jamroot.jam` specifies the target we build and the `time`
+declaration to time the target we build:
+
+[import jamroot.jam]
+
+[jamroot]
+
+Building the example yields:
+
+[teletype]
+```
+> cd /example/time
+> b2
+...found 9 targets...
+...updating 6 targets...
+common.mkdir bin
+common.mkdir bin/clang-darwin-4.2.1
+common.mkdir bin/clang-darwin-4.2.1/debug
+clang-darwin.compile.c++ bin/clang-darwin-4.2.1/debug/hello.o
+clang-darwin.link bin/clang-darwin-4.2.1/debug/hello
+testing.time bin/clang-darwin-4.2.1/debug/hello.time
+user: [hello] 0.013509
+system: [hello] 0.045641
+clock: [hello] 0.000000
+...updated 6 targets...
+```
+
+[note The actual paths in the `bin` sub-directory will depend on your
+toolset.]
+
+[endsect]
diff --git a/src/boost/tools/build/example/try_compile/Jamroot.jam b/src/boost/tools/build/example/try_compile/Jamroot.jam
new file mode 100644
index 000000000..3131e722e
--- /dev/null
+++ b/src/boost/tools/build/example/try_compile/Jamroot.jam
@@ -0,0 +1,29 @@
+
+# This example shows performing configure checks in B2,
+# e.g. to check for some system function or compiler quirk.
+
+# First, declare a metatarget that we'll try to build.
+obj foo : foo.cpp ;
+# Make it explicit so that it's only built if used by a configure check
+explicit foo ;
+
+# Declare a target that depends on configure check result.
+exe main
+ : main.cpp
+ # The check-target-builds invocation in requirements section will
+ # - build the specified metatarget
+ # - if it builds OK, add the properties in the second parameter
+ # - otherwise, add the properties in the third parameter
+ : [ check-target-builds foo : <define>FOO=1 : <define>FOO=0 ]
+ ;
+
+# To test this:
+#
+# 1. Build with "b2". You should see a "foo builds: yes" message, and running
+# the produced executable will show that FOO is set to 1.
+# 2. Modify foo.cpp to contain a compile error, rebuild with
+# "b2 -a --reconfigure". You should see a "foo builds: no" message, and running
+# the produced executable should show that FOO is now set to 0.
+#
+# The output from the check is not shown on the console, instead it is
+# redirected to the bin/config.log file
diff --git a/src/boost/tools/build/example/try_compile/foo.cpp b/src/boost/tools/build/example/try_compile/foo.cpp
new file mode 100644
index 000000000..c9107f937
--- /dev/null
+++ b/src/boost/tools/build/example/try_compile/foo.cpp
@@ -0,0 +1,6 @@
+
+
+int foo()
+{
+ return 0;
+} \ No newline at end of file
diff --git a/src/boost/tools/build/example/try_compile/main.cpp b/src/boost/tools/build/example/try_compile/main.cpp
new file mode 100644
index 000000000..12f64995b
--- /dev/null
+++ b/src/boost/tools/build/example/try_compile/main.cpp
@@ -0,0 +1,8 @@
+
+#include <iostream>
+using namespace std;
+
+int main()
+{
+ std::cout << "Foo: " << FOO << "\n";
+} \ No newline at end of file
diff --git a/src/boost/tools/build/example/user-config.jam b/src/boost/tools/build/example/user-config.jam
new file mode 100644
index 000000000..8b4aed1ba
--- /dev/null
+++ b/src/boost/tools/build/example/user-config.jam
@@ -0,0 +1,92 @@
+# Copyright 2003, 2005 Douglas Gregor
+# Copyright 2004 John Maddock
+# Copyright 2002, 2003, 2004, 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is used to configure your B2 installation. You can modify
+# this file in place, or you can place it in a permanent location so that it
+# does not get overwritten should you get a new version of B2. See:
+#
+# http://www.boost.org/boost-build2/doc/html/bbv2/overview/configuration.html
+#
+# for documentation about possible permanent locations.
+
+# This file specifies which toolsets (C++ compilers), libraries, and other
+# tools are available. Often, you should be able to just uncomment existing
+# example lines and adjust them to taste. The complete list of supported tools,
+# and configuration instructions can be found at:
+#
+# http://boost.org/boost-build2/doc/html/bbv2/reference/tools.html
+#
+
+# This file uses Jam language syntax to describe available tools. Mostly,
+# there are 'using' lines, that contain the name of the used tools, and
+# parameters to pass to those tools -- where parameters are separated by
+# semicolons. Important syntax notes:
+#
+# - Both ':' and ';' must be separated from other tokens by whitespace
+# - The '\' symbol is a quote character, so when specifying Windows paths you
+# should use '/' or '\\' instead.
+#
+# More details about the syntax can be found at:
+#
+# http://boost.org/boost-build2/doc/html/bbv2/advanced.html#bbv2.advanced.jam_language
+#
+
+# ------------------
+# GCC configuration.
+# ------------------
+
+# Configure gcc (default version).
+# using gcc ;
+
+# Configure specific gcc version, giving alternative name to use.
+# using gcc : 3.2 : g++-3.2 ;
+
+
+# -------------------
+# MSVC configuration.
+# -------------------
+
+# Configure msvc (default version, searched for in standard locations and PATH).
+# using msvc ;
+
+# Configure specific msvc version (searched for in standard locations and PATH).
+# using msvc : 8.0 ;
+
+
+# ----------------------
+# Borland configuration.
+# ----------------------
+# using borland ;
+
+
+# ----------------------
+# STLPort configuration.
+# ----------------------
+
+# Configure specifying location of STLPort headers. Libraries must be either
+# not needed or available to the compiler by default.
+# using stlport : : /usr/include/stlport ;
+
+# Configure specifying location of both headers and libraries explicitly.
+# using stlport : : /usr/include/stlport /usr/lib ;
+
+
+# -----------------
+# QT configuration.
+# -----------------
+
+# Configure assuming QTDIR gives the installation prefix.
+# using qt ;
+
+# Configure with an explicit installation prefix.
+# using qt : /usr/opt/qt ;
+
+# ---------------------
+# Python configuration.
+# ---------------------
+
+# Configure specific Python version.
+# using python : 3.1 : /usr/bin/python3 : /usr/include/python3.1 : /usr/lib ;
diff --git a/src/boost/tools/build/example/variant/a.cpp b/src/boost/tools/build/example/variant/a.cpp
new file mode 100644
index 000000000..42b69f335
--- /dev/null
+++ b/src/boost/tools/build/example/variant/a.cpp
@@ -0,0 +1,7 @@
+// Copyright Vladimir Prus 2004.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+void l();
+int main() { l(); return 0; }
diff --git a/src/boost/tools/build/example/variant/jamfile.jam b/src/boost/tools/build/example/variant/jamfile.jam
new file mode 100644
index 000000000..eb81a2fd4
--- /dev/null
+++ b/src/boost/tools/build/example/variant/jamfile.jam
@@ -0,0 +1,11 @@
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#[jamfile
+#<< By default, build the project with the two variants we have defined in jamroot.jam.
+project : default-build crazy super_release ;
+
+#<< We build an `a` exe target that links a built library. The library builds with the propagated properties of the exe.
+exe a : a.cpp libs//l ;
+#] \ No newline at end of file
diff --git a/src/boost/tools/build/example/variant/jamroot.jam b/src/boost/tools/build/example/variant/jamroot.jam
new file mode 100644
index 000000000..52d21e498
--- /dev/null
+++ b/src/boost/tools/build/example/variant/jamroot.jam
@@ -0,0 +1,12 @@
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#[jamroot
+#<< Define a build variant which is just combination of four properties.
+variant crazy : <optimization>speed <inlining>off
+ <debug-symbols>on <profiling>on ;
+
+#<< Define a built variant inherited from 'release'. It defines one new property and gets all properties from the parent `release` variant.
+variant super_release : release : <define>USE_ASM ;
+#]
diff --git a/src/boost/tools/build/example/variant/libs/jamfile.jam b/src/boost/tools/build/example/variant/libs/jamfile.jam
new file mode 100644
index 000000000..60d8e64bb
--- /dev/null
+++ b/src/boost/tools/build/example/variant/libs/jamfile.jam
@@ -0,0 +1,8 @@
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#[libs_jamfile
+#<< The library `l` just needs the sources. By default it will be a shared library.
+lib l : l.cpp ;
+#] \ No newline at end of file
diff --git a/src/boost/tools/build/example/variant/libs/l.cpp b/src/boost/tools/build/example/variant/libs/l.cpp
new file mode 100644
index 000000000..26cb4b1e6
--- /dev/null
+++ b/src/boost/tools/build/example/variant/libs/l.cpp
@@ -0,0 +1,9 @@
+// Copyright Vladimir Prus 2002-2004.
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt
+// or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void l() {}
diff --git a/src/boost/tools/build/example/variant/readme.qbk b/src/boost/tools/build/example/variant/readme.qbk
new file mode 100644
index 000000000..663219e34
--- /dev/null
+++ b/src/boost/tools/build/example/variant/readme.qbk
@@ -0,0 +1,94 @@
+[/
+Copyright 2004 Vladimir Prus
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+/]
+
+[section Build Variants]
+
+This example shows how user can create his own build variants. Two variants are
+defined: "crazy", which is just a random combination of properties, and
+"super-release", which is inherited from "release", and differs by a single
+define.
+
+Files:
+
+* [@../../example/variant/a.cpp a.cpp]
+* [@../../example/variant/jamroot.jam jamroot.jam]
+* [@../../example/variant/jamfile.jam jamfile.jam]
+* [@../../example/variant/libs/jamfile.jam libs/jamfile.jam]
+* [@../../example/variant/libs/l.cpp libs/l.cpp]
+
+[import jamroot.jam]
+[import jamfile.jam]
+[import libs/jamfile.jam]
+
+In this project the `jamroot.jam` specifies the custom build variants and the
+targets are specified in the two `jamfile.jam` files.
+
+[jamroot]
+
+The top-level `jamfile.jam`:
+
+[jamfile]
+
+And the library `jamfile.jam` that the top-level `jamfile.jam` refers to:
+
+[libs_jamfile]
+
+Building the example yields:
+
+[teletype]
+```
+> cd /example/variant
+> b2
+...found 20 targets...
+...updating 16 targets...
+common.mkdir bin
+common.mkdir bin/clang-darwin-4.2.1
+common.mkdir bin/clang-darwin-4.2.1/crazy
+clang-darwin.compile.c++ bin/clang-darwin-4.2.1/crazy/a.o
+common.mkdir libs/bin
+common.mkdir libs/bin/clang-darwin-4.2.1
+common.mkdir libs/bin/clang-darwin-4.2.1/crazy
+clang-darwin.compile.c++ libs/bin/clang-darwin-4.2.1/crazy/l.o
+clang-darwin.link.dll libs/bin/clang-darwin-4.2.1/crazy/libl.dylib
+clang-darwin.link bin/clang-darwin-4.2.1/crazy/a
+common.mkdir bin/clang-darwin-4.2.1/super_release
+clang-darwin.compile.c++ bin/clang-darwin-4.2.1/super_release/a.o
+common.mkdir libs/bin/clang-darwin-4.2.1/super_release
+clang-darwin.compile.c++ libs/bin/clang-darwin-4.2.1/super_release/l.o
+clang-darwin.link.dll libs/bin/clang-darwin-4.2.1/super_release/libl.dylib
+clang-darwin.link bin/clang-darwin-4.2.1/super_release/a
+...updated 16 targets...
+```
+
+As specified in the top-level `jamfile.jam` both custom variants where built
+by default. Once can override that by specifying the variant one wants to
+build directly on the command line with a `variant=super_release`. Or just
+with a `super_release` as variants can be referred to by their name only.
+For example using that argument yields:
+
+```
+> cd /example/variant
+> b2 super_release
+...found 14 targets...
+...updating 10 targets...
+common.mkdir bin
+common.mkdir bin/clang-darwin-4.2.1
+common.mkdir bin/clang-darwin-4.2.1/super_release
+clang-darwin.compile.c++ bin/clang-darwin-4.2.1/super_release/a.o
+common.mkdir libs/bin
+common.mkdir libs/bin/clang-darwin-4.2.1
+common.mkdir libs/bin/clang-darwin-4.2.1/super_release
+clang-darwin.compile.c++ libs/bin/clang-darwin-4.2.1/super_release/l.o
+clang-darwin.link.dll libs/bin/clang-darwin-4.2.1/super_release/libl.dylib
+clang-darwin.link bin/clang-darwin-4.2.1/super_release/a
+...updated 10 targets...
+```
+
+[note The actual paths in the `bin` sub-directory will depend on your
+toolset.]
+
+[endsect]
diff --git a/src/boost/tools/build/notes/README.txt b/src/boost/tools/build/notes/README.txt
new file mode 100644
index 000000000..96ef0c3aa
--- /dev/null
+++ b/src/boost/tools/build/notes/README.txt
@@ -0,0 +1,8 @@
+Copyright 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This directory contains various development notes. Some of them
+may eventually find the way into documentation, so are purely
+implementation comments.
diff --git a/src/boost/tools/build/notes/build_dir_option.txt b/src/boost/tools/build/notes/build_dir_option.txt
new file mode 100644
index 000000000..0ebd3bef7
--- /dev/null
+++ b/src/boost/tools/build/notes/build_dir_option.txt
@@ -0,0 +1,77 @@
+Copyright 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+Summary
+-------
+
+We need a --build-dir option that users building from read-only
+medium can use to force building to some other location. Pretty much
+every project need this functionality, so it's desirable to have it
+out-of-the box, without explicit setup.
+
+Design
+------
+
+We can achieve the desired effect manually by adding something like this
+to Jamroot:
+
+ project .... : build-dir [ my-rule-to-compute-build-dir ] ;
+
+Where 'my-rule-to-compute-build-dir' would look at the --build-dir option.
+
+We need to automate this, but essentially, --build-dir will only affect
+the 'build-dir' attribute of Jamroots.
+
+If Jamroot contains:
+
+ project foo ;
+
+and --build-dir options' value if /tmp/build, then we'll act as if Jamroot
+contained:
+
+ project foo : build-dir /tmp/build/foo ;
+
+If the 'project' rule has explicit 'build-dir':
+
+ project foo : build-dir bin.v2 ;
+
+then with the same value of --build-dir we'd act as if Jamroot contained:
+
+ project foo : build-dir /tmp/build/foo/bin.v2 ;
+
+We can't drop "bin.v2" because it's quite possible that the name of build dir
+have specific meaning. For example, it can be used to separate B2 V1
+and V2 build results.
+
+The --build-dir option has no effect if Jamroot does not define any project id.
+Doing otherwise can lead to nasty problems if we're building two distinct
+projects (that is with two different Jamroot). They'll get the same build
+directory. Most likely, user will see the "duplicate target" error, which is
+generally confusing.
+
+It is expected that any non-trivial project will have top-level "project"
+invocation with non empty id, so the above limitation is not so drastic.
+We'll emit a warning if Jamroot does not define project id, and --build-dir
+is specified.
+
+Here's the exact behavior of the --build-dir option. If we're loading a
+Jamfile (either root or non-root), that declare some project id and some
+build-dir attribute, the following table gives the value of build-dir
+that will actually be used.
+
+-------------------------------------------------------------------------------
+Root? Id Build-dir attribute Resulting build dir
+-------------------------------------------------------------------------------
+yes none * --build-dir is ignored, with warning
+yes 'foo' none /tmp/build/foo
+yes 'foo' 'bin.v2' /tmp/build/foo/bin.v2
+yes 'foo' '/tmp/bar' Error [1]
+no * none --build-dir has no effect, inherited
+ build dir is used
+no * non-empty Error [2]
+-------------------------------------------------------------------------------
+[1] -- not clear what to do
+[2] -- can be made to work, but non-empty build-dir
+attribute in non-root Jamfile does not make much sense even without --build-dir
diff --git a/src/boost/tools/build/notes/changes.txt b/src/boost/tools/build/notes/changes.txt
new file mode 100644
index 000000000..bb98661f1
--- /dev/null
+++ b/src/boost/tools/build/notes/changes.txt
@@ -0,0 +1,317 @@
+Copyright 2004-2007 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+Milestone 13 (in development)
+
+Changes in this release:
+
+The following bugs were fixed:
+
+ - gcc support did not work on HP-UX systems
+
+Milestone 12 (Oct 1, 2007)
+
+Changes in this release:
+
+ - The Pathscale, PGI and mipspro compilers are now supported.
+ - Support for autoconfiguration of toolset based on command-line
+ toolset=xxxx request, and for default toolset
+ configuration as a fallback.
+ - Support for precompiled headers for gcc toolset,
+ and improvements for msvc.
+ - Mechanism for removing inherited requirements.
+ - The 'make' rule support specifying usage-requirements.
+ - New 'project.extension' rule for declaring standalone
+ projects.
+ - New 'conditional' convenience rule.
+ - New 'glob-tree' rule.
+ - The 'glob' rule accepts patterns to exclude.
+ - Inline targets are now marked explicit automatically.
+ - Toolsets can now implicitly add requirements to
+ all targets.
+ - New 'generate' rule.
+ - The executables produced by the 'run' rule are automatically
+ removed after run.
+ - The gcc toolset uses the version obtained by running
+ the compiler, if no explicit one is provided.
+ - The sun toolset now supports the 'address-model' feature,
+ and uses -KPIC for shared libraries.
+ - Free features on command line affect all targets, not
+ just 'directly requested' ones.
+
+
+Documentation changes:
+
+ - Installation instructions for Linux distributors.
+ - Configuration options for all supported C++ compilers
+ are now documented.
+
+The following bugs were fixed:
+
+ - The 'cflags' and 'linkflags' now work on Darwin.o
+ - The intel toolset now works on Windows.
+ - Fix library search options for CodeWarriour toolset.
+ - The <install-source-root> could cause duplicate
+ mkdir commands.
+ - Numerious fixes in Boost autolink support
+ - Numerious fixes in Boost.Python support.
+ - Indirect properties not evaluated in usage requirements.
+ - Generator that returns a property set but not target is
+ considered successful.
+ - On Darwin, when several compiler versions
+ are configured, -fcoalesce-templates is applied only to
+ versions that need it.
+
+
+Milestone 11 (Jule 20, 2006)
+
+Changes in this release:
+
+ - New C++ compilers: IBM xlf, HP aCC, HP CXX, Intel fortran compiler.
+ - New tools: Qt4 support, MS message compiler and IDL compiler.
+ - New main targets: 'notfile' and 'cast'.
+
+ - Core changes:
+
+ - Only one file required at top level of a project, named Jamroot.
+ - Jamfiles can now contain project-specific help messages.
+ - "Indirect conditional requirements" introduced
+ (http://tinyurl.com/mn3jp)
+ - Strip suffix in main target names when computing names of generated
+ files (URL)
+ - The 'source-location' project attribute can contain
+ several directories.
+ - Usage requirements are propagated not only direct dependents,
+ but to indirect dependents.
+
+ - Command line option changes (see http://tinyurl.com/zbycz)
+ - New option --build-dir
+ - The --clean option cleans only target below the current directory,
+ not globally.
+ - New --clean-all option was added.
+ - New option --debug-building
+ - Running "bjam some_directory" works even if there's no Jamfile
+ in the current directory.
+
+ - Toolset improvements:
+ - Assembling support with gcc, borland and msvc.
+ - Support amd64/ia64 cross-compiling with msvc.
+ - Improved, registry-based autodetection for msvc.
+ - Serialize execution of gcc.link actions
+ - Precompiled headers supported on MSVC
+ (Need documentation)
+
+ - New features <warnings> and <warnings-as-errors>
+ - The 'glob' rule accepts wildcards in directory names.
+ - The 'stage' rule was renamed to 'install'
+ (the old name still available for compatibility)
+ - The <tag> feature can accept user-defined function as value
+ (URL)
+ - The 'install' rule can install a directory hierarchy preserving relative
+ paths.
+ - The 'install' rule no longer allows to change library
+ name during install.
+ - The Jamfile referred via 'use-project' may declare project id different
+ from the one in 'use-project'.
+ - The 'using' rule now searches the directory of containing Jamfile.
+
+
+The following bugs were fixed:
+
+ - The <library> feature was ignored for static linking
+ - Fix #include scanning for C files.
+ - Child projects were sometimes loaded before parent projects.
+ - Fix project references with absolute paths on Windows.
+ - The <dependency> feature was ignored for 'install' targets.
+ - A generator having the same type in sources and targets was causing hang.
+ - Use 'icpc' command for Intel, fixing errors with 8.1 and higher.
+ - Generation of PS files with the FOP tool really produces .PS files.
+ - No dependency scanning was done for C files.
+ - The 'constant' and 'path-constant' rules did not accept multi-element
+ value.
+ - Don't pass -fcoalesce-templates to gcc on OSX 10.4
+ - Fix static lib suffix on OSX.
+ - Fix rpath setting on Intel/Linux.
+ - The 'install' rule don't unnecessary scans #includes in installed
+ headers.
+
+
+Developer visible changes:
+
+ - Ability to customize type's prefix depending on build properties.
+ - Generator's 'run' method can return usage-requirements.
+ - Main target rule is automatically declared for each new target type.
+ - 'Link incompatible' feature attribute was removed
+ - Generators no longer bypass unhandled sources, they just ignore them.
+ - If there are several applicable generators, immediately report ambiguity.
+ Provide a way to explicitly resolve conflicts between generators.
+ - The 'flags' rule can match absence of feature.
+ - Great improvement in response files handling
+ - The 'toolset.flags' rules allows value-less feature to signify
+ absence of this feature (fix hack-hack).
+ - Automatically declare main target rule for each declared target type.
+ - When inheriting types, inherit generators for the base type, as opposed
+ to using various hacks to invoke base generators when needed.
+ - Improve diagnostic for "duplicate actual target" and generator ambiguity.
+
+
+Milestone 10 (October 29, 2004)
+
+Changes in this release:
+
+ Many toolsets were added: Intel, Metrowerks, Comeau, aCC, vacpp.
+ Documentation was converted to BoostBook and improved.
+ Performance was improved.
+
+ - Toolsets initialization syntax is much more uniform. Compiler and linker
+ flags can now be specified.
+ - The algorithm for computing build properties was improved. Conditional
+ requirements can be chained, and a number of bugs were fixed.
+ - Specific order of properties can be specified.
+ - The main target rules can be called from everywhere, not necessary from
+ Jamfile.
+ - Check for "unused sources" removed.
+ - The <library> feature affects only linking now.
+ - The <file> feature now works only for libraries.
+ - Simpler syntax for "searched" libraries was added.
+ - New <dependency> feature.
+
+
+ Unix:
+ The right order of static libraries on Unix is automatically
+ computed.
+ The <hardcode-dll-paths> feature is the default.
+ gcc:
+ The -fPIC option is passed when creating shared libraries.
+ Problems with distcc were solved.
+ Sun:
+ It's now possible to use the sun linker (as opposed to gnu), and
+ to compile C files.
+ Darwin:
+ Shared libraries are now supported.
+ MSVC: Before resource files compilation, the setup script is invoked.
+ Options deprecated in 8.0 are not longer used.
+
+
+The following bugs were fixed:
+
+ - The <unit-test> rule did not handle the <library> property (!!!!!!)
+ - Don't add "bin" to the build directory explicitly specified by the user.
+ - Allow <include-type> to select staged targets,
+ even with <traverse-dependencies>off.
+ - Includes for the form '# include <whatever>" did not work.
+ - (Qt) Add paths to all dependent libs to uic command
+ line, which helps if the UI files uses plugins.
+ - Using <toolset-msvc:version>xxx in requirements was broken.
+ - Error message printed when target can be found is much more clear.
+ - Inline targets in sources of 'stage' did not work.
+ - Don't produce 'independent target' warnings on Windows
+ - (gcc) The <link-runtime>static did not work.
+ - (gcc) Suppress warnings from the 'ar' tool on some systems.
+ - (gcc) Don't try to set soname on NT.
+
+Developer visible changes:
+
+ - Generator priorities are gone, explicit overrides are used.
+ - 'Active' features were removed
+ - Support for VMS paths was added.
+
+Thanks to Christopher Currie, Pedro Ferreira, Philipp Frauenfelder,
+Andre Hentz, Jurgen Hunold, Toon Knapen, Johan Nilsson, Alexey Pakhunov,
+Brock Peabody, Michael Stevens and Zbynek Winkler who contributed
+code to this release.
+
+
+Milestone 9.1 (Nov 6, 2003)
+
+The following bugs were fixed:
+
+ - The 'unit-test' rule used to ignore <library> properties.
+ - The gcc toolset used to ignore <threading> property.
+
+Milestone 9 (Nov 6, 2003)
+
+Changes in this release
+
+ - Putting library in sources of other library now works even for static
+ linking, which makes expressing library->library dependency much
+ simpler.
+ - Performance was considerably improved.
+ - Regression testing framework now works on windows.
+ - The "alias" rule can have usage requirements and passes on usage
+ requirements of sources.
+ - The "stage" rule can traverse dependencies.
+ - Support for "def files" was implemented.
+ - Targets paths are now shorter.
+ - Darwin toolset was improved.
+
+The following bugs were fixed:
+
+ - It was not possible to specify empty suffix for a target type derived
+ from other type.
+ - The stage rules used to generate incorrect suffix in some cases.
+ - It was possible to load Jamfile twice.
+ - The 'use-project' rule was broken when referring to a child project.
+ - Use of composite properties in requirements did not work.
+
+Developer visible changes:
+
+ - New CALC builtin, which considerable improves performance.
+ - Source layout was reorganized.
+ - Handling of response file was simplified.
+
+Thanks to Pedro Ferreira, Kirill Lapshin, Andre Hentz, Paul Lin,
+Jurgen Hunold, Christopher Currie, and Brock Peabody, who contributed to
+this release.
+
+Milestone 8 (Oct 15, 2003)
+
+Changes in this release:
+
+ - A regression testing framework was implemented.
+ - New <implicit-dependency> feature was added for better handling
+ of dependencies to generated headers.
+ - The link-compatibility checks not longer cause projects to be skipped,
+ and issue warning, not error, for main targets.
+ - Algorithm for selecting main target alternative was improved.
+ - The <dependency> feature was renamed to <use>.
+ - Project root constants were made available in project root itself.
+
+The following bugs were fixed:
+
+ - failure to recognize shared libraries with version as such
+ - the 'path-constant' rule was mishandling absolute paths on Windows.
+
+
+Milestone 7 (Sep 11, 2003)
+
+Changes in this release:
+
+ - Performance was improved.
+ - Support for Sun and Darwin toolsets was added.
+ - <tag> feature, which changes the name of target depending of build
+ variant, was implemented.
+ - Old-style targets-ids are no longer supported.
+ - New 'glob' rule allows to easily perform wildcard matching in Jamfile.
+ - Improve bison/flex support to understand C++.
+
+The following bugs were fixed:
+
+ - bogus error on use of project default-build attribute with several
+ main target alternatives.
+ - broken toolset inheritance
+ - hard error after skipping a target due to incompatible requirements
+ - incorrect behaviour of a generator when producing several targets of
+ the same type
+ - errors on use of the 'project-root' rule in Jamfile context
+ - inability to require specific compiler version for a main target.
+ - incorrect behaviour of "bjam msvc" when msvc is configured with explicit
+ version.
+
+Thanks to Christopher Currie, Pedro Ferreira and Michael Stevens, who
+contributed to this release.
+
+
+
+
diff --git a/src/boost/tools/build/notes/relative_source_paths.txt b/src/boost/tools/build/notes/relative_source_paths.txt
new file mode 100644
index 000000000..2f0557893
--- /dev/null
+++ b/src/boost/tools/build/notes/relative_source_paths.txt
@@ -0,0 +1,76 @@
+Copyright 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+Hi,
+recently, we had a couple of problems caused by using relative file paths, and
+I'd like to discuss what to do.
+
+Let's use the case from CÊdric. Simplified version is:
+
+ exe a : a.cpp dir1/qt_file.h ;
+ exe b : a.cpp dir2/qt_file.h ;
+
+Both exes have the same source cpp file but different *.h files -- which are
+processed by Qt tools. V2 currently strips directory name from all targets,
+so it tries to
+
+ - create "bin/mvsc/debug/moc_qt_file.cpp" from dir1/qt_file.h
+ - create "bin/msvc/debug/moc_qt_file.cpp" from dir2/qt_file.h
+
+There are two solutions that I see:
+
+ 1. Rewrite the code like:
+
+ lib aux : a.cpp
+ exe a : aux dir1/qt_file.h : <location-prefix>a ;
+ exe b : aux dir2/qt_file.h : <location-prefix>b ;
+
+ This way, two version of moc_qt_file.cpp will be generated to different
+ places.
+
+ 2. Rewrite the code like:
+
+ obj a_moc : dir1/qt_file.h : <library>/qt//qt ;
+ exe a : a.cpp a_moc ;
+ obj b_moc : dir2/qt_file.h : <library>/qt//qt ;
+ exe b : a.cpp b_moc ;
+
+ Explicitly changing name for the problematic files.
+
+ 3. Generally change V2 so that directory part of source is preserved. This
+ will generate targets:
+ "bin/msvc/debug/dir1/moc_qt_file.cpp" and
+ "bin/msvc/debug/dir2/moc_qt_file.cpp". No problems.
+
+ However, there are some additional questions:
+
+ - What if source has absolute file name?
+ - What if source is "../../include/qt_file.h"?
+
+ We can ignore directory names in those cases (i.e. use the current
+ behaviour) but that would be a bit inconsistent.
+
+Any opinions?
+
+Pedro Ferreira:
+
+I think this is a corner case and BB should not try to solve everything
+automatically - otherwise it will become really complex.
+I don't see a problem in requiring the user to help the build system by
+using solutions 1 or 2.
+Of course, the better the error reporting, the easier it will be to
+find the cause and the cure of the problem.
+
+TEMPLIE Cedric:
+
+I agree with Pedro. Solution 1 or 2 is the best way to deal with this
+problem. Of course I have a preference for the solution 1, but the
+solution 2 has the advantage to work without any modification...
+
+Toon Knapen:
+
+I agree.
+
+
diff --git a/src/boost/tools/build/notes/release_procedure.txt b/src/boost/tools/build/notes/release_procedure.txt
new file mode 100644
index 000000000..9ed95f1e6
--- /dev/null
+++ b/src/boost/tools/build/notes/release_procedure.txt
@@ -0,0 +1,83 @@
+Copyright 2003, 2005, 2006 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+ B2 V2 release procedure.
+
+[ Must be done from a Unix shell ]
+
+0. Look for all issues for current milestone in the tracker. Close the fixed one,
+ if not already closed. Move to a later milestone, or fix all the unfixed
+ ones.
+
+ Make sure that "bjam --version" output is correct. Update version string if
+ needed. Update bjam version and the version check is necessary.
+ Check the download locations in "index.html". Check that "last modified"
+ string in index.html is correct.
+
+1. Make sure you don't have any local modification, and create SVN directory
+
+ https://svn.boost.org/svn/boost/branches/build/Milestone_X
+
+ Then, copy:
+
+ https://svn.boost.org/svn/boost/trunk/tools/build
+ https://svn.boost.org/svn/boost/trunk/tools/jam
+
+ to that directory.
+
+2. Run
+
+ svn co https://svn.boost.org/svn/boost/branches/build/Milestone_X boost-build
+
+3. Go to "boost-build/build/v2" directory.
+
+4. Run "./roll.sh". This will create "boost-build.zip" and
+ "boost-build.tar.bz2" in parent directory, and also upload
+ new docs to sourceforge.
+
+5. Unpack "boost-build.tar.bz2", and build jam.
+
+6. Go to "test" and copy "test-config-example.jam" to "test-config.jam".
+ If you're not ghost, edit test-config.jam to specify all the right paths.
+ Run gcc tests:
+
+ python test_all.py gcc --extras
+
+7. Build all projects in examples-v2, using the bjam binary created at step 4.
+ Note: "threading=multi" might be needed to build QT examples.
+
+8. Make SF release:
+
+ - Go to
+ https://sourceforge.net/project/admin/editpackages.php?group_id=7586
+
+ - Create new B2 release. Name it 2.0-mXX
+
+ - Upload the changelog. Be sure to turn the "Preserve my pre-formatted
+ text" checkbox.
+
+ - Rename previously built packages to boost-build-2.0-mXX.tar.bz2
+ and boost-build-2.0-mXX.zip. Upload them to the
+ /incoming directory on ftp://upload.sourceforge.net
+
+ - Add those file to release, edit file properties.
+
+ - In a separate browser, verify changelog is not damaged.
+
+ - In a separate browser, Download the files and verify checksums.
+
+ - In SF file release interface, send email notice.
+
+9. Announce the release, etc.
+
+10. Login to SF and update the current-release redirects in
+ /home/groups/b/bo/boost/htdocs/boost-build2/.htaccess.
+
+11. If any issues were found during release in this document or in
+ test-config-example.jam, commit those changes. The release need
+ not be redone, but changes must be committed.
+
+12. Set release date in changes.txt and commit.
+
diff --git a/src/boost/tools/build/src/__init__.py b/src/boost/tools/build/src/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/boost/tools/build/src/__init__.py
diff --git a/src/boost/tools/build/src/bootstrap.jam b/src/boost/tools/build/src/bootstrap.jam
new file mode 100644
index 000000000..04cc023e4
--- /dev/null
+++ b/src/boost/tools/build/src/bootstrap.jam
@@ -0,0 +1,18 @@
+# Copyright (c) 2003 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file handles initial phase of B2 loading.
+# Boost.Jam has already figured out where B2 is
+# and loads this file, which is responsible for initialization
+# of basic facilities such a module system and loading the
+# main B2 module, build-system.jam.
+#
+# Exact operation of this module is not interesting, it makes
+# sense to look at build-system.jam right away.
+
+# Load the kernel/bootstrap.jam, which does all the work.
+.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ;
+include $(.bootstrap-file) ; \ No newline at end of file
diff --git a/src/boost/tools/build/src/build-system.jam b/src/boost/tools/build/src/build-system.jam
new file mode 100644
index 000000000..60425c542
--- /dev/null
+++ b/src/boost/tools/build/src/build-system.jam
@@ -0,0 +1,1079 @@
+# Copyright 2003, 2005, 2007 Dave Abrahams
+# Copyright 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is part of Boost Build version 2. You can think of it as forming the
+# main() routine. It is invoked by the bootstrapping code in bootstrap.jam.
+
+import build-request ;
+import builtin ;
+import "class" : new ;
+import configure ;
+import config-cache ;
+import feature ;
+import generators ;
+import indirect ;
+import make ;
+import modules ;
+import os ;
+import path ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import utility ;
+import version ;
+import virtual-target ;
+
+
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Shortcut used in this module for accessing used command-line parameters.
+.argv = [ modules.peek : ARGV ] ;
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ;
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+.results-of-main-targets = ;
+
+# Was an XML dump requested?
+.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ;
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+.default-toolset = ;
+.default-toolset-version = ;
+
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
+# build request.
+#
+rule command-line-free-features ( )
+{
+ return $(.command-line-free-features) ;
+}
+
+
+# Returns the location of the build system. The primary use case is building
+# Boost where it is sometimes needed to get the location of other components
+# (e.g. BoostBook files) and it is convenient to use locations relative to the
+# Boost Build path.
+#
+rule location ( )
+{
+ local r = [ modules.binding build-system ] ;
+ return $(r:P) ;
+}
+
+
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+rule set-default-toolset ( toolset : version ? )
+{
+ .default-toolset = $(toolset) ;
+ .default-toolset-version = $(version) ;
+}
+
+rule add-pre-build-hook ( function )
+{
+ .pre-build-hook += [ indirect.make $(function) : [ CALLER_MODULE ] ] ;
+}
+
+rule add-post-build-hook ( function )
+{
+ .post-build-hook += [ indirect.make $(function) : [ CALLER_MODULE ] ] ;
+}
+
+# Old names for backwards compatibility
+IMPORT build-system : add-pre-build-hook : build-system : set-pre-build-hook ;
+IMPORT build-system : add-post-build-hook : build-system : set-post-build-hook ;
+EXPORT build-system : set-pre-build-hook set-post-build-hook ;
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Returns actual Jam targets to be used for executing a clean request.
+#
+local rule actual-clean-targets ( )
+{
+ # The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo'
+ # is a directory, then we want to clean targets which are in 'foo' as well
+ # as those in any children Jamfiles under foo but not in any unrelated
+ # Jamfiles. To achieve this we first mark all projects explicitly detected
+ # as targets for this build system run as needing to be cleaned.
+ for local t in $(targets)
+ {
+ if [ class.is-a $(t) : project-target ]
+ {
+ local project = [ $(t).project-module ] ;
+ .should-clean-project.$(project) = true ;
+ }
+ }
+
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ local targets-to-clean ;
+ for local t in $(.results-of-main-targets)
+ {
+ # Do not include roots or sources.
+ targets-to-clean += [ virtual-target.traverse $(t) ] ;
+ }
+ targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
+
+ local to-clean ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ # Remove only derived targets and only those asked to be cleaned,
+ # whether directly or by belonging to one of the removed projects.
+ local p = [ $(t).project ] ;
+ if [ $(t).action ] && ( $(t) in $(targets-to-clean) ||
+ [ should-clean-project [ $(p).project-module ] ] )
+ {
+ to-clean += $(t) ;
+ }
+ }
+
+ local to-clean-actual ;
+ for local t in $(to-clean)
+ {
+ to-clean-actual += [ $(t).actualize ] ;
+ }
+ return $(to-clean-actual) ;
+}
+
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there is no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can not reuse that code without a
+# project-targets instance.
+#
+local rule find-target ( target-id )
+{
+ local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
+
+ local pm ;
+ if $(split)
+ {
+ pm = [ project.find $(split[1]) : "." ] ;
+ }
+ else
+ {
+ pm = [ project.find $(target-id) : "." ] ;
+ }
+
+ local result ;
+ if $(pm)
+ {
+ result = [ project.target $(pm) ] ;
+ }
+
+ if $(split)
+ {
+ result = [ $(result).find $(split[2]) ] ;
+ }
+
+ return $(result) ;
+}
+
+
+# Initializes a new configuration module.
+#
+local rule initialize-config-module ( module-name : location ? )
+{
+ project.initialize $(module-name) : $(location) ;
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+local rule load-config ( module-name : filename : path + : must-find ? )
+{
+ if $(.debug-config)
+ {
+ local path-string = $(path) ;
+ if $(path-string) = "" { path-string = . ; }
+ ECHO "notice:" Searching '$(path-string)' for $(module-name)
+ configuration file '$(filename)'. ;
+ }
+ local where = [ GLOB $(path) : $(filename) ] ;
+ if $(where)
+ {
+ where = [ NORMALIZE_PATH $(where[1]) ] ;
+ if $(.debug-config)
+ {
+ local where-string = $(where:D) ;
+ if $(where-string) = "" { where-string = . ; }
+ where-string = '$(where-string)' ;
+ ECHO "notice:" Loading $(module-name) configuration file '$(filename)'
+ from $(where-string:J=" "). ;
+ }
+
+ # Set source location so that path-constant in config files with
+ # relative paths work. This is of most importance for
+ # project-config.jam, but may be used in other config files as well.
+ local attributes = [ project.attributes $(module-name) ] ;
+ $(attributes).set source-location : $(where:D) : exact ;
+ modules.load $(module-name) : $(filename) : $(path) ;
+ project.load-used-projects $(module-name) ;
+ }
+ else if $(must-find) || $(.debug-config)
+ {
+ local path-string = $(path) ;
+ if $(path-string) = "" { path-string = . ; }
+ path-string = '$(path-string)' ;
+ path-string = $(path-string:J=" ") ;
+ if $(must-find)
+ {
+ import errors ;
+ errors.user-error Configuration file '$(filename)' not found "in"
+ $(path-string). ;
+ }
+ ECHO "notice:" Configuration file '$(filename)' not found "in"
+ $(path-string). ;
+ }
+ return $(where) ;
+}
+
+# Parses options of the form --xxx-config=path/to/config.jam
+# and environmental variables of the form BOOST_BUILD_XXX_CONFIG.
+# If not found, returns an empty list. The option may be
+# explicitly set to the empty string, in which case, handle-config-option
+# will return "".
+#
+local rule handle-config-option ( name : env ? )
+{
+ local result = [ MATCH ^--$(name)=(.*)$ : $(.argv) ] ;
+ if ! $(result)-is-defined && $(env)
+ {
+ result = [ os.environ $(env) ] ;
+ }
+ # Special handling for the case when the OS does not strip the quotes
+ # around the file name, as is the case when using Cygwin bash.
+ result = [ utility.unquote $(result[-1]) ] ;
+ if ! $(result)
+ {
+ return $(result) ;
+ }
+ # Treat explicitly entered user paths as native OS path
+ # references and, if non-absolute, root them at the current
+ # working directory.
+ result = [ path.make $(result) ] ;
+ result = [ path.root $(result) [ path.pwd ] ] ;
+ result = [ path.native $(result) ] ;
+ return $(result) ;
+}
+
+
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if specified.
+# If this configuration file is loaded, regular site and user configuration
+# files will not be. If a relative path is specified, file is searched for in
+# the current folder.
+#
+# -- all-config --
+# Loaded only if specified on the command-line using the --config command
+# line option. If a file name is specified, it must exist and replaces all
+# other configuration files. If an empty file name is passed, no configuration
+# files will be loaded.
+#
+# -- site-config --
+# Named site-config.jam by default or may be named explicitly using the
+# --site-config command-line option. If named explicitly, the file is found
+# relative to the current working directory and must exist. If the default one
+# is used then it is searched for in the system root path (Windows),
+# /etc (non-Windows), user's home folder or the Boost Build path, in that
+# order. Not loaded in case the test-config configuration file is loaded,
+# the file is explicitly set to the empty string or the --ignore-site-config
+# command-line option is specified.
+#
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded or an empty file name
+# is explicitly specified. If the file name has been given explicitly then the
+# file must exist.
+#
+# -- project-config --
+# Named project-config.jam. Looked up in the current working folder and
+# then upwards through its parents up to the root folder. It may also be
+# named explicitly using the --project-config command-line option. If a file
+# is specified explicitly, it is found relative to the current working
+# directory and must exist. If an empty file name is passed, project-config
+# will not be loaded.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+local rule load-configuration-files
+{
+ # Flag indicating that site configuration should not be loaded.
+ local ignore-site-config =
+ [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
+ local ignore-user-config ;
+ local ignore-project-config ;
+
+ initialize-config-module test-config ;
+ local test-config = [ handle-config-option test-config ] ;
+ if $(test-config)
+ {
+ local where = [ load-config test-config : $(test-config:BS) :
+ $(test-config:D) ] ;
+ if $(where)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice: Regular site and user configuration files will" ;
+ ECHO "notice: be ignored due to the test configuration being"
+ "loaded." ;
+ }
+ ignore-site-config = true ;
+ ignore-user-config = true ;
+ }
+ }
+
+ initialize-config-module all-config ;
+ local all-config = [ handle-config-option config ] ;
+ if $(all-config)
+ {
+ load-config all-config : $(all-config:D=) : $(all-config:D) : required ;
+ if $(.debug-config)
+ {
+ ECHO "notice: Regular configuration files will be ignored due" ;
+ ECHO "notice: to the global configuration being loaded." ;
+ }
+ }
+ if $(all-config)-is-defined
+ {
+ if $(.debug-config) && ! $(all-config)
+ {
+ ECHO "notice: Configuration file loading explicitly disabled." ;
+ }
+ ignore-site-config = true ;
+ ignore-user-config = true ;
+ ignore-project-config = true ;
+ }
+
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local site-path = /etc $(user-path) ;
+ if [ os.name ] in NT CYGWIN
+ {
+ site-path = [ modules.peek : SystemRoot ] $(user-path) ;
+ }
+
+ if $(.debug-config) && $(ignore-site-config) = --ignore-site-config
+ {
+ ECHO "notice: Site configuration files will be ignored due to the" ;
+ ECHO "notice: --ignore-site-config command-line option." ;
+ }
+
+ initialize-config-module site-config ;
+ if ! $(ignore-site-config)
+ {
+ local site-config = [ handle-config-option site-config ] ;
+ if $(site-config)
+ {
+ load-config site-config : $(site-config:D=) : $(site-config:D)
+ : must-exist ;
+ }
+ else if ! $(site-config)-is-defined
+ {
+ load-config site-config : site-config.jam : $(site-path) ;
+ }
+ else if $(.debug-config)
+ {
+ ECHO "notice:" Site configuration file loading explicitly disabled. ;
+ }
+ }
+
+ initialize-config-module user-config ;
+ if ! $(ignore-user-config)
+ {
+ local user-config =
+ [ handle-config-option user-config : BOOST_BUILD_USER_CONFIG ] ;
+
+ if $(user-config)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice:" Loading explicitly specified user configuration
+ "file:" ;
+ ECHO " $(user-config)" ;
+ }
+
+ load-config user-config : $(user-config:D=) : $(user-config:D)
+ : must-exist ;
+ }
+ else if ! $(user-config)-is-defined
+ {
+ load-config user-config : user-config.jam : $(user-path) ;
+ }
+ else if $(.debug-config)
+ {
+ ECHO "notice:" User configuration file loading explicitly disabled. ;
+ }
+ }
+
+ # We look for project-config.jam from "." upward. I am not sure this is 100%
+ # right decision, we might as well check for it only alongside the Jamroot
+ # file. However:
+ # - We need to load project-config.jam before Jamroot
+ # - We probably need to load project-config.jam even if there is no Jamroot
+ # - e.g. to implement automake-style out-of-tree builds.
+ if ! $(ignore-project-config)
+ {
+ local project-config = [ handle-config-option project-config ] ;
+ if $(project-config)
+ {
+ initialize-config-module project-config : $(project-config:D=) ;
+ load-config project-config : $(project-config:D=)
+ : $(project-config:D) : must-exist ;
+ }
+ else if ! $(project-config)-is-defined
+ {
+ local file = [ path.glob "." : project-config.jam ] ;
+ if ! $(file)
+ {
+ file = [ path.glob-in-parents "." : project-config.jam ] ;
+ }
+ if $(file)
+ {
+ initialize-config-module project-config : $(file:D) ;
+ load-config project-config : project-config.jam : $(file:D) ;
+ }
+ }
+ else if $(.debug-config)
+ {
+ ECHO "notice:" Project configuration file loading explicitly
+ disabled. ;
+ }
+ }
+
+ project.end-load ;
+}
+
+
+# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
+#
+local rule process-explicit-toolset-requests
+{
+ local extra-properties ;
+
+ local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ;
+ local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ;
+
+ for local t in $(option-toolsets) $(feature-toolsets)
+ {
+ # Parse toolset-version/properties.
+ local toolset = [ MATCH "([^/]+)/?.*" : $(t) ] ;
+ local properties = [ feature.expand-subfeatures <toolset>$(toolset) : true ] ;
+ local toolset-property = [ property.select <toolset> : $(properties) ] ;
+ local known ;
+ if $(toolset-property:G=) in [ feature.values <toolset> ]
+ {
+ known = true ;
+ }
+
+ # If the toolset is not known, configure it now.
+
+ # TODO: we should do 'using $(toolset)' in case no version has been
+ # specified and there are no versions defined for the given toolset to
+ # allow the toolset to configure its default version. For this we need
+ # to know how to detect whether a given toolset has any versions
+ # defined. An alternative would be to do this whenever version is not
+ # specified but that would require that toolsets correctly handle the
+ # case when their default version is configured multiple times which
+ # should be checked for all existing toolsets first.
+
+ if ! $(known)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice: [cmdline-cfg] toolset $(toolset) not"
+ "previously configured; attempting to auto-configure now" ;
+ }
+ local t,v = [ MATCH "([^-]+)-?(.+)?" : $(toolset) ] ;
+ project.push-current ;
+ toolset.using $(t,v[1]) : $(t,v[2]) ;
+ project.pop-current ;
+ }
+
+ # Make sure we get an appropriate property into the build request in
+ # case toolset has been specified using the "--toolset=..." command-line
+ # option form.
+ if ! $(t) in $(.argv) $(feature-toolsets)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice:" "[cmdline-cfg]" adding toolset=$(t) to the build
+ request. ;
+ }
+ extra-properties += toolset=$(t) ;
+ }
+ }
+
+ return $(extra-properties) ;
+}
+
+
+# Returns whether the given project (identifed by its project module) should be
+# cleaned because it or any of its parent projects have already been marked as
+# needing to be cleaned in this build. As an optimization, will explicitly mark
+# all encountered project needing to be cleaned in case thay have not already
+# been marked so.
+#
+local rule should-clean-project ( project )
+{
+ if ! $(.should-clean-project.$(project))-is-defined
+ {
+ local r = "" ;
+ if ! [ project.is-jamroot-module $(project) ]
+ {
+ local parent = [ project.attribute $(project) parent-module ] ;
+ if $(parent)
+ {
+ r = [ should-clean-project $(parent) ] ;
+ }
+ }
+ .should-clean-project.$(project) = $(r) ;
+ }
+
+ return $(.should-clean-project.$(project)) ;
+}
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+{
+ if --version in $(.argv)
+ {
+ version.print ;
+ EXIT ;
+ }
+
+ version.verify-engine-version ;
+
+ load-configuration-files ;
+
+ # Load explicitly specified toolset modules.
+ local extra-properties = [ process-explicit-toolset-requests ] ;
+
+ # Load the actual project build script modules. We always load the project
+ # in the current folder so 'use-project' directives have any chance of being
+ # seen. Otherwise, we would not be able to refer to subprojects using target
+ # ids.
+ local current-project ;
+ {
+ local current-module = [ project.find "." : "." ] ;
+ if $(current-module)
+ {
+ current-project = [ project.target $(current-module) ] ;
+ }
+ }
+
+ # Load the default toolset module if no other has already been specified.
+ if ! [ feature.values <toolset> ]
+ {
+ local default-toolset = $(.default-toolset) ;
+ local default-toolset-version = ;
+ if $(default-toolset)
+ {
+ default-toolset-version = $(.default-toolset-version) ;
+ }
+ else
+ {
+ default-toolset = gcc ;
+ if [ os.name ] = NT
+ {
+ default-toolset = msvc ;
+ }
+ else if [ os.name ] = VMS
+ {
+ default-toolset = vmsdecc ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ default-toolset = darwin ;
+ }
+ }
+
+ ECHO "warning: No toolsets are configured." ;
+ ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
+ ECHO "warning: If the default is wrong, your build may not work correctly." ;
+ ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ;
+ ECHO "warning: For more configuration options, please consult" ;
+ ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+
+ toolset.using $(default-toolset) : $(default-toolset-version) ;
+ }
+
+
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ # FIXME: This is not entirely true. Additional project files may be loaded
+ # only later via the project.find() rule when dereferencing encountered
+ # target ids containing explicit project references. See what to do about
+ # those as such 'lazy loading' may cause problems that are then extremely
+ # difficult to debug.
+ local build-request = [ build-request.from-command-line $(.argv)
+ $(extra-properties) ] ;
+ local target-ids = [ $(build-request).get-at 1 ] ;
+ local properties = [ $(build-request).get-at 2 ] ;
+
+
+ # Check that we actually found something to build.
+ if ! $(current-project) && ! $(target-ids)
+ {
+ import errors ;
+ errors.user-error no Jamfile "in" current directory found, and no target
+ references specified. ;
+ }
+
+
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ local clean ; if "--clean" in $(.argv) { clean = true ; }
+ local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; }
+
+
+ # List of explicitly requested files to build. Any target references read
+ # from the command line parameter not recognized as one of the targets
+ # defined in the loaded Jamfiles will be interpreted as an explicitly
+ # requested file to build. If any such files are explicitly requested then
+ # only those files and the targets they depend on will be built and they
+ # will be searched for among targets that would have been built had there
+ # been no explicitly requested files.
+ local explicitly-requested-files
+
+
+ # List of Boost Build meta-targets, virtual-targets and actual Jam targets
+ # constructed in this build system run.
+ local targets ;
+ local virtual-targets ;
+ local actual-targets ;
+
+
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explicitly requested use the current project
+ # as the target.
+ for local id in $(target-ids)
+ {
+ if $(id) = clean
+ {
+ clean = true ;
+ }
+ else
+ {
+ local t ;
+ if $(current-project)
+ {
+ t = [ $(current-project).find $(id) : no-error ] ;
+ }
+ else
+ {
+ t = [ find-target $(id) ] ;
+ }
+
+ if ! $(t)
+ {
+ ECHO "notice: could not find main target" $(id) ;
+ ECHO "notice: assuming it is a name of file to create." ;
+ explicitly-requested-files += $(id) ;
+ }
+ else
+ {
+ targets += $(t) ;
+ }
+ }
+ }
+ if ! $(targets)
+ {
+ targets += [ project.target [ project.module-name "." ] ] ;
+ }
+
+ if [ option.get dump-generators : : true ]
+ {
+ generators.dump ;
+ }
+
+ # We wish to put config.log in the build directory corresponding to Jamroot,
+ # so that the location does not differ depending on the directory we run the
+ # build from. The amount of indirection necessary here is scary.
+ local first-project = [ $(targets[0]).project ] ;
+ local first-project-root-location = [ $(first-project).get project-root ] ;
+ local first-project-root-module = [ project.load
+ $(first-project-root-location) ] ;
+ local first-project-root = [ project.target $(first-project-root-module) ] ;
+ local first-build-build-dir = [ $(first-project-root).build-dir ] ;
+ configure.set-log-file $(first-build-build-dir)/config.log ;
+ config-cache.load $(first-build-build-dir)/project-cache.jam ;
+
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ # The expansion is being performed as late as possible so that the feature
+ # validation is performed after all necessary modules (including project targets
+ # on the command line) have been loaded.
+ if $(properties)
+ {
+ expanded += [ build-request.convert-command-line-elements $(properties) ] ;
+ expanded = [ build-request.expand-no-defaults $(expanded) ] ;
+ local xexpanded ;
+ for local e in $(expanded)
+ {
+ xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ }
+ expanded = $(xexpanded) ;
+ }
+ else
+ {
+ expanded = [ property-set.empty ] ;
+ }
+
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
+ for local p in $(expanded)
+ {
+ .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
+ for local t in $(targets)
+ {
+ local g = [ $(t).generate $(p) ] ;
+ if ! [ class.is-a $(t) : project-target ]
+ {
+ .results-of-main-targets += $(g[2-]) ;
+ }
+ virtual-targets += $(g[2-]) ;
+ }
+ }
+
+
+ # Convert collected virtual targets into actual raw Jam targets.
+ for t in $(virtual-targets)
+ {
+ actual-targets += [ $(t).actualize ] ;
+ }
+
+ config-cache.save ;
+
+
+ # If XML data output has been requested prepare additional rules and targets
+ # so we can hook into Jam to collect build data while its building and have
+ # it trigger the final XML report generation after all the planned targets
+ # have been built.
+ if $(.out-xml)
+ {
+ # Get a qualified virtual target name.
+ rule full-target-name ( target )
+ {
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+ local project-path = [ $(project).get location ] ;
+ return $(project-path)//$(name) ;
+ }
+
+ # Generate an XML file containing build statistics for each constituent.
+ #
+ rule out-xml ( xml-file : constituents * )
+ {
+ # Prepare valid XML header and footer with some basic info.
+ local nl = "
+" ;
+ local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
+ local timestamp = [ modules.peek : JAMDATE ] ;
+ local cwd = [ PWD ] ;
+ local command = $(.argv) ;
+ local bb-version = [ version.boost-build ] ;
+ .header on $(xml-file) =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
+ "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
+ "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
+ "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
+ "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
+ "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
+ ;
+ .footer on $(xml-file) =
+ "$(nl)</build>" ;
+
+ # Generate the target dependency graph.
+ .contents on $(xml-file) +=
+ "$(nl) <targets>" ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ local action = [ $(t).action ] ;
+ if $(action)
+ # If a target has no action, it has no dependencies.
+ {
+ local name = [ full-target-name $(t) ] ;
+ local sources = [ $(action).sources ] ;
+ local dependencies ;
+ for local s in $(sources)
+ {
+ dependencies += [ full-target-name $(s) ] ;
+ }
+
+ local path = [ $(t).path ] ;
+ local jam-target = [ $(t).actual-name ] ;
+
+ .contents on $(xml-file) +=
+ "$(nl) <target>"
+ "$(nl) <name><![CDATA[$(name)]]></name>"
+ "$(nl) <dependencies>"
+ "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
+ "$(nl) </dependencies>"
+ "$(nl) <path><![CDATA[$(path)]]></path>"
+ "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
+ "$(nl) </target>"
+ ;
+ }
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </targets>" ;
+
+ # Build $(xml-file) after $(constituents). Do so even if a
+ # constituent action fails and regenerate the xml on every bjam run.
+ INCLUDES $(xml-file) : $(constituents) ;
+ ALWAYS $(xml-file) ;
+ __ACTION_RULE__ on $(xml-file) =
+ build-system.out-xml.generate-action ;
+ out-xml.generate $(xml-file) ;
+ }
+
+ # The actual build actions are here; if we did this work in the actions
+ # clause we would have to form a valid command line containing the
+ # result of @(...) below (the name of the XML file).
+ #
+ rule out-xml.generate-action ( args * : xml-file
+ : command status start end user system : output ? )
+ {
+ local contents =
+ [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
+ local f = @($(xml-file):E=$(contents)) ;
+ }
+
+ # Nothing to do here; the *real* actions happen in
+ # out-xml.generate-action.
+ actions quietly out-xml.generate { }
+
+ # Define the out-xml file target, which depends on all the targets so
+ # that it runs the collection after the targets have run.
+ out-xml $(.out-xml) : $(actual-targets) ;
+
+ # Set up a global __ACTION_RULE__ that records all the available
+ # statistics about each actual target in a variable "on" the --out-xml
+ # target.
+ #
+ rule out-xml.collect ( xml-file : target : command status start end user
+ system : output ? )
+ {
+ local nl = "
+" ;
+ # Open the action with some basic info.
+ .contents on $(xml-file) +=
+ "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
+
+ # If we have an action object we can print out more detailed info.
+ local action = [ on $(target) return $(.action) ] ;
+ if $(action)
+ {
+ local action-name = [ $(action).action-name ] ;
+ local action-sources = [ $(action).sources ] ;
+ local action-props = [ $(action).properties ] ;
+
+ # The qualified name of the action which we created the target.
+ .contents on $(xml-file) +=
+ "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
+
+ # The sources that made up the target.
+ .contents on $(xml-file) +=
+ "$(nl) <sources>" ;
+ for local source in $(action-sources)
+ {
+ local source-actual = [ $(source).actual-name ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </sources>" ;
+
+ # The properties that define the conditions under which the
+ # target was built.
+ .contents on $(xml-file) +=
+ "$(nl) <properties>" ;
+ for local prop in [ $(action-props).raw ]
+ {
+ local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </properties>" ;
+ }
+
+ local locate = [ on $(target) return $(LOCATE) ] ;
+ locate ?= "" ;
+ .contents on $(xml-file) +=
+ "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
+ "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
+ "$(nl) <command><![CDATA[$(command)]]></command>"
+ "$(nl) <output><![CDATA[$(output)]]></output>" ;
+ .contents on $(xml-file) +=
+ "$(nl) </action>" ;
+ }
+
+ # When no __ACTION_RULE__ is set "on" a target, the search falls back to
+ # the global module.
+ module
+ {
+ __ACTION_RULE__ = build-system.out-xml.collect
+ [ modules.peek build-system : .out-xml ] ;
+ }
+
+ IMPORT
+ build-system :
+ out-xml.collect
+ out-xml.generate-action
+ : :
+ build-system.out-xml.collect
+ build-system.out-xml.generate-action
+ ;
+ }
+
+ local j = [ option.get jobs ] ;
+ if $(j)
+ {
+ modules.poke : PARALLELISM : $(j) ;
+ }
+
+ local k = [ option.get keep-going : true : true ] ;
+ if $(k) in "on" "yes" "true"
+ {
+ modules.poke : KEEP_GOING : 1 ;
+ }
+ else if $(k) in "off" "no" "false"
+ {
+ modules.poke : KEEP_GOING : 0 ;
+ }
+ else
+ {
+ EXIT "error: Invalid value for the --keep-going option" ;
+ }
+
+ # The 'all' pseudo target is not strictly needed expect in the case when we
+ # use it below but people often assume they always have this target
+ # available and do not declare it themselves before use which may cause
+ # build failures with an error message about not being able to build the
+ # 'all' target.
+ NOTFILE all ;
+
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared all that is left is to tell Jam to update
+ # those targets.
+ if $(explicitly-requested-files)
+ {
+ # Note that this case can not be joined with the regular one when only
+ # exact Boost Build targets are requested as here we do not build those
+ # requested targets but only use them to construct the dependency tree
+ # needed to build the explicitly requested files.
+ UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ;
+ }
+ else if $(cleanall)
+ {
+ UPDATE clean-all ;
+ }
+ else if $(clean)
+ {
+ common.Clean clean : [ actual-clean-targets ] ;
+ UPDATE clean ;
+ }
+ else
+ {
+ configure.print-configure-checks-summary ;
+
+ for local function in $(.pre-build-hook)
+ {
+ indirect.call $(function) ;
+ }
+
+ DEPENDS all : $(actual-targets) ;
+ if UPDATE_NOW in [ RULENAMES ]
+ {
+ local ok = [ UPDATE_NOW all ] ;
+ # Force sequence updating of regular targets, then the xml
+ # log output target. To ensure the output records all built
+ # as otherwise if could execute out-of-sequence when
+ # doing parallel builds.
+ if $(.out-xml)
+ {
+ UPDATE_NOW $(.out-xml) : : ignore-minus-n ;
+ }
+ for local function in $(.post-build-hook)
+ {
+ indirect.call $(function) $(ok) ;
+ }
+ # Prevent automatic update of the 'all' target, now that we have
+ # explicitly updated what we wanted.
+ UPDATE ;
+ }
+ else
+ {
+ UPDATE all $(.out-xml) ;
+ }
+ }
+}
diff --git a/src/boost/tools/build/src/build/__init__.py b/src/boost/tools/build/src/build/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/boost/tools/build/src/build/__init__.py
diff --git a/src/boost/tools/build/src/build/ac.jam b/src/boost/tools/build/src/build/ac.jam
new file mode 100644
index 000000000..09eb26ea9
--- /dev/null
+++ b/src/boost/tools/build/src/build/ac.jam
@@ -0,0 +1,324 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property-set ;
+import path ;
+import modules ;
+import "class" ;
+import errors ;
+import configure ;
+import feature ;
+import project ;
+import virtual-target ;
+import generators ;
+import property ;
+import print ;
+import regex ;
+
+project.initialize $(__name__) ;
+.project = [ project.current ] ;
+project ac ;
+
+feature.feature ac.print-text : : free ;
+
+rule generate-include ( target : sources * : properties * )
+{
+ print.output $(target) ;
+ local text = [ property.select <ac.print-text> : $(properties) ] ;
+ if $(text)
+ {
+ print.text $(text:G=) : true ;
+ }
+ else
+ {
+ local header = [ property.select <include> : $(properties) ] ;
+ print.text "#include <$(header:G=)>\n" : true ;
+ }
+}
+
+rule generate-main ( target : sources * : properties * )
+{
+ print.output $(target) ;
+ print.text "int main() {}" : true ;
+}
+
+rule find-include-path ( properties : header : provided-path ? : test-source ? )
+{
+ if $(provided-path) && [ path.exists [ path.root $(header) $(provided-path) ] ]
+ {
+ return $(provided-path) ;
+ }
+ else
+ {
+ local a = [ class.new action : ac.generate-include : [ property-set.create <include>$(header) <ac.print-text>$(test-source) ] ] ;
+ # Create a new CPP target named after the header.
+ # Replace dots (".") in target basename for portability.
+ local basename = [ regex.replace $(header:D=) "[.]" "_" ] ;
+ local header-target = $(header:S=:B=$(basename)) ;
+ local cpp = [ class.new file-target $(header-target:S=.cpp) exact : CPP : $(.project) : $(a) ] ;
+ cpp = [ virtual-target.register $(cpp) ] ;
+ $(cpp).root true ;
+ local result = [ generators.construct $(.project) $(header-target) : OBJ : $(properties) : $(cpp) : true ] ;
+ configure.maybe-force-rebuild $(result[2-]) ;
+ local jam-targets ;
+ for local t in $(result[2-])
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+ if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
+ : ignore-minus-n ]
+ {
+ return %default ;
+ }
+ }
+}
+
+rule construct-library ( name : property-set : provided-path ? )
+{
+ local lib-props = [ $(property-set).add-raw <name>$(name) <search>$(provided-path) ] ;
+ return [ generators.construct $(.project) lib-$(name)
+ : SEARCHED_LIB : $(lib-props) : : true ] ;
+}
+
+
+rule find-library ( properties : names + : provided-path ? )
+{
+ local result ;
+ if [ $(properties).get <link> ] = shared
+ {
+ link-opts = <link>shared <link>static ;
+ }
+ else
+ {
+ link-opts = <link>static <link>shared ;
+ }
+ while $(link-opts)
+ {
+ local names-iter = $(names) ;
+ properties = [ $(properties).refine [ property-set.create $(link-opts[1]) ] ] ;
+ while $(names-iter)
+ {
+ local name = $(names-iter[1]) ;
+ local lib = [ construct-library $(name) : $(properties) : $(provided-path) ] ;
+ local a = [ class.new action : ac.generate-main :
+ [ property-set.empty ] ] ;
+ local main.cpp = [ virtual-target.register
+ [ class.new file-target main-$(name).cpp exact : CPP : $(.project) : $(a) ] ] ;
+ $(main.cpp).root true ;
+ local test = [ generators.construct $(.project) $(name) : EXE
+ : [ $(properties).add $(lib[1]) ] : $(main.cpp) $(lib[2-])
+ : true ] ;
+ configure.maybe-force-rebuild $(test[2-]) ;
+ local jam-targets ;
+ for t in $(test[2-])
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+ if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
+ : ignore-minus-n ]
+ {
+ result = $(name) $(link-opts[1]) ;
+ names-iter = ; link-opts = ; # break
+ }
+ names-iter = $(names-iter[2-]) ;
+ }
+ link-opts = $(link-opts[2-]) ;
+ }
+ return $(result) ;
+}
+
+class ac-library : basic-target
+{
+ import errors ;
+ import indirect ;
+ import virtual-target ;
+ import ac ;
+ import configure ;
+ import config-cache ;
+ import os ;
+
+ rule __init__ ( name : project : requirements * : include-path ? : library-path ? : library-name ? )
+ {
+ basic-target.__init__ $(name) : $(project) : : $(requirements) ;
+
+ reconfigure $(include-path) : $(library-path) : $(library-name) ;
+ }
+
+ rule set-header ( header )
+ {
+ self.header = $(header) ;
+ }
+
+ rule set-default-names ( names + )
+ {
+ self.default-names = $(names) ;
+ }
+
+ rule set-header-test ( source )
+ {
+ self.header-test = $(source) ;
+ }
+
+ rule reconfigure ( include-path ? : library-path ? : library-name ? )
+ {
+ if $(include-path) || $(library-path) || $(library-name)
+ {
+ check-not-configured ;
+
+ self.include-path = $(include-path) ;
+ self.library-path = $(library-path) ;
+ self.library-name = $(library-name) ;
+ }
+ }
+
+ rule set-target ( target )
+ {
+ check-not-configured ;
+ self.target = $(target) ;
+ }
+
+ rule check-not-configured ( )
+ {
+ if $(self.include-path) || $(self.library-path) || $(self.library-name) || $(self.target)
+ {
+ errors.user-error [ name ] "is already configured" ;
+ }
+ }
+
+ rule construct ( name : sources * : property-set )
+ {
+ if $(self.target)
+ {
+ return [ $(self.target).generate $(property-set) ] ;
+ }
+ else
+ {
+ local use-environment ;
+ if ! $(self.library-name) && ! $(self.include-path) && ! $(self.library-path)
+ {
+ use-environment = true ;
+ }
+ local libnames = $(self.library-name) ;
+ if ! $(libnames) && $(use-environment)
+ {
+ libnames = [ os.environ $(name:U)_NAME ] ;
+ # Backward compatibility only.
+ libnames ?= [ os.environ $(name:U)_BINARY ] ;
+ }
+ libnames ?= $(self.default-names) ;
+
+ local include-path = $(self.include-path) ;
+ if ! $(include-path) && $(use-environment)
+ {
+ include-path = [ os.environ $(name:U)_INCLUDE ] ;
+ }
+
+ local library-path = $(self.library-path) ;
+ if ! $(library-path) && $(use-environment)
+ {
+ library-path = [ os.environ $(name:U)_LIBRARY_PATH ] ;
+ # Backwards compatibility only
+ library-path ?= [ os.environ $(name:U)_LIBPATH ] ;
+ }
+
+ local relevant = [ property.select [ configure.get-relevant-features ] <link> :
+ [ $(property-set).raw ] ] ;
+
+ local key = ac-library-$(name)-$(relevant:J=-) ;
+ local lookup = [ config-cache.get $(key) ] ;
+
+ if $(lookup)
+ {
+ if $(lookup) = missing
+ {
+ configure.log-library-search-result $(name) : "no (cached)" ;
+ return [ property-set.empty ] ;
+ }
+ else
+ {
+ local includes = $(lookup[1]) ;
+ if $(includes) = %default
+ {
+ includes = ;
+ }
+ local library = [ ac.construct-library $(lookup[2]) :
+ [ $(property-set).refine [ property-set.create $(lookup[3]) ] ] : $(library-path) ] ;
+ configure.log-library-search-result $(name) : "yes (cached)" ;
+ return [ $(library[1]).add-raw <include>$(includes) ] $(library[2-]) ;
+ }
+ }
+ else
+ {
+ local includes = [ ac.find-include-path $(property-set) : $(self.header) : $(include-path) : $(self.header-test) ] ;
+ local library = [ ac.find-library $(property-set) : $(libnames) : $(library-path) ] ;
+ if $(includes) && $(library)
+ {
+ config-cache.set $(key) : $(includes) $(library) ;
+ if $(includes) = %default
+ {
+ includes = ;
+ }
+ library = [ ac.construct-library $(library[1]) :
+ [ $(property-set).refine [ property-set.create $(library[2]) ] ] : $(library-path) ] ;
+ configure.log-library-search-result $(name) : "yes" ;
+ return [ $(library[1]).add-raw <include>$(includes) ] $(library[2-]) ;
+ }
+ else
+ {
+ config-cache.set $(key) : missing ;
+ configure.log-library-search-result $(name) : "no" ;
+ return [ property-set.empty ] ;
+ }
+ }
+ }
+ }
+}
+
+class check-library-worker
+{
+ import property-set ;
+ import targets ;
+ import property ;
+
+ rule __init__ ( target : true-properties * : false-properties * )
+ {
+ self.target = $(target) ;
+ self.true-properties = $(true-properties) ;
+ self.false-properties = $(false-properties) ;
+ }
+
+ rule check ( properties * )
+ {
+ local choosen ;
+ local t = [ targets.current ] ;
+ local p = [ $(t).project ] ;
+ local ps = [ property-set.create $(properties) ] ;
+ ps = [ $(ps).propagated ] ;
+ local generated =
+ [ targets.generate-from-reference $(self.target) : $(p) : $(ps) ] ;
+ if $(generated[2])
+ {
+ choosen = $(self.true-properties) ;
+ }
+ else
+ {
+ choosen = $(self.false-properties) ;
+ }
+ return [ property.evaluate-conditionals-in-context $(choosen) :
+ $(properties) ] ;
+ }
+}
+
+rule check-library ( target : true-properties * : false-properties * )
+{
+ local instance = [ class.new check-library-worker $(target) :
+ $(true-properties) : $(false-properties) ] ;
+ return <conditional>@$(instance).check
+ [ property.evaluate-conditional-relevance
+ $(true-properties) $(false-properties)
+ : [ configure.get-relevant-features ] <link> ] ;
+}
diff --git a/src/boost/tools/build/src/build/alias.jam b/src/boost/tools/build/src/build/alias.jam
new file mode 100644
index 000000000..3a97b7263
--- /dev/null
+++ b/src/boost/tools/build/src/build/alias.jam
@@ -0,0 +1,78 @@
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'alias' rule and the associated target class.
+#
+# Alias is just a main target which returns its source targets without any
+# processing. For example:
+#
+# alias bin : hello test_hello ;
+# alias lib : helpers xml_parser ;
+#
+# Another important use of 'alias' is to conveniently group source files:
+#
+# alias platform-src : win.cpp : <os>NT ;
+# alias platform-src : linux.cpp : <os>LINUX ;
+# exe main : main.cpp platform-src ;
+#
+# Lastly, it is possible to create a local alias for some target, with different
+# properties:
+#
+# alias big_lib : : @/external_project/big_lib/<link>static ;
+#
+
+import "class" : new ;
+import param ;
+import project ;
+import property-set ;
+import targets ;
+
+
+class alias-target-class : basic-target
+{
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ return [ property-set.empty ] $(source-targets) ;
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
+ return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
+ }
+}
+
+
+# Declares the 'alias' target. It will process its sources virtual-targets by
+# returning them unaltered as its own constructed virtual-targets.
+#
+rule alias ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ param.handle-named-params
+ sources requirements default-build usage-requirements ;
+
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new alias-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project)
+ ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ]
+ ] ;
+}
+
+
+IMPORT $(__name__) : alias : : alias ;
diff --git a/src/boost/tools/build/src/build/alias.py b/src/boost/tools/build/src/build/alias.py
new file mode 100755
index 000000000..e9078c746
--- /dev/null
+++ b/src/boost/tools/build/src/build/alias.py
@@ -0,0 +1,75 @@
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Status: ported (danielw)
+# Base revision: 56043
+
+# This module defines the 'alias' rule and associated class.
+#
+# Alias is just a main target which returns its source targets without any
+# processing. For example::
+#
+# alias bin : hello test_hello ;
+# alias lib : helpers xml_parser ;
+#
+# Another important use of 'alias' is to conveniently group source files::
+#
+# alias platform-src : win.cpp : <os>NT ;
+# alias platform-src : linux.cpp : <os>LINUX ;
+# exe main : main.cpp platform-src ;
+#
+# Lastly, it's possible to create local alias for some target, with different
+# properties::
+#
+# alias big_lib : : @/external_project/big_lib/<link>static ;
+#
+
+import targets
+import property_set
+from b2.manager import get_manager
+
+from b2.util import metatarget, is_iterable_typed
+
+class AliasTarget(targets.BasicTarget):
+
+ def __init__(self, *args):
+ targets.BasicTarget.__init__(self, *args)
+
+ def construct(self, name, source_targets, properties):
+ if __debug__:
+ from .virtual_target import VirtualTarget
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, VirtualTarget)
+ assert isinstance(properties, property_set.PropertySet)
+ return [property_set.empty(), source_targets]
+
+ def compute_usage_requirements(self, subvariant):
+ if __debug__:
+ from .virtual_target import Subvariant
+ assert isinstance(subvariant, Subvariant)
+ base = targets.BasicTarget.compute_usage_requirements(self, subvariant)
+ # Add source's usage requirement. If we don't do this, "alias" does not
+ # look like 100% alias.
+ return base.add(subvariant.sources_usage_requirements())
+
+@metatarget
+def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
+ project = get_manager().projects().current()
+ targets = get_manager().targets()
+
+ targets.main_target_alternative(AliasTarget(
+ name, project,
+ targets.main_target_sources(sources, name, no_renaming=True),
+ targets.main_target_requirements(requirements or [], project),
+ targets.main_target_default_build(default_build, project),
+ targets.main_target_usage_requirements(usage_requirements or [], project)))
+
+# Declares the 'alias' target. It will build sources, and return them unaltered.
+get_manager().projects().add_rule("alias", alias)
+
diff --git a/src/boost/tools/build/src/build/build-request.jam b/src/boost/tools/build/src/build/build-request.jam
new file mode 100644
index 000000000..4c767c3b2
--- /dev/null
+++ b/src/boost/tools/build/src/build/build-request.jam
@@ -0,0 +1,400 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import sequence ;
+import set ;
+import regex ;
+import feature ;
+import property ;
+import container ;
+import string ;
+
+
+# Transform property-set by applying f to each component property.
+#
+local rule apply-to-property-set ( f property-set )
+{
+ local properties = [ feature.split $(property-set) ] ;
+ return [ string.join [ $(f) $(properties) ] : / ] ;
+}
+
+
+# Expand the given build request by combining all property-sets which do not
+# specify conflicting non-free features. Expects all the project files to
+# already be loaded.
+#
+rule expand-no-defaults ( property-sets * )
+{
+ # First make all features and subfeatures explicit.
+ local expanded-property-sets = [ sequence.transform apply-to-property-set
+ feature.expand-subfeatures : $(property-sets) ] ;
+
+ # Now combine all of the expanded property-sets
+ local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
+
+ return $(product) ;
+}
+
+
+# Update the list of expected conflicts based on the new
+# features.
+#
+local rule remove-conflicts ( conflicts * : features * )
+{
+ local result ;
+ for local c in $(conflicts)
+ {
+ if ! [ set.intersection [ regex.split $(c) "/" ] : $(features) ]
+ {
+ result += $(c) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Implementation of x-product, below. Expects all the project files to already
+# be loaded.
+#
+local rule x-product-aux ( property-sets + )
+{
+ local result ;
+ local p = [ feature.split $(property-sets[1]) ] ;
+ local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
+ local seen ;
+ local extra-conflicts ;
+
+ # No conflict with things used at a higher level?
+ if ! [ set.intersection $(f) : $(x-product-used) ]
+ {
+ local x-product-seen ;
+ local x-product-conflicts =
+ [ remove-conflicts $(x-product-conflicts) : $(f) ] ;
+ {
+ # Do not mix in any conflicting features.
+ local x-product-used = $(x-product-used) $(f) ;
+
+ if $(property-sets[2])
+ {
+ local rest = [ x-product-aux $(property-sets[2-]) ] ;
+ result = $(property-sets[1])/$(rest) ;
+ }
+ if ! $(x-product-conflicts)
+ {
+ result ?= $(property-sets[1]) ;
+ }
+ }
+
+ # If we did not encounter a conflicting feature lower down, do not
+ # recurse again.
+ if ! [ set.intersection $(f) : $(x-product-seen) ]
+ || [ remove-conflicts $(x-product-conflicts) : $(x-product-seen) ]
+ {
+ property-sets = ;
+ }
+ else
+ {
+ # A property is only allowed to be absent if it conflicts
+ # with either a higher or lower layer. We don't need to
+ # bother setting this if we already know that we don't need
+ # to recurse again.
+ extra-conflicts = $(f:J=/) ;
+ }
+
+ seen = $(x-product-seen) ;
+ }
+
+ if $(property-sets[2])
+ {
+ # Lower layers expansion must conflict with this
+ local x-product-conflicts = $(x-product-conflicts) $(extra-conflicts) ;
+
+ result += [ x-product-aux $(property-sets[2-]) ] ;
+ }
+
+ # Note that we have seen these features so that higher levels will recurse
+ # again without them set.
+ x-product-seen += $(f) $(seen) ;
+ return $(result) ;
+}
+
+
+# Return the cross-product of all elements of property-sets, less any that would
+# contain conflicting values for single-valued features. Expects all the project
+# files to already be loaded.
+#
+# Formal definition:
+# Returns all maximum non-conflicting subsets of property-sets.
+# The result is a list of all property-sets p such that
+# 1. p is composed by joining a subset of property-sets without removing
+# duplicates
+# 2. p contains at most one instance of every single-valued feature
+# 3. Adding any additional element of property-sets to p be would
+# violate (2)
+local rule x-product ( property-sets * )
+{
+ if $(property-sets).non-empty
+ {
+ # Prepare some "scoped globals" that can be used by the implementation
+ # function, x-product-aux.
+ local x-product-seen x-product-used x-product-conflicts ;
+ return [ x-product-aux $(property-sets) : $(feature-space) ] ;
+ }
+ # Otherwise return empty.
+}
+
+
+# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
+# an implicit value. Expects all the project files to already be loaded.
+#
+local rule looks-like-implicit-value ( v )
+{
+ if [ feature.is-implicit-value $(v) ]
+ {
+ return true ;
+ }
+ else
+ {
+ local split = [ regex.split $(v) - ] ;
+ if [ feature.is-implicit-value $(split[1]) ]
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Takes the command line tokens (such as taken from the ARGV rule) and
+# constructs a build request from them. Returns a vector of two vectors (where
+# "vector" means container.jam's "vector"). First is the set of targets
+# specified in the command line, and second is the set of requested build
+# properties. Expects all the project files to already be loaded.
+#
+rule from-command-line ( command-line * )
+{
+ local targets ;
+ local properties ;
+
+ command-line = $(command-line[2-]) ;
+ local skip-next = ;
+ for local e in $(command-line)
+ {
+ if $(skip-next)
+ {
+ skip-next = ;
+ }
+ else if ! [ MATCH ^(-) : $(e) ]
+ {
+ # Build request spec either has "=" in it or completely consists of
+ # implicit feature values.
+ local fs = feature-space ;
+ if [ MATCH "(.*=.*)" : $(e) ]
+ || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
+ {
+ properties += $(e) ;
+ }
+ else if $(e)
+ {
+ targets += $(e) ;
+ }
+ }
+ else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
+ {
+ skip-next = true ;
+ }
+ }
+ return [ new vector
+ [ new vector $(targets) ]
+ [ new vector $(properties) ] ] ;
+}
+
+
+# Converts a list of elements of command line build request specification into internal
+# form. Expects all the project files to already be loaded.
+#
+rule convert-command-line-elements ( elements * )
+{
+ local result ;
+ for local e in $(elements)
+ {
+ result += [ convert-command-line-element $(e) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Converts one element of command line build request specification into internal
+# form.
+local rule convert-command-line-element ( e )
+{
+ local result ;
+ local parts = [ regex.split $(e) "/" ] ;
+ while $(parts)
+ {
+ local p = $(parts[1]) ;
+ local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
+ local lresult ;
+ local feature ;
+ local values ;
+ if $(m)
+ {
+ feature = $(m[1]) ;
+ values = [ regex.split $(m[2]) "," ] ;
+ lresult = <$(feature)>$(values) ;
+ }
+ else
+ {
+ lresult = [ regex.split $(p) "," ] ;
+ }
+
+ if $(feature) && free in [ feature.attributes <$(feature)> ]
+ {
+ # If we have free feature, then the value is everything
+ # until the end of the command line token. Slashes in
+ # the following string are not taked to mean separation
+ # of properties. Commas are also not interpreted specially.
+ values = $(values:J=,) ;
+ values = $(values) $(parts[2-]) ;
+ values = $(values:J=/) ;
+ lresult = <$(feature)>$(values) ;
+ parts = ;
+ }
+
+ if ! [ MATCH (.*-.*) : $(p) ]
+ {
+ # property.validate cannot handle subfeatures, so we avoid the check
+ # here.
+ for local p in $(lresult)
+ {
+ property.validate $(p) : $(feature-space) ;
+ }
+ }
+
+ if ! $(result)
+ {
+ result = $(lresult) ;
+ }
+ else
+ {
+ result = $(result)/$(lresult) ;
+ }
+
+ parts = $(parts[2-]) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import feature ;
+
+ feature.prepare-test build-request-test-temp ;
+
+ import build-request ;
+ import build-request : expand-no-defaults : build-request.expand-no-defaults ;
+ import errors : try catch ;
+ import feature : feature subfeature ;
+
+ feature toolset : gcc msvc borland : implicit ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+ 3.0 3.0.1 3.0.2 : optional ;
+
+ feature variant : debug release : implicit composite ;
+ feature inlining : on off ;
+ feature "include" : : free ;
+
+ feature stdlib : native stlport : implicit ;
+
+ feature runtime-link : dynamic static : symmetric ;
+
+ # Empty build requests should expand to empty.
+ assert.result
+ : build-request.expand-no-defaults ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ <variant>debug/<toolset>msvc/<stdlib>stlport
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
+ : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
+
+ assert.result
+ <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
+ : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
+
+ local r ;
+
+ try ;
+ {
+ r = [ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ] ;
+ build-request.convert-command-line-elements [ $(r).get-at 2 ] ;
+ }
+ catch \"static\" is not an implicit feature value ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic ]
+ : debug <runtime-link>dynamic ;
+
+ r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : target ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic ]
+ : debug <runtime-link>dynamic ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic,static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic,static ]
+ : debug <runtime-link>dynamic <runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug gcc/runtime-link=dynamic,static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug gcc/runtime-link=dynamic,static ]
+ : debug gcc/<runtime-link>dynamic gcc/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : msvc gcc,borland/runtime-link=static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements msvc gcc,borland/runtime-link=static ]
+ : msvc gcc/<runtime-link>static borland/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam gcc-3.0 ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements gcc-3.0 ]
+ : gcc-3.0 ;
+
+ feature.finish-test build-request-test-temp ;
+}
diff --git a/src/boost/tools/build/src/build/build_request.py b/src/boost/tools/build/src/build/build_request.py
new file mode 100644
index 000000000..4fa54072f
--- /dev/null
+++ b/src/boost/tools/build/src/build/build_request.py
@@ -0,0 +1,222 @@
+# Status: being ported by Vladimir Prus
+# TODO: need to re-compare with mainline of .jam
+# Base revision: 40480
+#
+# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+import b2.build.feature
+feature = b2.build.feature
+
+from b2.util.utility import *
+from b2.util import is_iterable_typed
+import b2.build.property_set as property_set
+
+def expand_no_defaults (property_sets):
+ """ Expand the given build request by combining all property_sets which don't
+ specify conflicting non-free features.
+ """
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
+ # First make all features and subfeatures explicit
+ expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
+
+ # Now combine all of the expanded property_sets
+ product = __x_product (expanded_property_sets)
+
+ return [property_set.create(p) for p in product]
+
+
+def __x_product (property_sets):
+ """ Return the cross-product of all elements of property_sets, less any
+ that would contain conflicting values for single-valued features.
+ """
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
+ x_product_seen = set()
+ return __x_product_aux (property_sets, x_product_seen)[0]
+
+def __x_product_aux (property_sets, seen_features):
+ """Returns non-conflicting combinations of property sets.
+
+ property_sets is a list of PropertySet instances. seen_features is a set of Property
+ instances.
+
+ Returns a tuple of:
+ - list of lists of Property instances, such that within each list, no two Property instance
+ have the same feature, and no Property is for feature in seen_features.
+ - set of features we saw in property_sets
+ """
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
+ assert isinstance(seen_features, set)
+ if not property_sets:
+ return ([], set())
+
+ properties = property_sets[0].all()
+
+ these_features = set()
+ for p in property_sets[0].non_free():
+ these_features.add(p.feature)
+
+ # Note: the algorithm as implemented here, as in original Jam code, appears to
+ # detect conflicts based on features, not properties. For example, if command
+ # line build request say:
+ #
+ # <a>1/<b>1 c<1>/<b>1
+ #
+ # It will decide that those two property sets conflict, because they both specify
+ # a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two
+ # different property sets. This is a topic for future fixing, maybe.
+ if these_features & seen_features:
+
+ (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features)
+ return (inner_result, inner_seen | these_features)
+
+ else:
+
+ result = []
+ (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features)
+ if inner_result:
+ for inner in inner_result:
+ result.append(properties + inner)
+ else:
+ result.append(properties)
+
+ if inner_seen & these_features:
+ # Some of elements in property_sets[1:] conflict with elements of property_sets[0],
+ # Try again, this time omitting elements of property_sets[0]
+ (inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features)
+ result.extend(inner_result2)
+
+ return (result, inner_seen | these_features)
+
+
+
+def looks_like_implicit_value(v):
+ """Returns true if 'v' is either implicit value, or
+ the part before the first '-' symbol is implicit value."""
+ assert isinstance(v, basestring)
+ if feature.is_implicit_value(v):
+ return 1
+ else:
+ split = v.split("-")
+ if feature.is_implicit_value(split[0]):
+ return 1
+
+ return 0
+
+def from_command_line(command_line):
+ """Takes the command line tokens (such as taken from ARGV rule)
+ and constructs build request from it. Returns a list of two
+ lists. First is the set of targets specified in the command line,
+ and second is the set of requested build properties."""
+ assert is_iterable_typed(command_line, basestring)
+ targets = []
+ properties = []
+
+ for e in command_line:
+ if e[:1] != "-":
+ # Build request spec either has "=" in it, or completely
+ # consists of implicit feature values.
+ if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
+ properties.append(e)
+ elif e:
+ targets.append(e)
+
+ return [targets, properties]
+
+# Converts one element of command line build request specification into
+# internal form.
+def convert_command_line_element(e):
+ assert isinstance(e, basestring)
+ result = None
+ parts = e.split("/")
+ for p in parts:
+ m = p.split("=")
+ if len(m) > 1:
+ feature = m[0]
+ values = m[1].split(",")
+ lresult = [("<%s>%s" % (feature, v)) for v in values]
+ else:
+ lresult = p.split(",")
+
+ if p.find('-') == -1:
+ # FIXME: first port property.validate
+ # property.validate cannot handle subfeatures,
+ # so we avoid the check here.
+ #for p in lresult:
+ # property.validate(p)
+ pass
+
+ if not result:
+ result = lresult
+ else:
+ result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
+
+ return [property_set.create(b2.build.feature.split(r)) for r in result]
+
+###
+### rule __test__ ( )
+### {
+### import assert feature ;
+###
+### feature.prepare-test build-request-test-temp ;
+###
+### import build-request ;
+### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
+### import errors : try catch ;
+### import feature : feature subfeature ;
+###
+### feature toolset : gcc msvc borland : implicit ;
+### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+### 3.0 3.0.1 3.0.2 : optional ;
+###
+### feature variant : debug release : implicit composite ;
+### feature inlining : on off ;
+### feature "include" : : free ;
+###
+### feature stdlib : native stlport : implicit ;
+###
+### feature runtime-link : dynamic static : symmetric ;
+###
+###
+### local r ;
+###
+### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+###
+### try ;
+### {
+###
+### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
+### }
+### catch \"static\" is not a value of an implicit feature ;
+###
+###
+### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
+### assert.equal [ $(r).get-at 1 ] : target ;
+### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+###
+### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
+###
+### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
+### gcc/<runtime-link>static ;
+###
+### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
+### borland/<runtime-link>static ;
+###
+### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+###
+### feature.finish-test build-request-test-temp ;
+### }
+###
+###
diff --git a/src/boost/tools/build/src/build/config-cache.jam b/src/boost/tools/build/src/build/config-cache.jam
new file mode 100644
index 000000000..65bf91a8c
--- /dev/null
+++ b/src/boost/tools/build/src/build/config-cache.jam
@@ -0,0 +1,78 @@
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import errors ;
+import regex ;
+import path ;
+import project ;
+import os ;
+
+rule get ( name )
+{
+ return $(.vars.$(name)) ;
+}
+
+rule set ( name : value * )
+{
+ .all-vars += $(name) ;
+ .vars.$(name) = $(value) ;
+}
+
+rule save ( )
+{
+ if $(.cache-file)
+ {
+ local cache-file-native = [ path.native $(.cache-file) ] ;
+ local target = <new-cache-file>$(cache-file-native) ;
+ local contents = "# Automatically generated by B2.\n# Do not edit.\n\nmodule config-cache {\n" ;
+ for local var in $(.all-vars)
+ {
+ local transformed ;
+ for local value in $(.vars.$(var))
+ {
+ transformed += [ regex.escape $(value) : \"\\ : \\ ] ;
+ }
+ local quoted = \"$(transformed)\" ;
+ contents += " set \"$(var)\" : $(quoted:J= ) ;\n" ;
+ }
+ contents += "}\n" ;
+ FILE_CONTENTS on $(target) = $(contents) ;
+ ALWAYS $(target) ;
+ config-cache.write $(target) ;
+ UPDATE_NOW $(target) : [ modules.peek configure : .log-fd ] : ignore-minus-n ;
+ import common ;
+ common.Clean clean-all : $(target) ;
+ }
+}
+
+actions write
+{
+ @($(STDOUT):E=$(FILE_CONTENTS:J=)) > "$(<)"
+}
+
+if [ os.name ] = VMS
+{
+ actions write
+ {
+ @($(STDOUT):E=$(FILE_CONTENTS:J=)) | TYPE SYS$INPUT /OUT=$(<:W)
+ }
+}
+
+rule load ( cache-file )
+{
+ if $(.cache-file)
+ {
+ errors.error duplicate load of cache file ;
+ }
+ cache-file = [ path.native $(cache-file) ] ;
+ if [ path.exists $(cache-file) ] && ! ( --reconfigure in [ modules.peek : ARGV ] )
+ {
+ FILE_CONTENTS on <old-cache-file>$(cache-file) = "" ;
+ config-cache.write <old-cache-file>$(cache-file) ;
+ UPDATE_NOW <old-cache-file>$(cache-file) : [ modules.peek configure : .log-fd ] ;
+ include <old-cache-file>$(cache-file) ;
+ }
+ .cache-file = $(cache-file) ;
+}
diff --git a/src/boost/tools/build/src/build/configure.jam b/src/boost/tools/build/src/build/configure.jam
new file mode 100644
index 000000000..646b05a03
--- /dev/null
+++ b/src/boost/tools/build/src/build/configure.jam
@@ -0,0 +1,620 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright 2017 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines function to help with two main tasks:
+#
+# - Discovering build-time configuration for the purposes of adjusting the build
+# process.
+# - Reporting what is built, and how it is configured.
+
+import "class" : new ;
+import common ;
+import indirect ;
+import path ;
+import project ;
+import property ;
+import property-set ;
+import targets ;
+import config-cache ;
+import feature ;
+import modules ;
+import sequence ;
+import utility ;
+import virtual-target ;
+
+
+# The configure feature allows external definition of what features are
+# relevant for doing configuration builds. One can add additional relevant
+# features by using:
+#
+# import feature ;
+# import configure ;
+# feature.compose <configure> : <threading> ;
+#
+feature.feature configure : : composite optional ;
+
+# This is the initial set of relevant features. Note that subfeature of all
+# relevant features are also considered relevant.
+#
+feature.compose <configure> :
+ <target-os> <toolset> <address-model> <architecture> <cxxstd> ;
+
+
+rule log-summary ( )
+{
+}
+
+
+.width = 30 ;
+
+rule set-width ( width )
+{
+ .width = $(width) ;
+}
+
+
+# Declare that the components specified by the parameter exist.
+#
+rule register-components ( components * )
+{
+ .components += $(components) ;
+}
+
+
+# Declare that the components specified by the parameters will be built.
+#
+rule components-building ( components * )
+{
+ .built-components += $(components) ;
+}
+
+
+# Report something about component configuration that the user should better
+# know.
+#
+rule log-component-configuration ( component : message )
+{
+ # FIXME: Implement per-property-set logs.
+ .component-logs.$(component) += $(message) ;
+}
+
+
+rule log-check-result ( result )
+{
+ if ! $(.announced-checks)
+ {
+ ECHO "Performing configuration checks\n" ;
+ .announced-checks = 1 ;
+ }
+
+ ECHO $(result) ;
+ # FIXME: Unfinished code. Nothing seems to set .check-results at the moment.
+ #.check-results += $(result) ;
+}
+
+
+rule log-library-search-result ( library : result )
+{
+ local x = [ PAD " - $(library)" : $(.width) ] ;
+ log-check-result "$(x) : $(result)" ;
+}
+
+
+rule print-component-configuration ( )
+{
+ # FIXME: See what was intended with this initial assignment.
+ # local c = [ sequence.unique $(.components) ] ;
+
+ ECHO "\nComponent configuration:\n" ;
+ local c ;
+ for c in $(.components)
+ {
+ local s ;
+ if $(c) in $(.built-components)
+ {
+ s = "building" ;
+ }
+ else
+ {
+ s = "not building" ;
+ }
+ ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
+ for local m in $(.component-logs.$(c))
+ {
+ ECHO " -" $(m) ;
+ }
+ }
+ ECHO ;
+}
+
+
+rule print-configure-checks-summary ( )
+{
+ # FIXME: The problem with this approach is that the user sees the checks
+ # summary when all checks are done, and has no progress reporting while the
+ # checks are being executed.
+ if $(.check-results)
+ {
+ ECHO "Configuration checks summary\n" ;
+ for local r in $(.check-results)
+ {
+ ECHO $(r) ;
+ }
+ ECHO ;
+ }
+}
+
+if --reconfigure in [ modules.peek : ARGV ]
+{
+ .reconfigure = true ;
+}
+
+# Handle the --reconfigure option
+rule maybe-force-rebuild ( targets * )
+{
+ if $(.reconfigure)
+ {
+ local all-targets ;
+ for local t in $(targets)
+ {
+ all-targets += [ virtual-target.traverse $(t) ] ;
+ }
+ for local t in [ sequence.unique $(all-targets) ]
+ {
+ $(t).always ;
+ }
+ }
+}
+
+# Attempts to build a set of virtual targets
+rule try-build ( targets * : ps : what : retry ? )
+{
+ local cache-name = $(what) [ $(ps).raw ] ;
+ cache-name = $(cache-name:J=-) ;
+ local value = [ config-cache.get $(cache-name) ] ;
+
+ local result ;
+ local jam-targets ;
+
+ maybe-force-rebuild $(targets) ;
+
+ for local t in $(targets)
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+
+ if $(value)
+ {
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ if $(value) = true
+ {
+ .$(what)-supported.$(ps) = yes ;
+ result = true ;
+ log-check-result "$(x) : yes (cached)" ;
+ }
+ else
+ {
+ log-check-result "$(x) : no (cached)" ;
+ }
+ }
+ else if ! UPDATE_NOW in [ RULENAMES ]
+ {
+ # Cannot determine. Assume existence.
+ }
+ else
+ {
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ if [ UPDATE_NOW $(jam-targets) :
+ $(.log-fd) : ignore-minus-n : ignore-minus-q ]
+ {
+ .$(what)-supported.$(ps) = yes ;
+ result = true ;
+ log-check-result "$(x) : yes" ;
+ }
+ else
+ {
+ log-check-result "$(x) : no" ;
+ }
+ }
+ if ! $(value)
+ {
+ if $(result)
+ {
+ config-cache.set $(cache-name) : true ;
+ }
+ else
+ {
+ config-cache.set $(cache-name) : false ;
+ }
+ }
+ return $(result) ;
+}
+
+# Attempts to build several sets of virtual targets. Returns the
+# the index of the first set that builds.
+rule try-find-build ( ps : what : * )
+{
+ local args = 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
+ # The outer layer only needs to check $(what), but we
+ # also need to check the individual elements, in case
+ # the set of targets has changed since the last build.
+ local cache-name = $(what) $($(args)[1]) [ $(ps).raw ] ;
+ cache-name = $(cache-name:J=-) ;
+ local value = [ config-cache.get $(cache-name) ] ;
+
+ local result ;
+ local jam-targets ;
+
+ maybe-force-rebuild $($(args)[2-]) ;
+
+ # Make sure that the targets are always actualized,
+ # even if the result is cached. This is needed to
+ # allow clean-all to find them and also to avoid
+ # unintentional behavior changes.
+ for local t in $($(args)[2-])
+ {
+ $(t).actualize ;
+ }
+
+ if $(value)
+ {
+ local none = none ; # What to show when the argument
+ local name = $(value) ;
+ if $(name) != none
+ {
+ name = [ CALC $(name) + 2 ] ;
+ }
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ local y = [ PAD $($(name)[1]) : 3 ] ;
+ result = $(value) ;
+ log-check-result "$(x) : $(y) (cached)" ;
+ }
+ else
+ {
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ for local i in $(args)
+ {
+ if ! $($(i)[1])
+ {
+ break ;
+ }
+ local jam-targets ;
+ for local t in $($(i)[2-])
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+ if [ UPDATE_NOW $(jam-targets) :
+ $(.log-fd) : ignore-minus-n : ignore-minus-q ]
+ {
+ result = [ CALC $(i) - 2 ] ;
+ log-check-result "$(x) : $($(i)[1])" ;
+ break ;
+ }
+ }
+ if ! $(result)
+ {
+ log-check-result "$(x) : none" ;
+ result = none ;
+ }
+ }
+ if ! $(value)
+ {
+ if $(result)
+ {
+ config-cache.set $(cache-name) : $(result) ;
+ }
+ else
+ {
+ config-cache.set $(cache-name) : $(result) ;
+ }
+ }
+ if $(result) != none
+ {
+ return $(result) ;
+ }
+}
+
+# Attempt to build a metatarget named by 'metatarget-reference'
+# in context of 'project' with properties 'ps'.
+# Returns non-empty value if build is OK.
+rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
+{
+ local result ;
+
+ if ! $(retry) && ! $(.$(what)-tested.$(ps))
+ {
+ .$(what)-tested.$(ps) = true ;
+
+ local targets = [ targets.generate-from-reference
+ $(metatarget-reference) : $(project) : $(ps) ] ;
+
+ result = [ try-build $(targets[2-]) : $(ps) : $(what) : $(retry) ] ;
+ .$(what)-supported.$(ps) = $(result) ;
+
+ return $(result) ;
+
+ }
+ else
+ {
+ return $(.$(what)-supported.$(ps)) ;
+ }
+}
+
+# Attempt to build a metatarget named by 'metatarget-reference'
+# in context of 'project' with properties 'ps'.
+# Returns the 1-based index of the first target
+# that builds.
+rule find-builds-raw ( project : ps : what : * )
+{
+ local result ;
+ local args = 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
+
+ if ! $(.$(what)-tested.$(ps))
+ {
+ .$(what)-tested.$(ps) = true ;
+ local targets.$(i) what.$(i) ;
+ for local i in $(args)
+ {
+ if ! $($(i))
+ {
+ break ;
+ }
+ targets.$(i) = [ targets.generate-from-reference
+ $($(i)[1]) : $(project) : $(ps) ] ;
+ # ignore usage requirements
+ targets.$(i) = $(targets.$(i)[2-]) ;
+ if $($(i)[2])
+ {
+ what.$(i) = $($(i)[2]) ;
+ }
+ else
+ {
+ local t = [ targets.resolve-reference
+ $($(i)[1]) : $(project) ] ;
+ what.$(i) = [ $(t[1]).name ] ;
+ }
+ }
+
+ result = [ try-find-build $(ps) : $(what)
+ : $(what.4) $(targets.4)
+ : $(what.5) $(targets.5)
+ : $(what.6) $(targets.6)
+ : $(what.7) $(targets.7)
+ : $(what.8) $(targets.8)
+ : $(what.9) $(targets.9)
+ : $(what.10) $(targets.10)
+ : $(what.11) $(targets.11)
+ : $(what.12) $(targets.12)
+ : $(what.13) $(targets.13)
+ : $(what.14) $(targets.14)
+ : $(what.15) $(targets.15)
+ : $(what.16) $(targets.16)
+ : $(what.17) $(targets.17)
+ : $(what.18) $(targets.18)
+ : $(what.19) $(targets.19) ] ;
+ .$(what)-result.$(ps) = $(result) ;
+
+ return $(result) ;
+ }
+ else
+ {
+ return $(.$(what)-result.$(ps)) ;
+ }
+}
+
+rule get-relevant-features ( )
+{
+ local relevant = [ feature.expand <configure> ] ;
+ local result = ;
+ for local f in $(relevant)
+ {
+ if $(f) != <configure>
+ {
+ local sub = [ modules.peek feature : $(f).subfeatures ] ;
+ local name = [ utility.ungrist $(f) ] ;
+ result += $(f) <$(name)-$(sub)> ;
+ }
+ }
+ return $(result) ;
+}
+
+rule builds ( metatarget-reference : properties * : what ? : retry ? )
+{
+ local toolset-subfeatures = [ modules.peek feature : <toolset>.subfeatures ] ;
+ toolset-subfeatures = <toolset-$(toolset-subfeatures)> ;
+ # FIXME: This should not be hardcoded. Other checks might want to consider a
+ # different set of features as relevant.
+ local relevant = [ property.select [ get-relevant-features ] : $(properties) ] ;
+ local ps = [ property-set.create $(relevant) ] ;
+ local t = [ targets.current ] ;
+ local p = [ $(t).project ] ;
+
+ if ! $(what)
+ {
+ local resolved = [ targets.resolve-reference $(metatarget-reference) : $(p) ] ;
+ local name = [ $(resolved[1]).name ] ;
+ what = "$(name) builds" ;
+ }
+
+ return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) :
+ $(retry) ] ;
+}
+
+rule find-builds ( what : properties * : * )
+{
+ local relevant = [ property.select [ get-relevant-features ] : $(properties) ] ;
+ local ps = [ property-set.create $(relevant) ] ;
+ local t = [ targets.current ] ;
+ local p = [ $(t).project ] ;
+
+ return [ find-builds-raw $(p) : $(ps) : $(what) :
+ $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) :
+ $(10) : $(11) : $(12) : $(13) : $(14) : $(15) :
+ $(16) : $(17) : $(18) : $(19) ] ;
+}
+
+
+# Called by B2 startup code to specify the file to receive the
+# configuration check results. Should never be called by user code.
+#
+rule set-log-file ( log-file )
+{
+ path.makedirs [ path.parent $(log-file) ] ;
+ .log-fd = [ FILE_OPEN [ path.native $(log-file) ] : "w" ] ;
+ if ! $(.log-fd)
+ {
+ ECHO "warning:" failed to open log file $(log-file) for writing ;
+ }
+}
+
+
+# Frontend rules
+
+class check-target-builds-worker
+{
+ import configure ;
+ import property-set ;
+ import targets ;
+ import project ;
+ import property ;
+
+ rule __init__ ( target message ? : true-properties * : false-properties * )
+ {
+ local project = [ project.current ] ;
+ self.target = $(target) ;
+ self.message = $(message) ;
+ self.true-properties =
+ [ configure.translate-properties $(true-properties) : $(project) ] ;
+ self.false-properties =
+ [ configure.translate-properties $(false-properties) : $(project) ] ;
+ }
+
+ rule check ( properties * )
+ {
+ local choosen ;
+ if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
+ {
+ choosen = $(self.true-properties) ;
+ }
+ else
+ {
+ choosen = $(self.false-properties) ;
+ }
+ return [ property.evaluate-conditionals-in-context $(choosen) :
+ $(properties) ] ;
+ }
+}
+
+class configure-choose-worker
+{
+ import configure ;
+ import property ;
+ import project ;
+ rule __init__ ( message : * )
+ {
+ local project = [ project.current ] ;
+ self.message = $(message) ;
+ for i in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ {
+ local name = [ CALC $(i) - 1 ] ;
+ self.targets.$(name) = $($(i)[1]) ;
+ if ! $($(i)[2]:G) # Check whether the second argument is a property
+ {
+ self.what.$(name) = $($(i)[2]) ;
+ self.props.$(name) = $($(i)[3-]) ;
+ }
+ else
+ {
+ self.props.$(name) = $($(i)[2-]) ;
+ }
+ self.props.$(name) = [ configure.translate-properties
+ $(self.props.$(name)) : $(project) ] ;
+ }
+ }
+ rule all-properties ( )
+ {
+ local i = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
+ return $(self.props.$(i)) ;
+ }
+ rule check ( properties * )
+ {
+ local i = [ configure.find-builds $(self.message) : $(properties)
+ : $(self.targets.1) $(self.what.1)
+ : $(self.targets.2) $(self.what.2)
+ : $(self.targets.3) $(self.what.3)
+ : $(self.targets.4) $(self.what.4)
+ : $(self.targets.5) $(self.what.5)
+ : $(self.targets.6) $(self.what.6)
+ : $(self.targets.7) $(self.what.7)
+ : $(self.targets.8) $(self.what.8)
+ : $(self.targets.9) $(self.what.9)
+ : $(self.targets.10) $(self.what.10)
+ : $(self.targets.11) $(self.what.11)
+ : $(self.targets.12) $(self.what.12)
+ : $(self.targets.13) $(self.what.13)
+ : $(self.targets.14) $(self.what.14)
+ : $(self.targets.15) $(self.what.15)
+ : $(self.targets.16) $(self.what.16)
+ : $(self.targets.17) $(self.what.17)
+ : $(self.targets.18) $(self.what.18)
+ : $(self.targets.19) $(self.what.19) ] ;
+ if $(self.props.$(i))
+ {
+ return [ property.evaluate-conditionals-in-context $(self.props.$(i)) : $(properties) ] ;
+ }
+ }
+}
+
+rule translate-properties ( properties * : project ? )
+{
+ if $(project) && [ $(project).location ]
+ {
+ local location = [ $(project).location ] ;
+ local m = [ $(project).project-module ] ;
+ local project-id = [ project.attribute $(m) id ] ;
+ project-id ?= [ path.root $(location) [ path.pwd ] ] ;
+ return [ property.translate $(properties)
+ : $(project-id) : $(location) : $(m) ] ;
+ }
+ else
+ {
+ return $(properties) ;
+ }
+}
+
+rule check-target-builds ( target message ? : true-properties * :
+ false-properties * )
+{
+ local instance = [ new check-target-builds-worker $(target) $(message) :
+ $(true-properties) : $(false-properties) ] ;
+ local rulename = [ indirect.make check : $(instance) ] ;
+ return <conditional>@$(rulename)
+ [ property.evaluate-conditional-relevance
+ $(true-properties) $(false-properties)
+ : [ configure.get-relevant-features ] ] ;
+}
+
+# Usage:
+# [ configure.choose "architecture"
+# : /config//x86 x86 <architecture>x86
+# : /config//mips mips <architecture>mips
+# ]
+rule choose ( message : * )
+{
+ local instance = [ new configure-choose-worker $(message)
+ : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
+ : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16)
+ : $(17) : $(18) : $(19) ] ;
+ local rulename = [ indirect.make check : $(instance) ] ;
+ return <conditional>@$(rulename)
+ [ property.evaluate-conditional-relevance
+ [ $(instance).all-properties ]
+ : [ configure.get-relevant-features ] ] ;
+}
+
+
+IMPORT $(__name__) : check-target-builds : : check-target-builds ;
diff --git a/src/boost/tools/build/src/build/configure.py b/src/boost/tools/build/src/build/configure.py
new file mode 100644
index 000000000..9bb043773
--- /dev/null
+++ b/src/boost/tools/build/src/build/configure.py
@@ -0,0 +1,176 @@
+# Status: ported.
+# Base revision: 64488
+#
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines function to help with two main tasks:
+#
+# - Discovering build-time configuration for the purposes of adjusting
+# build process.
+# - Reporting what is built, and how it is configured.
+
+import b2.build.property as property
+import b2.build.property_set as property_set
+
+from b2.build import targets as targets_
+
+from b2.manager import get_manager
+from b2.util.sequence import unique
+from b2.util import bjam_signature, value_to_jam, is_iterable
+
+import bjam
+import os
+
+__width = 30
+
+def set_width(width):
+ global __width
+ __width = 30
+
+__components = []
+__built_components = []
+__component_logs = {}
+__announced_checks = False
+
+__log_file = None
+__log_fd = -1
+
+def register_components(components):
+ """Declare that the components specified by the parameter exist."""
+ assert is_iterable(components)
+ __components.extend(components)
+
+def components_building(components):
+ """Declare that the components specified by the parameters will be build."""
+ assert is_iterable(components)
+ __built_components.extend(components)
+
+def log_component_configuration(component, message):
+ """Report something about component configuration that the user should better know."""
+ assert isinstance(component, basestring)
+ assert isinstance(message, basestring)
+ __component_logs.setdefault(component, []).append(message)
+
+def log_check_result(result):
+ assert isinstance(result, basestring)
+ global __announced_checks
+ if not __announced_checks:
+ print "Performing configuration checks"
+ __announced_checks = True
+
+ print result
+
+def log_library_search_result(library, result):
+ assert isinstance(library, basestring)
+ assert isinstance(result, basestring)
+ log_check_result((" - %(library)s : %(result)s" % locals()).rjust(__width))
+
+
+def print_component_configuration():
+
+ print "\nComponent configuration:"
+ for c in __components:
+ if c in __built_components:
+ s = "building"
+ else:
+ s = "not building"
+ message = " - %s)" % c
+ message = message.rjust(__width)
+ message += " : " + s
+ for m in __component_logs.get(c, []):
+ print " -" + m
+ print ""
+
+__builds_cache = {}
+
+def builds(metatarget_reference, project, ps, what):
+ # Attempt to build a metatarget named by 'metatarget-reference'
+ # in context of 'project' with properties 'ps'.
+ # Returns non-empty value if build is OK.
+ assert isinstance(metatarget_reference, basestring)
+ assert isinstance(project, targets_.ProjectTarget)
+ assert isinstance(ps, property_set.PropertySet)
+ assert isinstance(what, basestring)
+
+ result = []
+
+ existing = __builds_cache.get((what, ps), None)
+ if existing is None:
+
+ result = False
+ __builds_cache[(what, ps)] = False
+
+ targets = targets_.generate_from_reference(
+ metatarget_reference, project, ps).targets()
+ jam_targets = []
+ for t in targets:
+ jam_targets.append(t.actualize())
+
+ x = (" - %s" % what).rjust(__width)
+ if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"):
+ __builds_cache[(what, ps)] = True
+ result = True
+ log_check_result("%s: yes" % x)
+ else:
+ log_check_result("%s: no" % x)
+
+ return result
+ else:
+ return existing
+
+def set_log_file(log_file_name):
+ assert isinstance(log_file_name, basestring)
+ # Called by Boost.Build startup code to specify name of a file
+ # that will receive results of configure checks. This
+ # should never be called by users.
+ global __log_file, __log_fd
+ dirname = os.path.dirname(log_file_name)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ # Make sure to keep the file around, so that it's not
+ # garbage-collected and closed
+ __log_file = open(log_file_name, "w")
+ __log_fd = __log_file.fileno()
+
+# Frontend rules
+
+class CheckTargetBuildsWorker:
+
+ def __init__(self, target, true_properties, false_properties):
+ self.target = target
+ self.true_properties = property.create_from_strings(true_properties, True)
+ self.false_properties = property.create_from_strings(false_properties, True)
+
+ def check(self, ps):
+ assert isinstance(ps, property_set.PropertySet)
+ # FIXME: this should not be hardcoded. Other checks might
+ # want to consider different set of features as relevant.
+ toolset = ps.get('toolset')[0]
+ toolset_version_property = "<toolset-" + toolset + ":version>" ;
+ relevant = ps.get_properties('target-os') + \
+ ps.get_properties("toolset") + \
+ ps.get_properties(toolset_version_property) + \
+ ps.get_properties("address-model") + \
+ ps.get_properties("architecture")
+ rps = property_set.create(relevant)
+ t = get_manager().targets().current()
+ p = t.project()
+ if builds(self.target, p, rps, "%s builds" % self.target):
+ choosen = self.true_properties
+ else:
+ choosen = self.false_properties
+ return property.evaluate_conditionals_in_context(choosen, ps)
+
+@bjam_signature((["target"], ["true_properties", "*"], ["false_properties", "*"]))
+def check_target_builds(target, true_properties, false_properties):
+ worker = CheckTargetBuildsWorker(target, true_properties, false_properties)
+ value = value_to_jam(worker.check)
+ return "<conditional>" + value
+
+get_manager().projects().add_rule("check-target-builds", check_target_builds)
+
+
diff --git a/src/boost/tools/build/src/build/engine.py b/src/boost/tools/build/src/build/engine.py
new file mode 100644
index 000000000..6e49a8b5e
--- /dev/null
+++ b/src/boost/tools/build/src/build/engine.py
@@ -0,0 +1,246 @@
+# Copyright Pedro Ferreira 2005.
+# Copyright Vladimir Prus 2007.
+# Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+bjam_interface = __import__('bjam')
+
+import operator
+import re
+
+import b2.build.property_set as property_set
+
+from b2.util import set_jam_action, is_iterable
+
+class BjamAction(object):
+ """Class representing bjam action defined from Python."""
+
+ def __init__(self, action_name, function, has_command=False):
+ assert isinstance(action_name, basestring)
+ assert callable(function) or function is None
+ self.action_name = action_name
+ self.function = function
+ self.has_command = has_command
+
+ def __call__(self, targets, sources, property_set_):
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
+ if self.has_command:
+ # Bjam actions defined from Python have only the command
+ # to execute, and no associated jam procedural code. So
+ # passing 'property_set' to it is not necessary.
+ bjam_interface.call("set-update-action", self.action_name,
+ targets, sources, [])
+ if self.function:
+ self.function(targets, sources, property_set_)
+
+class BjamNativeAction(BjamAction):
+ """Class representing bjam action defined by Jam code.
+
+ We still allow to associate a Python callable that will
+ be called when this action is installed on any target.
+ """
+
+ def __call__(self, targets, sources, property_set_):
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
+ if self.function:
+ self.function(targets, sources, property_set_)
+
+ p = []
+ if property_set:
+ p = property_set_.raw()
+
+ set_jam_action(self.action_name, targets, sources, p)
+
+action_modifiers = {"updated": 0x01,
+ "together": 0x02,
+ "ignore": 0x04,
+ "quietly": 0x08,
+ "piecemeal": 0x10,
+ "existing": 0x20}
+
+class Engine:
+ """ The abstract interface to a build engine.
+
+ For now, the naming of targets, and special handling of some
+ target variables like SEARCH and LOCATE make this class coupled
+ to bjam engine.
+ """
+ def __init__ (self):
+ self.actions = {}
+
+ def add_dependency (self, targets, sources):
+ """Adds a dependency from 'targets' to 'sources'
+
+ Both 'targets' and 'sources' can be either list
+ of target names, or a single target name.
+ """
+ if isinstance (targets, str):
+ targets = [targets]
+ if isinstance (sources, str):
+ sources = [sources]
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+
+ for target in targets:
+ for source in sources:
+ self.do_add_dependency (target, source)
+
+ def get_target_variable(self, targets, variable):
+ """Gets the value of `variable` on set on the first target in `targets`.
+
+ Args:
+ targets (str or list): one or more targets to get the variable from.
+ variable (str): the name of the variable
+
+ Returns:
+ the value of `variable` set on `targets` (list)
+
+ Example:
+
+ >>> ENGINE = get_manager().engine()
+ >>> ENGINE.set_target_variable(targets, 'MY-VAR', 'Hello World')
+ >>> ENGINE.get_target_variable(targets, 'MY-VAR')
+ ['Hello World']
+
+ Equivalent Jam code:
+
+ MY-VAR on $(targets) = "Hello World" ;
+ echo [ on $(targets) return $(MY-VAR) ] ;
+ "Hello World"
+ """
+ if isinstance(targets, str):
+ targets = [targets]
+ assert is_iterable(targets)
+ assert isinstance(variable, basestring)
+
+ return bjam_interface.call('get-target-variable', targets, variable)
+
+ def set_target_variable (self, targets, variable, value, append=0):
+ """ Sets a target variable.
+
+ The 'variable' will be available to bjam when it decides
+ where to generate targets, and will also be available to
+ updating rule for that 'taret'.
+ """
+ if isinstance (targets, str):
+ targets = [targets]
+ if isinstance(value, str):
+ value = [value]
+
+ assert is_iterable(targets)
+ assert isinstance(variable, basestring)
+ assert is_iterable(value)
+
+ if targets:
+ if append:
+ bjam_interface.call("set-target-variable", targets, variable, value, "true")
+ else:
+ bjam_interface.call("set-target-variable", targets, variable, value)
+
+ def set_update_action (self, action_name, targets, sources, properties=None):
+ """ Binds a target to the corresponding update action.
+ If target needs to be updated, the action registered
+ with action_name will be used.
+ The 'action_name' must be previously registered by
+ either 'register_action' or 'register_bjam_action'
+ method.
+ """
+ if isinstance(targets, str):
+ targets = [targets]
+ if isinstance(sources, str):
+ sources = [sources]
+ if properties is None:
+ properties = property_set.empty()
+ assert isinstance(action_name, basestring)
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert(isinstance(properties, property_set.PropertySet))
+
+ self.do_set_update_action (action_name, targets, sources, properties)
+
+ def register_action (self, action_name, command='', bound_list = [], flags = [],
+ function = None):
+ """Creates a new build engine action.
+
+ Creates on bjam side an action named 'action_name', with
+ 'command' as the command to be executed, 'bound_variables'
+ naming the list of variables bound when the command is executed
+ and specified flag.
+ If 'function' is not None, it should be a callable taking three
+ parameters:
+ - targets
+ - sources
+ - instance of the property_set class
+ This function will be called by set_update_action, and can
+ set additional target variables.
+ """
+ assert isinstance(action_name, basestring)
+ assert isinstance(command, basestring)
+ assert is_iterable(bound_list)
+ assert is_iterable(flags)
+ assert function is None or callable(function)
+
+ bjam_flags = reduce(operator.or_,
+ (action_modifiers[flag] for flag in flags), 0)
+
+ # We allow command to be empty so that we can define 'action' as pure
+ # python function that would do some conditional logic and then relay
+ # to other actions.
+ assert command or function
+ if command:
+ bjam_interface.define_action(action_name, command, bound_list, bjam_flags)
+
+ self.actions[action_name] = BjamAction(
+ action_name, function, has_command=bool(command))
+
+ def register_bjam_action (self, action_name, function=None):
+ """Informs self that 'action_name' is declared in bjam.
+
+ From this point, 'action_name' is a valid argument to the
+ set_update_action method. The action_name should be callable
+ in the global module of bjam.
+ """
+
+ # We allow duplicate calls to this rule for the same
+ # action name. This way, jamfile rules that take action names
+ # can just register them without specially checking if
+ # action is already registered.
+ assert isinstance(action_name, basestring)
+ assert function is None or callable(function)
+ if action_name not in self.actions:
+ self.actions[action_name] = BjamNativeAction(action_name, function)
+
+ # Overridables
+
+
+ def do_set_update_action (self, action_name, targets, sources, property_set_):
+ assert isinstance(action_name, basestring)
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
+ action = self.actions.get(action_name)
+ if not action:
+ raise Exception("No action %s was registered" % action_name)
+ action(targets, sources, property_set_)
+
+ def do_set_target_variable (self, target, variable, value, append):
+ assert isinstance(target, basestring)
+ assert isinstance(variable, basestring)
+ assert is_iterable(value)
+ assert isinstance(append, int) # matches bools
+ if append:
+ bjam_interface.call("set-target-variable", target, variable, value, "true")
+ else:
+ bjam_interface.call("set-target-variable", target, variable, value)
+
+ def do_add_dependency (self, target, source):
+ assert isinstance(target, basestring)
+ assert isinstance(source, basestring)
+ bjam_interface.call("DEPENDS", target, source)
+
+
diff --git a/src/boost/tools/build/src/build/errors.py b/src/boost/tools/build/src/build/errors.py
new file mode 100644
index 000000000..dd517395f
--- /dev/null
+++ b/src/boost/tools/build/src/build/errors.py
@@ -0,0 +1,135 @@
+# Status: being written afresh by Vladimir Prus
+
+# Copyright 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is supposed to implement error reporting for Boost.Build.
+# Experience with jam version has shown that printing full backtrace
+# on each error is buffling. Further, for errors printed after parsing --
+# during target building, the stacktrace does not even mention what
+# target is being built.
+
+# This module implements explicit contexts -- where other code can
+# communicate which projects/targets are being built, and error
+# messages will show those contexts. For programming errors,
+# Python assertions are to be used.
+
+import bjam
+import traceback
+import sys
+
+def format(message, prefix=""):
+ parts = str(message).split("\n")
+ return "\n".join(prefix+p for p in parts)
+
+
+class Context:
+
+ def __init__(self, message, nested=None):
+ self.message_ = message
+ self.nested_ = nested
+
+ def report(self, indent=""):
+ print indent + " -", self.message_
+ if self.nested_:
+ print indent + " declared at:"
+ for n in self.nested_:
+ n.report(indent + " ")
+
+class JamfileContext:
+
+ def __init__(self):
+ raw = bjam.backtrace()
+ self.raw_ = raw
+
+ def report(self, indent=""):
+ for r in self.raw_:
+ print indent + " - %s:%s" % (r[0], r[1])
+
+class ExceptionWithUserContext(Exception):
+
+ def __init__(self, message, context,
+ original_exception=None, original_tb=None, stack=None):
+ Exception.__init__(self, message)
+ self.context_ = context
+ self.original_exception_ = original_exception
+ self.original_tb_ = original_tb
+ self.stack_ = stack
+
+ def report(self):
+ print "error:", self.args[0]
+ if self.original_exception_:
+ print format(str(self.original_exception_), " ")
+ print
+ print " error context (most recent first):"
+ for c in self.context_[::-1]:
+ c.report()
+ print
+ if "--stacktrace" in bjam.variable("ARGV"):
+ if self.original_tb_:
+ traceback.print_tb(self.original_tb_)
+ elif self.stack_:
+ for l in traceback.format_list(self.stack_):
+ print l,
+ else:
+ print " use the '--stacktrace' option to get Python stacktrace"
+ print
+
+def user_error_checkpoint(callable):
+ def wrapper(self, *args):
+ errors = self.manager().errors()
+ try:
+ return callable(self, *args)
+ except ExceptionWithUserContext, e:
+ raise
+ except Exception, e:
+ errors.handle_stray_exception(e)
+ finally:
+ errors.pop_user_context()
+
+ return wrapper
+
+class Errors:
+
+ def __init__(self):
+ self.contexts_ = []
+ self._count = 0
+
+ def count(self):
+ return self._count
+
+ def push_user_context(self, message, nested=None):
+ self.contexts_.append(Context(message, nested))
+
+ def pop_user_context(self):
+ del self.contexts_[-1]
+
+ def push_jamfile_context(self):
+ self.contexts_.append(JamfileContext())
+
+ def pop_jamfile_context(self):
+ del self.contexts_[-1]
+
+ def capture_user_context(self):
+ return self.contexts_[:]
+
+ def handle_stray_exception(self, e):
+ raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
+ e, sys.exc_info()[2])
+ def __call__(self, message):
+ self._count = self._count + 1
+ raise ExceptionWithUserContext(message, self.contexts_[:],
+ stack=traceback.extract_stack())
+
+
+def nearest_user_location():
+ """
+ Returns:
+ tuple: the filename and line number of the nearest user location
+ """
+ bt = bjam.backtrace()
+ if not bt:
+ return None
+ last = bt[-1]
+ return last[0], last[1]
diff --git a/src/boost/tools/build/src/build/feature.jam b/src/boost/tools/build/src/build/feature.jam
new file mode 100644
index 000000000..09d7af1a5
--- /dev/null
+++ b/src/boost/tools/build/src/build/feature.jam
@@ -0,0 +1,1442 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import assert : * ;
+import "class" : * ;
+import indirect ;
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+import utility ;
+
+
+local rule setup ( )
+{
+ .all-attributes =
+ implicit
+ composite
+ optional
+ symmetric
+ free
+ incidental
+ path
+ dependency
+ propagated
+ link-incompatible
+ subfeature
+ order-sensitive
+ hidden
+ ;
+
+ .all-features = ;
+ .all-subfeatures = ;
+ .all-top-features = ; # non-subfeatures
+ .all-implicit-values = ;
+}
+setup ;
+
+
+# Prepare a fresh space to test in by moving all global variable settings into
+# the given temporary module and erasing them here.
+#
+rule prepare-test ( temp-module )
+{
+ DELETE_MODULE $(temp-module) ;
+
+ # Transfer globals to temp-module.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ modules.poke $(temp-module) : $(v) : $($(v)) ;
+ $(v) = ;
+ }
+ }
+ setup ;
+}
+
+
+# Clear out all global variables and recover all variables from the given
+# temporary module.
+#
+rule finish-test ( temp-module )
+{
+ # Clear globals.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ $(v) = ;
+ }
+ }
+
+ for local v in [ VARNAMES $(temp-module) ]
+ {
+ $(v) = [ modules.peek $(temp-module) : $(v) ] ;
+ }
+ DELETE_MODULE $(temp-module) ;
+}
+
+
+# Transform features by bracketing any elements which are not already bracketed
+# by "<>".
+#
+local rule grist ( features * )
+{
+ local empty = "" ;
+ return $(empty:G=$(features)) ;
+}
+
+
+# Declare a new feature with the given name, values, and attributes.
+#
+rule feature (
+ name # Feature name.
+ : values * # Allowable values - may be extended later using feature.extend.
+ : attributes * # Feature attributes (e.g. implicit, free, propagated...).
+)
+{
+ name = [ grist $(name) ] ;
+
+ local error ;
+
+ # Check for any unknown attributes.
+ if ! ( $(attributes) in $(.all-attributes) )
+ {
+ error = unknown "attributes:"
+ [ set.difference $(attributes) : $(.all-attributes) ] ;
+ }
+ else if $(name) in $(.all-features)
+ {
+ error = feature already "defined:" ;
+ }
+ else if implicit in $(attributes) && free in $(attributes)
+ {
+ error = free features cannot also be implicit ;
+ }
+ else if free in $(attributes) && propagated in $(attributes)
+ {
+ error = free features cannot be propagated ;
+ }
+ else
+ {
+ local m = [ MATCH (.*=.*) : $(values) ] ;
+ if $(m[1])
+ {
+ error = "feature value may not contain '='" ;
+ }
+ }
+
+ if $(error)
+ {
+ import errors ;
+ errors.error $(error)
+ : "in" feature "declaration:"
+ : feature [ errors.lol->list $(1) : $(2) : $(3) ] ;
+ }
+
+ $(name).values ?= ;
+ $(name).attributes = $(attributes) ;
+ $(name).subfeatures ?= ;
+ $(attributes).features += $(name) ;
+
+ .all-features += $(name) ;
+ if subfeature in $(attributes)
+ {
+ .all-subfeatures += $(name) ;
+ }
+ else
+ {
+ .all-top-features += $(name) ;
+ }
+ extend $(name) : $(values) ;
+}
+
+
+# Sets the default value of the given feature, overriding any previous default.
+#
+rule set-default ( feature : value )
+{
+ local f = [ grist $(feature) ] ;
+ local a = $($(f).attributes) ;
+ local bad-attribute = ;
+ if free in $(a)
+ {
+ bad-attribute = free ;
+ }
+ else if optional in $(a)
+ {
+ bad-attribute = optional ;
+ }
+ if $(bad-attribute)
+ {
+ import errors ;
+ errors.error $(bad-attribute) property $(f) cannot have a default. ;
+ }
+ if ! $(value) in $($(f).values)
+ {
+ import errors ;
+ errors.error The specified default value, '$(value)' is invalid :
+ allowed values "are:" $($(f).values) ;
+ }
+ $(f).default = $(value) ;
+}
+
+
+# Returns the default property values for the given features.
+#
+rule defaults ( features * )
+{
+ local result ;
+ for local f in $(features)
+ {
+ local gf = $(:E=:G=$(f)) ;
+ local a = $($(gf).attributes) ;
+ if ( free in $(a) ) || ( optional in $(a) )
+ {
+ }
+ else
+ {
+ result += $(gf)$($(gf).default) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns true iff all 'names' elements are valid features.
+#
+rule valid ( names + )
+{
+ if $(names) in $(.all-features)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the attributes of the given feature.
+#
+rule attributes ( feature )
+{
+ return $($(feature).attributes) ;
+}
+
+
+# Returns the values of the given feature.
+#
+rule values ( feature )
+{
+ return $($(:E=:G=$(feature)).values) ;
+}
+
+
+# Returns true iff 'value-string' is a value-string of an implicit feature.
+#
+rule is-implicit-value ( value-string )
+{
+ local v = [ regex.split $(value-string) - ] ;
+ local failed ;
+ if ! $(v[1]) in $(.all-implicit-values)
+ {
+ failed = true ;
+ }
+ else
+ {
+ local feature = $($(v[1]).implicit-feature) ;
+ for local subvalue in $(v[2-])
+ {
+ if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ]
+ {
+ failed = true ;
+ }
+ }
+ }
+
+ if ! $(failed)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the implicit feature associated with the given implicit value.
+#
+rule implied-feature ( implicit-value )
+{
+ local components = [ regex.split $(implicit-value) "-" ] ;
+ local feature = $($(components[1]).implicit-feature) ;
+ if ! $(feature)
+ {
+ import errors ;
+ errors.error \"$(implicit-value)\" is not an implicit feature value ;
+ feature = "" ; # Keep testing happy; it expects a result.
+ }
+ return $(feature) ;
+}
+
+
+local rule find-implied-subfeature ( feature subvalue : value-string ? )
+{
+ # Feature should be of the form <feature-name>.
+ if $(feature) != $(feature:G)
+ {
+ import errors ;
+ errors.error invalid feature $(feature) ;
+ }
+ value-string += "" ;
+ return $($(feature)$(value-string)<>$(subvalue).subfeature) ;
+}
+
+
+# Given a feature and a value of one of its subfeatures, find the name of the
+# subfeature. If value-string is supplied, looks for implied subfeatures that
+# are specific to that value of feature
+#
+rule implied-subfeature (
+ feature # The main feature name.
+ subvalue # The value of one of its subfeatures.
+ : value-string ? # The value of the main feature.
+)
+{
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue)
+ : $(value-string) ] ;
+ if ! $(subfeature)
+ {
+ value-string ?= "" ;
+ import errors ;
+ errors.error \"$(subvalue)\" is not a known subfeature value of
+ $(feature)$(value-string) ;
+ }
+ return $(subfeature) ;
+}
+
+
+# Generate an error if the feature is unknown.
+#
+local rule validate-feature ( feature )
+{
+ if ! $(feature) in $(.all-features)
+ {
+ import errors ;
+ errors.error unknown feature \"$(feature)\" ;
+ }
+}
+
+
+# Given a feature and its value or just a value corresponding to an implicit
+# feature, returns a property set consisting of all component subfeatures and
+# their values. For example all the following calls:
+#
+# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86
+# expand-subfeatures-aux gcc-2.95.2-linux-x86
+#
+# return:
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+local rule expand-subfeatures-aux (
+ feature ? # Feature name or empty if value corresponds to an
+ # implicit property.
+ : value # Feature value.
+ : dont-validate ? # If set, no value string validation will be done.
+)
+{
+ if $(feature)
+ {
+ feature = $(feature) ;
+ }
+
+ if ! $(feature)
+ {
+ feature = [ implied-feature $(value) ] ;
+ }
+ else
+ {
+ validate-feature $(feature) ;
+ }
+ if ! $(dont-validate)
+ {
+ validate-value-string $(feature) $(value) ;
+ }
+
+ local components = [ regex.split $(value) "-" ] ;
+
+ # Get the top-level feature's value.
+ local value = $(components[1]:G=) ;
+
+ local result = $(components[1]:G=$(feature)) ;
+
+ for local subvalue in $(components[2-])
+ {
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue) :
+ $(value) ] ;
+
+ # If no subfeature was found reconstitute the value string and use that.
+ if ! $(subfeature)
+ {
+ result = $(components:J=-) ;
+ result = $(result:G=$(feature)) ;
+ break ;
+ }
+ else
+ {
+ local f = [ MATCH ^<(.*)>$ : $(feature) ] ;
+ result += $(subvalue:G=$(f)-$(subfeature)) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Make all elements of properties corresponding to implicit features explicit,
+# and express all subfeature values as separate properties in their own right.
+# For example, all of the following properties
+#
+# gcc-2.95.2-linux-x86
+# <toolset>gcc-2.95.2-linux-x86
+#
+# might expand to
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+rule expand-subfeatures (
+ properties * # Property set with elements of the form
+ # <feature>value-string or just value-string in the case
+ # of implicit features.
+ : dont-validate ?
+)
+{
+ local result ;
+ for local p in $(properties)
+ {
+ # Don't expand subfeatures in subfeatures
+ if ! [ MATCH "(:)" : $(p:G) ]
+ {
+ result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for extend, below. Handles the feature case.
+#
+local rule extend-feature ( feature : values * )
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if implicit in $($(feature).attributes)
+ {
+ for local v in $(values)
+ {
+ if $($(v).implicit-feature)
+ {
+ import errors ;
+ errors.error $(v) is already associated with the
+ \"$($(v).implicit-feature)\" feature ;
+ }
+ $(v).implicit-feature = $(feature) ;
+ }
+
+ .all-implicit-values += $(values) ;
+ }
+ if ! $($(feature).values)
+ {
+ # This is the first value specified for this feature so make it be the
+ # default.
+ $(feature).default = $(values[1]) ;
+ }
+ $(feature).values += $(values) ;
+}
+
+
+# Checks that value-string is a valid value-string for the given feature.
+#
+rule validate-value-string ( feature value-string )
+{
+ if ! (
+ free in $($(feature).attributes)
+ || ( $(value-string) in $(feature).values )
+ )
+ {
+ local values = $(value-string) ;
+
+ if $($(feature).subfeatures)
+ {
+ if ! $(value-string) in $($(feature).values)
+ $($(feature).subfeatures)
+ {
+ values = [ regex.split $(value-string) - ] ;
+ }
+ }
+
+ if ! ( $(values[1]) in $($(feature).values) ) &&
+
+ # An empty value is allowed for optional features.
+ ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
+ {
+ import errors ;
+ errors.error \"$(values[1])\" is not a known value of feature
+ $(feature) : legal "values:" \"$($(feature).values)\" ;
+ }
+
+ for local v in $(values[2-])
+ {
+ # This will validate any subfeature values in value-string.
+ implied-subfeature $(feature) $(v) : $(values[1]) ;
+ }
+ }
+}
+
+
+# A helper that computes:
+# * name(s) of module-local variable(s) used to record the correspondence
+# between subvalue(s) and a subfeature
+# * value of that variable when such a subfeature/subvalue has been defined and
+# returns a list consisting of the latter followed by the former.
+#
+local rule subvalue-var (
+ feature # Main feature name.
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the subfeature values are valid.
+ : subfeature # Subfeature name.
+ : subvalues * # Subfeature values.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if $(value-string)
+ {
+ validate-value-string $(feature) $(value-string) ;
+ }
+
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ return $(subfeature-name)
+ $(feature)$(value-string:E="")<>$(subvalues).subfeature ;
+}
+
+
+# Extends the given subfeature with the subvalues. If the optional value-string
+# is provided, the subvalues are only valid for the given value of the feature.
+# Thus, you could say that <target-platform>mingw is specific to
+# <toolset>gcc-2.95.2 as follows:
+#
+# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
+#
+rule extend-subfeature (
+ feature # The feature whose subfeature is being extended.
+
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the new subfeature values are valid.
+
+ : subfeature # Subfeature name.
+ : subvalues * # Additional subfeature values.
+)
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalues) ] ;
+
+ local f = [ utility.ungrist [ grist $(feature) ] ] ;
+ extend $(f)-$(subfeature-vars[1]) : $(subvalues) ;
+
+ # Provide a way to get from the given feature or property and subfeature
+ # value to the subfeature name.
+ $(subfeature-vars[2-]) = $(subfeature-vars[1]) ;
+}
+
+
+# Returns true iff the subvalues are valid for the feature. When the optional
+# value-string is provided, returns true iff the subvalues are valid for the
+# given value of the feature.
+#
+rule is-subvalue ( feature : value-string ? : subfeature : subvalue )
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalue) ] ;
+
+ if $($(subfeature-vars[2])) = $(subfeature-vars[1])
+ {
+ return true ;
+ }
+}
+
+
+# Can be called three ways:
+#
+# 1. extend feature : values *
+# 2. extend <feature> subfeature : values *
+# 3. extend <feature>value-string subfeature : values *
+#
+# * Form 1 adds the given values to the given feature.
+# * Forms 2 and 3 add subfeature values to the given feature.
+# * Form 3 adds the subfeature values as specific to the given property
+# value-string.
+#
+rule extend ( feature-or-property subfeature ? : values * )
+{
+ local feature ; # If a property was specified this is its feature.
+ local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2.
+
+ # If a property was specified.
+ if $(feature-or-property:G) && $(feature-or-property:G=)
+ {
+ # Extract the feature and value-string, if any.
+ feature = $(feature-or-property:G) ;
+ value-string = $(feature-or-property:G=) ;
+ }
+ else
+ {
+ feature = [ grist $(feature-or-property) ] ;
+ }
+
+ # Dispatch to the appropriate handler.
+ if $(subfeature)
+ {
+ extend-subfeature $(feature) $(value-string) : $(subfeature)
+ : $(values) ;
+ }
+ else
+ {
+ # If no subfeature was specified, we do not expect to see a
+ # value-string.
+ if $(value-string)
+ {
+ import errors ;
+ errors.error can only specify a property as the first argument when
+ extending a subfeature
+ : "usage:"
+ : " extend" feature ":" values...
+ : " | extend" <feature>value-string subfeature ":" values... ;
+ }
+
+ extend-feature $(feature) : $(values) ;
+ }
+}
+
+
+local rule get-subfeature-name ( subfeature value-string ? )
+{
+ local prefix = "$(value-string):" ;
+ return $(prefix:E="")$(subfeature) ;
+}
+
+
+# Declares a subfeature.
+#
+rule subfeature (
+ feature # Root feature that is not a subfeature.
+ value-string ? # A value-string specifying which feature or subfeature
+ # values this subfeature is specific to, if any.
+ : subfeature # The name of the subfeature being declared.
+ : subvalues * # The allowed values of this subfeature.
+ : attributes * # The attributes of the subfeature.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+
+ # Add grist to the subfeature name if a value-string was supplied.
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ if $(subfeature-name) in $($(feature).subfeatures)
+ {
+ import errors ;
+ errors.error \"$(subfeature)\" already declared as a subfeature of
+ \"$(feature)\" "specific to "$(value-string) ;
+ }
+ $(feature).subfeatures += $(subfeature-name) ;
+
+ # First declare the subfeature as a feature in its own right.
+ local f = [ utility.ungrist $(feature) ] ;
+ feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
+
+ # Features and subfeatures are always relevant as a group
+ .feature-dependencies.$(f) += $(f)-$(subfeature-name) ;
+ .feature-dependencies.$(f)-$(subfeature-name) += $(f) ;
+
+ # Now make sure the subfeature values are known.
+ extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
+}
+
+
+# Set components of the given composite property.
+#
+rule compose ( composite-property : component-properties * )
+{
+ local feature = $(composite-property:G) ;
+ if ! ( composite in [ attributes $(feature) ] )
+ {
+ import errors ;
+ errors.error "$(feature)" is not a composite feature ;
+ }
+
+ $(composite-property).components ?= ;
+ if $($(composite-property).components)
+ {
+ import errors ;
+ errors.error components of "$(composite-property)" already "set:"
+ $($(composite-property).components) ;
+ }
+
+ if $(composite-property) in $(component-properties)
+ {
+ import errors ;
+ errors.error composite property "$(composite-property)" cannot have itself as a component ;
+ }
+ $(composite-property).components = $(component-properties) ;
+
+ # A composite feature is relevant if any composed feature is relevant
+ local component-features = [ sequence.transform utility.ungrist : $(component-properties:G) ] ;
+ .feature-dependencies.$(component-features) += [ utility.ungrist $(feature) ] ;
+}
+
+
+local rule expand-composite ( property )
+{
+ return $(property)
+ [ sequence.transform expand-composite : $($(property).components) ] ;
+}
+
+
+# Return all values of the given feature specified by the given property set.
+#
+rule get-values ( feature : properties * )
+{
+ local result ;
+
+ feature = $(:E=:G=$(feature)) ; # Add <> if necessary.
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ # Use MATCH instead of :G= to get the value, in order to preserve
+ # the value intact instead of having bjam treat it as a decomposable
+ # path.
+ result += [ MATCH ">(.*)" : $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule free-features ( )
+{
+ return $(free.features) ;
+}
+
+
+# Expand all composite properties in the set so that all components are
+# explicitly expressed.
+#
+rule expand-composites ( properties * )
+{
+ local explicit-features = $(properties:G) ;
+ local result ;
+
+ # Now expand composite features.
+ for local p in $(properties)
+ {
+ local expanded = [ expand-composite $(p) ] ;
+
+ for local x in $(expanded)
+ {
+ if ! $(x) in $(result)
+ {
+ local f = $(x:G) ;
+
+ if $(f) in $(free.features)
+ {
+ result += $(x) ;
+ }
+ else if ! $(x) in $(properties) # x is the result of expansion
+ {
+ if ! $(f) in $(explicit-features) # not explicitly-specified
+ {
+ if $(f) in $(result:G)
+ {
+ import errors ;
+ errors.error expansions of composite features result
+ in conflicting values for $(f)
+ : "values:" [ get-values $(f) : $(result) ] $(x:G=)
+ : one contributing composite property was $(p) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ else if $(f) in $(result:G)
+ {
+ import errors ;
+ errors.error explicitly-specified values of non-free feature
+ $(f) conflict :
+ "existing values:" [ get-values $(f) : $(properties) ] :
+ "value from expanding " $(p) ":" $(x:G=) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return true iff f is an ordinary subfeature of the parent-property's feature,
+# or if f is a subfeature of the parent-property's feature specific to the
+# parent-property's value.
+#
+local rule is-subfeature-of ( parent-property f )
+{
+ if subfeature in $($(f).attributes)
+ {
+ local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ;
+ if $(specific-subfeature)
+ {
+ # The feature has the form <topfeature-topvalue:subfeature>, e.g.
+ # <toolset-msvc:version>.
+ local feature-value = [ split-top-feature $(specific-subfeature[1])
+ ] ;
+ if <$(feature-value[1])>$(feature-value[2]) = $(parent-property)
+ {
+ return true ;
+ }
+ }
+ else
+ {
+ # The feature has the form <topfeature-subfeature>, e.g.
+ # <toolset-version>
+ local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ;
+ if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G)
+ {
+ return true ;
+ }
+ }
+ }
+}
+
+
+# As for is-subfeature-of but for subproperties.
+#
+local rule is-subproperty-of ( parent-property p )
+{
+ return [ is-subfeature-of $(parent-property) $(p:G) ] ;
+}
+
+
+# Given a property, return the subset of features consisting of all ordinary
+# subfeatures of the property's feature, and all specific subfeatures of the
+# property's feature which are conditional on the property's value.
+#
+local rule select-subfeatures ( parent-property : features * )
+{
+ return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ;
+}
+
+
+# As for select-subfeatures but for subproperties.
+#
+local rule select-subproperties ( parent-property : properties * )
+{
+ return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ;
+}
+
+
+# Given a property set which may consist of composite and implicit properties
+# and combined subfeature values, returns an expanded, normalized property set
+# with all implicit features expressed explicitly, all subfeature values
+# individually expressed, and all components of composite properties expanded.
+# Non-free features directly expressed in the input properties cause any values
+# of those features due to composite feature expansion to be dropped. If two
+# values of a given non-free feature are directly expressed in the input, an
+# error is issued.
+#
+rule expand ( properties * )
+{
+ local expanded = [ expand-subfeatures $(properties) ] ;
+ return [ expand-composites $(expanded) ] ;
+}
+
+
+# Helper rule for minimize. Returns true iff property's feature is present in
+# the contents of the variable named by feature-set-var.
+#
+local rule in-features ( feature-set-var property )
+{
+ if $(property:G) in $($(feature-set-var))
+ {
+ return true ;
+ }
+}
+
+
+# Helper rule for minimize. Returns the list with the same properties, but with
+# all subfeatures moved to the end of the list.
+#
+local rule move-subfeatures-to-the-end ( properties * )
+{
+ local x1 ;
+ local x2 ;
+ for local p in $(properties)
+ {
+ if subfeature in $($(p:G).attributes)
+ {
+ x2 += $(p) ;
+ }
+ else
+ {
+ x1 += $(p) ;
+ }
+ }
+ return $(x1) $(x2) ;
+}
+
+
+# Given an expanded property set, eliminate all redundancy: properties that are
+# elements of other (composite) properties in the set will be eliminated.
+# Non-symmetric properties equal to default values will be eliminated unless
+# they override a value from some composite property. Implicit properties will
+# be expressed without feature grist, and sub-property values will be expressed
+# as elements joined to the corresponding main property.
+#
+rule minimize ( properties * )
+{
+ # Precondition checking
+ local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
+ if $(implicits)
+ {
+ import errors ;
+ errors.error minimize requires an expanded property set, but
+ \"$(implicits[1])\" appears to be the value of an un-expanded
+ implicit feature ;
+ }
+
+ # Remove properties implied by composite features.
+ local components = $($(properties).components) ;
+ local x = [ set.difference $(properties) : $(components) ] ;
+
+ # Handle subfeatures and implicit features.
+ x = [ move-subfeatures-to-the-end $(x) ] ;
+ local result ;
+ while $(x)
+ {
+ local p fullp = $(x[1]) ;
+ local f = $(p:G) ;
+ local v = $(p:G=) ;
+
+ # Eliminate features in implicit properties.
+ if implicit in [ attributes $(f) ]
+ {
+ p = $(v) ;
+ }
+
+ # Locate all subproperties of $(x[1]) in the property set.
+ local subproperties = [ select-subproperties $(fullp) : $(x) ] ;
+ if $(subproperties)
+ {
+ # Reconstitute the joined property name.
+ local sorted = [ sequence.insertion-sort $(subproperties) ] ;
+ result += $(p)-$(sorted:G="":J=-) ;
+
+ x = [ set.difference $(x[2-]) : $(subproperties) ] ;
+ }
+ else
+ {
+ # Eliminate properties whose value is equal to feature's default,
+ # which are not symmetric and which do not contradict values implied
+ # by composite properties.
+
+ # Since all component properties of composites in the set have been
+ # eliminated, any remaining property whose feature is the same as a
+ # component of a composite in the set must have a non-redundant
+ # value.
+ if $(fullp) != [ defaults $(f) ]
+ || symmetric in [ attributes $(f) ]
+ || $(fullp:G) in $(components:G)
+ {
+ result += $(p) ;
+ }
+
+ x = $(x[2-]) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Combine all subproperties into their parent properties
+#
+# Requires: for every subproperty, there is a parent property. All features are
+# explicitly expressed.
+#
+# This rule probably should not be needed, but build-request.expand-no-defaults
+# is being abused for unintended purposes and it needs help.
+#
+rule compress-subproperties ( properties * )
+{
+ local all-subs ;
+ local matched-subs ;
+ local result ;
+
+ for local p in $(properties)
+ {
+ if ! $(p:G)
+ {
+ # Expecting fully-gristed properties.
+ assert.variable-not-empty "p:G" ;
+ }
+
+ if ! subfeature in $($(p:G).attributes)
+ {
+ local subs = [ sequence.insertion-sort
+ [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ;
+
+ matched-subs += $(subs) ;
+
+ local subvalues = -$(subs:G=:J=-) ;
+ subvalues ?= "" ;
+ result += $(p)$(subvalues) ;
+ }
+ else
+ {
+ all-subs += $(p) ;
+ }
+ }
+ assert.result true : set.equal $(all-subs) : $(matched-subs) ;
+ return $(result) ;
+}
+
+
+# Given an ungristed string, finds the longest prefix which is a top-level
+# feature name followed by a dash, and return a pair consisting of the parts
+# before and after that dash. More interesting than a simple split because
+# feature names may contain dashes.
+#
+local rule split-top-feature ( feature-plus )
+{
+ local e = [ regex.split $(feature-plus) - ] ;
+ local f = $(e[1]) ;
+ local v ;
+ while $(e)
+ {
+ if <$(f)> in $(.all-top-features)
+ {
+ v = $(f) $(e[2-]:J=-) ;
+ }
+ e = $(e[2-]) ;
+ f = $(f)-$(e[1]) ;
+ }
+ return $(v) ;
+}
+
+
+# Given a set of properties, add default values for features not represented in
+# the set.
+#
+# properties must be fully expanded and must not contain conditionals.
+#
+# Note: if there's an ordinary feature F1 and a composite feature F2 which
+# includes some value for F1 and both feature have default values then the
+# default value of F1 will be added (as opposed to the value in F2). This might
+# not be the right idea, e.g. consider:
+#
+# feature variant : debug ... ;
+# <variant>debug : .... <runtime-debugging>on
+# feature <runtime-debugging> : off on ;
+#
+# Here, when adding default for an empty property set, we'll get
+#
+# <variant>debug <runtime_debugging>off
+#
+# and that's kind of strange.
+#
+rule add-defaults ( properties * )
+{
+ for local v in $(properties:G=)
+ {
+ if $(v) in $(properties)
+ {
+ import errors ;
+ errors.error add-defaults requires explicitly specified features,
+ but \"$(v)\" appears to be the value of an un-expanded implicit
+ feature ;
+ }
+ }
+ local missing-top = [ set.difference $(.all-top-features) : $(properties:G) ] ;
+ local more = [ defaults $(missing-top) ] ;
+
+ # This is similar to property.refine, except that it
+ # does not remove subfeatures, because we might be adding
+ # the default value of a subfeature.
+ local to-remove ;
+ for local f in $(properties:G)
+ {
+ if ! free in [ attributes $(f) ]
+ {
+ to-remove += $(f) ;
+ }
+ }
+
+ local worklist = $(properties) $(more) ;
+ local expanded-from-composite ;
+ local to-expand = $(more) ;
+ while $(worklist)
+ {
+ # Add defaults for subfeatures of features which are present.
+ for local p in $(worklist)
+ {
+ local s = $($(p:G).subfeatures) ;
+ local f = [ utility.ungrist $(p:G) ] ;
+ local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
+ local sd = [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
+ to-expand += $(sd) ;
+ }
+ worklist = ;
+
+ # Expand subfeatures of newly added properties
+ for local m in [ sequence.transform expand-composite : $(to-expand) ]
+ {
+ if ! $(m:G) in $(to-remove)
+ {
+ local att = [ attributes $(m:G) ] ;
+ if $(m:G) in $(expanded-from-composite) &&
+ ! free in $(att) &&
+ ! $(m) in $(more)
+ {
+ import errors ;
+ errors.error "default values for $(p:G) conflict" ;
+ }
+ if ! $(m) in $(to-expand)
+ {
+ expanded-from-composite += $(m:G) ;
+ }
+ more += $(m) ;
+ if ! subfeature in $(att) && ! free in $(att)
+ {
+ worklist += $(m) ;
+ }
+ }
+ }
+ to-expand = ;
+ }
+
+ return [ sequence.unique $(properties) $(more) ] ;
+}
+
+
+# Given a property-set of the form
+# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
+#
+# Returns
+# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
+#
+# Note that vN...vM may contain slashes. This needs to be resilient to the
+# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps
+# slash direction on NT.
+#
+rule split ( property-set )
+{
+ local pieces = [ regex.split $(property-set) "[\\/]" ] ;
+ local result ;
+
+ for local x in $(pieces)
+ {
+ if ( ! $(x:G) ) && $(result[-1]:G)
+ {
+ result = $(result[1--2]) $(result[-1])/$(x) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+# Returns all the features that also must be relevant when these features are relevant
+rule expand-relevant ( features * )
+{
+ local conditional ;
+ local result ;
+ for f in $(features)
+ {
+ # This looks like a conditional, even though it isn't really.
+ # (Free features can never be used in conditionals)
+ local split = [ MATCH "^(.*):<relevant>(.*)$" : $(f) ] ;
+ if $(split)
+ {
+ local-dependencies.$(split[1]) += $(split[2]) ;
+ conditional += local-dependencies.$(split[1]) ;
+ }
+ else
+ {
+ result += $(f) ;
+ }
+ }
+ local queue = $(result) ;
+ while $(queue)
+ {
+ local added = [ set.difference
+ $(.feature-dependencies.$(queue))
+ $(local-dependencies.$(queue))
+ : $(result) ] ;
+ result += $(added) ;
+ queue = $(added) ;
+ }
+ # Clean up local map
+ $(conditional) = ;
+ return $(result) ;
+}
+
+
+# Tests of module feature.
+#
+rule __test__ ( )
+{
+ # Use a fresh copy of the feature module.
+ prepare-test feature-test-temp ;
+
+ import assert ;
+ import errors : try catch ;
+
+ # These are local rules and so must be explicitly reimported into the
+ # testing module.
+ import feature : extend-feature validate-feature select-subfeatures ;
+
+ feature toolset : gcc : implicit ;
+ feature define : : free ;
+ feature runtime-link : dynamic static : symmetric ;
+ feature optimization : on off ;
+ feature variant : debug release profile : implicit composite symmetric ;
+ feature stdlib : native stlport ;
+ feature magic : : free ;
+
+ compose <variant>debug : <define>_DEBUG <optimization>off ;
+ compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ assert.result dynamic static : values <runtime-link> ;
+ assert.result dynamic static : values runtime-link ;
+
+ try ;
+ {
+ compose <variant>profile : <variant>profile ;
+ }
+ catch composite property <variant>profile cannot have itself as a component ;
+
+ extend-feature toolset : msvc metrowerks ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ;
+
+ assert.true is-subvalue toolset : gcc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : gcc : version : 1.1 ;
+
+ assert.false is-subvalue toolset : msvc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : : version : yabba ;
+
+ feature yabba ;
+ subfeature yabba : version : dabba ;
+ assert.true is-subvalue yabba : : version : dabba ;
+
+ subfeature toolset gcc : platform : linux cygwin : optional ;
+
+ assert.result <toolset-gcc:version>
+ : select-subfeatures <toolset>gcc
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib> ;
+
+ subfeature stdlib : version : 3 4 : optional ;
+
+ assert.result <stdlib-version>
+ : select-subfeatures <stdlib>native
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib-version> ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures <toolset>gcc-3.0.1 ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux
+ : expand-subfeatures <toolset>gcc-3.0.1-linux ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
+
+ assert.result <define>foo=x-y
+ : expand-subfeatures <define>foo=x-y ;
+
+ assert.result <define>minus=-
+ : expand-subfeatures <define>minus=- ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures gcc-3.0.1 ;
+
+ assert.result a c e
+ : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ <variant>debug <define>_DEBUG <optimization>on
+ : expand gcc-3.0.1 debug <optimization>on ;
+
+ assert.result <variant>debug <define>_DEBUG <optimization>on
+ : expand debug <optimization>on ;
+
+ assert.result <optimization>on <variant>debug <define>_DEBUG
+ : expand <optimization>on debug ;
+
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults <runtime-link> <define> <optimization> ;
+
+ # Make sure defaults is resilient to missing grist.
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults runtime-link define optimization ;
+
+ feature dummy : dummy1 dummy2 ;
+ subfeature dummy : subdummy : x y z : optional ;
+
+ feature fu : fu1 fu2 : optional ;
+ subfeature fu : subfu : x y z : optional ;
+ subfeature fu : subfu2 : q r s ;
+
+ assert.result optional : attributes <fu> ;
+
+ assert.result [ SORT <define>_DEBUG <runtime-link>static
+ <define>foobar <optimization>on
+ <toolset>gcc <variant>debug <stdlib>native
+ <dummy>dummy1 <toolset-gcc:version>2.95.2 ]
+ : add-defaults <runtime-link>static <define>foobar <optimization>on ;
+
+ assert.result [ SORT <define>_DEBUG <runtime-link>static
+ <define>foobar <optimization>on
+ <fu>fu1 <toolset>gcc <variant>debug
+ <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2 ]
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <fu>fu1 ;
+
+ feature f0 : f0-0 f0-1 ;
+ feature f1 : f1-0 f1-1 ;
+
+ assert.true valid <f0> ;
+ assert.true valid <f1> ;
+ assert.true valid <f0> <f1> ;
+
+ set-default <runtime-link> : static ;
+ assert.result <runtime-link>static : defaults <runtime-link> ;
+
+ assert.result gcc-3.0.1 debug <optimization>on
+ : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ;
+
+ assert.result gcc-3.0.1 debug <runtime-link>dynamic
+ : minimize
+ [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ;
+
+ assert.result gcc-3.0.1 debug
+ : minimize [ expand gcc-3.0.1 debug <optimization>off ] ;
+
+ assert.result debug <optimization>on
+ : minimize [ expand debug <optimization>on ] ;
+
+ assert.result gcc-3.0
+ : minimize <toolset>gcc <toolset-gcc:version>3.0 ;
+
+ assert.result gcc-3.0
+ : minimize <toolset-gcc:version>3.0 <toolset>gcc ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y/z/<a>b/c/<d>e/f ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y\\z\\<a>b\\c\\<d>e\\f ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a/b/c/<d>e/f/g/<h>i/j/k ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ;
+
+ # Test error checking.
+
+ try ;
+ {
+ expand release <optimization>off <optimization>on ;
+ }
+ catch explicitly-specified values of non-free feature <optimization> conflict ;
+
+ try ;
+ {
+ validate-feature <foobar> ;
+ }
+ catch unknown feature ;
+
+ validate-value-string <toolset> gcc ;
+ validate-value-string <toolset> gcc-3.0.1 ;
+
+ try ;
+ {
+ validate-value-string <toolset> digital_mars ;
+ }
+ catch \"digital_mars\" is not a known value of <toolset> ;
+
+ try ;
+ {
+ feature foobar : : baz ;
+ }
+ catch unknown "attributes:" baz ;
+
+ feature feature1 ;
+ try ;
+ {
+ feature feature1 ;
+ }
+ catch feature already "defined:" ;
+
+ try ;
+ {
+ feature feature2 : : free implicit ;
+ }
+ catch free features cannot also be implicit ;
+
+ try ;
+ {
+ feature feature3 : : free propagated ;
+ }
+ catch free features cannot be propagated ;
+
+ try ;
+ {
+ implied-feature lackluster ;
+ }
+ catch \"lackluster\" is not an implicit feature value ;
+
+ try ;
+ {
+ implied-subfeature <toolset> 3.0.1 ;
+ }
+ catch \"3.0.1\" is not a known subfeature value of <toolset> ;
+
+ try ;
+ {
+ implied-subfeature <toolset> not-a-version : gcc ;
+ }
+ catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ;
+
+ # Leave a clean copy of the features module behind.
+ finish-test feature-test-temp ;
+}
diff --git a/src/boost/tools/build/src/build/feature.py b/src/boost/tools/build/src/build/feature.py
new file mode 100644
index 000000000..86e72144d
--- /dev/null
+++ b/src/boost/tools/build/src/build/feature.py
@@ -0,0 +1,914 @@
+# Status: ported, except for unit tests.
+# Base revision: 64488
+#
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import re
+
+from b2.manager import get_manager
+from b2.util import utility, bjam_signature, is_iterable_typed
+import b2.util.set
+from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
+from b2.exceptions import *
+
+__re_split_subfeatures = re.compile ('<(.*):(.*)>')
+__re_no_hyphen = re.compile ('^([^:]+)$')
+__re_slash_or_backslash = re.compile (r'[\\/]')
+
+VALID_ATTRIBUTES = {
+ 'implicit',
+ 'composite',
+ 'optional',
+ 'symmetric',
+ 'free',
+ 'incidental',
+ 'path',
+ 'dependency',
+ 'propagated',
+ 'link-incompatible',
+ 'subfeature',
+ 'order-sensitive'
+}
+
+
+class Feature(object):
+ def __init__(self, name, values, attributes):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(values, basestring)
+ assert is_iterable_typed(attributes, basestring)
+ self.name = name
+ self.values = values
+ self.default = None
+ self.subfeatures = []
+ self.parent = None
+ self.attributes_string_list = []
+ self._hash = hash(self.name)
+
+ for attr in attributes:
+ self.attributes_string_list.append(attr)
+ attr = attr.replace("-", "_")
+ setattr(self, attr, True)
+
+ def add_values(self, values):
+ assert is_iterable_typed(values, basestring)
+ self.values.extend(values)
+
+ def set_default(self, value):
+ assert isinstance(value, basestring)
+ for attr in ('free', 'optional'):
+ if getattr(self, attr):
+ get_manager().errors()('"{}" feature "<{}>" cannot have a default value.'
+ .format(attr, self.name))
+
+ self.default = value
+
+ def add_subfeature(self, name):
+ assert isinstance(name, Feature)
+ self.subfeatures.append(name)
+
+ def set_parent(self, feature, value):
+ assert isinstance(feature, Feature)
+ assert isinstance(value, basestring)
+ self.parent = (feature, value)
+
+ def __hash__(self):
+ return self._hash
+
+ def __str__(self):
+ return self.name
+
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __all_attributes, __all_features, __implicit_features, __composite_properties
+ global __subfeature_from_value, __all_top_features, __free_features
+ global __all_subfeatures
+
+ # sets the default value of False for each valid attribute
+ for attr in VALID_ATTRIBUTES:
+ setattr(Feature, attr.replace("-", "_"), False)
+
+ # A map containing all features. The key is the feature name.
+ # The value is an instance of Feature class.
+ __all_features = {}
+
+ # All non-subfeatures.
+ __all_top_features = []
+
+ # Maps valus to the corresponding implicit feature
+ __implicit_features = {}
+
+ # A map containing all composite properties. The key is a Property instance,
+ # and the value is a list of Property instances
+ __composite_properties = {}
+
+ # Maps a value to the corresponding subfeature name.
+ __subfeature_from_value = {}
+
+ # All free features
+ __free_features = []
+
+ __all_subfeatures = []
+
+reset ()
+
+def enumerate ():
+ """ Returns an iterator to the features map.
+ """
+ return __all_features.iteritems ()
+
+def get(name):
+ """Return the Feature instance for the specified name.
+
+ Throws if no feature by such name exists
+ """
+ assert isinstance(name, basestring)
+ return __all_features[name]
+
+# FIXME: prepare-test/finish-test?
+
+@bjam_signature((["name"], ["values", "*"], ["attributes", "*"]))
+def feature (name, values, attributes = []):
+ """ Declares a new feature with the given name, values, and attributes.
+ name: the feature name
+ values: a sequence of the allowable values - may be extended later with feature.extend
+ attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...)
+ """
+ __validate_feature_attributes (name, attributes)
+
+ feature = Feature(name, [], attributes)
+ __all_features[name] = feature
+ # Temporary measure while we have not fully moved from 'gristed strings'
+ __all_features["<" + name + ">"] = feature
+
+ name = add_grist(name)
+
+ if 'subfeature' in attributes:
+ __all_subfeatures.append(name)
+ else:
+ __all_top_features.append(feature)
+
+ extend (name, values)
+
+ # FIXME: why his is needed.
+ if 'free' in attributes:
+ __free_features.append (name)
+
+ return feature
+
+@bjam_signature((["feature"], ["value"]))
+def set_default (feature, value):
+ """ Sets the default value of the given feature, overriding any previous default.
+ feature: the name of the feature
+ value: the default value to assign
+ """
+ f = __all_features[feature]
+ bad_attribute = None
+
+ if f.free:
+ bad_attribute = "free"
+ elif f.optional:
+ bad_attribute = "optional"
+
+ if bad_attribute:
+ raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, f.name))
+
+ if value not in f.values:
+ raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % f.values)
+
+ f.set_default(value)
+
+def defaults(features):
+ """ Returns the default property values for the given features.
+ """
+ assert is_iterable_typed(features, Feature)
+ # FIXME: should merge feature and property modules.
+ from . import property
+
+ result = []
+ for f in features:
+ if not f.free and not f.optional and f.default:
+ result.append(property.Property(f, f.default))
+
+ return result
+
+def valid (names):
+ """ Returns true iff all elements of names are valid features.
+ """
+ if isinstance(names, str):
+ names = [names]
+ assert is_iterable_typed(names, basestring)
+
+ return all(name in __all_features for name in names)
+
+def attributes (feature):
+ """ Returns the attributes of the given feature.
+ """
+ assert isinstance(feature, basestring)
+ return __all_features[feature].attributes_string_list
+
+def values (feature):
+ """ Return the values of the given feature.
+ """
+ assert isinstance(feature, basestring)
+ validate_feature (feature)
+ return __all_features[feature].values
+
+def is_implicit_value (value_string):
+ """ Returns true iff 'value_string' is a value_string
+ of an implicit feature.
+ """
+ assert isinstance(value_string, basestring)
+ if value_string in __implicit_features:
+ return __implicit_features[value_string]
+
+ v = value_string.split('-')
+
+ if v[0] not in __implicit_features:
+ return False
+
+ feature = __implicit_features[v[0]]
+
+ for subvalue in (v[1:]):
+ if not __find_implied_subfeature(feature, subvalue, v[0]):
+ return False
+
+ return True
+
+def implied_feature (implicit_value):
+ """ Returns the implicit feature associated with the given implicit value.
+ """
+ assert isinstance(implicit_value, basestring)
+ components = implicit_value.split('-')
+
+ if components[0] not in __implicit_features:
+ raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
+
+ return __implicit_features[components[0]]
+
+def __find_implied_subfeature (feature, subvalue, value_string):
+ assert isinstance(feature, Feature)
+ assert isinstance(subvalue, basestring)
+ assert isinstance(value_string, basestring)
+
+ try:
+ return __subfeature_from_value[feature][value_string][subvalue]
+ except KeyError:
+ return None
+
+# Given a feature and a value of one of its subfeatures, find the name
+# of the subfeature. If value-string is supplied, looks for implied
+# subfeatures that are specific to that value of feature
+# feature # The main feature name
+# subvalue # The value of one of its subfeatures
+# value-string # The value of the main feature
+
+def implied_subfeature (feature, subvalue, value_string):
+ assert isinstance(feature, Feature)
+ assert isinstance(subvalue, basestring)
+ assert isinstance(value_string, basestring)
+ result = __find_implied_subfeature (feature, subvalue, value_string)
+ if not result:
+ raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
+
+ return result
+
+def validate_feature (name):
+ """ Checks if all name is a valid feature. Otherwise, raises an exception.
+ """
+ assert isinstance(name, basestring)
+ if name not in __all_features:
+ raise InvalidFeature ("'%s' is not a valid feature name" % name)
+ else:
+ return __all_features[name]
+
+
+# Uses Property
+def __expand_subfeatures_aux (property_, dont_validate = False):
+ """ Helper for expand_subfeatures.
+ Given a feature and value, or just a value corresponding to an
+ implicit feature, returns a property set consisting of all component
+ subfeatures and their values. For example:
+
+ expand_subfeatures <toolset>gcc-2.95.2-linux-x86
+ -> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+ equivalent to:
+ expand_subfeatures gcc-2.95.2-linux-x86
+
+ feature: The name of the feature, or empty if value corresponds to an implicit property
+ value: The value of the feature.
+ dont_validate: If True, no validation of value string will be done.
+ """
+ from . import property # no __debug__ since Property is used elsewhere
+ assert isinstance(property_, property.Property)
+ assert isinstance(dont_validate, int) # matches bools
+
+ f = property_.feature
+ v = property_.value
+ if not dont_validate:
+ validate_value_string(f, v)
+
+ components = v.split ("-")
+
+ v = components[0]
+
+ result = [property.Property(f, components[0])]
+
+ subvalues = components[1:]
+
+ while len(subvalues) > 0:
+ subvalue = subvalues [0] # pop the head off of subvalues
+ subvalues = subvalues [1:]
+
+ subfeature = __find_implied_subfeature (f, subvalue, v)
+
+ # If no subfeature was found, reconstitute the value string and use that
+ if not subfeature:
+ return [property.Property(f, '-'.join(components))]
+
+ result.append(property.Property(subfeature, subvalue))
+
+ return result
+
+def expand_subfeatures(properties, dont_validate = False):
+ """
+ Make all elements of properties corresponding to implicit features
+ explicit, and express all subfeature values as separate properties
+ in their own right. For example, the property
+
+ gcc-2.95.2-linux-x86
+
+ might expand to
+
+ <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+
+ properties: A sequence with elements of the form
+ <feature>value-string or just value-string in the
+ case of implicit features.
+ : dont_validate: If True, no validation of value string will be done.
+ """
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(dont_validate, int) # matches bools
+ result = []
+ for p in properties:
+ # Don't expand subfeatures in subfeatures
+ if p.feature.subfeature:
+ result.append (p)
+ else:
+ result.extend(__expand_subfeatures_aux (p, dont_validate))
+
+ return result
+
+
+
+# rule extend was defined as below:
+ # Can be called three ways:
+ #
+ # 1. extend feature : values *
+ # 2. extend <feature> subfeature : values *
+ # 3. extend <feature>value-string subfeature : values *
+ #
+ # * Form 1 adds the given values to the given feature
+ # * Forms 2 and 3 add subfeature values to the given feature
+ # * Form 3 adds the subfeature values as specific to the given
+ # property value-string.
+ #
+ #rule extend ( feature-or-property subfeature ? : values * )
+#
+# Now, the specific rule must be called, depending on the desired operation:
+# extend_feature
+# extend_subfeature
+@bjam_signature([['name'], ['values', '*']])
+def extend (name, values):
+ """ Adds the given values to the given feature.
+ """
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(values, basestring)
+ name = add_grist (name)
+ __validate_feature (name)
+ feature = __all_features [name]
+
+ if feature.implicit:
+ for v in values:
+ if v in __implicit_features:
+ raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v]))
+
+ __implicit_features[v] = feature
+
+ if values and not feature.values and not(feature.free or feature.optional):
+ # This is the first value specified for this feature,
+ # take it as default value
+ feature.set_default(values[0])
+
+ feature.add_values(values)
+
+def validate_value_string (f, value_string):
+ """ Checks that value-string is a valid value-string for the given feature.
+ """
+ assert isinstance(f, Feature)
+ assert isinstance(value_string, basestring)
+ if f.free or value_string in f.values:
+ return
+
+ values = [value_string]
+
+ if f.subfeatures:
+ if not value_string in f.values and \
+ not value_string in f.subfeatures:
+ values = value_string.split('-')
+
+ # An empty value is allowed for optional features
+ if not values[0] in f.values and \
+ (values[0] or not f.optional):
+ raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], f.name, f.values))
+
+ for v in values [1:]:
+ # this will validate any subfeature values in value-string
+ implied_subfeature(f, v, values[0])
+
+
+""" Extends the given subfeature with the subvalues. If the optional
+ value-string is provided, the subvalues are only valid for the given
+ value of the feature. Thus, you could say that
+ <target-platform>mingw is specific to <toolset>gcc-2.95.2 as follows:
+
+ extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
+
+ feature: The feature whose subfeature is being extended.
+
+ value-string: If supplied, specifies a specific value of the
+ main feature for which the new subfeature values
+ are valid.
+
+ subfeature: The name of the subfeature.
+
+ subvalues: The additional values of the subfeature being defined.
+"""
+def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
+ assert isinstance(feature_name, basestring)
+ assert isinstance(value_string, basestring)
+ assert isinstance(subfeature_name, basestring)
+ assert is_iterable_typed(subvalues, basestring)
+ feature = validate_feature(feature_name)
+
+ if value_string:
+ validate_value_string(feature, value_string)
+
+ subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
+
+ extend(subfeature_name, subvalues) ;
+ subfeature = __all_features[subfeature_name]
+
+ if value_string == None: value_string = ''
+
+ if feature not in __subfeature_from_value:
+ __subfeature_from_value[feature] = {}
+
+ if value_string not in __subfeature_from_value[feature]:
+ __subfeature_from_value[feature][value_string] = {}
+
+ for subvalue in subvalues:
+ __subfeature_from_value [feature][value_string][subvalue] = subfeature
+
+@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"],
+ ["subvalues", "*"], ["attributes", "*"]))
+def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []):
+ """ Declares a subfeature.
+ feature_name: Root feature that is not a subfeature.
+ value_string: An optional value-string specifying which feature or
+ subfeature values this subfeature is specific to,
+ if any.
+ subfeature: The name of the subfeature being declared.
+ subvalues: The allowed values of this subfeature.
+ attributes: The attributes of the subfeature.
+ """
+ parent_feature = validate_feature (feature_name)
+
+ # Add grist to the subfeature name if a value-string was supplied
+ subfeature_name = __get_subfeature_name (subfeature, value_string)
+
+ if subfeature_name in __all_features[feature_name].subfeatures:
+ message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
+ message += " specific to '%s'" % value_string
+ raise BaseException (message)
+
+ # First declare the subfeature as a feature in its own right
+ f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
+ f.set_parent(parent_feature, value_string)
+
+ parent_feature.add_subfeature(f)
+
+ # Now make sure the subfeature values are known.
+ extend_subfeature (feature_name, value_string, subfeature, subvalues)
+
+
+@bjam_signature((["composite_property_s"], ["component_properties_s", "*"]))
+def compose (composite_property_s, component_properties_s):
+ """ Sets the components of the given composite property.
+
+ All parameters are <feature>value strings
+ """
+ from . import property
+
+ component_properties_s = to_seq (component_properties_s)
+ composite_property = property.create_from_string(composite_property_s)
+ f = composite_property.feature
+
+ if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property):
+ component_properties = component_properties_s
+ else:
+ component_properties = [property.create_from_string(p) for p in component_properties_s]
+
+ if not f.composite:
+ raise BaseException ("'%s' is not a composite feature" % f)
+
+ if property in __composite_properties:
+ raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property])))
+
+ if composite_property in component_properties:
+ raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property)
+
+ __composite_properties[composite_property] = component_properties
+
+
+def expand_composite(property_):
+ if __debug__:
+ from .property import Property
+ assert isinstance(property_, Property)
+ result = [ property_ ]
+ if property_ in __composite_properties:
+ for p in __composite_properties[property_]:
+ result.extend(expand_composite(p))
+ return result
+
+@bjam_signature((['feature'], ['properties', '*']))
+def get_values (feature, properties):
+ """ Returns all values of the given feature specified by the given property set.
+ """
+ if feature[0] != '<':
+ feature = '<' + feature + '>'
+ result = []
+ for p in properties:
+ if get_grist (p) == feature:
+ result.append (replace_grist (p, ''))
+
+ return result
+
+def free_features ():
+ """ Returns all free features.
+ """
+ return __free_features
+
+def expand_composites (properties):
+ """ Expand all composite properties in the set so that all components
+ are explicitly expressed.
+ """
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ explicit_features = set(p.feature for p in properties)
+
+ result = []
+
+ # now expand composite features
+ for p in properties:
+ expanded = expand_composite(p)
+
+ for x in expanded:
+ if not x in result:
+ f = x.feature
+
+ if f.free:
+ result.append (x)
+ elif not x in properties: # x is the result of expansion
+ if not f in explicit_features: # not explicitly-specified
+ if any(r.feature == f for r in result):
+ raise FeatureConflict(
+ "expansions of composite features result in "
+ "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" %
+ (f.name, [r.value for r in result if r.feature == f] + [x.value], p))
+ else:
+ result.append (x)
+ elif any(r.feature == f for r in result):
+ raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
+ "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
+ [r.value for r in result if r.feature == f], p, x.value))
+ else:
+ result.append (x)
+
+ return result
+
+# Uses Property
+def is_subfeature_of (parent_property, f):
+ """ Return true iff f is an ordinary subfeature of the parent_property's
+ feature, or if f is a subfeature of the parent_property's feature
+ specific to the parent_property's value.
+ """
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert isinstance(f, Feature)
+
+ if not f.subfeature:
+ return False
+
+ p = f.parent
+ if not p:
+ return False
+
+ parent_feature = p[0]
+ parent_value = p[1]
+
+ if parent_feature != parent_property.feature:
+ return False
+
+ if parent_value and parent_value != parent_property.value:
+ return False
+
+ return True
+
+def __is_subproperty_of (parent_property, p):
+ """ As is_subfeature_of, for subproperties.
+ """
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert isinstance(p, Property)
+ return is_subfeature_of (parent_property, p.feature)
+
+
+# Returns true iff the subvalue is valid for the feature. When the
+# optional value-string is provided, returns true iff the subvalues
+# are valid for the given value of the feature.
+def is_subvalue(feature, value_string, subfeature, subvalue):
+ assert isinstance(feature, basestring)
+ assert isinstance(value_string, basestring)
+ assert isinstance(subfeature, basestring)
+ assert isinstance(subvalue, basestring)
+ if not value_string:
+ value_string = ''
+ try:
+ return __subfeature_from_value[feature][value_string][subvalue] == subfeature
+ except KeyError:
+ return False
+
+
+# Uses Property
+def expand (properties):
+ """ Given a property set which may consist of composite and implicit
+ properties and combined subfeature values, returns an expanded,
+ normalized property set with all implicit features expressed
+ explicitly, all subfeature values individually expressed, and all
+ components of composite properties expanded. Non-free features
+ directly expressed in the input properties cause any values of
+ those features due to composite feature expansion to be dropped. If
+ two values of a given non-free feature are directly expressed in the
+ input, an error is issued.
+ """
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ expanded = expand_subfeatures(properties)
+ return expand_composites (expanded)
+
+# Accepts list of Property objects
+def add_defaults (properties):
+ """ Given a set of properties, add default values for features not
+ represented in the set.
+ Note: if there's there's ordinary feature F1 and composite feature
+ F2, which includes some value for F1, and both feature have default values,
+ then the default value of F1 will be added, not the value in F2. This might
+ not be right idea: consider
+
+ feature variant : debug ... ;
+ <variant>debug : .... <runtime-debugging>on
+ feature <runtime-debugging> : off on ;
+
+ Here, when adding default for an empty property set, we'll get
+
+ <variant>debug <runtime_debugging>off
+
+ and that's kind of strange.
+ """
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ # create a copy since properties will be modified
+ result = list(properties)
+
+ # We don't add default for conditional properties. We don't want
+ # <variant>debug:<define>DEBUG to be takes as specified value for <variant>
+ handled_features = set(p.feature for p in properties if not p.condition)
+
+ missing_top = [f for f in __all_top_features if not f in handled_features]
+ more = defaults(missing_top)
+ result.extend(more)
+ handled_features.update(p.feature for p in more)
+
+ # Add defaults for subfeatures of features which are present
+ for p in result[:]:
+ subfeatures = [s for s in p.feature.subfeatures if not s in handled_features]
+ more = defaults(__select_subfeatures(p, subfeatures))
+ handled_features.update(h.feature for h in more)
+ result.extend(more)
+
+ return result
+
+def minimize (properties):
+ """ Given an expanded property set, eliminate all redundancy: properties
+ which are elements of other (composite) properties in the set will
+ be eliminated. Non-symmetric properties equal to default values will be
+ eliminated, unless the override a value from some composite property.
+ Implicit properties will be expressed without feature
+ grist, and sub-property values will be expressed as elements joined
+ to the corresponding main property.
+ """
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ # remove properties implied by composite features
+ components = []
+ component_features = set()
+ for property in properties:
+ if property in __composite_properties:
+ cs = __composite_properties[property]
+ components.extend(cs)
+ component_features.update(c.feature for c in cs)
+
+ properties = b2.util.set.difference (properties, components)
+
+ # handle subfeatures and implicit features
+
+ # move subfeatures to the end of the list
+ properties = [p for p in properties if not p.feature.subfeature] +\
+ [p for p in properties if p.feature.subfeature]
+
+ result = []
+ while properties:
+ p = properties[0]
+ f = p.feature
+
+ # locate all subproperties of $(x[1]) in the property set
+ subproperties = [x for x in properties if is_subfeature_of(p, x.feature)]
+
+ if subproperties:
+ # reconstitute the joined property name
+ subproperties.sort ()
+ joined = b2.build.property.Property(p.feature, p.value + '-' + '-'.join ([sp.value for sp in subproperties]))
+ result.append(joined)
+
+ properties = b2.util.set.difference(properties[1:], subproperties)
+
+ else:
+ # eliminate properties whose value is equal to feature's
+ # default and which are not symmetric and which do not
+ # contradict values implied by composite properties.
+
+ # since all component properties of composites in the set
+ # have been eliminated, any remaining property whose
+ # feature is the same as a component of a composite in the
+ # set must have a non-redundant value.
+ if p.value != f.default or f.symmetric or f in component_features:
+ result.append (p)
+
+ properties = properties[1:]
+
+ return result
+
+
+def split (properties):
+ """ Given a property-set of the form
+ v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
+
+ Returns
+ v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
+
+ Note that vN...vM may contain slashes. This is resilient to the
+ substitution of backslashes for slashes, since Jam, unbidden,
+ sometimes swaps slash direction on NT.
+ """
+ assert isinstance(properties, basestring)
+ def split_one (properties):
+ pieces = re.split (__re_slash_or_backslash, properties)
+ result = []
+
+ for x in pieces:
+ if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
+ result = result [0:-1] + [ result [-1] + '/' + x ]
+ else:
+ result.append (x)
+
+ return result
+
+ if isinstance (properties, str):
+ return split_one (properties)
+
+ result = []
+ for p in properties:
+ result += split_one (p)
+ return result
+
+
+def compress_subproperties (properties):
+ """ Combine all subproperties into their parent properties
+
+ Requires: for every subproperty, there is a parent property. All
+ features are explicitly expressed.
+
+ This rule probably shouldn't be needed, but
+ build-request.expand-no-defaults is being abused for unintended
+ purposes and it needs help
+ """
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ result = []
+ matched_subs = set()
+ all_subs = set()
+ for p in properties:
+ f = p.feature
+
+ if not f.subfeature:
+ subs = [x for x in properties if is_subfeature_of(p, x.feature)]
+ if subs:
+
+ matched_subs.update(subs)
+
+ subvalues = '-'.join (sub.value for sub in subs)
+ result.append(Property(
+ p.feature, p.value + '-' + subvalues,
+ p.condition))
+ else:
+ result.append(p)
+
+ else:
+ all_subs.add(p)
+
+ # TODO: this variables are used just for debugging. What's the overhead?
+ assert all_subs == matched_subs
+
+ return result
+
+######################################################################################
+# Private methods
+
+def __select_subproperties (parent_property, properties):
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(parent_property, Property)
+ return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
+
+def __get_subfeature_name (subfeature, value_string):
+ assert isinstance(subfeature, basestring)
+ assert isinstance(value_string, basestring) or value_string is None
+ if value_string == None:
+ prefix = ''
+ else:
+ prefix = value_string + ':'
+
+ return prefix + subfeature
+
+
+def __validate_feature_attributes (name, attributes):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(attributes, basestring)
+ for attribute in attributes:
+ if attribute not in VALID_ATTRIBUTES:
+ raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
+
+ if name in __all_features:
+ raise AlreadyDefined ("feature '%s' already defined" % name)
+ elif 'implicit' in attributes and 'free' in attributes:
+ raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name)
+ elif 'free' in attributes and 'propagated' in attributes:
+ raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
+
+
+def __validate_feature (feature):
+ """ Generates an error if the feature is unknown.
+ """
+ assert isinstance(feature, basestring)
+ if feature not in __all_features:
+ raise BaseException ('unknown feature "%s"' % feature)
+
+
+def __select_subfeatures (parent_property, features):
+ """ Given a property, return the subset of features consisting of all
+ ordinary subfeatures of the property's feature, and all specific
+ subfeatures of the property's feature which are conditional on the
+ property's value.
+ """
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert is_iterable_typed(features, Feature)
+ return [f for f in features if is_subfeature_of (parent_property, f)]
+
+# FIXME: copy over tests.
diff --git a/src/boost/tools/build/src/build/generators.jam b/src/boost/tools/build/src/build/generators.jam
new file mode 100644
index 000000000..36d8304bf
--- /dev/null
+++ b/src/boost/tools/build/src/build/generators.jam
@@ -0,0 +1,1447 @@
+# Copyright 2002. Vladimir Prus
+# Copyright 2006. Rene Rivera
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Manages 'generators' --- objects which can do transformation between different
+# target types and contain algorithm for finding transformation from sources to
+# targets.
+#
+# The main entry point to this module is generators.construct rule. It is given
+# a list of source targets, desired target type and a set of properties. It
+# starts by selecting 'viable generators', which have any chances of producing
+# the desired target type with the required properties. Generators are ranked
+# and a set of the most specific ones is selected.
+#
+# The most specific generators have their 'run' methods called, with the
+# properties and list of sources. Each one selects a target which can be
+# directly consumed, and tries to convert the remaining ones to the types it can
+# consume. This is done by recursively calling 'construct' with all consumable
+# types.
+#
+# If the generator has collected all the targets it needs, it creates targets
+# corresponding to result, and returns it. When all generators have been run,
+# results of one of them are selected and returned as a result.
+#
+# It is quite possible for 'construct' to return more targets that it was asked
+# for. For example, if it were asked to generate a target of type EXE, but the
+# only found generator produces both EXE and TDS (file with debug) information.
+# The extra target will be returned.
+#
+# Likewise, when generator tries to convert sources to consumable types, it can
+# get more targets that it was asked for. The question is what to do with extra
+# targets. B2 attempts to convert them to requested types, and attempts
+# that as early as possible. Specifically, this is done after invoking each
+# generator. TODO: An example is needed to document the rationale for trying
+# extra target conversion at that point.
+#
+# In order for the system to be able to use a specific generator instance 'when
+# needed', the instance needs to be registered with the system using
+# generators.register() or one of its related rules. Unregistered generators may
+# only be run explicitly and will not be considered by B2 when when
+# converting between given target types.
+
+import "class" : new ;
+import property-set ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
+import virtual-target ;
+
+
+if "--debug-generators" in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+
+# Updated cached viable source target type information as needed after a new
+# target type gets defined. This is needed because if a target type is a viable
+# source target type for some generator then all of the target type's derived
+# target types should automatically be considered as viable source target types
+# for the same generator as well. Does nothing if a non-derived target type is
+# passed to it.
+#
+rule update-cached-information-with-a-new-type ( type )
+{
+ local base-type = [ type.base $(type) ] ;
+ if $(base-type)
+ {
+ for local g in $(.vstg-cached-generators)
+ {
+ if $(base-type) in $(.vstg.$(g))
+ {
+ .vstg.$(g) += $(type) ;
+ }
+ }
+
+ for local t in $(.vst-cached-types)
+ {
+ if $(base-type) in $(.vst.$(t))
+ {
+ .vst.$(t) += $(type) ;
+ }
+ }
+ }
+}
+
+
+# Clears cached viable source target type information except for target types
+# and generators with all source types listed as viable. Should be called when
+# something invalidates those cached values by possibly causing some new source
+# types to become viable.
+#
+local rule invalidate-extendable-viable-source-target-type-cache ( )
+{
+ local generators-with-cached-source-types = $(.vstg-cached-generators) ;
+ .vstg-cached-generators = ;
+ for local g in $(generators-with-cached-source-types)
+ {
+ if $(.vstg.$(g)) = *
+ {
+ .vstg-cached-generators += $(g) ;
+ }
+ else
+ {
+ .vstg.$(g) = ;
+ }
+ }
+
+ local types-with-cached-source-types = $(.vst-cached-types) ;
+ .vst-cached-types = ;
+ for local t in $(types-with-cached-source-types)
+ {
+ if $(.vst.$(t)) = *
+ {
+ .vst-cached-types += $(t) ;
+ }
+ else
+ {
+ .vst.$(t) = ;
+ }
+ }
+}
+
+
+# Outputs a debug message if generators debugging is on. Each element of
+# 'message' is checked to see if it is a class instance. If so, instead of the
+# value, the result of 'str' call is output.
+#
+local rule generators.dout ( message * )
+{
+ if $(.debug)
+ {
+ ECHO [ sequence.transform utility.str : $(message) ] ;
+ }
+}
+
+
+local rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+local rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+local rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Models a generator.
+#
+class generator
+{
+ import "class" : new ;
+ import feature ;
+ import generators : indent increase-indent decrease-indent generators.dout ;
+ import utility ;
+ import path ;
+ import property ;
+ import property-set ;
+ import sequence ;
+ import set ;
+ import toolset ;
+ import type ;
+ import virtual-target ;
+
+ EXPORT class@generator : indent increase-indent decrease-indent
+ generators.dout ;
+
+ rule __init__ (
+ id # Identifies the generator - should be name
+ # of the rule which sets up the build
+ # actions.
+
+ composing ? # Whether generator processes each source
+ # target in turn, converting it to required
+ # types. Ordinary generators pass all
+ # sources together to the recursive
+ # generators.construct-types call.
+
+ : source-types * # Types that this generator can handle. If
+ # empty, the generator can consume anything.
+
+ : target-types-and-names + # Types the generator will create and,
+ # optionally, names for created targets.
+ # Each element should have the form
+ # type["(" name-pattern ")"], for example,
+ # obj(%_x). Generated target name will be
+ # found by replacing % with the name of
+ # source, provided an explicit name was not
+ # specified.
+
+ : requirements *
+ )
+ {
+ self.id = $(id) ;
+ self.rule-name = $(id) ;
+ self.composing = $(composing) ;
+ self.source-types = $(source-types) ;
+ self.target-types-and-names = $(target-types-and-names) ;
+ self.requirements = $(requirements) ;
+
+ for local e in $(target-types-and-names)
+ {
+ # Create three parallel lists: one with the list of target types,
+ # and two other with prefixes and postfixes to be added to target
+ # name. We use parallel lists for prefix and postfix (as opposed to
+ # mapping), because given target type might occur several times, for
+ # example "H H(%_symbols)".
+ local m = [ MATCH "([^\\(]*)(\\((.*)%(.*)\\))?" : $(e) ] ;
+ self.target-types += $(m[1]) ;
+ self.name-prefix += $(m[3]:E="") ;
+ self.name-postfix += $(m[4]:E="") ;
+ }
+
+ for local r in [ requirements ]
+ {
+ if $(r:G=)
+ {
+ self.property-requirements += $(r) ;
+ }
+ else
+ {
+ self.feature-requirements += $(r) ;
+ }
+ }
+
+ # Note that 'transform' here, is the same as 'for_each'.
+ sequence.transform type.validate : $(self.source-types) ;
+ sequence.transform type.validate : $(self.target-types) ;
+
+ local relevant-for-generator =
+ [ sequence.transform utility.ungrist : $(requirements:G) ] ;
+ self.relevant-features = [ property-set.create <relevant>$(relevant-for-generator) ] ;
+ }
+
+ ################# End of constructor #################
+
+ rule id ( )
+ {
+ return $(self.id) ;
+ }
+
+ # Returns the list of target type the generator accepts.
+ #
+ rule source-types ( )
+ {
+ return $(self.source-types) ;
+ }
+
+ # Returns the list of target types that this generator produces. It is
+ # assumed to be always the same -- i.e. it can not change depending on some
+ # provided list of sources.
+ #
+ rule target-types ( )
+ {
+ return $(self.target-types) ;
+ }
+
+ # Returns the required properties for this generator. Properties in returned
+ # set must be present in build properties if this generator is to be used.
+ # If result has grist-only element, that build properties must include some
+ # value of that feature.
+ #
+ # XXX: remove this method?
+ #
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule set-rule-name ( rule-name )
+ {
+ self.rule-name = $(rule-name) ;
+ }
+
+ rule rule-name ( )
+ {
+ return $(self.rule-name) ;
+ }
+
+ # Returns a true value if the generator can be run with the specified
+ # properties.
+ #
+ rule match-rank ( property-set-to-match )
+ {
+ # See if generator requirements are satisfied by 'properties'. Treat a
+ # feature name in requirements (i.e. grist-only element), as matching
+ # any value of the feature.
+
+ if [ $(property-set-to-match).contains-raw $(self.property-requirements) ] &&
+ [ $(property-set-to-match).contains-features $(self.feature-requirements) ]
+ {
+ return true ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ # Returns another generator which differs from $(self) in
+ # - id
+ # - value to <toolset> feature in properties
+ #
+ rule clone ( new-id : new-toolset-properties + )
+ {
+ local g = [ new $(__class__) $(new-id) $(self.composing) :
+ $(self.source-types) : $(self.target-types-and-names) :
+ # Note: this does not remove any subfeatures of <toolset> which
+ # might cause problems.
+ [ property.change $(self.requirements) : <toolset> ]
+ $(new-toolset-properties) ] ;
+ return $(g) ;
+ }
+
+ # Creates another generator that is the same as $(self), except that if
+ # 'base' is in target types of $(self), 'type' will in target types of the
+ # new generator.
+ #
+ rule clone-and-change-target-type ( base : type )
+ {
+ local target-types ;
+ for local t in $(self.target-types-and-names)
+ {
+ local m = [ MATCH "([^\\(]*)(\\(.*\\))?" : $(t) ] ;
+ if $(m) = $(base)
+ {
+ target-types += $(type)$(m[2]:E="") ;
+ }
+ else
+ {
+ target-types += $(t) ;
+ }
+ }
+
+ local g = [ new $(__class__) $(self.id) $(self.composing) :
+ $(self.source-types) : $(target-types) : $(self.requirements) ] ;
+ if $(self.rule-name)
+ {
+ $(g).set-rule-name $(self.rule-name) ;
+ }
+ return $(g) ;
+ }
+
+ # Tries to invoke this generator on the given sources. Returns a list of
+ # generated targets (instances of 'virtual-target') and optionally a set of
+ # properties to be added to the usage-requirements for all the generated
+ # targets. Returning nothing from run indicates that the generator was
+ # unable to create the target.
+ #
+ rule run
+ (
+ project # Project for which the targets are generated.
+ name ? # Used when determining the 'name' attribute for all
+ # generated targets. See the 'generated-targets' method.
+ : property-set # Desired properties for generated targets.
+ : sources + # Source targets.
+ )
+ {
+ generators.dout [ indent ] " ** generator" $(self.id) ;
+ generators.dout [ indent ] " composing:" $(self.composing) ;
+
+ if ! $(self.composing) && $(sources[2]) && $(self.source-types[2])
+ {
+ import errors : error : errors.error ;
+ errors.error "Unsupported source/source-type combination" ;
+ }
+
+ # We do not run composing generators if no name is specified. The reason
+ # is that composing generator combines several targets, which can have
+ # different names, and it cannot decide which name to give for produced
+ # target. Therefore, the name must be passed.
+ #
+ # This in effect, means that composing generators are runnable only at
+ # the top-level of a transformation graph, or if their name is passed
+ # explicitly. Thus, we dissallow composing generators in the middle. For
+ # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE
+ # will not be allowed as the OBJ -> STATIC_LIB generator is composing.
+ if ! $(self.composing) || $(name)
+ {
+ run-really $(project) $(name) : $(property-set) : $(sources) ;
+ }
+ }
+
+ rule run-really ( project name ? : property-set : sources + )
+ {
+ # Targets that this generator will consume directly.
+ local consumed = ;
+ # Targets that can not be consumed and will be returned as-is.
+ local bypassed = ;
+
+ if $(self.composing)
+ {
+ consumed = [ convert-multiple-sources-to-consumable-types $(project)
+ : $(property-set) : $(sources) ] ;
+ }
+ else
+ {
+ consumed = [ convert-to-consumable-types $(project) $(name)
+ : $(property-set) : $(sources) ] ;
+ }
+
+ local result ;
+ if $(consumed[2])
+ {
+ result = [ construct-result $(consumed[2-]) : $(project) $(name) :
+ [ $(property-set).add $(consumed[1]) ] ] ;
+ }
+
+ if $(result)
+ {
+ generators.dout [ indent ] " SUCCESS: " $(result) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " FAILURE" ;
+ }
+ generators.dout ;
+ if $(result)
+ {
+ # Make sure that we propagate usage-requirements up the stack.
+ return [ $(result[1]).add $(consumed[1]) ] $(result[2-]) ;
+ }
+ }
+
+ # Constructs the dependency graph to be returned by this generator.
+ #
+ rule construct-result
+ (
+ consumed + # Already prepared list of consumable targets.
+ # Composing generators may receive multiple sources
+ # all of which will have types matching those in
+ # $(self.source-types). Non-composing generators with
+ # multiple $(self.source-types) will receive exactly
+ # len $(self.source-types) sources with types matching
+ # those in $(self.source-types). And non-composing
+ # generators with only a single source type may
+ # receive multiple sources with all of them of the
+ # type listed in $(self.source-types).
+ : project name ?
+ : property-set # Properties to be used for all actions created here.
+ )
+ {
+ local result ;
+
+ local relevant = [ toolset.relevant $(self.rule-name) ] ;
+ relevant = [ $(relevant).add $(self.relevant-features) ] ;
+ property-set = [ $(property-set).add $(relevant) ] ;
+
+ # If this is a 1->1 transformation, apply it to all consumed targets in
+ # order.
+ if ! $(self.source-types[2]) && ! $(self.composing)
+ {
+ for local r in $(consumed)
+ {
+ result += [ generated-targets $(r) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ }
+ else if $(consumed)
+ {
+ result += [ generated-targets $(consumed) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ if $(result)
+ {
+ return $(relevant) $(result) ;
+ }
+ }
+
+ # Determine target name from fullname (maybe including path components)
+ # Place optional prefix and postfix around basename
+ #
+ rule determine-target-name ( fullname : prefix ? : postfix ? )
+ {
+ # See if we need to add directory to the target name.
+ local dir = $(fullname:D) ;
+ local name = $(fullname:B) ;
+
+ name = $(prefix:E=)$(name) ;
+ name = $(name)$(postfix:E=) ;
+
+ if $(dir)
+ # Never append '..' to target path.
+ && ! [ MATCH .*(\\.\\.).* : $(dir) ]
+ && ! [ path.is-rooted $(dir) ]
+ {
+ # Relative path is always relative to the source directory. Retain
+ # it, so that users can have files with the same name in two
+ # different subdirectories.
+ name = $(dir)/$(name) ;
+ }
+ return $(name) ;
+ }
+
+ # Determine the name of the produced target from the names of the sources.
+ #
+ rule determine-output-name ( sources + )
+ {
+ # The simple case if when a name of source has single dot. Then, we take
+ # the part before dot. Several dots can be caused by:
+ # - using source file like a.host.cpp, or
+ # - a type whose suffix has a dot. Say, we can type 'host_cpp' with
+ # extension 'host.cpp'.
+ # In the first case, we want to take the part up to the last dot. In the
+ # second case -- not sure, but for now take the part up to the last dot
+ # too.
+ name = [ utility.basename [ $(sources[1]).name ] ] ;
+ for local s in $(sources[2-])
+ {
+ if [ utility.basename [ $(s).name ] ] != $(name)
+ {
+ import errors : error : errors.error ;
+ errors.error "$(self.id): source targets have different names: cannot determine target name" ;
+ }
+ }
+ return [ determine-target-name [ $(sources[1]).name ] ] ;
+ }
+
+ # Constructs targets that are created after consuming 'sources'. The result
+ # will be the list of virtual-target, which has the same length as the
+ # 'target-types' attribute and with corresponding types.
+ #
+ # When 'name' is empty, all source targets must have the same 'name'
+ # attribute value, which will be used instead of the 'name' argument.
+ #
+ # The 'name' attribute value for each generated target will be equal to the
+ # 'name' parameter if there is no name pattern for this type. Otherwise, the
+ # '%' symbol in the name pattern will be replaced with the 'name' parameter
+ # to obtain the 'name' attribute.
+ #
+ # For example, if targets types are T1 and T2 (with name pattern "%_x"),
+ # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created
+ # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually
+ # determines the basename of a file.
+ #
+ # Note that this pattern mechanism has nothing to do with implicit patterns
+ # in make. It is a way to produce a target whose name is different than the
+ # name of its source.
+ #
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if ! $(name)
+ {
+ name = [ determine-output-name $(sources) ] ;
+ }
+
+ # Assign an action for each target.
+ local action = [ action-class ] ;
+ local a = [ class.new $(action) $(sources) : $(self.rule-name) :
+ $(property-set) ] ;
+
+ # Create generated target for each target type.
+ local targets ;
+ local pre = $(self.name-prefix) ;
+ local post = $(self.name-postfix) ;
+ for local t in $(self.target-types)
+ {
+ local generated-name = $(pre[1])$(name:BS)$(post[1]) ;
+ generated-name = $(generated-name:R=$(name:D)) ;
+ pre = $(pre[2-]) ;
+ post = $(post[2-]) ;
+
+ targets += [ class.new file-target $(generated-name) : $(t) :
+ $(project) : $(a) ] ;
+ }
+
+ return [ sequence.transform virtual-target.register : $(targets) ] ;
+ }
+
+ # Attempts to convert 'sources' to targets of types that this generator can
+ # handle. The intention is to produce the set of targets that can be used
+ # when the generator is run.
+ #
+ rule convert-to-consumable-types
+ (
+ project name ?
+ : property-set
+ : sources +
+ : only-one ? # Convert 'source' to only one of the source types. If
+ # there is more that one possibility, report an error.
+ )
+ {
+ local _consumed ;
+ local missing-types ;
+ local usage-requirements ;
+
+ if $(sources[2])
+ {
+ # Do not know how to handle several sources yet. Just try to pass
+ # the request to other generator.
+ missing-types = $(self.source-types) ;
+ }
+ else
+ {
+ local temp = [ consume-directly $(sources) ] ;
+ if $(temp[1])
+ {
+ usage-requirements = [ property-set.empty ] ;
+ _consumed = $(temp[1]) ;
+ }
+ missing-types = $(temp[2-]) ;
+ }
+
+ # No need to search for transformation if some source type has consumed
+ # source and no more source types are needed.
+ if $(only-one) && $(_consumed)
+ {
+ missing-types = ;
+ }
+
+ # TODO: we should check that only one source type is created if
+ # 'only-one' is true.
+
+ if $(missing-types)
+ {
+ local transformed = [ generators.construct-types $(project) $(name)
+ : $(missing-types) : $(property-set) : $(sources) ] ;
+
+ # Add targets of right type to 'consumed'. Add others to 'bypassed'.
+ # The 'generators.construct' rule has done its best to convert
+ # everything to the required type. There is no need to rerun it on
+ # targets of different types.
+
+ usage-requirements = $(transformed[1]) ;
+ for local t in $(transformed[2-])
+ {
+ if [ $(t).type ] in $(missing-types)
+ {
+ _consumed += $(t) ;
+ }
+ }
+ }
+
+ return $(usage-requirements) [ sequence.unique $(_consumed) ] ;
+ }
+
+ # Converts several files to consumable types. Called for composing
+ # generators only.
+ #
+ rule convert-multiple-sources-to-consumable-types ( project : property-set :
+ sources * )
+ {
+ local result ;
+ # We process each source one-by-one, trying to convert it to a usable
+ # type.
+ if ! $(self.source-types)
+ {
+ # Anything is acceptable
+ return [ property-set.empty ] $(sources) ;
+ }
+ else
+ {
+ local usage-requirements = [ property-set.empty ] ;
+ local acceptible-types = [ sequence.unique
+ [ sequence.transform type.all-derived : $(self.source-types) ] ] ;
+ for local source in $(sources)
+ {
+ if ! [ $(source).type ] in $(acceptible-types)
+ {
+ local transformed = [ generators.construct-types $(project)
+ : $(self.source-types) : $(property-set) : $(source) ] ;
+ for local t in $(transformed[2-])
+ {
+ if [ $(t).type ] in $(self.source-types)
+ {
+ result += $(t) ;
+ }
+ }
+ if ! $(transformed)
+ {
+ generators.dout [ indent ] " failed to convert " $(source) ;
+ }
+ else
+ {
+ usage-requirements = [ $(usage-requirements).add $(transformed[1]) ] ;
+ }
+ }
+ else
+ {
+ result += $(source) ;
+ }
+ }
+ return $(usage-requirements) [ sequence.unique $(result) : stable ] ;
+ }
+ }
+
+ rule consume-directly ( source )
+ {
+ local real-source-type = [ $(source).type ] ;
+
+ # If there are no source types, we can consume anything.
+ local source-types = $(self.source-types) ;
+ source-types ?= $(real-source-type) ;
+
+ local result = "" ;
+ local missing-types ;
+
+ for local st in $(source-types)
+ {
+ # The 'source' if of the right type already.
+ if $(real-source-type) = $(st) || [ type.is-derived
+ $(real-source-type) $(st) ]
+ {
+ result = $(source) ;
+ }
+ else
+ {
+ missing-types += $(st) ;
+ }
+ }
+ return $(result) $(missing-types) ;
+ }
+
+ # Returns the class to be used to actions. Default implementation returns
+ # "action".
+ #
+ rule action-class ( )
+ {
+ return "action" ;
+ }
+}
+
+
+# Registers a new generator instance 'g'.
+#
+rule register ( g )
+{
+ .all-generators += $(g) ;
+
+ # A generator can produce several targets of the same type. We want unique
+ # occurrence of that generator in .generators.$(t) in that case, otherwise,
+ # it will be tried twice and we will get a false ambiguity.
+ for local t in [ sequence.unique [ $(g).target-types ] ]
+ {
+ .generators.$(t) += $(g) ;
+ }
+
+ # Update the set of generators for toolset.
+
+ # TODO: should we check that generator with this id is not already
+ # registered. For example, the fop.jam module intentionally declared two
+ # generators with the same id, so such check will break it.
+ local id = [ $(g).id ] ;
+
+ # Some generators have multiple periods in their name, so a simple $(id:S=)
+ # will not generate the right toolset name. E.g. if id = gcc.compile.c++,
+ # then .generators-for-toolset.$(id:S=) will append to
+ # .generators-for-toolset.gcc.compile, which is a separate value from
+ # .generators-for-toolset.gcc. Correcting this makes generator inheritance
+ # work properly. See also inherit-generators in the toolset module.
+ local base = $(id) ;
+ while $(base:S)
+ {
+ base = $(base:B) ;
+ }
+ .generators-for-toolset.$(base) += $(g) ;
+
+
+ # After adding a new generator that can construct new target types, we need
+ # to clear the related cached viable source target type information for
+ # constructing a specific target type or using a specific generator. Cached
+ # viable source target type lists affected by this are those containing any
+ # of the target types constructed by the new generator or any of their base
+ # target types.
+ #
+ # A more advanced alternative to clearing that cached viable source target
+ # type information would be to expand it with additional source types or
+ # even better - mark it as needing to be expanded on next use.
+ #
+ # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077
+ # mailing list thread for an even more advanced idea of how we could convert
+ # Boost Build's Jamfile processing, target selection and generator selection
+ # into separate steps which would prevent these caches from ever being
+ # invalidated.
+ #
+ # For now we just clear all the cached viable source target type information
+ # that does not simply state 'all types' and may implement a more detailed
+ # algorithm later on if it becomes needed.
+
+ invalidate-extendable-viable-source-target-type-cache ;
+}
+
+
+# Creates a new non-composing 'generator' class instance and registers it.
+# Returns the created instance. Rationale: the instance is returned so that it
+# is possible to first register a generator and then call its 'run' method,
+# bypassing the whole generator selection process.
+#
+rule register-standard ( id : source-types * : target-types + : requirements * )
+{
+ local g = [ new generator $(id) : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Creates a new composing 'generator' class instance and registers it.
+#
+rule register-composing ( id : source-types * : target-types + : requirements *
+ )
+{
+ local g = [ new generator $(id) true : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Returns all generators belonging to the given 'toolset', i.e. whose ids are
+# '$(toolset).<something>'.
+#
+rule generators-for-toolset ( toolset )
+{
+ return $(.generators-for-toolset.$(toolset)) ;
+}
+
+
+# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when
+# searching for generators that could produce a target of a certain type, both
+# those generators are among viable generators, the overridden generator is
+# immediately discarded.
+#
+# The overridden generators are discarded immediately after computing the list
+# of viable generators but before running any of them.
+#
+rule override ( overrider-id : overridee-id )
+{
+ .override.$(overrider-id) += $(overridee-id) ;
+}
+
+
+# Returns a list of source type which can possibly be converted to 'target-type'
+# by some chain of generator invocation.
+#
+# More formally, takes all generators for 'target-type' and returns a union of
+# source types for those generators and result of calling itself recursively on
+# source types.
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given type.
+#
+local rule viable-source-types-real ( target-type )
+{
+ local result ;
+
+ # 't0' is the initial list of target types we need to process to get a list
+ # of their viable source target types. New target types will not be added to
+ # this list.
+ local t0 = [ type.all-bases $(target-type) ] ;
+
+ # 't' is the list of target types which have not yet been processed to get a
+ # list of their viable source target types. This list will get expanded as
+ # we locate more target types to process.
+ local t = $(t0) ;
+
+ while $(t)
+ {
+ # Find all generators for the current type. Unlike
+ # 'find-viable-generators' we do not care about the property-set.
+ local generators = $(.generators.$(t[1])) ;
+ t = $(t[2-]) ;
+
+ while $(generators)
+ {
+ local g = $(generators[1]) ;
+ generators = $(generators[2-]) ;
+
+ if ! [ $(g).source-types ]
+ {
+ # Empty source types -- everything can be accepted.
+ result = * ;
+ # This will terminate this loop.
+ generators = ;
+ # This will terminate the outer loop.
+ t = ;
+ }
+
+ for local source-type in [ $(g).source-types ]
+ {
+ if ! $(source-type) in $(result)
+ {
+ # If a generator accepts a 'source-type' it will also
+ # happily accept any type derived from it.
+ for local n in [ type.all-derived $(source-type) ]
+ {
+ if ! $(n) in $(result)
+ {
+ # Here there is no point in adding target types to
+ # the list of types to process in case they are or
+ # have already been on that list. We optimize this
+ # check by realizing that we only need to avoid the
+ # original target type's base types. Other target
+ # types that are or have been on the list of target
+ # types to process have been added to the 'result'
+ # list as well and have thus already been eliminated
+ # by the previous if.
+ if ! $(n) in $(t0)
+ {
+ t += $(n) ;
+ }
+ result += $(n) ;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-real'.
+#
+rule viable-source-types ( target-type )
+{
+ local key = .vst.$(target-type) ;
+ if ! $($(key))
+ {
+ .vst-cached-types += $(target-type) ;
+ local v = [ viable-source-types-real $(target-type) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns the list of source types, which, when passed to 'run' method of
+# 'generator', has some change of being eventually used (probably after
+# conversion by other generators).
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given generator.
+#
+rule viable-source-types-for-generator-real ( generator )
+{
+ local source-types = [ $(generator).source-types ] ;
+ if ! $(source-types)
+ {
+ # If generator does not specify any source types, it might be a special
+ # generator like builtin.lib-generator which just relays to other
+ # generators. Return '*' to indicate that any source type is possibly
+ # OK, since we do not know for sure.
+ return * ;
+ }
+ else
+ {
+ local result ;
+ while $(source-types)
+ {
+ local s = $(source-types[1]) ;
+ source-types = $(source-types[2-]) ;
+ local viable-sources = [ generators.viable-source-types $(s) ] ;
+ if $(viable-sources) = *
+ {
+ result = * ;
+ source-types = ; # Terminate the loop.
+ }
+ else
+ {
+ result += [ type.all-derived $(s) ] $(viable-sources) ;
+ }
+ }
+ return [ sequence.unique $(result) ] ;
+ }
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-for-generator'.
+#
+local rule viable-source-types-for-generator ( generator )
+{
+ local key = .vstg.$(generator) ;
+ if ! $($(key))
+ {
+ .vstg-cached-generators += $(generator) ;
+ local v = [ viable-source-types-for-generator-real $(generator) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns usage requirements + list of created targets.
+#
+local rule try-one-generator-really ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local targets =
+ [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ;
+
+ local usage-requirements ;
+ local success ;
+
+ generators.dout [ indent ] returned $(targets) ;
+
+ if $(targets)
+ {
+ success = true ;
+
+ if [ class.is-a $(targets[1]) : property-set ]
+ {
+ usage-requirements = $(targets[1]) ;
+ targets = $(targets[2-]) ;
+ }
+ else
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ }
+
+ generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ;
+ generators.dout [ indent ] " " $(targets) ;
+ if $(usage-requirements)
+ {
+ generators.dout [ indent ] " with usage requirements:" $(usage-requirements) ;
+ }
+
+ if $(success)
+ {
+ return $(usage-requirements) $(targets) ;
+ }
+}
+
+
+# Checks if generator invocation can be pruned, because it is guaranteed to
+# fail. If so, quickly returns an empty list. Otherwise, calls
+# try-one-generator-really.
+#
+local rule try-one-generator ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local source-types ;
+ for local s in $(sources)
+ {
+ source-types += [ $(s).type ] ;
+ }
+ local viable-source-types = [ viable-source-types-for-generator $(generator)
+ ] ;
+
+ if $(source-types) && $(viable-source-types) != * &&
+ ! [ set.intersection $(source-types) : $(viable-source-types) ]
+ {
+ local id = [ $(generator).id ] ;
+ generators.dout [ indent ] " ** generator '$(id)' pruned" ;
+ #generators.dout [ indent ] "source-types" '$(source-types)' ;
+ #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ;
+ }
+ else
+ {
+ return [ try-one-generator-really $(project) $(name) : $(generator) :
+ $(target-type) : $(property-set) : $(sources) ] ;
+ }
+}
+
+
+rule construct-types ( project name ? : target-types + : property-set
+ : sources + )
+{
+ local result ;
+ local usage-requirements = [ property-set.empty ] ;
+ for local t in $(target-types)
+ {
+ local r = [ construct $(project) $(name) : $(t) : $(property-set) :
+ $(sources) ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ }
+ }
+ # TODO: have to introduce parameter controlling if several types can be
+ # matched and add appropriate checks.
+
+ # TODO: need to review the documentation for 'construct' to see if it should
+ # return $(source) even if nothing can be done with it. Currents docs seem
+ # to imply that, contrary to the behaviour.
+ if $(result)
+ {
+ return $(usage-requirements) $(result) ;
+ }
+ else
+ {
+ return $(usage-requirements) $(sources) ;
+ }
+}
+
+
+# Ensures all 'targets' have their type. If this is not so, exists with error.
+#
+local rule ensure-type ( targets * )
+{
+ for local t in $(targets)
+ {
+ if ! [ $(t).type ]
+ {
+ import errors ;
+ errors.error "target" [ $(t).str ] "has no type" ;
+ }
+ }
+}
+
+
+# Returns generators which can be used to construct target of specified type
+# with specified properties. Uses the following algorithm:
+# - iterates over requested target-type and all its bases (in the order returned
+# by type.all-bases).
+# - for each type find all generators that generate that type and whose
+# requirements are satisfied by properties.
+# - if the set of generators is not empty, returns that set.
+#
+# Note: this algorithm explicitly ignores generators for base classes if there
+# is at least one generator for the requested target-type.
+#
+local rule find-viable-generators-aux ( target-type : property-set )
+{
+ # Select generators that can create the required target type.
+ local viable-generators = ;
+
+ import type ;
+ local t = $(target-type) ;
+
+ if $(.debug)
+ {
+ generators.dout [ indent ] find-viable-generators target-type= $(target-type)
+ property-set= [ $(property-set).as-path ] ;
+ generators.dout [ indent ] "trying type" $(target-type) ;
+ }
+
+ local generators = $(.generators.$(target-type)) ;
+ if $(generators)
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] "there are generators for this type" ;
+ }
+ }
+ else
+ {
+ local t = [ type.base $(target-type) ] ;
+
+ # Get the list of generators for the requested type. If no generator is
+ # registered, try base type, and so on.
+ while $(t)
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] "trying type" $(t) ;
+ }
+ if $(.generators.$(t))
+ {
+ generators.dout [ indent ] "there are generators for this type" ;
+ generators = $(.generators.$(t)) ;
+
+ # We are here because there were no generators found for
+ # target-type but there are some generators for its base type.
+ # We will try to use them, but they will produce targets of
+ # base type, not of 'target-type'. So, we clone the generators
+ # and modify the list of target types.
+ local generators2 ;
+ for local g in $(generators)
+ {
+ # generators.register adds a generator to the list of
+ # generators for toolsets, which is a bit strange, but
+ # should work. That list is only used when inheriting a
+ # toolset, which should have been done before running
+ # generators.
+ generators2 += [ $(g).clone-and-change-target-type $(t) :
+ $(target-type) ] ;
+ generators.register $(generators2[-1]) ;
+ }
+ generators = $(generators2) ;
+ t = ;
+ }
+ else
+ {
+ t = [ type.base $(t) ] ;
+ }
+ }
+ }
+
+ for local g in $(generators)
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ;
+ }
+
+ if [ $(g).match-rank $(property-set) ]
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] " is viable" ;
+ }
+ viable-generators += $(g) ;
+ }
+ }
+
+ return $(viable-generators) ;
+}
+
+
+rule find-viable-generators ( target-type : property-set )
+{
+ local key = $(target-type).$(property-set) ;
+ local l = $(.fv.$(key)) ;
+ if ! $(l)
+ {
+ l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ;
+ if ! $(l)
+ {
+ l = none ;
+ }
+ .fv.$(key) = $(l) ;
+ }
+
+ if $(l) = none
+ {
+ l = ;
+ }
+
+ local viable-generators ;
+ for local g in $(l)
+ {
+ # Avoid trying the same generator twice on different levels.
+ if ! $(g) in $(.active-generators)
+ {
+ viable-generators += $(g) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ;
+ }
+ }
+
+ # Generators which override 'all'.
+ local all-overrides ;
+ # Generators which are overridden.
+ local overriden-ids ;
+ for local g in $(viable-generators)
+ {
+ local id = [ $(g).id ] ;
+ local this-overrides = $(.override.$(id)) ;
+ overriden-ids += $(this-overrides) ;
+ if all in $(this-overrides)
+ {
+ all-overrides += $(g) ;
+ }
+ }
+ if $(all-overrides)
+ {
+ viable-generators = $(all-overrides) ;
+ }
+ local result ;
+ for local g in $(viable-generators)
+ {
+ if ! [ $(g).id ] in $(overriden-ids)
+ {
+ result += $(g) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+.construct-stack = ;
+
+
+# Attempts to construct a target by finding viable generators, running them and
+# selecting the dependency graph.
+#
+local rule construct-really ( project name ? : target-type : property-set :
+ sources * )
+{
+ viable-generators = [ find-viable-generators $(target-type) :
+ $(property-set) ] ;
+
+ generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
+ " viable generators" ;
+
+ local result ;
+ local generators-that-succeeded ;
+ for local g in $(viable-generators)
+ {
+ # This variable will be restored on exit from this scope.
+ local .active-generators = $(g) $(.active-generators) ;
+
+ local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type)
+ : $(property-set) : $(sources) ] ;
+
+ if $(r)
+ {
+ generators-that-succeeded += $(g) ;
+ if $(result)
+ {
+ ECHO "Error: ambiguity found when searching for best transformation" ;
+ ECHO "Trying to produce type '$(target-type)' from: " ;
+ for local s in $(sources)
+ {
+ ECHO " - " [ $(s).str ] ;
+ }
+ ECHO "Generators that succeeded:" ;
+ for local g in $(generators-that-succeeded)
+ {
+ ECHO " - " [ $(g).id ] ;
+ }
+ ECHO "First generator produced: " ;
+ for local t in $(result[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ ECHO "Second generator produced: " ;
+ for local t in $(r[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ EXIT ;
+ }
+ else
+ {
+ result = $(r) ;
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Attempts to create a target of 'target-type' with 'properties' from 'sources'.
+# The 'sources' are treated as a collection of *possible* ingridients, i.e.
+# there is no obligation to consume them all.
+#
+# Returns a list of targets. When this invocation is first instance of
+# 'construct' in stack, returns only targets of requested 'target-type',
+# otherwise, returns also unused sources and additionally generated targets.
+#
+# If 'top-level' is set, does not suppress generators that are already
+# used in the stack. This may be useful in cases where a generator
+# has to build a metatargets -- for example a target corresponding to
+# built tool.
+#
+rule construct ( project name ? : target-type : property-set * : sources * : top-level ? )
+{
+ local saved-active ;
+ if $(top-level)
+ {
+ saved-active = $(.active-generators) ;
+ .active-generators = ;
+ }
+
+ # FIXME This is probably not intended be be run unconditionally,
+ # but changing it causes no_type to fail.
+ if "(.construct-stack)"
+ {
+ ensure-type $(sources) ;
+ }
+
+ .construct-stack += 1 ;
+
+ increase-indent ;
+
+ if $(.debug)
+ {
+ generators.dout [ indent ] "*** construct" $(target-type) ;
+
+ for local s in $(sources)
+ {
+ generators.dout [ indent ] " from" $(s) ;
+ }
+ generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
+ }
+
+ local result = [ construct-really $(project) $(name) : $(target-type) :
+ $(property-set) : $(sources) ] ;
+
+ decrease-indent ;
+
+ .construct-stack = $(.construct-stack[2-]) ;
+
+ if $(top-level)
+ {
+ .active-generators = $(saved-active) ;
+ }
+
+ return $(result) ;
+}
+
+# Given 'result', obtained from some generator or generators.construct, adds
+# 'raw-properties' as usage requirements to it. If result already contains usage
+# requirements -- that is the first element of result of an instance of the
+# property-set class, the existing usage requirements and 'raw-properties' are
+# combined.
+#
+rule add-usage-requirements ( result * : raw-properties * )
+{
+ if $(result)
+ {
+ if [ class.is-a $(result[1]) : property-set ]
+ {
+ return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
+ }
+ else
+ {
+ return [ property-set.create $(raw-properties) ] $(result) ;
+ }
+ }
+}
+
+rule dump ( )
+{
+ for local g in $(.all-generators)
+ {
+ ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ;
+ }
+}
+
diff --git a/src/boost/tools/build/src/build/generators.py b/src/boost/tools/build/src/build/generators.py
new file mode 100644
index 000000000..4e77276d7
--- /dev/null
+++ b/src/boost/tools/build/src/build/generators.py
@@ -0,0 +1,1209 @@
+# Status: being ported by Vladimir Prus
+# Base revision: 48649
+# TODO: replace the logging with dout
+
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Manages 'generators' --- objects which can do transformation between different
+# target types and contain algorithm for finding transformation from sources
+# to targets.
+#
+# The main entry point to this module is generators.construct rule. It is given
+# a list of source targets, desired target type and a set of properties.
+# It starts by selecting 'viable generators', which have any chances of producing
+# the desired target type with the required properties. Generators are ranked and
+# a set of most specific ones is selected.
+#
+# The most specific generators have their 'run' methods called, with the properties
+# and list of sources. Each one selects target which can be directly consumed, and
+# tries to convert the remaining ones to the types it can consume. This is done
+# by recursively calling 'construct' with all consumable types.
+#
+# If the generator has collected all the targets it needs, it creates targets
+# corresponding to result, and returns it. When all generators have been run,
+# results of one of them are selected and returned as result.
+#
+# It's quite possible that 'construct' returns more targets that it was asked for.
+# For example, it was asked to target type EXE, but the only found generators produces
+# both EXE and TDS (file with debug) information. The extra target will be returned.
+#
+# Likewise, when generator tries to convert sources to consumable types, it can get
+# more targets that it was asked for. The question is what to do with extra targets.
+# Boost.Build attempts to convert them to requested types, and attempts as early as
+# possible. Specifically, this is done after invoking each generator. (Later I'll
+# document the rationale for trying extra target conversion at that point).
+#
+# That early conversion is not always desirable. Suppose a generator got a source of
+# type Y and must consume one target of type X_1 and one target of type X_2.
+# When converting Y to X_1 extra target of type Y_2 is created. We should not try to
+# convert it to type X_1, because if we do so, the generator will get two targets
+# of type X_1, and will be at loss as to which one to use. Because of that, the
+# 'construct' rule has a parameter, telling if multiple targets can be returned. If
+# the parameter is false, conversion of extra targets is not performed.
+
+
+import re
+import cStringIO
+import os.path
+
+from virtual_target import Subvariant
+from . import virtual_target, type, property_set, property
+from b2.exceptions import BaseBoostBuildException
+from b2.util.logger import *
+from b2.util.utility import *
+from b2.util import set as set_, is_iterable_typed, is_iterable, bjam_signature
+from b2.util.sequence import unique
+import b2.util.sequence as sequence
+from b2.manager import get_manager
+import b2.build.type
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __generators, __type_to_generators, __generators_for_toolset, __construct_stack
+ global __overrides, __active_generators
+ global __viable_generators_cache, __viable_source_types_cache
+ global __vstg_cached_generators, __vst_cached_types
+
+ __generators = {}
+ __type_to_generators = {}
+ __generators_for_toolset = {}
+ __overrides = {}
+
+ # TODO: can these be global?
+ __construct_stack = []
+ __viable_generators_cache = {}
+ __viable_source_types_cache = {}
+ __active_generators = []
+
+ __vstg_cached_generators = []
+ __vst_cached_types = []
+
+reset ()
+
+_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?')
+_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?')
+
+
+__debug = None
+__indent = ""
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-generators" in bjam.variable("ARGV")
+ return __debug
+
+def increase_indent():
+ global __indent
+ __indent += " "
+
+def decrease_indent():
+ global __indent
+ __indent = __indent[0:-4]
+
+
+# Updated cached viable source target type information as needed after a new
+# derived target type gets added. This is needed because if a target type is a
+# viable source target type for some generator then all of the target type's
+# derived target types are automatically viable as source target types for the
+# same generator. Does nothing if a non-derived target type is passed to it.
+#
+def update_cached_information_with_a_new_type(type):
+ assert isinstance(type, basestring)
+ base_type = b2.build.type.base(type)
+
+ if base_type:
+ for g in __vstg_cached_generators:
+ if base_type in __viable_source_types_cache.get(g, []):
+ __viable_source_types_cache[g].append(type)
+
+ for t in __vst_cached_types:
+ if base_type in __viable_source_types_cache.get(t, []):
+ __viable_source_types_cache[t].append(type)
+
+# Clears cached viable source target type information except for target types
+# and generators with all source types listed as viable. Should be called when
+# something invalidates those cached values by possibly causing some new source
+# types to become viable.
+#
+def invalidate_extendable_viable_source_target_type_cache():
+
+ global __vstg_cached_generators
+ generators_with_cached_source_types = __vstg_cached_generators
+ __vstg_cached_generators = []
+
+ for g in generators_with_cached_source_types:
+ if g in __viable_source_types_cache:
+ if __viable_source_types_cache[g] == ["*"]:
+ __vstg_cached_generators.append(g)
+ else:
+ del __viable_source_types_cache[g]
+
+ global __vst_cached_types
+ types_with_cached_sources_types = __vst_cached_types
+ __vst_cached_types = []
+ for t in types_with_cached_sources_types:
+ if t in __viable_source_types_cache:
+ if __viable_source_types_cache[t] == ["*"]:
+ __vst_cached_types.append(t)
+ else:
+ del __viable_source_types_cache[t]
+
+def dout(message):
+ if debug():
+ print __indent + message
+
+
+class InvalidTargetSource(BaseBoostBuildException):
+ """
+ Should be raised when a target contains a source that is invalid.
+ """
+
+
+class Generator:
+ """ Creates a generator.
+ manager: the build manager.
+ id: identifies the generator
+
+ rule: the rule which sets up build actions.
+
+ composing: whether generator processes each source target in
+ turn, converting it to required types.
+ Ordinary generators pass all sources together to
+ recusrive generators.construct_types call.
+
+ source_types (optional): types that this generator can handle
+
+ target_types_and_names: types the generator will create and, optionally, names for
+ created targets. Each element should have the form
+ type["(" name-pattern ")"]
+ for example, obj(%_x). Name of generated target will be found
+ by replacing % with the name of source, provided explicit name
+ was not specified.
+
+ requirements (optional)
+
+ NOTE: all subclasses must have a similar signature for clone to work!
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []):
+ assert isinstance(id, basestring)
+ assert isinstance(composing, bool)
+ assert is_iterable_typed(source_types, basestring)
+ assert is_iterable_typed(target_types_and_names, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ self.id_ = id
+ self.composing_ = composing
+ self.source_types_ = source_types
+ self.target_types_and_names_ = target_types_and_names
+ self.requirements_ = requirements
+
+ self.target_types_ = []
+ self.name_prefix_ = []
+ self.name_postfix_ = []
+
+ for e in target_types_and_names:
+ # Create three parallel lists: one with the list of target types,
+ # and two other with prefixes and postfixes to be added to target
+ # name. We use parallel lists for prefix and postfix (as opposed
+ # to mapping), because given target type might occur several times,
+ # for example "H H(%_symbols)".
+ m = _re_separate_types_prefix_and_postfix.match (e)
+
+ if not m:
+ raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id))
+
+ target_type = m.group (1)
+ if not target_type: target_type = ''
+ prefix = m.group (3)
+ if not prefix: prefix = ''
+ postfix = m.group (4)
+ if not postfix: postfix = ''
+
+ self.target_types_.append (target_type)
+ self.name_prefix_.append (prefix)
+ self.name_postfix_.append (postfix)
+
+ for x in self.source_types_:
+ type.validate (x)
+
+ for x in self.target_types_:
+ type.validate (x)
+
+ def clone (self, new_id, new_toolset_properties):
+ """ Returns another generator which differers from $(self) in
+ - id
+ - value to <toolset> feature in properties
+ """
+ assert isinstance(new_id, basestring)
+ assert is_iterable_typed(new_toolset_properties, basestring)
+ return self.__class__ (new_id,
+ self.composing_,
+ self.source_types_,
+ self.target_types_and_names_,
+ # Note: this does not remove any subfeatures of <toolset>
+ # which might cause problems
+ property.change (self.requirements_, '<toolset>') + new_toolset_properties)
+
+ def clone_and_change_target_type(self, base, type):
+ """Creates another generator that is the same as $(self), except that
+ if 'base' is in target types of $(self), 'type' will in target types
+ of the new generator."""
+ assert isinstance(base, basestring)
+ assert isinstance(type, basestring)
+ target_types = []
+ for t in self.target_types_and_names_:
+ m = _re_match_type.match(t)
+ assert m
+
+ if m.group(1) == base:
+ if m.group(2):
+ target_types.append(type + m.group(2))
+ else:
+ target_types.append(type)
+ else:
+ target_types.append(t)
+
+ return self.__class__(self.id_, self.composing_,
+ self.source_types_,
+ target_types,
+ self.requirements_)
+
+
+ def id(self):
+ return self.id_
+
+ def source_types (self):
+ """ Returns the list of target type the generator accepts.
+ """
+ return self.source_types_
+
+ def target_types (self):
+ """ Returns the list of target types that this generator produces.
+ It is assumed to be always the same -- i.e. it cannot change depending
+ list of sources.
+ """
+ return self.target_types_
+
+ def requirements (self):
+ """ Returns the required properties for this generator. Properties
+ in returned set must be present in build properties if this
+ generator is to be used. If result has grist-only element,
+ that build properties must include some value of that feature.
+ """
+ return self.requirements_
+
+ def match_rank (self, ps):
+ """ Returns true if the generator can be run with the specified
+ properties.
+ """
+ # See if generator's requirements are satisfied by
+ # 'properties'. Treat a feature name in requirements
+ # (i.e. grist-only element), as matching any value of the
+ # feature.
+ assert isinstance(ps, property_set.PropertySet)
+ all_requirements = self.requirements ()
+
+ property_requirements = []
+ feature_requirements = []
+ # This uses strings because genenator requirements allow
+ # the '<feature>' syntax without value and regular validation
+ # is not happy about that.
+ for r in all_requirements:
+ if get_value (r):
+ property_requirements.append (r)
+
+ else:
+ feature_requirements.append (r)
+
+ return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \
+ and all(ps.get(get_grist(s)) for s in feature_requirements)
+
+ def run (self, project, name, prop_set, sources):
+ """ Tries to invoke this generator on the given sources. Returns a
+ list of generated targets (instances of 'virtual-target').
+
+ project: Project for which the targets are generated.
+
+ name: Determines the name of 'name' attribute for
+ all generated targets. See 'generated_targets' method.
+
+ prop_set: Desired properties for generated targets.
+
+ sources: Source targets.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ # intermediary targets don't have names, so None is possible
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ if project.manager ().logger ().on ():
+ project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
+ project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
+
+ if not sources:
+ s = 'An empty source list was passed in to the "{}" generator'.format(self.id_)
+ if name:
+ s += ' for target "{}"'.format(name)
+ raise InvalidTargetSource(s)
+
+ if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
+ raise BaseException ("Unsupported source/source_type combination")
+
+ # We don't run composing generators if no name is specified. The reason
+ # is that composing generator combines several targets, which can have
+ # different names, and it cannot decide which name to give for produced
+ # target. Therefore, the name must be passed.
+ #
+ # This in effect, means that composing generators are runnable only
+ # at top-level of transofrmation graph, or if name is passed explicitly.
+ # Thus, we dissallow composing generators in the middle. For example, the
+ # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
+ # (the OBJ -> STATIC_LIB generator is composing)
+ if not self.composing_ or name:
+ return self.run_really (project, name, prop_set, sources)
+ else:
+ return []
+
+ def run_really (self, project, name, prop_set, sources):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ # intermediary targets don't have names, so None is possible
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ # consumed: Targets that this generator will consume directly.
+
+ if self.composing_:
+ consumed = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
+ else:
+ consumed = self.convert_to_consumable_types (project, name, prop_set, sources)
+
+ result = []
+ if consumed:
+ result = self.construct_result (consumed, project, name, prop_set)
+
+ if result:
+ if project.manager ().logger ().on ():
+ project.manager ().logger ().log (__name__, " SUCCESS: ", result)
+
+ else:
+ project.manager ().logger ().log (__name__, " FAILURE")
+
+ return result
+
+ def construct_result (self, consumed, project, name, prop_set):
+ """ Constructs the dependency graph that will be returned by this
+ generator.
+ consumed: Already prepared list of consumable targets
+ If generator requires several source files will contain
+ exactly len $(self.source_types_) targets with matching types
+ Otherwise, might contain several targets with the type of
+ self.source_types_ [0]
+ project:
+ name:
+ prop_set: Properties to be used for all actions create here
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert is_iterable_typed(consumed, virtual_target.VirtualTarget)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ result = []
+ # If this is 1->1 transformation, apply it to all consumed targets in order.
+ if len (self.source_types_) < 2 and not self.composing_:
+
+ for r in consumed:
+ result.extend(self.generated_targets([r], prop_set, project, name))
+ elif consumed:
+ result.extend(self.generated_targets(consumed, prop_set, project, name))
+
+ return result
+
+ def determine_target_name(self, fullname):
+ assert isinstance(fullname, basestring)
+ # Determine target name from fullname (maybe including path components)
+ # Place optional prefix and postfix around basename
+
+ dir = os.path.dirname(fullname)
+ name = os.path.basename(fullname)
+ idx = name.find(".")
+ if idx != -1:
+ name = name[:idx]
+
+ if dir and not ".." in dir and not os.path.isabs(dir):
+ # Relative path is always relative to the source
+ # directory. Retain it, so that users can have files
+ # with the same in two different subdirectories.
+ name = dir + "/" + name
+
+ return name
+
+ def determine_output_name(self, sources):
+ """Determine the name of the produced target from the
+ names of the sources."""
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
+ # The simple case if when a name
+ # of source has single dot. Then, we take the part before
+ # dot. Several dots can be caused by:
+ # - Using source file like a.host.cpp
+ # - A type which suffix has a dot. Say, we can
+ # type 'host_cpp' with extension 'host.cpp'.
+ # In the first case, we want to take the part till the last
+ # dot. In the second case -- no sure, but for now take
+ # the part till the last dot too.
+ name = os.path.splitext(sources[0].name())[0]
+
+ for s in sources[1:]:
+ n2 = os.path.splitext(s.name())
+ if n2 != name:
+ get_manager().errors()(
+ "%s: source targets have different names: cannot determine target name"
+ % (self.id_))
+
+ # Names of sources might include directory. We should strip it.
+ return self.determine_target_name(sources[0].name())
+
+
+ def generated_targets (self, sources, prop_set, project, name):
+ """ Constructs targets that are created after consuming 'sources'.
+ The result will be the list of virtual-target, which the same length
+ as 'target_types' attribute and with corresponding types.
+
+ When 'name' is empty, all source targets must have the same value of
+ the 'name' attribute, which will be used instead of the 'name' argument.
+
+ The value of 'name' attribute for each generated target will be equal to
+ the 'name' parameter if there's no name pattern for this type. Otherwise,
+ the '%' symbol in the name pattern will be replaced with the 'name' parameter
+ to obtain the 'name' attribute.
+
+ For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
+ for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
+ be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
+ basename of a file.
+
+ Note that this pattern mechanism has nothing to do with implicit patterns
+ in make. It's a way to produce target which name is different for name of
+ source.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ if not name:
+ name = self.determine_output_name(sources)
+
+ # Assign an action for each target
+ action = self.action_class()
+ a = action(project.manager(), sources, self.id_, prop_set)
+
+ # Create generated target for each target type.
+ targets = []
+ pre = self.name_prefix_
+ post = self.name_postfix_
+ for t in self.target_types_:
+ basename = os.path.basename(name)
+ generated_name = pre[0] + basename + post[0]
+ generated_name = os.path.join(os.path.dirname(name), generated_name)
+ pre = pre[1:]
+ post = post[1:]
+
+ targets.append(virtual_target.FileTarget(generated_name, t, project, a))
+
+ return [ project.manager().virtual_targets().register(t) for t in targets ]
+
+ def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
+ """ Attempts to convert 'source' to the types that this generator can
+ handle. The intention is to produce the set of targets can should be
+ used when generator is run.
+ only_one: convert 'source' to only one of source types
+ if there's more that one possibility, report an
+ error.
+
+ Returns a pair:
+ consumed: all targets that can be consumed.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(only_one, bool)
+ consumed = []
+ missing_types = []
+
+ if len (sources) > 1:
+ # Don't know how to handle several sources yet. Just try
+ # to pass the request to other generator
+ missing_types = self.source_types_
+
+ else:
+ (c, m) = self.consume_directly (sources [0])
+ consumed += c
+ missing_types += m
+
+ # No need to search for transformation if
+ # some source type has consumed source and
+ # no more source types are needed.
+ if only_one and consumed:
+ missing_types = []
+
+ #TODO: we should check that only one source type
+ #if create of 'only_one' is true.
+ # TODO: consider if consuned/bypassed separation should
+ # be done by 'construct_types'.
+
+ if missing_types:
+ transformed = construct_types (project, name, missing_types, prop_set, sources)
+
+ # Add targets of right type to 'consumed'. Add others to
+ # 'bypassed'. The 'generators.construct' rule has done
+ # its best to convert everything to the required type.
+ # There's no need to rerun it on targets of different types.
+
+ # NOTE: ignoring usage requirements
+ for t in transformed[1]:
+ if t.type() in missing_types:
+ consumed.append(t)
+
+ consumed = unique(consumed)
+
+ return consumed
+
+
+ def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
+ """ Converts several files to consumable types.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ if not self.source_types_:
+ return list(sources)
+
+ acceptable_types = set()
+ for t in self.source_types_:
+ acceptable_types.update(type.all_derived(t))
+
+ result = []
+ for source in sources:
+ if source.type() not in acceptable_types:
+ transformed = construct_types(
+ project, None,self.source_types_, prop_set, [source])
+ # construct_types returns [prop_set, [targets]]
+ for t in transformed[1]:
+ if t.type() in self.source_types_:
+ result.append(t)
+ if not transformed:
+ project.manager().logger().log(__name__, " failed to convert ", source)
+ else:
+ result.append(source)
+
+ result = sequence.unique(result, stable=True)
+ return result
+
+
+
+ def consume_directly (self, source):
+ assert isinstance(source, virtual_target.VirtualTarget)
+ real_source_type = source.type ()
+
+ # If there are no source types, we can consume anything
+ source_types = self.source_types()
+ if not source_types:
+ source_types = [real_source_type]
+
+ consumed = []
+ missing_types = []
+ for st in source_types:
+ # The 'source' if of right type already)
+ if real_source_type == st or type.is_derived (real_source_type, st):
+ consumed = [source]
+
+ else:
+ missing_types.append (st)
+
+ return (consumed, missing_types)
+
+ def action_class (self):
+ """ Returns the class to be used to actions. Default implementation
+ returns "action".
+ """
+ return virtual_target.Action
+
+
+def find (id):
+ """ Finds the generator with id. Returns None if not found.
+ """
+ assert isinstance(id, basestring)
+ return __generators.get (id, None)
+
+def register (g):
+ """ Registers new generator instance 'g'.
+ """
+ assert isinstance(g, Generator)
+ id = g.id()
+
+ __generators [id] = g
+
+ # A generator can produce several targets of the
+ # same type. We want unique occurrence of that generator
+ # in .generators.$(t) in that case, otherwise, it will
+ # be tried twice and we'll get false ambiguity.
+ for t in sequence.unique(g.target_types()):
+ __type_to_generators.setdefault(t, []).append(g)
+
+ # Update the set of generators for toolset
+
+ # TODO: should we check that generator with this id
+ # is not already registered. For example, the fop.jam
+ # module intentionally declared two generators with the
+ # same id, so such check will break it.
+
+ # Some generators have multiple periods in their name, so the
+ # normal $(id:S=) won't generate the right toolset name.
+ # e.g. if id = gcc.compile.c++, then
+ # .generators-for-toolset.$(id:S=) will append to
+ # .generators-for-toolset.gcc.compile, which is a separate
+ # value from .generators-for-toolset.gcc. Correcting this
+ # makes generator inheritance work properly.
+ # See also inherit-generators in module toolset
+ base = id.split ('.', 100) [0]
+
+ __generators_for_toolset.setdefault(base, []).append(g)
+
+ # After adding a new generator that can construct new target types, we need
+ # to clear the related cached viable source target type information for
+ # constructing a specific target type or using a specific generator. Cached
+ # viable source target type lists affected by this are those containing any
+ # of the target types constructed by the new generator or any of their base
+ # target types.
+ #
+ # A more advanced alternative to clearing that cached viable source target
+ # type information would be to expand it with additional source types or
+ # even better - mark it as needing to be expanded on next use.
+ #
+ # For now we just clear all the cached viable source target type information
+ # that does not simply state 'all types' and may implement a more detailed
+ # algorithm later on if it becomes needed.
+
+ invalidate_extendable_viable_source_target_type_cache()
+
+
+def check_register_types(fn):
+ def wrapper(id, source_types, target_types, requirements=[]):
+ assert isinstance(id, basestring)
+ assert is_iterable_typed(source_types, basestring)
+ assert is_iterable_typed(target_types, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ return fn(id, source_types, target_types, requirements=requirements)
+ wrapper.__name__ = fn.__name__
+ wrapper.__doc__ = fn.__doc__
+ return wrapper
+
+
+@bjam_signature([['id'], ['source_types', '*'], ['target_types', '*'], ['requirements', '*']])
+@check_register_types
+def register_standard (id, source_types, target_types, requirements = []):
+ """ Creates new instance of the 'generator' class and registers it.
+ Returns the creates instance.
+ Rationale: the instance is returned so that it's possible to first register
+ a generator and then call 'run' method on that generator, bypassing all
+ generator selection.
+ """
+ g = Generator (id, False, source_types, target_types, requirements)
+ register (g)
+ return g
+
+
+@check_register_types
+def register_composing (id, source_types, target_types, requirements = []):
+ g = Generator (id, True, source_types, target_types, requirements)
+ register (g)
+ return g
+
+def generators_for_toolset (toolset):
+ """ Returns all generators which belong to 'toolset'.
+ """
+ assert isinstance(toolset, basestring)
+ return __generators_for_toolset.get(toolset, [])
+
+def override (overrider_id, overridee_id):
+ """Make generator 'overrider-id' be preferred to
+ 'overridee-id'. If, when searching for generators
+ that could produce a target of certain type,
+ both those generators are among viable generators,
+ the overridden generator is immediately discarded.
+
+ The overridden generators are discarded immediately
+ after computing the list of viable generators, before
+ running any of them."""
+ assert isinstance(overrider_id, basestring)
+ assert isinstance(overridee_id, basestring)
+
+ __overrides.setdefault(overrider_id, []).append(overridee_id)
+
+def __viable_source_types_real (target_type):
+ """ Returns a list of source type which can possibly be converted
+ to 'target_type' by some chain of generator invocation.
+
+ More formally, takes all generators for 'target_type' and
+ returns union of source types for those generators and result
+ of calling itself recusrively on source types.
+ """
+ assert isinstance(target_type, basestring)
+ generators = []
+
+ # 't0' is the initial list of target types we need to process to get a list
+ # of their viable source target types. New target types will not be added to
+ # this list.
+ t0 = type.all_bases (target_type)
+
+
+ # 't' is the list of target types which have not yet been processed to get a
+ # list of their viable source target types. This list will get expanded as
+ # we locate more target types to process.
+ t = t0
+
+ result = []
+ while t:
+ # Find all generators for current type.
+ # Unlike 'find_viable_generators' we don't care about prop_set.
+ generators = __type_to_generators.get (t [0], [])
+ t = t[1:]
+
+ for g in generators:
+ if not g.source_types():
+ # Empty source types -- everything can be accepted
+ result = "*"
+ # This will terminate outer loop.
+ t = None
+ break
+
+ for source_type in g.source_types ():
+ if not source_type in result:
+ # If generator accepts 'source_type' it
+ # will happily accept any type derived from it
+ all = type.all_derived (source_type)
+ for n in all:
+ if not n in result:
+
+ # Here there is no point in adding target types to
+ # the list of types to process in case they are or
+ # have already been on that list. We optimize this
+ # check by realizing that we only need to avoid the
+ # original target type's base types. Other target
+ # types that are or have been on the list of target
+ # types to process have been added to the 'result'
+ # list as well and have thus already been eliminated
+ # by the previous if.
+ if not n in t0:
+ t.append (n)
+ result.append (n)
+
+ return result
+
+
+def viable_source_types (target_type):
+ """ Helper rule, caches the result of '__viable_source_types_real'.
+ """
+ assert isinstance(target_type, basestring)
+ if target_type not in __viable_source_types_cache:
+ __vst_cached_types.append(target_type)
+ __viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
+ return __viable_source_types_cache [target_type]
+
+def viable_source_types_for_generator_real (generator):
+ """ Returns the list of source types, which, when passed to 'run'
+ method of 'generator', has some change of being eventually used
+ (probably after conversion by other generators)
+ """
+ assert isinstance(generator, Generator)
+ source_types = generator.source_types ()
+
+ if not source_types:
+ # If generator does not specify any source types,
+ # it might be special generator like builtin.lib-generator
+ # which just relays to other generators. Return '*' to
+ # indicate that any source type is possibly OK, since we don't
+ # know for sure.
+ return ['*']
+
+ else:
+ result = []
+ for s in source_types:
+ viable_sources = viable_source_types(s)
+ if viable_sources == "*":
+ result = ["*"]
+ break
+ else:
+ result.extend(type.all_derived(s) + viable_sources)
+ return unique(result)
+
+def viable_source_types_for_generator (generator):
+ """ Caches the result of 'viable_source_types_for_generator'.
+ """
+ assert isinstance(generator, Generator)
+ if generator not in __viable_source_types_cache:
+ __vstg_cached_generators.append(generator)
+ __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator)
+
+ return __viable_source_types_cache[generator]
+
+def try_one_generator_really (project, name, generator, target_type, properties, sources):
+ """ Returns usage requirements + list of created targets.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(generator, Generator)
+ assert isinstance(target_type, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ targets = generator.run (project, name, properties, sources)
+
+ usage_requirements = []
+ success = False
+
+ dout("returned " + str(targets))
+
+ if targets:
+ success = True;
+
+ if isinstance (targets[0], property_set.PropertySet):
+ usage_requirements = targets [0]
+ targets = targets [1]
+
+ else:
+ usage_requirements = property_set.empty ()
+
+ dout( " generator" + generator.id() + " spawned ")
+ # generators.dout [ indent ] " " $(targets) ;
+# if $(usage-requirements)
+# {
+# generators.dout [ indent ] " with usage requirements:" $(x) ;
+# }
+
+ if success:
+ return (usage_requirements, targets)
+ else:
+ return None
+
+def try_one_generator (project, name, generator, target_type, properties, sources):
+ """ Checks if generator invocation can be pruned, because it's guaranteed
+ to fail. If so, quickly returns empty list. Otherwise, calls
+ try_one_generator_really.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(generator, Generator)
+ assert isinstance(target_type, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ source_types = []
+
+ for s in sources:
+ source_types.append (s.type ())
+
+ viable_source_types = viable_source_types_for_generator (generator)
+
+ if source_types and viable_source_types != ['*'] and\
+ not set_.intersection (source_types, viable_source_types):
+ if project.manager ().logger ().on ():
+ id = generator.id ()
+ project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
+ project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
+ project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
+
+ return []
+
+ else:
+ return try_one_generator_really (project, name, generator, target_type, properties, sources)
+
+
+def construct_types (project, name, target_types, prop_set, sources):
+
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert is_iterable_typed(target_types, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
+ result = []
+ usage_requirements = property_set.empty()
+
+ for t in target_types:
+ r = construct (project, name, t, prop_set, sources)
+
+ if r:
+ (ur, targets) = r
+ usage_requirements = usage_requirements.add(ur)
+ result.extend(targets)
+
+ # TODO: have to introduce parameter controlling if
+ # several types can be matched and add appropriate
+ # checks
+
+ # TODO: need to review the documentation for
+ # 'construct' to see if it should return $(source) even
+ # if nothing can be done with it. Currents docs seem to
+ # imply that, contrary to the behaviour.
+ if result:
+ return (usage_requirements, result)
+
+ else:
+ return (usage_requirements, sources)
+
+def __ensure_type (targets):
+ """ Ensures all 'targets' have types. If this is not so, exists with
+ error.
+ """
+ assert is_iterable_typed(targets, virtual_target.VirtualTarget)
+ for t in targets:
+ if not t.type ():
+ get_manager().errors()("target '%s' has no type" % str (t))
+
+def find_viable_generators_aux (target_type, prop_set):
+ """ Returns generators which can be used to construct target of specified type
+ with specified properties. Uses the following algorithm:
+ - iterates over requested target_type and all it's bases (in the order returned bt
+ type.all-bases.
+ - for each type find all generators that generate that type and which requirements
+ are satisfied by properties.
+ - if the set of generators is not empty, returns that set.
+
+ Note: this algorithm explicitly ignores generators for base classes if there's
+ at least one generator for requested target_type.
+ """
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ # Select generators that can create the required target type.
+ viable_generators = []
+ initial_generators = []
+
+ from . import type
+
+ # Try all-type generators first. Assume they have
+ # quite specific requirements.
+ all_bases = type.all_bases(target_type)
+
+ for t in all_bases:
+
+ initial_generators = __type_to_generators.get(t, [])
+
+ if initial_generators:
+ dout("there are generators for this type")
+ if t != target_type:
+ # We're here, when no generators for target-type are found,
+ # but there are some generators for a base type.
+ # We'll try to use them, but they will produce targets of
+ # base type, not of 'target-type'. So, we clone the generators
+ # and modify the list of target types.
+ generators2 = []
+ for g in initial_generators[:]:
+ # generators.register adds generator to the list of generators
+ # for toolsets, which is a bit strange, but should work.
+ # That list is only used when inheriting toolset, which
+ # should have being done before generators are run.
+ ng = g.clone_and_change_target_type(t, target_type)
+ generators2.append(ng)
+ register(ng)
+
+ initial_generators = generators2
+ break
+
+ for g in initial_generators:
+ dout("trying generator " + g.id()
+ + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
+
+ m = g.match_rank(prop_set)
+ if m:
+ dout(" is viable")
+ viable_generators.append(g)
+
+ return viable_generators
+
+def find_viable_generators (target_type, prop_set):
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ key = target_type + '.' + str (prop_set)
+
+ l = __viable_generators_cache.get (key, None)
+ if not l:
+ l = []
+
+ if not l:
+ l = find_viable_generators_aux (target_type, prop_set)
+
+ __viable_generators_cache [key] = l
+
+ viable_generators = []
+ for g in l:
+ # Avoid trying the same generator twice on different levels.
+ # TODO: is this really used?
+ if not g in __active_generators:
+ viable_generators.append (g)
+ else:
+ dout(" generator %s is active, discarding" % g.id())
+
+ # Generators which override 'all'.
+ all_overrides = []
+
+ # Generators which are overridden
+ overriden_ids = []
+
+ for g in viable_generators:
+ id = g.id ()
+
+ this_overrides = __overrides.get (id, [])
+
+ if this_overrides:
+ overriden_ids.extend (this_overrides)
+ if 'all' in this_overrides:
+ all_overrides.append (g)
+
+ if all_overrides:
+ viable_generators = all_overrides
+
+ return [g for g in viable_generators if not g.id() in overriden_ids]
+
+def __construct_really (project, name, target_type, prop_set, sources):
+ """ Attempts to construct target by finding viable generators, running them
+ and selecting the dependency graph.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ viable_generators = find_viable_generators (target_type, prop_set)
+
+ result = []
+
+ dout(" *** %d viable generators" % len (viable_generators))
+
+ generators_that_succeeded = []
+
+ for g in viable_generators:
+ __active_generators.append(g)
+ r = try_one_generator (project, name, g, target_type, prop_set, sources)
+ del __active_generators[-1]
+
+ if r:
+ generators_that_succeeded.append(g)
+ if result:
+ output = cStringIO.StringIO()
+ print >>output, "ambiguity found when searching for best transformation"
+ print >>output, "Trying to produce type '%s' from: " % (target_type)
+ for s in sources:
+ print >>output, " - " + s.str()
+ print >>output, "Generators that succeeded:"
+ for g in generators_that_succeeded:
+ print >>output, " - " + g.id()
+ print >>output, "First generator produced: "
+ for t in result[1:]:
+ print >>output, " - " + str(t)
+ print >>output, "Second generator produced:"
+ for t in r[1:]:
+ print >>output, " - " + str(t)
+ get_manager().errors()(output.getvalue())
+ else:
+ result = r;
+
+ return result;
+
+
+def construct (project, name, target_type, prop_set, sources, top_level=False):
+ """ Attempts to create target of 'target-type' with 'properties'
+ from 'sources'. The 'sources' are treated as a collection of
+ *possible* ingridients -- i.e. it is not required to consume
+ them all. If 'multiple' is true, the rule is allowed to return
+ several targets of 'target-type'.
+
+ Returns a list of target. When this invocation is first instance of
+ 'construct' in stack, returns only targets of requested 'target-type',
+ otherwise, returns also unused sources and additionally generated
+ targets.
+
+ If 'top-level' is set, does not suppress generators that are already
+ used in the stack. This may be useful in cases where a generator
+ has to build a metatarget -- for example a target corresponding to
+ built tool.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(top_level, bool)
+ global __active_generators
+ if top_level:
+ saved_active = __active_generators
+ __active_generators = []
+
+ global __construct_stack
+ if not __construct_stack:
+ __ensure_type (sources)
+
+ __construct_stack.append (1)
+
+ increase_indent ()
+
+ if project.manager().logger().on():
+ dout( "*** construct " + target_type)
+
+ for s in sources:
+ dout(" from " + str(s))
+
+ project.manager().logger().log (__name__, " properties: ", prop_set.raw ())
+
+ result = __construct_really(project, name, target_type, prop_set, sources)
+
+ decrease_indent()
+
+ __construct_stack = __construct_stack [1:]
+
+ if top_level:
+ __active_generators = saved_active
+
+ return result
+
+def add_usage_requirements (result, raw_properties):
+ if result:
+ if isinstance (result[0], property_set.PropertySet):
+ return (result[0].add_raw(raw_properties), result[1])
+ else:
+ return (property_set.create(raw_properties), result)
+ #if [ class.is-a $(result[1]) : property-set ]
+ #{
+ # return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
+ #}
+ #else
+ #{
+ # return [ property-set.create $(raw-properties) ] $(result) ;
+ #}
diff --git a/src/boost/tools/build/src/build/project.jam b/src/boost/tools/build/src/build/project.jam
new file mode 100644
index 000000000..172315e4f
--- /dev/null
+++ b/src/boost/tools/build/src/build/project.jam
@@ -0,0 +1,1357 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements project representation and loading. Each project is represented by:
+# - a module where all the Jamfile content lives.
+# - an instance of 'project-attributes' class.
+# (given a module name, can be obtained using the 'attributes' rule)
+# - an instance of 'project-target' class (from targets.jam)
+# (given a module name, can be obtained using the 'target' rule)
+#
+# Typically, projects are created as result of loading a Jamfile, which is done
+# by rules 'load' and 'initialize', below. First, a module is prepared and a new
+# project-attributes instance is created. Some rules necessary for all projects
+# are added to the module (see the 'project-rules' module). Default project
+# attributes are set (inheriting parent project attributes, if it exists). After
+# that the Jamfile is read. It can declare its own attributes using the
+# 'project' rule which will be combined with any already set.
+#
+# The 'project' rule can also declare a project id which will be associated with
+# the project module.
+#
+# Besides Jamfile projects, we also support 'standalone' projects created by
+# calling 'initialize' in an arbitrary module and not specifying the project's
+# location. After the call, the module can call the 'project' rule, declare main
+# targets and behave as a regular project except that, since it is not
+# associated with any location, it should only declare prebuilt targets.
+#
+# The list of all loaded Jamfiles is stored in the .project-locations variable.
+# It is possible to obtain a module name for a location using the 'module-name'
+# rule. Standalone projects are not recorded and can only be referenced using
+# their project id.
+
+import "class" : new ;
+import modules ;
+import path ;
+import print ;
+import property-set ;
+import sequence ;
+
+
+.debug-loading = [ MATCH ^(--debug-loading)$ : [ modules.peek : ARGV ] ] ;
+
+
+# Loads the Jamfile at the given location. After loading, project global file
+# and Jamfiles needed by the requested one will be loaded recursively. If the
+# Jamfile at that location is loaded already, does nothing. Returns the project
+# module for the Jamfile.
+#
+rule load ( jamfile-location : synthesize ? )
+{
+ local module-name = [ module-name $(jamfile-location) ] ;
+ # If Jamfile is already loaded, do not try again.
+ if ! $(module-name) in $(.jamfile-modules)
+ {
+ if $(.debug-loading)
+ {
+ ECHO Loading Jamfile at '$(jamfile-location)' ;
+ }
+
+ load-jamfile $(jamfile-location) : $(module-name) : $(synthesize) ;
+
+ # We want to make sure that child project are loaded only after parent
+ # projects. In particular, because parent projects define attributes
+ # which are then inherited by children, and we do not want children to
+ # be loaded before parent has defined everything.
+ #
+ # While "build-project" and "use-project" can potentially refer to child
+ # projects from parent projects, we do not immediately load child
+ # projects when seeing those attributes. Instead, we record the minimal
+ # information to be used only later.
+ load-used-projects $(module-name) ;
+ }
+ return $(module-name) ;
+}
+
+
+rule load-used-projects ( module-name )
+{
+ local used = [ modules.peek $(module-name) : .used-projects ] ;
+ local location = [ attribute $(module-name) location ] ;
+ while $(used)
+ {
+ local id = $(used[1]) ;
+ local where = [ path.make $(used[2]) ] ;
+ register-id $(id) : [ load [ path.root $(where) $(location) ] ] ;
+ used = $(used[3-]) ;
+ }
+}
+
+
+# Note the use of character groups, as opposed to listing 'Jamroot' and
+# 'jamroot'. With the latter, we would get duplicate matches on Windows and
+# would have to eliminate duplicates.
+JAMROOT ?= [ modules.peek : JAMROOT ] ;
+JAMROOT ?= project-root.jam "[Jj]amroot" "[Jj]amroot." "[Jj]amroot.jam" ;
+
+
+# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
+#
+rule load-parent ( location )
+{
+ local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ;
+ if $(found)
+ {
+ return [ load $(found[1]:D) ] ;
+ }
+}
+
+
+# Returns the project module corresponding to the given project-id or plain
+# directory name. Returns nothing if such a project can not be found.
+#
+rule find ( name : current-location )
+{
+ local project-module ;
+
+ # Try interpreting name as project id.
+ if [ path.is-rooted $(name) ]
+ {
+ project-module = $($(name).jamfile-module) ;
+ }
+
+ if ! $(project-module)
+ {
+ local location = [ path.root [ path.make $(name) ] $(current-location) ]
+ ;
+
+ # If no project is registered for the given location, try to load it.
+ # First see if we have a Jamfile. If not, then see if we might have a
+ # project root willing to act as a Jamfile. In that case, project root
+ # must be placed in the directory referred to by id.
+
+ project-module = [ module-name $(location) ] ;
+ if ! $(project-module) in $(.jamfile-modules)
+ {
+ if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ]
+ {
+ project-module = [ load $(location) ] ;
+ }
+ else
+ {
+ project-module = ;
+ }
+ }
+ }
+
+ return $(project-module) ;
+}
+
+
+# Returns the name of the module corresponding to 'jamfile-location'. If no
+# module corresponds to that location yet, associates the default module name
+# with that location.
+#
+rule module-name ( jamfile-location )
+{
+ if ! $(.module.$(jamfile-location))
+ {
+ # Root the path, so that locations are always unambiguous. Without this,
+ # we can not decide if '../../exe/program1' and '.' are the same paths.
+ local normalized = [ path.root $(jamfile-location) [ path.pwd ] ] ;
+
+ # Quick & dirty fix to get the same module name when we supply two
+ # equivalent location paths, e.g. 'd:\Foo' & 'D:\fOo\bar\..' on Windows.
+ # Note that our current implementation will not work correctly if the
+ # given location references an empty folder, but in that case any later
+ # attempt to load a Jamfile from this location will fail anyway.
+ # FIXME: Implement this cleanly. Support for this type of path
+ # normalization already exists internally in Boost Jam and the current
+ # fix relies on the GLOB builtin rule using that support. Most likely we
+ # just need to add a new builtin rule to do this explicitly.
+ normalized = [ NORMALIZE_PATH $(normalized) ] ;
+ local glob-result = [ GLOB [ path.native $(normalized) ] : * ] ;
+ if $(glob-result)
+ {
+ normalized = $(glob-result[1]:D) ;
+ }
+ .module.$(jamfile-location) = Jamfile<$(normalized)> ;
+ }
+ return $(.module.$(jamfile-location)) ;
+}
+
+
+# Default patterns to search for the Jamfiles to use for build declarations.
+#
+JAMFILE = [ modules.peek : JAMFILE ] ;
+JAMFILE ?= "[Bb]uild.jam" "[Jj]amfile.v2" "[Jj]amfile" "[Jj]amfile." "[Jj]amfile.jam" ;
+
+
+# Find the Jamfile at the given location. This returns the exact names of all
+# the Jamfiles in the given directory. The optional parent-root argument causes
+# this to search not the given directory but the ones above it up to the
+# parent-root directory.
+#
+rule find-jamfile (
+ dir # The directory(s) to look for a Jamfile.
+ parent-root ? # Optional flag indicating to search for the parent Jamfile.
+ : no-errors ?
+ )
+{
+ # Glob for all the possible Jamfiles according to the match pattern.
+ #
+ local jamfile-glob = ;
+ if $(parent-root)
+ {
+ if ! $(.parent-jamfile.$(dir))
+ {
+ .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE)
+ ] ;
+ }
+ jamfile-glob = $(.parent-jamfile.$(dir)) ;
+ }
+ else
+ {
+ if ! $(.jamfile.$(dir))
+ {
+ .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
+ }
+ jamfile-glob = $(.jamfile.$(dir)) ;
+
+ }
+
+ local jamfile-to-load = $(jamfile-glob) ;
+ # Multiple Jamfiles found in the same place. Warn about this and ensure we
+ # use only one of them. As a temporary convenience measure, if there is
+ # Jamfile.v2 among found files, suppress the warning and use it.
+ #
+ if $(jamfile-to-load[2-])
+ {
+ local v2-jamfiles = [ MATCH "^(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)$" :
+ $(jamfile-to-load) ] ;
+
+ if $(v2-jamfiles) && ! $(v2-jamfiles[2])
+ {
+ jamfile-to-load = $(v2-jamfiles) ;
+ }
+ else
+ {
+ local jamfile = [ path.basename $(jamfile-to-load[1]) ] ;
+ ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!"
+ "Loading the first one: '$(jamfile)'." ;
+ }
+
+ jamfile-to-load = $(jamfile-to-load[1]) ;
+ }
+
+ # Could not find it, error.
+ #
+ if ! $(no-errors) && ! $(jamfile-to-load)
+ {
+ import errors ;
+ errors.error Unable to load Jamfile.
+ : Could not find a Jamfile in directory '$(dir)'.
+ : Attempted to find it with pattern '$(JAMFILE:J=" ")'.
+ : Please consult the documentation at "'http://www.boost.org'." ;
+ }
+
+ return $(jamfile-to-load) ;
+}
+
+
+# Default patterns to search for auto-include of package manager build declarations.
+#
+PACKAGE_MANAGER_BUILD_INFO(CONAN) = "conanbuildinfo.jam" ;
+
+# Default to using the package manager build info in this priority order:
+# 1. Configuration, user, project, etc.
+# 2. Command line argument "--use-package-manager=<name>".
+# 3. Environment variable "PACKAGE_MANAGER_BUILD_INFO".
+# 4. Conan, others.
+#
+local .use-package-manager = [ MATCH "^--use-package-manager=(.*)$" : [ modules.peek : ARGV ] ] ;
+PACKAGE_MANAGER_BUILD_INFO ?= $(PACKAGE_MANAGER_BUILD_INFO($(.use-package-manager:U))) ;
+PACKAGE_MANAGER_BUILD_INFO ?= [ modules.peek : PACKAGE_MANAGER_BUILD_INFO ] ;
+PACKAGE_MANAGER_BUILD_INFO ?= $(PACKAGE_MANAGER_BUILD_INFO(CONAN)) ;
+
+
+# Load the configured package manager build information file.
+#
+rule load-package-manager-build-info ( )
+{
+ # This first variable is the one from the configuration (user, project, etc).
+ local package-manager-build-info = [ modules.peek [ CALLER_MODULE ] : PACKAGE_MANAGER_BUILD_INFO ] ;
+ # And this is the rest as it takes it from the settings in the "project" module.
+ # I.e. the variable assignments above.
+ package-manager-build-info ?= $(PACKAGE_MANAGER_BUILD_INFO) ;
+ if $(package-manager-build-info)
+ {
+ local pm = [ path.glob $(dir) : $(package-manager-build-info) ] ;
+ pm = $(pm[1]) ;
+ local cm = [ CALLER_MODULE ] ;
+ local pm-tag = "$(cm)<$(pm:B)>" ;
+ if $(pm) && ! ( $(pm-tag) in $(.package-manager-build-info) )
+ {
+ .package-manager-build-info += $(pm-tag) ;
+ # We found a matching builf info to load, but we have to be careful
+ # as the loading can affect the current project since it can define
+ # sub-projects. Hence we save and restore the current project.
+ local saved-project = $(.current-project) ;
+ modules.load $(cm) : $(pm) ;
+ .current-project = $(saved-project) ;
+ }
+ }
+}
+
+
+# Load a Jamfile at the given directory. Returns nothing. Will attempt to load
+# the file as indicated by the JAMFILE patterns. Effect of calling this rule
+# twice with the same 'dir' is undefined.
+#
+local rule load-jamfile ( dir : jamfile-module : synthesize ? )
+{
+ # See if the Jamfile is where it should be.
+ #
+ local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ;
+ if ! $(jamfile-to-load)
+ {
+ jamfile-to-load = [ find-jamfile $(dir) : $(synthesize) ] ;
+ }
+
+ if $(jamfile-to-load[2])
+ {
+ import errors ;
+ errors.error "Multiple Jamfiles found at '$(dir)'" :
+ "Filenames are: " $(jamfile-to-load:D=) ;
+ }
+
+ if ! $(jamfile-to-load) && $(synthesize)
+ {
+ jamfile-to-load = $(dir)/@ ;
+ }
+
+ # Now load the Jamfile in its own context.
+ # The call to 'initialize' may load the parent Jamfile, which might contain
+ # a 'use-project' or a 'project.load' call, causing a second attempt to load
+ # the same project we are loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing things up.
+ if ! $(jamfile-module) in $(.jamfile-modules)
+ {
+ local previous-project = $(.current-project) ;
+
+ # Initialize the Jamfile module before loading.
+ initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ] :
+ $(jamfile-to-load:BS) ;
+
+ # Auto-load package manager(s) build information.
+ IMPORT project : load-package-manager-build-info
+ : $(jamfile-module) : project.load-package-manager-build-info ;
+ modules.call-in $(jamfile-module) : project.load-package-manager-build-info ;
+
+ if ! $(jamfile-module) in $(.jamfile-modules)
+ {
+ .jamfile-modules += $(jamfile-module) ;
+
+ local saved-project = $(.current-project) ;
+
+ mark-as-user $(jamfile-module) ;
+ if $(jamfile-to-load:B) = "@"
+ {
+ # Not a real jamfile to load. Synthsize the load.
+ modules.poke $(jamfile-module) : __name__ : $(jamfile-module) ;
+ modules.poke $(jamfile-module) : __file__ : [ path.native $(jamfile-to-load) ] ;
+ modules.poke $(jamfile-module) : __binding__ : [ path.native $(jamfile-to-load) ] ;
+ }
+ else
+ {
+ modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ]
+ : . ;
+ if [ MATCH ^($(JAMROOT))$ : $(jamfile-to-load:BS) ]
+ {
+ jamfile = [ find-jamfile $(dir) : no-errors ] ;
+ if $(jamfile)
+ {
+ load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
+ }
+ }
+ }
+
+ # Now do some checks.
+ if $(.current-project) != $(saved-project)
+ {
+ import errors ;
+ errors.error
+ The value of the .current-project variable has magically
+ : changed after loading a Jamfile. This means some of the
+ : targets might be defined in the wrong project.
+ : after loading $(jamfile-module)
+ : expected value $(saved-project)
+ : actual value $(.current-project) ;
+ }
+
+ end-load $(previous-project) ;
+
+ if $(.global-build-dir)
+ {
+ if [ attribute $(jamfile-module) location ] && ! [ attribute
+ $(jamfile-module) id ]
+ {
+ local project-root = [ attribute $(jamfile-module)
+ project-root ] ;
+ if $(project-root) = $(dir)
+ {
+ ECHO "warning: the --build-dir option was specified" ;
+ ECHO "warning: but Jamroot at '$(dir)'" ;
+ ECHO "warning: specified no project id" ;
+ ECHO "warning: the --build-dir option will be ignored" ;
+ }
+ }
+ }
+ }
+ }
+}
+
+
+# Called when done loading a project module. Restores the current project to its
+# previous value and does some additional checking to make sure our 'currently
+# loaded project' identifier does not get left with an invalid value.
+#
+rule end-load ( previous-project ? )
+{
+ if ! $(.current-project)
+ {
+ import errors ;
+ errors.error Ending project loading requested when there was no project
+ currently being loaded. ;
+ }
+
+ if ! $(previous-project) && $(.saved-current-project)
+ {
+ import errors ;
+ errors.error Ending project loading requested with no 'previous project'
+ when there were other projects still marked as being loaded
+ recursively. ;
+ }
+
+ .current-project = $(previous-project) ;
+}
+
+
+rule mark-as-user ( module-name )
+{
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+rule load-aux ( module-name : file )
+{
+ mark-as-user $(module-name) ;
+
+ module $(module-name)
+ {
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+}
+
+
+.global-build-dir = [ MATCH ^--build-dir=(.*)$ : [ modules.peek : ARGV ] ] ;
+if $(.global-build-dir)
+{
+ # If the option is specified several times, take the last value.
+ .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ;
+}
+
+
+# Initialize the module for a project.
+#
+rule initialize (
+ module-name # The name of the project module.
+ : location ? # The location (directory) of the project to initialize. If
+ # not specified, a standalone project will be initialized.
+ : basename ?
+ )
+{
+ if $(.debug-loading)
+ {
+ ECHO "Initializing project '$(module-name)'" ;
+ }
+
+ local jamroot ;
+
+ local parent-module ;
+ if $(module-name) in test-config all-config
+ {
+ # No parent.
+ }
+ else if $(module-name) = site-config
+ {
+ parent-module = test-config ;
+ }
+ else if $(module-name) = user-config
+ {
+ parent-module = site-config ;
+ }
+ else if $(module-name) = project-config
+ {
+ parent-module = user-config ;
+ }
+ else if $(location)
+ {
+ if ! [ MATCH ^($(JAMROOT))$ : $(basename) ]
+ {
+ # We search for parent/jamroot only if this is a jamfile project, i.e.
+ # if is not a standalone or a jamroot project.
+ parent-module = [ load-parent $(location) ] ;
+ }
+ if ! $(parent-module)
+ {
+ # We have a jamroot project, or a jamfile project
+ # without a parent that becomes a jamroot. Inherit from
+ # user-config (or project-config
+ # if it exists).
+ if $(project-config.attributes)
+ {
+ parent-module = project-config ;
+ }
+ else
+ {
+ parent-module = user-config ;
+ }
+ jamroot = true ;
+ }
+ }
+
+ # TODO: need to consider if standalone projects can do anything but define
+ # prebuilt targets. If so, we need to give them a more sensible "location",
+ # so that source paths are correct.
+ location ?= "" ;
+ # Create the module for the Jamfile first.
+ module $(module-name)
+ {
+ }
+
+ # load-parent can end up loading this module again. Make sure this is not
+ # duplicated.
+ if ! $($(module-name).attributes)
+ {
+ $(module-name).attributes = [ new project-attributes $(location)
+ $(module-name) ] ;
+ local attributes = $($(module-name).attributes) ;
+
+ if $(location)
+ {
+ $(attributes).set source-location : [ path.make $(location) ] :
+ exact ;
+ }
+ else
+ {
+ local cfgs = project site test user all ;
+ if ! $(module-name) in $(cfgs)-config
+ {
+ # This is a standalone project with known location. Set its
+ # source location so it can declare targets. This is needed so
+ # you can put a .jam file with your sources and use it via
+ # 'using'. Standard modules (in the 'tools' subdir) may not
+ # assume source dir is set.
+ local s = [ modules.binding $(module-name) ] ;
+ if ! $(s)
+ {
+ import errors ;
+ errors.error Could not determine project location
+ $(module-name) ;
+ }
+ $(attributes).set source-location : $(s:D) : exact ;
+ }
+ }
+
+ $(attributes).set requirements : [ property-set.empty ] : exact ;
+ $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
+
+ # Import rules common to all project modules from project-rules module,
+ # defined at the end of this file.
+ local rules = [ RULENAMES project-rules ] ;
+ IMPORT project-rules : $(rules) : $(module-name) : $(rules) ;
+
+ if $(parent-module)
+ {
+ inherit-attributes $(module-name) : $(parent-module) ;
+ $(attributes).set parent-module : $(parent-module) : exact ;
+ }
+
+ if $(jamroot)
+ {
+ $(attributes).set project-root : $(location) : exact ;
+ if ! $(.first-project-root)
+ {
+ .first-project-root = $(module-name) ;
+ }
+ }
+
+ local parent ;
+ if $(parent-module)
+ {
+ parent = [ target $(parent-module) ] ;
+ }
+
+ if ! $(.target.$(module-name))
+ {
+ local requirements = [ attribute $(module-name) requirements ] ;
+ .target.$(module-name) = [ new project-target $(module-name) :
+ $(module-name) $(parent) : $(requirements) ] ;
+
+ if $(.debug-loading)
+ {
+ ECHO Assigned project target $(.target.$(module-name)) to
+ '$(module-name)' ;
+ }
+ }
+ }
+
+ .current-project = [ target $(module-name) ] ;
+}
+
+
+# Make 'project-module' inherit attributes of project root and parent module.
+#
+rule inherit-attributes ( project-module : parent-module )
+{
+ local attributes = $($(project-module).attributes) ;
+ local pattributes = [ attributes $(parent-module) ] ;
+ # Parent module might be locationless configuration module.
+ if [ modules.binding $(parent-module) ]
+ {
+ $(attributes).set parent :
+ [ path.parent [ path.make [ modules.binding $(parent-module) ] ] ] ;
+ }
+ $(attributes).set project-root :
+ [ $(pattributes).get project-root ] : exact ;
+ $(attributes).set default-build :
+ [ $(pattributes).get default-build ] ;
+ $(attributes).set requirements :
+ [ $(pattributes).get requirements ] : exact ;
+ $(attributes).set usage-requirements :
+ [ $(pattributes).get usage-requirements ] : exact ;
+
+ local parent-build-dir = [ $(pattributes).get build-dir ] ;
+ if $(parent-build-dir)
+ {
+ # Have to compute relative path from parent dir to our dir. Convert both
+ # paths to absolute, since we cannot find relative path from ".." to
+ # ".".
+
+ local location = [ attribute $(project-module) location ] ;
+ local parent-location = [ attribute $(parent-module) location ] ;
+
+ local pwd = [ path.pwd ] ;
+ local parent-dir = [ path.root $(parent-location) $(pwd) ] ;
+ local our-dir = [ path.root $(location) $(pwd) ] ;
+ $(attributes).set build-dir : [ path.join $(parent-build-dir)
+ [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
+ }
+}
+
+
+# Returns whether the given string is a valid registered project id.
+#
+rule is-registered-id ( id )
+{
+ return $($(id).jamfile-module) ;
+}
+
+
+# Associate the given id with the given project module. Returns the possibly
+# corrected project id.
+#
+rule register-id ( id : module )
+{
+ id = [ path.root $(id) / ] ;
+
+ if [ MATCH (//) : $(id) ]
+ {
+ import errors ;
+ errors.user-error Project id may not contain two consecutive slash
+ characters (project "id:" '$(id)'). ;
+ }
+
+ local orig-module = $($(id).jamfile-module) ;
+ if $(orig-module) && $(orig-module) != $(module)
+ {
+ local new-file = [ modules.peek $(module) : __file__ ] ;
+ local new-location = [ project.attribute $(module) location ] ;
+
+ local orig-file = [ modules.peek $(orig-module) : __file__ ] ;
+ local orig-main-id = [ project.attribute $(orig-module) id ] ;
+ local orig-location = [ project.attribute $(orig-module) location ] ;
+ local orig-project = [ target $(orig-module) ] ;
+ local orig-name = [ $(orig-project).name ] ;
+
+ import errors ;
+ errors.user-error Attempt to redeclare already registered project id
+ '$(id)'.
+ : Original "project:"
+ : " " "Name:" $(orig-name:E=---)
+ : " " "Module:" $(orig-module)
+ : " " "Main id: "$(orig-main-id:E=---)
+ : " " "File:" $(orig-file:E=---)
+ : " " "Location:" $(orig-location:E=---)
+ : New "project:"
+ : " " "Module:" $(module)
+ : " " "File:" $(new-file:E=---)
+ : " " "Location:" $(new-location:E=---) ;
+ }
+
+ $(id).jamfile-module = $(module) ;
+ return $(id) ;
+}
+
+
+# Class keeping all the attributes of a project.
+#
+# The standard attributes are "id", "location", "project-root", "parent"
+# "requirements", "default-build", "source-location" and "projects-to-build".
+#
+class project-attributes
+{
+ import path ;
+ import print ;
+ import project ;
+ import property ;
+ import property-set ;
+ import sequence ;
+
+ rule __init__ ( location project-module )
+ {
+ self.location = $(location) ;
+ self.project-module = $(project-module) ;
+ }
+
+ # Set the named attribute from the specification given by the user. The
+ # value actually set may be different.
+ #
+ rule set ( attribute : specification *
+ : exact ? # Sets value from 'specification' without any processing.
+ )
+ {
+ if $(exact)
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ else if $(attribute) = "requirements"
+ {
+ local result = [ property-set.refine-from-user-input
+ $(self.requirements) : $(specification)
+ : $(self.project-module) : $(self.location) ] ;
+
+ if $(result[1]) = "@error"
+ {
+ import errors : error : errors.error ;
+ errors.error Requirements for project at '$(self.location)'
+ conflict with parent's. : "Explanation:" $(result[2-]) ;
+ }
+
+ self.requirements = $(result) ;
+ }
+ else if $(attribute) = "usage-requirements"
+ {
+ local unconditional ;
+ for local p in $(specification)
+ {
+ local split = [ property.split-conditional $(p) ] ;
+ split ?= nothing $(p) ;
+ unconditional += $(split[2]) ;
+ }
+
+ local non-free = [ property.remove free : $(unconditional) ] ;
+ if $(non-free)
+ {
+ import errors : error : errors.error ;
+ errors.error usage-requirements $(specification) have non-free
+ properties $(non-free) ;
+ }
+ local t = [ property.translate-paths $(specification) :
+ $(self.location) ] ;
+ if $(self.usage-requirements)
+ {
+ self.usage-requirements = [ property-set.create
+ [ $(self.usage-requirements).raw ] $(t) ] ;
+ }
+ else
+ {
+ self.usage-requirements = [ property-set.create $(t) ] ;
+ }
+ }
+ else if $(attribute) = "default-build"
+ {
+ self.default-build = [ property.make $(specification) ] ;
+ }
+ else if $(attribute) = "source-location"
+ {
+ self.source-location = ;
+ for local src-path in $(specification)
+ {
+ self.source-location += [ path.root [ path.make $(src-path) ]
+ $(self.location) ] ;
+ }
+ }
+ else if $(attribute) = "build-dir"
+ {
+ self.build-dir = [ path.root [ path.make $(specification) ]
+ $(self.location) ] ;
+ }
+ else if $(attribute) = "id"
+ {
+ self.id = [ project.register-id $(specification) :
+ $(self.project-module) ] ;
+ }
+ else if ! $(attribute) in "default-build" "location" "parent"
+ "projects-to-build" "project-root" "source-location"
+ {
+ import errors : error : errors.error ;
+ errors.error Invalid project attribute '$(attribute)' specified for
+ project at '$(self.location)' ;
+ }
+ else
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ }
+
+ # Returns the value of the given attribute.
+ #
+ rule get ( attribute )
+ {
+ return $(self.$(attribute)) ;
+ }
+
+ # Returns whether these attributes belong to a Jamroot project module.
+ #
+ rule is-jamroot ( )
+ {
+ if $(self.location) && $(self.project-root) = $(self.location)
+ {
+ return true ;
+ }
+ }
+
+ # Prints the project attributes.
+ #
+ rule print ( )
+ {
+ local id = '$(self.id)' ;
+ print.section $(id:E=(none)) ;
+ print.list-start ;
+ print.list-item "Parent project:" $(self.parent:E=(none)) ;
+ print.list-item "Requirements:" [ $(self.requirements).raw ] ;
+ print.list-item "Default build:" $(self.default-build) ;
+ print.list-item "Source location:" $(self.source-location) ;
+ print.list-item "Projects to build:" [ sequence.insertion-sort
+ $(self.projects-to-build) ] ;
+ print.list-end ;
+ }
+}
+
+
+# Returns the build directory for standalone projects
+#
+rule standalone-build-dir ( )
+{
+ project = [ target $(.first-project-root) ] ;
+ return [ path.join [ $(project).build-dir ] standalone ] ;
+}
+
+# Returns the project which is currently being loaded.
+#
+rule current ( )
+{
+ if ! $(.current-project)
+ {
+ import errors ;
+ errors.error Reference to the project currently being loaded requested
+ when there was no project module being loaded. ;
+ }
+ return $(.current-project) ;
+}
+
+
+# Temporarily changes the current project to 'project'. Should be followed by
+# 'pop-current'.
+#
+rule push-current ( project ? )
+{
+ .saved-current-project += $(.current-project) ;
+ .current-project = $(project) ;
+}
+
+
+rule pop-current ( )
+{
+ .current-project = $(.saved-current-project[-1]) ;
+ .saved-current-project = $(.saved-current-project[1--2]) ;
+}
+
+
+# Returns the project-attribute instance for the specified Jamfile module.
+#
+rule attributes ( project )
+{
+ return $($(project).attributes) ;
+}
+
+
+# Returns the value of the specified attribute in the specified Jamfile module.
+#
+rule attribute ( project attribute )
+{
+ return [ $($(project).attributes).get $(attribute) ] ;
+}
+
+
+# Returns whether a project module is one of Boost Build's configuration
+# modules.
+#
+rule is-config-module ( project )
+{
+ local cfgs = project site test user ;
+ if $(project) in $(cfgs)-config
+ {
+ return true ;
+ }
+}
+
+
+# Returns whether a project module is a Jamroot project module.
+#
+rule is-jamroot-module ( project )
+{
+ return [ $($(project).attributes).is-jamroot ] ;
+}
+
+
+# Returns a project's parent jamroot module. Returns nothing if there is no such
+# module, i.e. if this is a standalone project or one of the internal Boost
+# Build configuration projects.
+#
+rule get-jamroot-module ( project )
+{
+ local jamroot-location = [ attribute $(project) project-root ] ;
+ if $(jamroot-location)
+ {
+ return [ module-name $(jamroot-location) ] ;
+ }
+}
+
+
+# Returns the project target corresponding to the 'project-module'.
+#
+rule target ( project-module : allow-missing ? )
+{
+ if ! $(.target.$(project-module)) && ! $(allow-missing)
+ {
+ import errors ;
+ errors.user-error Project target requested but not yet assigned for
+ module '$(project-module)'. ;
+ }
+ return $(.target.$(project-module)) ;
+}
+
+
+# Defines a B2 extension project. Such extensions usually contain
+# library targets and features that can be used by many people. Even though
+# extensions are really projects, they can be initialized as a module would be
+# with the "using" (project.project-rules.using) mechanism.
+#
+rule extension ( id space ? : options * : * )
+{
+ # The caller is a standalone module for the extension.
+ local mod = [ CALLER_MODULE ] ;
+
+ # We need to do the rest within the extension module.
+ module $(mod)
+ {
+ import path ;
+
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+
+ # Default to creating extensions in /ext/.. project space.
+ local id = $(1[1]) ;
+ local space = $(1[2]) ;
+ space ?= ext ;
+
+ # Create the project data, and bring in the project rules into the
+ # module.
+ project.initialize $(__name__) : [ path.join [ project.attribute
+ $(root-project) location ] $(space:L) $(id:L) ] ;
+
+ # Create the project itself, i.e. the attributes.
+ project /$(space:L)/$(id:L) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ;
+ local attributes = [ project.attributes $(__name__) ] ;
+
+ # Inherit from the root project of whomever is defining us.
+ project.inherit-attributes $(__name__) : $(root-project) ;
+ $(attributes).set parent-module : $(root-project) : exact ;
+ }
+}
+
+
+rule glob-internal ( project : wildcards + : excludes * : rule-name )
+{
+ local location = [ $(project).get source-location ] ;
+
+ local result ;
+ local paths = [ path.$(rule-name) $(location) :
+ [ sequence.transform path.make : $(wildcards) ] :
+ [ sequence.transform path.make : $(excludes) ] ] ;
+ if $(wildcards:D) || $(rule-name) != glob
+ {
+ # The paths we have found are relative to the current directory, but the
+ # names specified in the sources list are assumed to be relative to the
+ # source directory of the corresponding project. So, just make the names
+ # absolute.
+ for local p in $(paths)
+ {
+ # If the path is below source location, use relative path.
+ # Otherwise, use full path just to avoid any ambiguities.
+ local rel = [ path.relative $(p) $(location) : no-error ] ;
+ if $(rel) = not-a-child
+ {
+ result += [ path.root $(p) [ path.pwd ] ] ;
+ }
+ else
+ {
+ result += $(rel) ;
+ }
+ }
+ }
+ else
+ {
+ # There were no wildcards in the directory path, so the files are all in
+ # the source directory of the project. Just drop the directory, instead
+ # of making paths absolute.
+ result = $(paths:D="") ;
+ }
+
+ return $(result) ;
+}
+
+
+rule glob-path-root ( root path )
+{
+ return [ path.root $(path) $(root) ] ;
+}
+
+rule glob-internal-ex ( project : paths + : wildcards + : excludes * : rule-name )
+{
+ # Make the paths we search in absolute, if they aren't already absolute.
+ # If the given paths are relative, they will be relative to the source
+ # directory. So that's what we root against.
+ local source-location
+ = [ path.root [ $(project).get source-location ] [ path.pwd ] ] ;
+ local search-paths
+ = [ sequence.transform project.glob-path-root $(source-location) : $(paths) ] ;
+ paths
+ = [ path.$(rule-name) $(search-paths) : $(wildcards) : $(excludes) ] ;
+ # The paths we have found are absolute, but the names specified in the
+ # sources list are assumed to be relative to the source directory of the
+ # corresponding project. Make the results relative to the source again.
+ local result
+ = [ sequence.transform path.relative-to $(source-location) : $(paths) ] ;
+
+ return $(result) ;
+}
+
+
+# This module defines rules common to all projects.
+#
+module project-rules
+{
+ import modules ;
+
+ rule using ( toolset-module : * )
+ {
+ import toolset ;
+
+ local saved-project = [ modules.peek project : .current-project ] ;
+
+ # Temporarily change the search path so the module referred to by
+ # 'using' can be placed in the same directory as Jamfile. User will
+ # expect the module to be found even though the directory is not in
+ # BOOST_BUILD_PATH.
+ local x = [ modules.peek : BOOST_BUILD_PATH ] ;
+ local caller = [ CALLER_MODULE ] ;
+ local caller-location = [ modules.binding $(caller) ] ;
+ modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ;
+ toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ;
+ modules.poke : BOOST_BUILD_PATH : $(x) ;
+
+ # The above might have clobbered .current-project in case it caused a
+ # new project instance to be created (which would then automatically
+ # get set as the 'current' project). Restore the correct value so any
+ # main targets declared after this do not get mapped to the loaded
+ # module's project.
+ modules.poke project : .current-project : $(saved-project) ;
+ }
+
+ rule import ( * : * : * )
+ {
+ local caller = [ CALLER_MODULE ] ;
+ local saved-project = [ modules.peek project : .current-project ] ;
+ module $(caller)
+ {
+ modules.import $(1) : $(2) : $(3) ;
+ }
+
+ # The above might have clobbered .current-project in case it caused a
+ # new project instance to be created (which would then automatically
+ # get set as the 'current' project). Restore the correct value so any
+ # main targets declared after this do not get mapped to the loaded
+ # module's project.
+ modules.poke project : .current-project : $(saved-project) ;
+ }
+
+ rule project ( id ? : options * : * )
+ {
+ import path ;
+ import project ;
+
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+ if $(id)
+ {
+ $(attributes).set id : $(id) ;
+ }
+
+ local explicit-build-dir ;
+
+ for n in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ {
+ local option = $($(n)) ;
+ if $(option)
+ {
+ $(attributes).set $(option[1]) : $(option[2-]) ;
+ }
+ if $(option[1]) = "build-dir"
+ {
+ explicit-build-dir = [ path.make $(option[2-]) ] ;
+ }
+ }
+
+ # If '--build-dir' is specified, change the build dir for the project.
+ local global-build-dir = [ modules.peek project : .global-build-dir ] ;
+
+ if $(global-build-dir)
+ {
+ local location = [ $(attributes).get location ] ;
+ # Project with an empty location is a 'standalone' project such as
+ # user-config or qt. It has no build dir. If we try to set build dir
+ # for user-config, we shall then try to inherit it, with either
+ # weird or wrong consequences.
+ if $(location) && $(location) = [ $(attributes).get project-root ]
+ {
+ # Re-read the project id, since it might have been modified a
+ # bit when setting the project's id attribute, e.g. might have
+ # been prefixed by a slash if it was not already.
+ id = [ $(attributes).get id ] ;
+ # This is Jamroot.
+ if $(id)
+ {
+ if $(explicit-build-dir) &&
+ [ path.is-rooted $(explicit-build-dir) ]
+ {
+ import errors ;
+ errors.user-error Absolute directory specified via
+ 'build-dir' project attribute : Do not know how to
+ combine that with the --build-dir option. ;
+ }
+ # Strip the leading slash from id.
+ local rid = [ MATCH ^/(.*) : $(id) ] ;
+ local p = [ path.join $(global-build-dir) $(rid)
+ $(explicit-build-dir) ] ;
+
+ $(attributes).set build-dir : $(p) : exact ;
+ }
+ }
+ else
+ {
+ # Not Jamroot.
+ if $(explicit-build-dir)
+ {
+ import errors ;
+ errors.user-error When --build-dir is specified, the
+ 'build-dir' project : attribute is allowed only for
+ top-level 'project' invocations ;
+ }
+ }
+ }
+ }
+
+ # Declare and set a project global constant. Project global constants are
+ # normal variables but should not be changed. They are applied to every
+ # child Jamfile.
+ #
+ rule constant ( name : value + )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) ;
+ }
+
+ # Declare and set a project global constant, whose value is a path. The path
+ # is adjusted to be relative to the invocation directory. The given value
+ # path is taken to be either absolute, or relative to this project root.
+ #
+ rule path-constant ( name : value + )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) : path ;
+ }
+
+ rule use-project ( id : where )
+ {
+ # See comment in 'load' for explanation.
+ local caller = [ CALLER_MODULE ] ;
+ modules.poke $(caller) : .used-projects : [ modules.peek $(caller) :
+ .used-projects ] $(id) $(where) ;
+ }
+
+ rule build-project ( dir )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+ local now = [ $(attributes).get projects-to-build ] ;
+ $(attributes).set projects-to-build : $(now) $(dir) ;
+ }
+
+ rule explicit ( target-names * )
+ {
+ import project ;
+ # If 'explicit' is used in a helper rule defined in Jamroot and
+ # inherited by children, then most of the time we want 'explicit' to
+ # operate on the Jamfile where the helper rule is invoked.
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-explicit $(n) ;
+ }
+ }
+
+ rule always ( target-names * )
+ {
+ import project ;
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-always $(n) ;
+ }
+ }
+
+ rule glob ( wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob ] ;
+ }
+
+ rule glob-tree ( wildcards + : excludes * )
+ {
+ import project ;
+ if $(wildcards:D) || $(excludes:D)
+ {
+ import errors ;
+ errors.user-error The patterns to 'glob-tree' may not include
+ directory ;
+ }
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob-tree ] ;
+ }
+
+ rule glob-ex ( paths + : wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal-ex [ project.current ]
+ : $(paths) : $(wildcards) : $(excludes) : glob ] ;
+ }
+
+ rule glob-tree-ex ( paths + : wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal-ex [ project.current ]
+ : $(paths) : $(wildcards) : $(excludes) : glob-tree ] ;
+ }
+
+ # Calculates conditional requirements for multiple requirements at once.
+ # This is a shorthand to reduce duplication and to keep an inline
+ # declarative syntax. For example:
+ #
+ # lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
+ # <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
+ #
+ rule conditional ( condition + : requirements * )
+ {
+ local condition = $(condition:J=,) ;
+ if [ MATCH "(:)" : $(condition) ]
+ {
+ return $(condition)$(requirements) ;
+ }
+ else
+ {
+ return "$(condition):$(requirements)" ;
+ }
+ }
+
+ rule option ( name : value )
+ {
+ local m = [ CALLER_MODULE ] ;
+ local cfgs = project site test user ;
+ if ! $(m) in $(cfgs)-config
+ {
+ import errors ;
+ errors.error The 'option' rule may only be used "in" Boost Build
+ configuration files. ;
+ }
+ import option ;
+ option.set $(name) : $(value) ;
+ }
+
+ # This allows one to manually import a package manager build information file.
+ # The argument can be either a symbolic name of a supported package manager or
+ # the a glob pattern to load a b2 jam file.
+ #
+ rule use-packages ( name-or-glob-pattern ? )
+ {
+ local m = [ CALLER_MODULE ] ;
+ local glob-pattern = $(name-or-glob-pattern) ;
+ local glob-for-name = [ modules.peek project : PACKAGE_MANAGER_BUILD_INFO($(name-or-glob-pattern:U)) ] ;
+ if $(glob-for-name)
+ {
+ glob-pattern = $(glob-for-name) ;
+ }
+ modules.call-in $(m) : constant PACKAGE_MANAGER_BUILD_INFO : $(glob-pattern) ;
+ IMPORT project : load-package-manager-build-info : $(m) : project.load-package-manager-build-info ;
+ modules.call-in $(m) : project.load-package-manager-build-info ;
+ }
+}
diff --git a/src/boost/tools/build/src/build/project.py b/src/boost/tools/build/src/build/project.py
new file mode 100644
index 000000000..29e99a2c2
--- /dev/null
+++ b/src/boost/tools/build/src/build/project.py
@@ -0,0 +1,1285 @@
+# Status: ported.
+# Base revision: 64488
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements project representation and loading. Each project is represented
+# by:
+# - a module where all the Jamfile content live.
+# - an instance of 'project-attributes' class.
+# (given a module name, can be obtained using the 'attributes' rule)
+# - an instance of 'project-target' class (from targets.jam)
+# (given a module name, can be obtained using the 'target' rule)
+#
+# Typically, projects are created as result of loading a Jamfile, which is done
+# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded
+# and new project-attributes instance is created. Some rules necessary for
+# project are added to the module (see 'project-rules' module) at the bottom of
+# this file. Default project attributes are set (inheriting attributes of
+# parent project, if it exists). After that the Jamfile is read. It can declare
+# its own attributes using the 'project' rule which will be combined with any
+# already set attributes.
+#
+# The 'project' rule can also declare a project id which will be associated
+# with the project module.
+#
+# There can also be 'standalone' projects. They are created by calling
+# 'initialize' on an arbitrary module and not specifying their location. After
+# the call, the module can call the 'project' rule, declare main targets and
+# behave as a regular project except that, since it is not associated with any
+# location, it should only declare prebuilt targets.
+#
+# The list of all loaded Jamfiles is stored in the .project-locations variable.
+# It is possible to obtain a module name for a location using the 'module-name'
+# rule. Standalone projects are not recorded and can only be references using
+# their project id.
+
+import b2.util.path
+import b2.build.targets
+from b2.build import property_set, property
+from b2.build.errors import ExceptionWithUserContext
+from b2.manager import get_manager
+
+import bjam
+import b2
+
+import re
+import sys
+import pkgutil
+import os
+import string
+import imp
+import traceback
+import b2.util.option as option
+
+from b2.util import (
+ record_jam_to_value_mapping, qualify_jam_action, is_iterable_typed, bjam_signature,
+ is_iterable)
+
+
+class ProjectRegistry:
+
+ def __init__(self, manager, global_build_dir):
+ self.manager = manager
+ self.global_build_dir = global_build_dir
+ self.project_rules_ = ProjectRules(self)
+
+ # The target corresponding to the project being loaded now
+ self.current_project = None
+
+ # The set of names of loaded project modules
+ self.jamfile_modules = {}
+
+ # Mapping from location to module name
+ self.location2module = {}
+
+ # Mapping from project id to project module
+ self.id2module = {}
+
+ # Map from Jamfile directory to parent Jamfile/Jamroot
+ # location.
+ self.dir2parent_jamfile = {}
+
+ # Map from directory to the name of Jamfile in
+ # that directory (or None).
+ self.dir2jamfile = {}
+
+ # Map from project module to attributes object.
+ self.module2attributes = {}
+
+ # Map from project module to target for the project
+ self.module2target = {}
+
+ # Map from names to Python modules, for modules loaded
+ # via 'using' and 'import' rules in Jamfiles.
+ self.loaded_tool_modules_ = {}
+
+ self.loaded_tool_module_path_ = {}
+
+ # Map from project target to the list of
+ # (id,location) pairs corresponding to all 'use-project'
+ # invocations.
+ # TODO: should not have a global map, keep this
+ # in ProjectTarget.
+ self.used_projects = {}
+
+ self.saved_current_project = []
+
+ self.JAMROOT = self.manager.getenv("JAMROOT");
+
+ # Note the use of character groups, as opposed to listing
+ # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
+ # matches on windows and would have to eliminate duplicates.
+ if not self.JAMROOT:
+ self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"]
+
+ # Default patterns to search for the Jamfiles to use for build
+ # declarations.
+ self.JAMFILE = self.manager.getenv("JAMFILE")
+
+ if not self.JAMFILE:
+ self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile",
+ "[Jj]amfile.jam"]
+
+ self.__python_module_cache = {}
+
+
+ def load (self, jamfile_location):
+ """Loads jamfile at the given location. After loading, project global
+ file and jamfile needed by the loaded one will be loaded recursively.
+ If the jamfile at that location is loaded already, does nothing.
+ Returns the project module for the Jamfile."""
+ assert isinstance(jamfile_location, basestring)
+
+ absolute = os.path.join(os.getcwd(), jamfile_location)
+ absolute = os.path.normpath(absolute)
+ jamfile_location = b2.util.path.relpath(os.getcwd(), absolute)
+
+ mname = self.module_name(jamfile_location)
+ # If Jamfile is already loaded, do not try again.
+ if not mname in self.jamfile_modules:
+
+ if "--debug-loading" in self.manager.argv():
+ print "Loading Jamfile at '%s'" % jamfile_location
+
+ self.load_jamfile(jamfile_location, mname)
+
+ # We want to make sure that child project are loaded only
+ # after parent projects. In particular, because parent projects
+ # define attributes which are inherited by children, and we do not
+ # want children to be loaded before parents has defined everything.
+ #
+ # While "build-project" and "use-project" can potentially refer
+ # to child projects from parent projects, we do not immediately
+ # load child projects when seeing those attributes. Instead,
+ # we record the minimal information that will be used only later.
+
+ self.load_used_projects(mname)
+
+ return mname
+
+ def load_used_projects(self, module_name):
+ assert isinstance(module_name, basestring)
+ # local used = [ modules.peek $(module-name) : .used-projects ] ;
+ used = self.used_projects[module_name]
+
+ location = self.attribute(module_name, "location")
+ for u in used:
+ id = u[0]
+ where = u[1]
+
+ self.use(id, os.path.join(location, where))
+
+ def load_parent(self, location):
+ """Loads parent of Jamfile at 'location'.
+ Issues an error if nothing is found."""
+ assert isinstance(location, basestring)
+ found = b2.util.path.glob_in_parents(
+ location, self.JAMROOT + self.JAMFILE)
+
+ if not found:
+ print "error: Could not find parent for project at '%s'" % location
+ print "error: Did not find Jamfile.jam or Jamroot.jam in any parent directory."
+ sys.exit(1)
+
+ return self.load(os.path.dirname(found[0]))
+
+ def find(self, name, current_location):
+ """Given 'name' which can be project-id or plain directory name,
+ return project module corresponding to that id or directory.
+ Returns nothing of project is not found."""
+ assert isinstance(name, basestring)
+ assert isinstance(current_location, basestring)
+
+ project_module = None
+
+ # Try interpreting name as project id.
+ if name[0] == '/':
+ project_module = self.id2module.get(name)
+
+ if not project_module:
+ location = os.path.join(current_location, name)
+ # If no project is registered for the given location, try to
+ # load it. First see if we have Jamfile. If not we might have project
+ # root, willing to act as Jamfile. In that case, project-root
+ # must be placed in the directory referred by id.
+
+ project_module = self.module_name(location)
+ if not project_module in self.jamfile_modules:
+ if b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
+ project_module = self.load(location)
+ else:
+ project_module = None
+
+ return project_module
+
+ def module_name(self, jamfile_location):
+ """Returns the name of module corresponding to 'jamfile-location'.
+ If no module corresponds to location yet, associates default
+ module name with that location."""
+ assert isinstance(jamfile_location, basestring)
+ module = self.location2module.get(jamfile_location)
+ if not module:
+ # Root the path, so that locations are always umbiguious.
+ # Without this, we can't decide if '../../exe/program1' and '.'
+ # are the same paths, or not.
+ jamfile_location = os.path.realpath(
+ os.path.join(os.getcwd(), jamfile_location))
+ module = "Jamfile<%s>" % jamfile_location
+ self.location2module[jamfile_location] = module
+ return module
+
+ def find_jamfile (self, dir, parent_root=0, no_errors=0):
+ """Find the Jamfile at the given location. This returns the
+ exact names of all the Jamfiles in the given directory. The optional
+ parent-root argument causes this to search not the given directory
+ but the ones above it up to the directory given in it."""
+ assert isinstance(dir, basestring)
+ assert isinstance(parent_root, (int, bool))
+ assert isinstance(no_errors, (int, bool))
+
+ # Glob for all the possible Jamfiles according to the match pattern.
+ #
+ jamfile_glob = None
+ if parent_root:
+ parent = self.dir2parent_jamfile.get(dir)
+ if not parent:
+ parent = b2.util.path.glob_in_parents(dir,
+ self.JAMFILE)
+ self.dir2parent_jamfile[dir] = parent
+ jamfile_glob = parent
+ else:
+ jamfile = self.dir2jamfile.get(dir)
+ if not jamfile:
+ jamfile = b2.util.path.glob([dir], self.JAMFILE)
+ self.dir2jamfile[dir] = jamfile
+ jamfile_glob = jamfile
+
+ if len(jamfile_glob) > 1:
+ # Multiple Jamfiles found in the same place. Warn about this.
+ # And ensure we use only one of them.
+ # As a temporary convenience measure, if there's Jamfile.v2 amount
+ # found files, suppress the warning and use it.
+ #
+ pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)"
+ v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)]
+ if len(v2_jamfiles) == 1:
+ jamfile_glob = v2_jamfiles
+ else:
+ print """warning: Found multiple Jamfiles at '%s'!""" % (dir)
+ for j in jamfile_glob:
+ print " -", j
+ print "Loading the first one"
+
+ # Could not find it, error.
+ if not no_errors and not jamfile_glob:
+ self.manager.errors()(
+ """Unable to load Jamfile.
+Could not find a Jamfile in directory '%s'
+Attempted to find it with pattern '%s'.
+Please consult the documentation at 'http://boost.org/boost-build2'."""
+ % (dir, string.join(self.JAMFILE)))
+
+ if jamfile_glob:
+ return jamfile_glob[0]
+
+ def load_jamfile(self, dir, jamfile_module):
+ """Load a Jamfile at the given directory. Returns nothing.
+ Will attempt to load the file as indicated by the JAMFILE patterns.
+ Effect of calling this rule twice with the same 'dir' is underfined."""
+ assert isinstance(dir, basestring)
+ assert isinstance(jamfile_module, basestring)
+
+ # See if the Jamfile is where it should be.
+ is_jamroot = False
+ jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
+ if jamfile_to_load:
+ if len(jamfile_to_load) > 1:
+ get_manager().errors()(
+ "Multiple Jamfiles found at '{}'\n"
+ "Filenames are: {}"
+ .format(dir, ' '.join(os.path.basename(j) for j in jamfile_to_load))
+ )
+ is_jamroot = True
+ jamfile_to_load = jamfile_to_load[0]
+ else:
+ jamfile_to_load = self.find_jamfile(dir)
+
+ dir = os.path.dirname(jamfile_to_load)
+ if not dir:
+ dir = "."
+
+ self.used_projects[jamfile_module] = []
+
+ # Now load the Jamfile in it's own context.
+ # The call to 'initialize' may load parent Jamfile, which might have
+ # 'use-project' statement that causes a second attempt to load the
+ # same project we're loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing up.
+ if not jamfile_module in self.jamfile_modules:
+ previous_project = self.current_project
+ # Initialize the jamfile module before loading.
+ self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
+
+ if not jamfile_module in self.jamfile_modules:
+ saved_project = self.current_project
+ self.jamfile_modules[jamfile_module] = True
+
+ bjam.call("load", jamfile_module, jamfile_to_load)
+
+ if is_jamroot:
+ jamfile = self.find_jamfile(dir, no_errors=True)
+ if jamfile:
+ bjam.call("load", jamfile_module, jamfile)
+
+ # Now do some checks
+ if self.current_project != saved_project:
+ from textwrap import dedent
+ self.manager.errors()(dedent(
+ """
+ The value of the .current-project variable has magically changed
+ after loading a Jamfile. This means some of the targets might be
+ defined a the wrong project.
+ after loading %s
+ expected value %s
+ actual value %s
+ """
+ % (jamfile_module, saved_project, self.current_project)
+ ))
+
+ self.end_load(previous_project)
+
+ if self.global_build_dir:
+ id = self.attributeDefault(jamfile_module, "id", None)
+ project_root = self.attribute(jamfile_module, "project-root")
+ location = self.attribute(jamfile_module, "location")
+
+ if location and project_root == dir:
+ # This is Jamroot
+ if not id:
+ # FIXME: go via errors module, so that contexts are
+ # shown?
+ print "warning: the --build-dir option was specified"
+ print "warning: but Jamroot at '%s'" % dir
+ print "warning: specified no project id"
+ print "warning: the --build-dir option will be ignored"
+
+ def end_load(self, previous_project=None):
+ if not self.current_project:
+ self.manager.errors()(
+ 'Ending project loading requested when there was no project currently '
+ 'being loaded.'
+ )
+
+ if not previous_project and self.saved_current_project:
+ self.manager.errors()(
+ 'Ending project loading requested with no "previous project" when there '
+ 'other projects still being loaded recursively.'
+ )
+
+ self.current_project = previous_project
+
+ def load_standalone(self, jamfile_module, file):
+ """Loads 'file' as standalone project that has no location
+ associated with it. This is mostly useful for user-config.jam,
+ which should be able to define targets, but although it has
+ some location in filesystem, we do not want any build to
+ happen in user's HOME, for example.
+
+ The caller is required to never call this method twice on
+ the same file.
+ """
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(file, basestring)
+
+ self.used_projects[jamfile_module] = []
+ bjam.call("load", jamfile_module, file)
+ self.load_used_projects(jamfile_module)
+
+ def is_jamroot(self, basename):
+ assert isinstance(basename, basestring)
+ match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
+ if match:
+ return 1
+ else:
+ return 0
+
+ def initialize(self, module_name, location=None, basename=None, standalone_path=''):
+ """Initialize the module for a project.
+
+ module-name is the name of the project module.
+ location is the location (directory) of the project to initialize.
+ If not specified, standalone project will be initialized
+ standalone_path is the path to the source-location.
+ this should only be called from the python side.
+ """
+ assert isinstance(module_name, basestring)
+ assert isinstance(location, basestring) or location is None
+ assert isinstance(basename, basestring) or basename is None
+ jamroot = False
+ parent_module = None
+ if module_name == "test-config":
+ # No parent
+ pass
+ elif module_name == "site-config":
+ parent_module = "test-config"
+ elif module_name == "user-config":
+ parent_module = "site-config"
+ elif module_name == "project-config":
+ parent_module = "user-config"
+ elif location and not self.is_jamroot(basename):
+ # We search for parent/project-root only if jamfile was specified
+ # --- i.e
+ # if the project is not standalone.
+ parent_module = self.load_parent(location)
+ elif location:
+ # It's either jamroot, or standalone project.
+ # If it's jamroot, inherit from user-config.
+ # If project-config module exist, inherit from it.
+ parent_module = 'user-config'
+ if 'project-config' in self.module2attributes:
+ parent_module = 'project-config'
+ jamroot = True
+
+ # TODO: need to consider if standalone projects can do anything but defining
+ # prebuilt targets. If so, we need to give more sensible "location", so that
+ # source paths are correct.
+ if not location:
+ location = ""
+
+ # the call to load_parent() above can end up loading this module again
+ # make sure we don't reinitialize the module's attributes
+ if module_name not in self.module2attributes:
+ if "--debug-loading" in self.manager.argv():
+ print "Initializing project '%s'" % module_name
+ attributes = ProjectAttributes(self.manager, location, module_name)
+ self.module2attributes[module_name] = attributes
+
+ python_standalone = False
+ if location:
+ attributes.set("source-location", [location], exact=1)
+ elif not module_name in ["test-config", "site-config", "user-config", "project-config"]:
+ # This is a standalone project with known location. Set source location
+ # so that it can declare targets. This is intended so that you can put
+ # a .jam file in your sources and use it via 'using'. Standard modules
+ # (in 'tools' subdir) may not assume source dir is set.
+ source_location = standalone_path
+ if not source_location:
+ source_location = self.loaded_tool_module_path_.get(module_name)
+ if not source_location:
+ self.manager.errors()('Standalone module path not found for "{}"'
+ .format(module_name))
+ attributes.set("source-location", [source_location], exact=1)
+ python_standalone = True
+
+ attributes.set("requirements", property_set.empty(), exact=True)
+ attributes.set("usage-requirements", property_set.empty(), exact=True)
+ attributes.set("default-build", property_set.empty(), exact=True)
+ attributes.set("projects-to-build", [], exact=True)
+ attributes.set("project-root", None, exact=True)
+ attributes.set("build-dir", None, exact=True)
+
+ self.project_rules_.init_project(module_name, python_standalone)
+
+ if parent_module:
+ self.inherit_attributes(module_name, parent_module)
+ attributes.set("parent-module", parent_module, exact=1)
+
+ if jamroot:
+ attributes.set("project-root", location, exact=1)
+
+ parent = None
+ if parent_module:
+ parent = self.target(parent_module)
+
+ if module_name not in self.module2target:
+ target = b2.build.targets.ProjectTarget(self.manager,
+ module_name, module_name, parent,
+ self.attribute(module_name, "requirements"),
+ # FIXME: why we need to pass this? It's not
+ # passed in jam code.
+ self.attribute(module_name, "default-build"))
+ self.module2target[module_name] = target
+
+ self.current_project = self.target(module_name)
+
+ def inherit_attributes(self, project_module, parent_module):
+ """Make 'project-module' inherit attributes of project
+ root and parent module."""
+ assert isinstance(project_module, basestring)
+ assert isinstance(parent_module, basestring)
+
+ attributes = self.module2attributes[project_module]
+ pattributes = self.module2attributes[parent_module]
+
+ # Parent module might be locationless user-config.
+ # FIXME:
+ #if [ modules.binding $(parent-module) ]
+ #{
+ # $(attributes).set parent : [ path.parent
+ # [ path.make [ modules.binding $(parent-module) ] ] ] ;
+ # }
+
+ attributes.set("project-root", pattributes.get("project-root"), exact=True)
+ attributes.set("default-build", pattributes.get("default-build"), exact=True)
+ attributes.set("requirements", pattributes.get("requirements"), exact=True)
+ attributes.set("usage-requirements",
+ pattributes.get("usage-requirements"), exact=1)
+
+ parent_build_dir = pattributes.get("build-dir")
+
+ if parent_build_dir:
+ # Have to compute relative path from parent dir to our dir
+ # Convert both paths to absolute, since we cannot
+ # find relative path from ".." to "."
+
+ location = attributes.get("location")
+ parent_location = pattributes.get("location")
+
+ our_dir = os.path.join(os.getcwd(), location)
+ parent_dir = os.path.join(os.getcwd(), parent_location)
+
+ build_dir = os.path.join(parent_build_dir,
+ os.path.relpath(our_dir, parent_dir))
+ attributes.set("build-dir", build_dir, exact=True)
+
+ def register_id(self, id, module):
+ """Associate the given id with the given project module."""
+ assert isinstance(id, basestring)
+ assert isinstance(module, basestring)
+ self.id2module[id] = module
+
+ def current(self):
+ """Returns the project which is currently being loaded."""
+ if not self.current_project:
+ get_manager().errors()(
+ 'Reference to the project currently being loaded requested '
+ 'when there was no project module being loaded.'
+ )
+ return self.current_project
+
+ def set_current(self, c):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(c, ProjectTarget)
+ self.current_project = c
+
+ def push_current(self, project):
+ """Temporary changes the current project to 'project'. Should
+ be followed by 'pop-current'."""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ self.saved_current_project.append(self.current_project)
+ self.current_project = project
+
+ def pop_current(self):
+ if self.saved_current_project:
+ self.current_project = self.saved_current_project.pop()
+ else:
+ self.current_project = None
+
+ def attributes(self, project):
+ """Returns the project-attribute instance for the
+ specified jamfile module."""
+ assert isinstance(project, basestring)
+ return self.module2attributes[project]
+
+ def attribute(self, project, attribute):
+ """Returns the value of the specified attribute in the
+ specified jamfile module."""
+ assert isinstance(project, basestring)
+ assert isinstance(attribute, basestring)
+ try:
+ return self.module2attributes[project].get(attribute)
+ except:
+ raise BaseException("No attribute '%s' for project %s" % (attribute, project))
+
+ def attributeDefault(self, project, attribute, default):
+ """Returns the value of the specified attribute in the
+ specified jamfile module."""
+ assert isinstance(project, basestring)
+ assert isinstance(attribute, basestring)
+ assert isinstance(default, basestring) or default is None
+ return self.module2attributes[project].getDefault(attribute, default)
+
+ def target(self, project_module):
+ """Returns the project target corresponding to the 'project-module'."""
+ assert isinstance(project_module, basestring)
+ if project_module not in self.module2target:
+ self.module2target[project_module] = \
+ b2.build.targets.ProjectTarget(project_module, project_module,
+ self.attribute(project_module, "requirements"))
+
+ return self.module2target[project_module]
+
+ def use(self, id, location):
+ # Use/load a project.
+ assert isinstance(id, basestring)
+ assert isinstance(location, basestring)
+ saved_project = self.current_project
+ project_module = self.load(location)
+ declared_id = self.attributeDefault(project_module, "id", "")
+
+ if not declared_id or declared_id != id:
+ # The project at 'location' either have no id or
+ # that id is not equal to the 'id' parameter.
+ if id in self.id2module and self.id2module[id] != project_module:
+ self.manager.errors()(
+"""Attempt to redeclare already existing project id '%s' at location '%s'""" % (id, location))
+ self.id2module[id] = project_module
+
+ self.current_project = saved_project
+
+ def add_rule(self, name, callable_):
+ """Makes rule 'name' available to all subsequently loaded Jamfiles.
+
+ Calling that rule will relay to 'callable'."""
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ self.project_rules_.add_rule(name, callable_)
+
+ def project_rules(self):
+ return self.project_rules_
+
+ def glob_internal(self, project, wildcards, excludes, rule_name):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring) or excludes is None
+ assert isinstance(rule_name, basestring)
+ location = project.get("source-location")[0]
+
+ result = []
+ callable = b2.util.path.__dict__[rule_name]
+
+ paths = callable([location], wildcards, excludes)
+ has_dir = 0
+ for w in wildcards:
+ if os.path.dirname(w):
+ has_dir = 1
+ break
+
+ if has_dir or rule_name != "glob":
+ result = []
+ # The paths we've found are relative to current directory,
+ # but the names specified in sources list are assumed to
+ # be relative to source directory of the corresponding
+ # prject. Either translate them or make absolute.
+
+ for p in paths:
+ rel = os.path.relpath(p, location)
+ # If the path is below source location, use relative path.
+ if not ".." in rel:
+ result.append(rel)
+ else:
+ # Otherwise, use full path just to avoid any ambiguities.
+ result.append(os.path.abspath(p))
+
+ else:
+ # There were not directory in wildcard, so the files are all
+ # in the source directory of the project. Just drop the
+ # directory, instead of making paths absolute.
+ result = [os.path.basename(p) for p in paths]
+
+ return result
+
+ def __build_python_module_cache(self):
+ """Recursively walks through the b2/src subdirectories and
+ creates an index of base module name to package name. The
+ index is stored within self.__python_module_cache and allows
+ for an O(1) module lookup.
+
+ For example, given the base module name `toolset`,
+ self.__python_module_cache['toolset'] will return
+ 'b2.build.toolset'
+
+ pkgutil.walk_packages() will find any python package
+ provided a directory contains an __init__.py. This has the
+ added benefit of allowing libraries to be installed and
+ automatically available within the contrib directory.
+
+ *Note*: pkgutil.walk_packages() will import any subpackage
+ in order to access its __path__variable. Meaning:
+ any initialization code will be run if the package hasn't
+ already been imported.
+ """
+ cache = {}
+ for importer, mname, ispkg in pkgutil.walk_packages(b2.__path__, prefix='b2.'):
+ basename = mname.split('.')[-1]
+ # since the jam code is only going to have "import toolset ;"
+ # it doesn't matter if there are separately named "b2.build.toolset" and
+ # "b2.contrib.toolset" as it is impossible to know which the user is
+ # referring to.
+ if basename in cache:
+ self.manager.errors()('duplicate module name "{0}" '
+ 'found in boost-build path'.format(basename))
+ cache[basename] = mname
+ self.__python_module_cache = cache
+
+ def load_module(self, name, extra_path=None):
+ """Load a Python module that should be usable from Jamfiles.
+
+ There are generally two types of modules Jamfiles might want to
+ use:
+ - Core Boost.Build. Those are imported using plain names, e.g.
+ 'toolset', so this function checks if we have module named
+ b2.package.module already.
+ - Python modules in the same directory as Jamfile. We don't
+ want to even temporary add Jamfile's directory to sys.path,
+ since then we might get naming conflicts between standard
+ Python modules and those.
+ """
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(extra_path, basestring) or extra_path is None
+ # See if we loaded module of this name already
+ existing = self.loaded_tool_modules_.get(name)
+ if existing:
+ return existing
+
+ # check the extra path as well as any paths outside
+ # of the b2 package and import the module if it exists
+ b2_path = os.path.normpath(b2.__path__[0])
+ # normalize the pathing in the BOOST_BUILD_PATH.
+ # this allows for using startswith() to determine
+ # if a path is a subdirectory of the b2 root_path
+ paths = [os.path.normpath(p) for p in self.manager.boost_build_path()]
+ # remove all paths that start with b2's root_path
+ paths = [p for p in paths if not p.startswith(b2_path)]
+ # add any extra paths
+ paths.extend(extra_path)
+
+ try:
+ # find_module is used so that the pyc's can be used.
+ # an ImportError is raised if not found
+ f, location, description = imp.find_module(name, paths)
+ except ImportError:
+ # if the module is not found in the b2 package,
+ # this error will be handled later
+ pass
+ else:
+ # we've found the module, now let's try loading it.
+ # it's possible that the module itself contains an ImportError
+ # which is why we're loading it in this else clause so that the
+ # proper error message is shown to the end user.
+ # TODO: does this module name really need to be mangled like this?
+ mname = name + "__for_jamfile"
+ self.loaded_tool_module_path_[mname] = location
+ module = imp.load_module(mname, f, location, description)
+ self.loaded_tool_modules_[name] = module
+ return module
+
+ # the cache is created here due to possibly importing packages
+ # that end up calling get_manager() which might fail
+ if not self.__python_module_cache:
+ self.__build_python_module_cache()
+
+ underscore_name = name.replace('-', '_')
+ # check to see if the module is within the b2 package
+ # and already loaded
+ mname = self.__python_module_cache.get(underscore_name)
+ if mname in sys.modules:
+ return sys.modules[mname]
+ # otherwise, if the module name is within the cache,
+ # the module exists within the BOOST_BUILD_PATH,
+ # load it.
+ elif mname:
+ # in some cases, self.loaded_tool_module_path_ needs to
+ # have the path to the file during the import
+ # (project.initialize() for example),
+ # so the path needs to be set *before* importing the module.
+ path = os.path.join(b2.__path__[0], *mname.split('.')[1:])
+ self.loaded_tool_module_path_[mname] = path
+ # mname is guaranteed to be importable since it was
+ # found within the cache
+ __import__(mname)
+ module = sys.modules[mname]
+ self.loaded_tool_modules_[name] = module
+ return module
+
+ self.manager.errors()("Cannot find module '%s'" % name)
+
+
+
+# FIXME:
+# Defines a Boost.Build extension project. Such extensions usually
+# contain library targets and features that can be used by many people.
+# Even though extensions are really projects, they can be initialize as
+# a module would be with the "using" (project.project-rules.using)
+# mechanism.
+#rule extension ( id : options * : * )
+#{
+# # The caller is a standalone module for the extension.
+# local mod = [ CALLER_MODULE ] ;
+#
+# # We need to do the rest within the extension module.
+# module $(mod)
+# {
+# import path ;
+#
+# # Find the root project.
+# local root-project = [ project.current ] ;
+# root-project = [ $(root-project).project-module ] ;
+# while
+# [ project.attribute $(root-project) parent-module ] &&
+# [ project.attribute $(root-project) parent-module ] != user-config
+# {
+# root-project = [ project.attribute $(root-project) parent-module ] ;
+# }
+#
+# # Create the project data, and bring in the project rules
+# # into the module.
+# project.initialize $(__name__) :
+# [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
+#
+# # Create the project itself, i.e. the attributes.
+# # All extensions are created in the "/ext" project space.
+# project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+# local attributes = [ project.attributes $(__name__) ] ;
+#
+# # Inherit from the root project of whomever is defining us.
+# project.inherit-attributes $(__name__) : $(root-project) ;
+# $(attributes).set parent-module : $(root-project) : exact ;
+# }
+#}
+
+
+class ProjectAttributes:
+ """Class keeping all the attributes of a project.
+
+ The standard attributes are 'id', "location", "project-root", "parent"
+ "requirements", "default-build", "source-location" and "projects-to-build".
+ """
+
+ def __init__(self, manager, location, project_module):
+ self.manager = manager
+ self.location = location
+ self.project_module = project_module
+ self.attributes = {}
+ self.usage_requirements = None
+
+ def set(self, attribute, specification, exact=False):
+ """Set the named attribute from the specification given by the user.
+ The value actually set may be different."""
+ assert isinstance(attribute, basestring)
+ assert isinstance(exact, (int, bool))
+ if __debug__ and not exact:
+ if attribute == 'requirements':
+ assert (isinstance(specification, property_set.PropertySet)
+ or all(isinstance(s, basestring) for s in specification))
+ elif attribute in (
+ 'usage-requirements', 'default-build', 'source-location', 'build-dir', 'id'):
+ assert is_iterable_typed(specification, basestring)
+ elif __debug__:
+ assert (
+ isinstance(specification, (property_set.PropertySet, type(None), basestring))
+ or all(isinstance(s, basestring) for s in specification)
+ )
+ if exact:
+ self.__dict__[attribute] = specification
+
+ elif attribute == "requirements":
+ self.requirements = property_set.refine_from_user_input(
+ self.requirements, specification,
+ self.project_module, self.location)
+
+ elif attribute == "usage-requirements":
+ unconditional = []
+ for p in specification:
+ split = property.split_conditional(p)
+ if split:
+ unconditional.append(split[1])
+ else:
+ unconditional.append(p)
+
+ non_free = property.remove("free", unconditional)
+ if non_free:
+ get_manager().errors()("usage-requirements %s have non-free properties %s" \
+ % (specification, non_free))
+
+ t = property.translate_paths(
+ property.create_from_strings(specification, allow_condition=True),
+ self.location)
+
+ existing = self.__dict__.get("usage-requirements")
+ if existing:
+ new = property_set.create(existing.all() + t)
+ else:
+ new = property_set.create(t)
+ self.__dict__["usage-requirements"] = new
+
+
+ elif attribute == "default-build":
+ self.__dict__["default-build"] = property_set.create(specification)
+
+ elif attribute == "source-location":
+ source_location = []
+ for path in specification:
+ source_location.append(os.path.join(self.location, path))
+ self.__dict__["source-location"] = source_location
+
+ elif attribute == "build-dir":
+ self.__dict__["build-dir"] = os.path.join(self.location, specification[0])
+
+ elif attribute == "id":
+ id = specification[0]
+ if id[0] != '/':
+ id = "/" + id
+ self.manager.projects().register_id(id, self.project_module)
+ self.__dict__["id"] = id
+
+ elif not attribute in ["default-build", "location",
+ "source-location", "parent",
+ "projects-to-build", "project-root"]:
+ self.manager.errors()(
+"""Invalid project attribute '%s' specified
+for project at '%s'""" % (attribute, self.location))
+ else:
+ self.__dict__[attribute] = specification
+
+ def get(self, attribute):
+ assert isinstance(attribute, basestring)
+ return self.__dict__[attribute]
+
+ def getDefault(self, attribute, default):
+ assert isinstance(attribute, basestring)
+ return self.__dict__.get(attribute, default)
+
+ def dump(self):
+ """Prints the project attributes."""
+ id = self.get("id")
+ if not id:
+ id = "(none)"
+ else:
+ id = id[0]
+
+ parent = self.get("parent")
+ if not parent:
+ parent = "(none)"
+ else:
+ parent = parent[0]
+
+ print "'%s'" % id
+ print "Parent project:%s", parent
+ print "Requirements:%s", self.get("requirements")
+ print "Default build:%s", string.join(self.get("debuild-build"))
+ print "Source location:%s", string.join(self.get("source-location"))
+ print "Projects to build:%s", string.join(self.get("projects-to-build").sort());
+
+class ProjectRules:
+ """Class keeping all rules that are made available to Jamfile."""
+
+ def __init__(self, registry):
+ self.registry = registry
+ self.manager_ = registry.manager
+ self.rules = {}
+ self.local_names = [x for x in self.__class__.__dict__
+ if x not in ["__init__", "init_project", "add_rule",
+ "error_reporting_wrapper", "add_rule_for_type", "reverse"]]
+ self.all_names_ = [x for x in self.local_names]
+
+ def _import_rule(self, bjam_module, name, callable_):
+ assert isinstance(bjam_module, basestring)
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ if hasattr(callable_, "bjam_signature"):
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable_), callable_.bjam_signature)
+ else:
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable_))
+
+
+ def add_rule_for_type(self, type):
+ assert isinstance(type, basestring)
+ rule_name = type.lower().replace("_", "-")
+
+ @bjam_signature([['name'], ['sources', '*'], ['requirements', '*'],
+ ['default_build', '*'], ['usage_requirements', '*']])
+ def xpto (name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+
+ return self.manager_.targets().create_typed_target(
+ type, self.registry.current(), name, sources,
+ requirements, default_build, usage_requirements)
+
+ self.add_rule(rule_name, xpto)
+
+ def add_rule(self, name, callable_):
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ self.rules[name] = callable_
+ self.all_names_.append(name)
+
+ # Add new rule at global bjam scope. This might not be ideal,
+ # added because if a jamroot does 'import foo' where foo calls
+ # add_rule, we need to import new rule to jamroot scope, and
+ # I'm lazy to do this now.
+ self._import_rule("", name, callable_)
+
+ def all_names(self):
+ return self.all_names_
+
+ def call_and_report_errors(self, callable_, *args, **kw):
+ assert callable(callable_)
+ result = None
+ try:
+ self.manager_.errors().push_jamfile_context()
+ result = callable_(*args, **kw)
+ except ExceptionWithUserContext, e:
+ e.report()
+ except Exception, e:
+ try:
+ self.manager_.errors().handle_stray_exception (e)
+ except ExceptionWithUserContext, e:
+ e.report()
+ finally:
+ self.manager_.errors().pop_jamfile_context()
+
+ return result
+
+ def make_wrapper(self, callable_):
+ """Given a free-standing function 'callable', return a new
+ callable that will call 'callable' and report all exceptins,
+ using 'call_and_report_errors'."""
+ assert callable(callable_)
+ def wrapper(*args, **kw):
+ return self.call_and_report_errors(callable_, *args, **kw)
+ return wrapper
+
+ def init_project(self, project_module, python_standalone=False):
+ assert isinstance(project_module, basestring)
+ assert isinstance(python_standalone, bool)
+ if python_standalone:
+ m = sys.modules[project_module]
+
+ for n in self.local_names:
+ if n != "import_":
+ setattr(m, n, getattr(self, n))
+
+ for n in self.rules:
+ setattr(m, n, self.rules[n])
+
+ return
+
+ for n in self.local_names:
+ # Using 'getattr' here gives us a bound method,
+ # while using self.__dict__[r] would give unbound one.
+ v = getattr(self, n)
+ if callable(v):
+ if n == "import_":
+ n = "import"
+ else:
+ n = string.replace(n, "_", "-")
+
+ self._import_rule(project_module, n, v)
+
+ for n in self.rules:
+ self._import_rule(project_module, n, self.rules[n])
+
+ def project(self, *args):
+ assert is_iterable(args) and all(is_iterable(arg) for arg in args)
+ jamfile_module = self.registry.current().project_module()
+ attributes = self.registry.attributes(jamfile_module)
+
+ id = None
+ if args and args[0]:
+ id = args[0][0]
+ args = args[1:]
+
+ if id:
+ attributes.set('id', [id])
+
+ explicit_build_dir = None
+ for a in args:
+ if a:
+ attributes.set(a[0], a[1:], exact=0)
+ if a[0] == "build-dir":
+ explicit_build_dir = a[1]
+
+ # If '--build-dir' is specified, change the build dir for the project.
+ if self.registry.global_build_dir:
+
+ location = attributes.get("location")
+ # Project with empty location is 'standalone' project, like
+ # user-config, or qt. It has no build dir.
+ # If we try to set build dir for user-config, we'll then
+ # try to inherit it, with either weird, or wrong consequences.
+ if location and location == attributes.get("project-root"):
+ # Re-read the project id, since it might have been changed in
+ # the project's attributes.
+ id = attributes.get('id')
+
+ # This is Jamroot.
+ if id:
+ if explicit_build_dir and os.path.isabs(explicit_build_dir):
+ self.registry.manager.errors()(
+"""Absolute directory specified via 'build-dir' project attribute
+Don't know how to combine that with the --build-dir option.""")
+
+ rid = id
+ if rid[0] == '/':
+ rid = rid[1:]
+
+ p = os.path.join(self.registry.global_build_dir, rid)
+ if explicit_build_dir:
+ p = os.path.join(p, explicit_build_dir)
+ attributes.set("build-dir", p, exact=1)
+ elif explicit_build_dir:
+ self.registry.manager.errors()(
+"""When --build-dir is specified, the 'build-dir'
+attribute is allowed only for top-level 'project' invocations""")
+
+ def constant(self, name, value):
+ """Declare and set a project global constant.
+ Project global constants are normal variables but should
+ not be changed. They are applied to every child Jamfile."""
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(value, basestring)
+ self.registry.current().add_constant(name[0], value)
+
+ def path_constant(self, name, value):
+ """Declare and set a project global constant, whose value is a path. The
+ path is adjusted to be relative to the invocation directory. The given
+ value path is taken to be either absolute, or relative to this project
+ root."""
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(value, basestring)
+ if len(value) > 1:
+ self.registry.manager.errors()("path constant should have one element")
+ self.registry.current().add_constant(name[0], value, path=1)
+
+ def use_project(self, id, where):
+ # See comment in 'load' for explanation why we record the
+ # parameters as opposed to loading the project now.
+ assert is_iterable_typed(id, basestring)
+ assert is_iterable_typed(where, basestring)
+ m = self.registry.current().project_module()
+ self.registry.used_projects[m].append((id[0], where[0]))
+
+ def build_project(self, dir):
+ assert is_iterable_typed(dir, basestring)
+ jamfile_module = self.registry.current().project_module()
+ attributes = self.registry.attributes(jamfile_module)
+ now = attributes.get("projects-to-build")
+ attributes.set("projects-to-build", now + dir, exact=True)
+
+ def explicit(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
+ self.registry.current().mark_targets_as_explicit(target_names)
+
+ def always(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
+ self.registry.current().mark_targets_as_always(target_names)
+
+ def glob(self, wildcards, excludes=None):
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring)or excludes is None
+ return self.registry.glob_internal(self.registry.current(),
+ wildcards, excludes, "glob")
+
+ def glob_tree(self, wildcards, excludes=None):
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring) or excludes is None
+ bad = 0
+ for p in wildcards:
+ if os.path.dirname(p):
+ bad = 1
+
+ if excludes:
+ for p in excludes:
+ if os.path.dirname(p):
+ bad = 1
+
+ if bad:
+ self.registry.manager.errors()(
+"The patterns to 'glob-tree' may not include directory")
+ return self.registry.glob_internal(self.registry.current(),
+ wildcards, excludes, "glob_tree")
+
+
+ def using(self, toolset, *args):
+ # The module referred by 'using' can be placed in
+ # the same directory as Jamfile, and the user
+ # will expect the module to be found even though
+ # the directory is not in BOOST_BUILD_PATH.
+ # So temporary change the search path.
+ assert is_iterable_typed(toolset, basestring)
+ current = self.registry.current()
+ location = current.get('location')
+
+ m = self.registry.load_module(toolset[0], [location])
+ if "init" not in m.__dict__:
+ self.registry.manager.errors()(
+ "Tool module '%s' does not define the 'init' method" % toolset[0])
+ m.init(*args)
+
+ # The above might have clobbered .current-project. Restore the correct
+ # value.
+ self.registry.set_current(current)
+
+ def import_(self, name, names_to_import=None, local_names=None):
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(names_to_import, basestring) or names_to_import is None
+ assert is_iterable_typed(local_names, basestring)or local_names is None
+ name = name[0]
+ py_name = name
+ if py_name == "os":
+ py_name = "os_j"
+ jamfile_module = self.registry.current().project_module()
+ attributes = self.registry.attributes(jamfile_module)
+ location = attributes.get("location")
+
+ saved = self.registry.current()
+
+ m = self.registry.load_module(py_name, [location])
+
+ for f in m.__dict__:
+ v = m.__dict__[f]
+ f = f.replace("_", "-")
+ if callable(v):
+ qn = name + "." + f
+ self._import_rule(jamfile_module, qn, v)
+ record_jam_to_value_mapping(qualify_jam_action(qn, jamfile_module), v)
+
+
+ if names_to_import:
+ if not local_names:
+ local_names = names_to_import
+
+ if len(names_to_import) != len(local_names):
+ self.registry.manager.errors()(
+"""The number of names to import and local names do not match.""")
+
+ for n, l in zip(names_to_import, local_names):
+ self._import_rule(jamfile_module, l, m.__dict__[n])
+
+ self.registry.set_current(saved)
+
+ def conditional(self, condition, requirements):
+ """Calculates conditional requirements for multiple requirements
+ at once. This is a shorthand to be reduce duplication and to
+ keep an inline declarative syntax. For example:
+
+ lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
+ <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
+ """
+ assert is_iterable_typed(condition, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ c = string.join(condition, ",")
+ if c.find(":") != -1:
+ return [c + r for r in requirements]
+ else:
+ return [c + ":" + r for r in requirements]
+
+ def option(self, name, value):
+ assert is_iterable(name) and isinstance(name[0], basestring)
+ assert is_iterable(value) and isinstance(value[0], basestring)
+ name = name[0]
+ if not name in ["site-config", "user-config", "project-config"]:
+ get_manager().errors()("The 'option' rule may be used only in site-config or user-config")
+
+ option.set(name, value[0])
diff --git a/src/boost/tools/build/src/build/property-set.jam b/src/boost/tools/build/src/build/property-set.jam
new file mode 100644
index 000000000..ae6dd2ca4
--- /dev/null
+++ b/src/boost/tools/build/src/build/property-set.jam
@@ -0,0 +1,591 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import feature ;
+import path ;
+import project ;
+import property ;
+import sequence ;
+import set ;
+import option ;
+
+# Class for storing a set of properties.
+#
+# There is 1<->1 correspondence between identity and value. No two instances
+# of the class are equal. To maintain this property, the 'property-set.create'
+# rule should be used to create new instances. Instances are immutable.
+#
+# Each property is classified with regard to its effect on build results.
+# Incidental properties have no effect on build results, from B2's
+# point of view. Others are either free, or non-free and we refer to non-free
+# ones as 'base'. Each property belongs to exactly one of those categories.
+#
+# It is possible to get a list of properties belonging to each category as
+# well as a list of properties with a specific attribute.
+#
+# Several operations, like and refine and as-path are provided. They all use
+# caching whenever possible.
+#
+class property-set
+{
+ import errors ;
+ import feature ;
+ import modules ;
+ import path ;
+ import property ;
+ import property-set ;
+ import set ;
+
+ rule __init__ ( raw-properties * )
+ {
+ self.raw = $(raw-properties) ;
+
+ for local p in $(raw-properties)
+ {
+ if ! $(p:G)
+ {
+ errors.error "Invalid property: '$(p)'" ;
+ }
+ }
+ }
+
+ # Returns Jam list of stored properties.
+ #
+ rule raw ( )
+ {
+ return $(self.raw) ;
+ }
+
+ rule str ( )
+ {
+ return "[" $(self.raw) "]" ;
+ }
+
+ # Returns properties that are neither incidental nor free.
+ #
+ rule base ( )
+ {
+ if ! $(self.base-initialized)
+ {
+ init-base ;
+ }
+ return $(self.base) ;
+ }
+
+ # Returns free properties which are not incidental.
+ #
+ rule free ( )
+ {
+ if ! $(self.base-initialized)
+ {
+ init-base ;
+ }
+ return $(self.free) ;
+ }
+
+ # Returns relevant base properties. This is used for computing
+ # target paths, so it must return the expanded set of relevant
+ # properties.
+ #
+ rule base-relevant ( )
+ {
+ if ! $(self.relevant-initialized)
+ {
+ init-relevant ;
+ }
+ return $(self.base-relevant) ;
+ }
+
+ # Returns all properties marked as relevant by features-ps
+ # Does not attempt to expand features-ps in any way, as
+ # this matches what virtual-target.register needs.
+ #
+ rule relevant ( features-ps )
+ {
+ if ! $(self.relevant.$(features-ps))
+ {
+ local result ;
+ local features = [ $(features-ps).get <relevant> ] ;
+ features = <$(features)> ;
+ local ignore-relevance = [ modules.peek
+ property-set : .ignore-relevance ] ;
+ for local p in $(self.raw)
+ {
+ if $(ignore-relevance) || $(p:G) in $(features)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ if ! ( incidental in $(att) )
+ {
+ result += $(p) ;
+ }
+ }
+ }
+ self.relevant.$(features-ps) = [ property-set.create $(result) ] ;
+ }
+ return $(self.relevant.$(features-ps)) ;
+ }
+
+ # Returns dependency properties.
+ #
+ rule dependency ( )
+ {
+ if ! $(self.dependency-initialized)
+ {
+ init-dependency ;
+ }
+ return $(self.dependency) ;
+ }
+
+ rule non-dependency ( )
+ {
+ if ! $(self.dependency-initialized)
+ {
+ init-dependency ;
+ }
+ return $(self.non-dependency) ;
+ }
+
+ rule conditional ( )
+ {
+ if ! $(self.conditional-initialized)
+ {
+ init-conditional ;
+ }
+ return $(self.conditional) ;
+ }
+
+ rule non-conditional ( )
+ {
+ if ! $(self.conditional-initialized)
+ {
+ init-conditional ;
+ }
+ return $(self.non-conditional) ;
+ }
+
+ # Returns incidental properties.
+ #
+ rule incidental ( )
+ {
+ if ! $(self.base-initialized)
+ {
+ init-base ;
+ }
+ return $(self.incidental) ;
+ }
+
+ rule refine ( ps )
+ {
+ if ! $(self.refined.$(ps))
+ {
+ local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
+ if $(r[1]) != "@error"
+ {
+ self.refined.$(ps) = [ property-set.create $(r) ] ;
+ }
+ else
+ {
+ self.refined.$(ps) = $(r) ;
+ }
+ }
+ return $(self.refined.$(ps)) ;
+ }
+
+ rule expand ( )
+ {
+ if ! $(self.expanded)
+ {
+ self.expanded = [ property-set.create [ feature.expand $(self.raw) ]
+ ] ;
+ }
+ return $(self.expanded) ;
+ }
+
+ rule expand-composites ( )
+ {
+ if ! $(self.composites)
+ {
+ self.composites = [ property-set.create
+ [ feature.expand-composites $(self.raw) ] ] ;
+ }
+ return $(self.composites) ;
+ }
+
+ rule evaluate-conditionals ( context ? )
+ {
+ context ?= $(__name__) ;
+ if ! $(self.evaluated.$(context))
+ {
+ self.evaluated.$(context) = [ property-set.create
+ [ property.evaluate-conditionals-in-context $(self.raw) : [
+ $(context).raw ] ] ] ;
+ }
+ return $(self.evaluated.$(context)) ;
+ }
+
+ rule propagated ( )
+ {
+ if ! $(self.propagated-ps)
+ {
+ local result ;
+ for local p in $(self.raw)
+ {
+ if propagated in [ feature.attributes $(p:G) ]
+ {
+ result += $(p) ;
+ }
+ }
+ self.propagated-ps = [ property-set.create $(result) ] ;
+ }
+ return $(self.propagated-ps) ;
+ }
+
+ rule add-defaults ( )
+ {
+ if ! $(self.defaults)
+ {
+ self.defaults = [ property-set.create
+ [ feature.add-defaults $(self.raw) ] ] ;
+ }
+ return $(self.defaults) ;
+ }
+
+ rule as-path ( )
+ {
+ if ! $(self.as-path)
+ {
+ self.as-path = [ property.as-path [ base-relevant ] ] ;
+ }
+ return $(self.as-path) ;
+ }
+
+ # Computes the path to be used for a target with the given properties.
+ # Returns a list of
+ # - the computed path
+ # - if the path is relative to the build directory, a value of 'true'.
+ #
+ rule target-path ( )
+ {
+ if ! $(self.target-path)
+ {
+ # The <location> feature can be used to explicitly change the
+ # location of generated targets.
+ local l = [ get <location> ] ;
+ if $(l)
+ {
+ self.target-path = $(l) ;
+ }
+ else
+ {
+ local p = [ property-set.hash-maybe [ as-path ] ] ;
+
+ # A real ugly hack. Boost regression test system requires
+ # specific target paths, and it seems that changing it to handle
+ # other directory layout is really hard. For that reason, we
+ # teach V2 to do the things regression system requires. The
+ # value of '<location-prefix>' is prepended to the path.
+ local prefix = [ get <location-prefix> ] ;
+ if $(prefix)
+ {
+ self.target-path = [ path.join $(prefix) $(p) ] ;
+ }
+ else
+ {
+ self.target-path = $(p) ;
+ }
+ if ! $(self.target-path)
+ {
+ self.target-path = . ;
+ }
+ # The path is relative to build dir.
+ self.target-path += true ;
+ }
+ }
+ return $(self.target-path) ;
+ }
+
+ rule add ( ps )
+ {
+ if ! $(self.added.$(ps))
+ {
+ self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ]
+ ;
+ }
+ return $(self.added.$(ps)) ;
+ }
+
+ rule add-raw ( properties * )
+ {
+ return [ add [ property-set.create $(properties) ] ] ;
+ }
+
+ # Returns all values of 'feature'.
+ #
+ rule get ( feature )
+ {
+ if ! $(self.map-built)
+ {
+ # For each feature, create a member var and assign all values to it.
+ # Since all regular member vars start with 'self', there will be no
+ # conflicts between names.
+ self.map-built = true ;
+ for local v in $(self.raw)
+ {
+ $(v:G) += $(v:G=) ;
+ }
+ }
+ return $($(feature)) ;
+ }
+
+ # Returns true if the property-set contains all the
+ # specified properties.
+ #
+ rule contains-raw ( properties * )
+ {
+ if $(properties) in $(self.raw)
+ {
+ return true ;
+ }
+ }
+
+ # Returns true if the property-set has values for
+ # all the specified features
+ #
+ rule contains-features ( features * )
+ {
+ if $(features) in $(self.raw:G)
+ {
+ return true ;
+ }
+ }
+
+ # private
+
+ rule init-base ( )
+ {
+ for local p in $(self.raw)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ # A feature can be both incidental and free, in which case we add it
+ # to incidental.
+ if incidental in $(att)
+ {
+ self.incidental += $(p) ;
+ }
+ else if free in $(att)
+ {
+ self.free += $(p) ;
+ }
+ else
+ {
+ self.base += $(p) ;
+ }
+ }
+ self.base-initialized = true ;
+ }
+
+ rule init-relevant ( )
+ {
+ local relevant-features = [ get <relevant> ] ;
+ relevant-features = [ feature.expand-relevant $(relevant-features) ] ;
+ relevant-features = <$(relevant-features)> ;
+ ignore-relevance = [ modules.peek property-set : .ignore-relevance ] ;
+ for local p in $(self.raw)
+ {
+ if $(ignore-relevance) || $(p:G) in $(relevant-features)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ if ! ( incidental in $(att) )
+ {
+ self.relevant += $(p) ;
+ if ! ( free in $(att) )
+ {
+ self.base-relevant += $(p) ;
+ }
+ }
+ }
+ }
+ self.relevant-initialized = true ;
+ }
+
+ rule init-dependency ( )
+ {
+ for local p in $(self.raw)
+ {
+ if dependency in [ feature.attributes $(p:G) ]
+ {
+ self.dependency += $(p) ;
+ }
+ else
+ {
+ self.non-dependency += $(p) ;
+ }
+ }
+ self.dependency-initialized = true ;
+ }
+
+ rule init-conditional ( )
+ {
+ for local p in $(self.raw)
+ {
+ # TODO: Note that non-conditional properties may contain colon (':')
+ # characters as well, e.g. free or indirect properties. Indirect
+ # properties for example contain a full Jamfile path in their value
+ # which on Windows file systems contains ':' as the drive separator.
+ if ( [ MATCH "(:)" : $(p:G=) ] && ! ( free in [ feature.attributes $(p:G) ] ) ) || $(p:G) = <conditional>
+ {
+ self.conditional += $(p) ;
+ }
+ else
+ {
+ self.non-conditional += $(p) ;
+ }
+ }
+ self.conditional-initialized = true ;
+ }
+}
+
+# This is a temporary measure to help users work around
+# any problems. Remove it once we've verified that
+# everything works.
+if --ignore-relevance in [ modules.peek : ARGV ]
+{
+ .ignore-relevance = true ;
+}
+
+# Creates a new 'property-set' instance for the given raw properties or returns
+# an already existing ones.
+#
+rule create ( raw-properties * )
+{
+ raw-properties = [ sequence.unique
+ [ sequence.insertion-sort $(raw-properties) ] ] ;
+
+ local key = $(raw-properties:J=-:E=) ;
+
+ if ! $(.ps.$(key))
+ {
+ .ps.$(key) = [ new property-set $(raw-properties) ] ;
+ }
+ return $(.ps.$(key)) ;
+}
+NATIVE_RULE property-set : create ;
+
+if [ HAS_NATIVE_RULE class@property-set : get : 1 ]
+{
+ NATIVE_RULE class@property-set : get ;
+}
+
+if [ HAS_NATIVE_RULE class@property-set : contains-features : 1 ]
+{
+ NATIVE_RULE class@property-set : contains-features ;
+}
+
+# Creates a new 'property-set' instance after checking that all properties are
+# valid and converting implicit properties into gristed form.
+#
+rule create-with-validation ( raw-properties * )
+{
+ property.validate $(raw-properties) ;
+ return [ create [ property.make $(raw-properties) ] ] ;
+}
+
+
+# Creates a property-set from the input given by the user, in the context of
+# 'jamfile-module' at 'location'.
+#
+rule create-from-user-input ( raw-properties * : jamfile-module location )
+{
+ local project-id = [ project.attribute $(jamfile-module) id ] ;
+ project-id ?= [ path.root $(location) [ path.pwd ] ] ;
+ return [ property-set.create [ property.translate $(raw-properties)
+ : $(project-id) : $(location) : $(jamfile-module) ] ] ;
+}
+
+
+# Refines requirements with requirements provided by the user. Specially handles
+# "-<property>value" syntax in specification to remove given requirements.
+# - parent-requirements -- property-set object with requirements to refine.
+# - specification -- string list of requirements provided by the user.
+# - project-module -- module to which context indirect features will be
+# bound.
+# - location -- path to which path features are relative.
+#
+rule refine-from-user-input ( parent-requirements : specification * :
+ project-module : location )
+{
+ if ! $(specification)
+ {
+ return $(parent-requirements) ;
+ }
+ else
+ {
+ local add-requirements ;
+ local remove-requirements ;
+
+ for local r in $(specification)
+ {
+ local m = [ MATCH "^-(.*)" : $(r) ] ;
+ if $(m)
+ {
+ remove-requirements += $(m) ;
+ }
+ else
+ {
+ add-requirements += $(r) ;
+ }
+ }
+
+ if $(remove-requirements)
+ {
+ # Need to create a property set, so that path features and indirect
+ # features are translated just like they are in project
+ # requirements.
+ local ps = [ property-set.create-from-user-input
+ $(remove-requirements) : $(project-module) $(location) ] ;
+
+ parent-requirements = [ property-set.create
+ [ set.difference [ $(parent-requirements).raw ]
+ : [ $(ps).raw ] ] ] ;
+ specification = $(add-requirements) ;
+ }
+
+ local requirements = [ property-set.create-from-user-input
+ $(specification) : $(project-module) $(location) ] ;
+
+ return [ $(parent-requirements).refine $(requirements) ] ;
+ }
+}
+
+
+# Returns a property-set with an empty set of properties.
+#
+rule empty ( )
+{
+ if ! $(.empty)
+ {
+ .empty = [ create ] ;
+ }
+ return $(.empty) ;
+}
+
+
+if [ option.get hash : : yes ] = yes
+{
+ rule hash-maybe ( path ? )
+ {
+ path ?= "" ;
+ return [ MD5 $(path) ] ;
+ }
+}
+else
+{
+ rule hash-maybe ( path ? )
+ {
+ return $(path) ;
+ }
+}
diff --git a/src/boost/tools/build/src/build/property.jam b/src/boost/tools/build/src/build/property.jam
new file mode 100644
index 000000000..35bf96281
--- /dev/null
+++ b/src/boost/tools/build/src/build/property.jam
@@ -0,0 +1,977 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+import indirect ;
+import path ;
+import regex ;
+import string ;
+import sequence ;
+import set ;
+import utility ;
+
+
+# Refines 'properties' by overriding any non-free and non-conditional properties
+# for which a different value is specified in 'requirements'. Returns the
+# resulting list of properties.
+#
+rule refine ( properties * : requirements * )
+{
+ local result ;
+ local unset ;
+
+ # Collect all non-free features in requirements
+ for local r in $(requirements)
+ {
+ # Do not consider conditional requirements.
+ if ! [ MATCH "(:<)" : $(r:G=) ] && ! free in [ feature.attributes $(r:G) ]
+ {
+ if ! $(r) in $(properties)
+ {
+ # Kill subfeatures of properties that we're changing
+ local sub = [ modules.peek feature : $(r:G).subfeatures ] ;
+ if $(sub)
+ {
+ # non-specific subfeatures are still valid
+ sub = [ MATCH "(.*:.*)" : $(sub) ] ;
+ local name = [ utility.ungrist $(r:G) ] ;
+ unset += <$(name)-$(sub)> ;
+ }
+ }
+ unset += $(r:G) ;
+ }
+ }
+
+ # Remove properties that are overridden by requirements
+ for local p in $(properties)
+ {
+ if [ MATCH "(:<)" : $(p:G=) ] || ! $(p:G) in $(unset)
+ {
+ result += $(p) ;
+ }
+ }
+
+ return [ sequence.unique $(result) $(requirements) ] ;
+}
+
+
+# Removes all conditional properties whose conditions are not met. For those
+# with met conditions, removes the condition. Properties in conditions are
+# looked up in 'context'.
+#
+rule evaluate-conditionals-in-context ( properties * : context * )
+{
+ local base ;
+ local conditionals ;
+ local indirect ;
+ for local p in $(properties)
+ {
+ if [ MATCH "(:<)" : $(p) ] && ! free in [ feature.attributes $(p:G) ]
+ {
+ conditionals += $(p) ;
+ }
+ else if $(p:G) = <conditional>
+ {
+ indirect += $(p) ;
+ }
+ else
+ {
+ base += $(p) ;
+ }
+ }
+
+ local result = $(base) ;
+ for local p in $(conditionals)
+ {
+ # Separate condition and property.
+ local s = [ MATCH "^(.*):(<.*)" : $(p) ] ;
+ # Split condition into individual properties.
+ local condition = [ regex.split $(s[1]) "," ] ;
+ # Evaluate condition.
+ if ! [ MATCH ^(!).* : $(condition:G=) ]
+ {
+ # Only positive checks
+ if $(condition) in $(context)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ else
+ {
+ # Have negative checks
+ local fail ;
+ while $(condition)
+ {
+ local c = $(condition[1]) ;
+ local m = [ MATCH ^!(.*) : $(c) ] ;
+ if $(m)
+ {
+ local p = $(m:G=$(c:G)) ;
+ if $(p) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ else
+ {
+ if ! $(c) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ condition = $(condition[2-]) ;
+ }
+ if ! $(fail)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ }
+ # Import here to avoid cyclic dependency
+ import project ;
+ for local i in [ MATCH "^@(.*)" : $(indirect:G=) ]
+ {
+ # If the rule was set in a project module, translate paths
+ # relative to that project's location.
+ local m = [ indirect.get-module $(i) ] ;
+ local p = [ project.target $(m) : allow-missing ] ;
+ local new = [ indirect.call $(i) $(context) ] ;
+ if $(p) && [ $(p).location ]
+ {
+ local location = [ $(p).location ] ;
+ local project-id = [ project.attribute $(m) id ] ;
+ project-id ?= [ path.root $(location) [ path.pwd ] ] ;
+ result +=
+ [ translate $(new) : $(project-id) : $(location) : $(m) ] ;
+ }
+ else
+ {
+ result += $(new) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns <relevant> properties indicating how the conditionals in
+# properties affect feature relevance. If the optional argument cond
+# is passed, it is treated as extra conditions for all properties.
+#
+rule evaluate-conditional-relevance ( properties * : cond * )
+{
+ cond = [ sequence.transform utility.ungrist : $(cond:G) ] ;
+ local result ;
+ for local p in $(properties)
+ {
+ # Separate condition and property.
+ local s = [ MATCH "^(.*):(<.*)" : $(p) ] ;
+ if ! $(s) || free in [ feature.attributes $(p:G) ]
+ {
+ local value = [ utility.ungrist $(p:G) ] ;
+ result += <relevant>$(value):<relevant>$(cond) ;
+ }
+ else
+ {
+ local condition = [ regex.split $(s[1]) "," ] ;
+ condition = [ MATCH "^!?(.*)" : $(condition) ] ;
+ condition = [ sequence.transform utility.ungrist : $(condition:G) ] $(cond) ;
+ local value = [ utility.ungrist $(s[2]:G) ] ;
+ result += <relevant>$(value):<relevant>$(condition) ;
+ }
+ }
+ return [ sequence.unique $(result) ] ;
+}
+
+
+rule expand-subfeatures-in-conditions ( properties * )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local s = [ MATCH "^(.*):(<.*)" : $(p) ] ;
+ if ! $(s)
+ {
+ result += $(p) ;
+ }
+ else
+ {
+ local condition = $(s[1]) ;
+ local value = $(s[2]) ;
+ # Condition might include several elements.
+ condition = [ regex.split $(condition) "," ] ;
+ local e ;
+ for local c in $(condition)
+ {
+ # It is common for a condition to include a toolset or
+ # subfeatures that have not been defined. In that case we want
+ # the condition to simply 'never be satisfied' and validation
+ # would only produce a spurious error so we prevent it by
+ # passing 'true' as the second parameter.
+ e += [ feature.expand-subfeatures $(c) : true ] ;
+ }
+ if $(e) = $(condition)
+ {
+ # (todo)
+ # This is just an optimization and possibly a premature one at
+ # that.
+ # (todo) (12.07.2008.) (Jurko)
+ result += $(p) ;
+ }
+ else
+ {
+ result += "$(e:J=,):$(value)" ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for as-path, below. Orders properties with the implicit ones first, and
+# within the two sections in alphabetical order of feature name.
+#
+local rule path-order ( x y )
+{
+ if $(y:G) && ! $(x:G)
+ {
+ return true ;
+ }
+ else if $(x:G) && ! $(y:G)
+ {
+ return ;
+ }
+ else
+ {
+ if ! $(x:G)
+ {
+ x = [ feature.expand-subfeatures $(x) ] ;
+ y = [ feature.expand-subfeatures $(y) ] ;
+ }
+
+ if $(x[1]) < $(y[1])
+ {
+ return true ;
+ }
+ }
+}
+
+
+local rule abbreviate-dashed ( string )
+{
+ local r ;
+ for local part in [ regex.split $(string) - ]
+ {
+ r += [ string.abbreviate $(part) ] ;
+ }
+ return $(r:J=-) ;
+}
+
+
+local rule identity ( string )
+{
+ return $(string) ;
+}
+
+
+if --abbreviate-paths in [ modules.peek : ARGV ]
+{
+ .abbrev = abbreviate-dashed ;
+}
+else
+{
+ .abbrev = identity ;
+}
+
+
+# Returns a path representing the given expanded property set.
+#
+rule as-path ( properties * )
+{
+ local entry = .result.$(properties:J=-) ;
+
+ if ! $($(entry))
+ {
+ # Trim redundancy.
+ properties = [ feature.minimize $(properties) ] ;
+
+ # Sort according to path-order.
+ properties = [ sequence.insertion-sort $(properties) : path-order ] ;
+
+ local components ;
+ for local p in $(properties)
+ {
+ if ! hidden in [ feature.attributes $(p:G) ]
+ {
+ if $(p:G)
+ {
+ local f = [ utility.ungrist $(p:G) ] ;
+ p = $(f)-$(p:G=) ;
+ }
+ components += [ $(.abbrev) $(p) ] ;
+ }
+ }
+
+ $(entry) = $(components:J=/) ;
+ }
+
+ return $($(entry)) ;
+}
+
+
+# Exit with error if property is not valid.
+#
+local rule validate1 ( property )
+{
+ local msg ;
+ if $(property:G)
+ {
+ local feature = $(property:G) ;
+ local value = $(property:G=) ;
+
+ if ! [ feature.valid $(feature) ]
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "unknown feature '$(feature)'" ;
+ }
+ else if $(value) && ! free in [ feature.attributes $(feature) ]
+ {
+ feature.validate-value-string $(feature) $(value) ;
+ }
+ else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "No value specified for feature '$(feature)'" ;
+ }
+ }
+ else
+ {
+ local feature = [ feature.implied-feature $(property) ] ;
+ feature.validate-value-string $(feature) $(property) ;
+ }
+ if $(msg)
+ {
+ import errors ;
+ errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
+ }
+}
+
+
+rule validate ( properties * )
+{
+ for local p in $(properties)
+ {
+ validate1 $(p) ;
+ }
+}
+
+
+rule validate-property-sets ( property-sets * )
+{
+ for local s in $(property-sets)
+ {
+ validate [ feature.split $(s) ] ;
+ }
+}
+
+
+# Expands any implicit property values in the given property 'specification' so
+# they explicitly state their feature.
+#
+rule make ( specification * )
+{
+ local result ;
+ for local e in $(specification)
+ {
+ if $(e:G)
+ {
+ result += $(e) ;
+ }
+ else if [ feature.is-implicit-value $(e) ]
+ {
+ local feature = [ feature.implied-feature $(e) ] ;
+ result += $(feature)$(e) ;
+ }
+ else
+ {
+ import errors ;
+ errors.error "'$(e)' is not a valid property specification" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that do not
+# have their attributes listed in 'attributes'.
+#
+rule remove ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that have
+# their attributes listed in 'attributes'.
+#
+rule take ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Selects properties corresponding to any of the given features.
+#
+rule select ( features * : properties * )
+{
+ local result ;
+
+ # Add any missing angle brackets.
+ local empty = "" ;
+ features = $(empty:G=$(features)) ;
+
+ for local p in $(properties)
+ {
+ if $(p:G) in $(features)
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a modified version of properties with all values of the given feature
+# replaced by the given value. If 'value' is empty the feature will be removed.
+#
+rule change ( properties * : feature value ? )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ result += $(value:G=$(feature)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'property' is a conditional property, returns the condition and the
+# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
+# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
+# string.
+#
+rule split-conditional ( property )
+{
+ return [ MATCH "^(.+):(<.+)" : $(property) ] ;
+}
+
+
+rule translate-path-value ( value : path )
+{
+ local t ;
+ for local v in [ regex.split $(value) "&&" ]
+ {
+ t += [ path.root [ path.make $(v) ] $(path) ] ;
+ }
+ return $(t:TJ="&&") ;
+}
+
+rule translate-dependency-value ( value : project-id : project-location )
+{
+ local split-target = [ regex.match ^(.*)//(.*) : $(value) ] ;
+ if $(split-target)
+ {
+ local rooted = [ path.root [ path.make $(split-target[1]) ]
+ [ path.root $(project-location) [ path.pwd ] ] ] ;
+ return $(rooted)//$(split-target[2]) ;
+ }
+ else if [ path.is-rooted $(value) ]
+ {
+ return $(value) ;
+ }
+ else
+ {
+ return $(project-id)//$(value) ;
+ }
+}
+
+rule translate-indirect-value ( rulename : context-module )
+{
+ if [ MATCH "^([^%]*)%([^%]+)$" : $(rulename) ]
+ {
+ # Rule is already in the 'indirect-rule' format.
+ return @$(rulename) ;
+ }
+ else
+ {
+ local v ;
+ if ! [ MATCH "([.])" : $(rulename) ]
+ {
+ # This is an unqualified rule name. The user might want to
+ # set flags on this rule name and toolset.flag
+ # auto-qualifies it. Need to do the same here so flag
+ # setting works. We can arrange for toolset.flag to *not*
+ # auto-qualify the argument but then two rules defined in
+ # two Jamfiles would conflict.
+ rulename = $(context-module).$(rulename) ;
+ }
+ v = [ indirect.make $(rulename) : $(context-module) ] ;
+ return @$(v) ;
+ }
+
+}
+
+# Equivalent to a calling all of:
+# translate-path
+# translate-indirect
+# translate-dependency
+# expand-subfeatures-in-conditions
+# make
+#
+rule translate ( properties * : project-id : project-location : context-module )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition property ;
+
+ if $(split)
+ {
+ condition = $(split[1]) ;
+ property = $(split[2]) ;
+
+ local e ;
+ for local c in [ regex.split $(condition) "," ]
+ {
+ e += [ feature.expand-subfeatures $(c) : true ] ;
+ }
+
+ condition = "$(e:J=,):" ;
+ }
+ else
+ {
+ property = $(p) ;
+ }
+
+ local feature = $(property:G) ;
+ if ! $(feature)
+ {
+ if [ feature.is-implicit-value $(property) ]
+ {
+ feature = [ feature.implied-feature $(property) ] ;
+ result += $(condition:E=)$(feature)$(property) ;
+ }
+ else
+ {
+ import errors ;
+ errors.error "'$(property)' is not a valid property specification" ;
+ }
+ } else {
+ local attributes = [ feature.attributes $(feature) ] ;
+ local value ;
+ # Only free features should be translated
+ if free in $(attributes)
+ {
+ if path in $(attributes)
+ {
+ value = [ translate-path-value $(property:G=) : $(project-location) ] ;
+ result += $(condition:E=)$(feature)$(value) ;
+ }
+ else if dependency in $(attributes)
+ {
+ value = [ translate-dependency-value $(property:G=) : $(project-id) : $(project-location) ] ;
+ result += $(condition:E=)$(feature)$(value) ;
+ }
+ else
+ {
+ local m = [ MATCH ^@(.+) : $(property:G=) ] ;
+ if $(m)
+ {
+ value = [ translate-indirect-value $(m) : $(context-module) ] ;
+ result += $(condition:E=)$(feature)$(value) ;
+ }
+ else
+ {
+ result += $(condition:E=)$(property) ;
+ }
+ }
+ }
+ else
+ {
+ result += $(condition:E=)$(property) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+# Interpret all path properties in 'properties' as relative to 'path'. The
+# property values are assumed to be in system-specific form, and will be
+# translated into normalized form.
+#
+rule translate-paths ( properties * : path )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = "$(split[1]):" ;
+ p = $(split[2]) ;
+ }
+
+ if path in [ feature.attributes $(p:G) ]
+ {
+ local values = [ regex.split $(p:TG=) "&&" ] ;
+ local t ;
+ for local v in $(values)
+ {
+ t += [ path.root [ path.make $(v) ] $(path) ] ;
+ }
+ t = $(t:J="&&") ;
+ result += $(condition)$(t:TG=$(p:G)) ;
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Assumes that all feature values that start with '@' are names of rules, used
+# in 'context-module'. Such rules can be either local to the module or global.
+# Converts such values into 'indirect-rule' format (see indirect.jam), so they
+# can be called from other modules. Does nothing for such values that are
+# already in the 'indirect-rule' format.
+#
+rule translate-indirect ( specification * : context-module )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local m = [ MATCH ^@(.+) : $(p:G=) ] ;
+ if $(m)
+ {
+ local v ;
+ if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
+ {
+ # Rule is already in the 'indirect-rule' format.
+ v = $(m) ;
+ }
+ else
+ {
+ if ! [ MATCH "([.])" : $(m) ]
+ {
+ # This is an unqualified rule name. The user might want to
+ # set flags on this rule name and toolset.flag
+ # auto-qualifies it. Need to do the same here so flag
+ # setting works. We can arrange for toolset.flag to *not*
+ # auto-qualify the argument but then two rules defined in
+ # two Jamfiles would conflict.
+ m = $(context-module).$(m) ;
+ }
+ v = [ indirect.make $(m) : $(context-module) ] ;
+ }
+
+ v = @$(v) ;
+ result += $(v:G=$(p:G)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Binds all dependency properties in a list relative to the given project.
+# Targets with absolute paths will be left unchanged and targets which have a
+# project specified will have the path to the project interpreted relative to
+# the specified location.
+#
+rule translate-dependencies ( specification * : project-id : location )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = "$(split[1]):" ;
+ p = $(split[2]) ;
+ }
+ if dependency in [ feature.attributes $(p:G) ]
+ {
+ local split-target = [ regex.match ^(.*)//(.*) : $(p:G=) ] ;
+ if $(split-target)
+ {
+ local rooted = [ path.root [ path.make $(split-target[1]) ]
+ [ path.root $(location) [ path.pwd ] ] ] ;
+ result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ;
+ }
+ else if [ path.is-rooted $(p:G=) ]
+ {
+ result += $(condition)$(p) ;
+ }
+ else
+ {
+ result += $(condition)$(p:G)$(project-id)//$(p:G=) ;
+ }
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Class maintaining a property set -> string mapping.
+#
+class property-map
+{
+ import numbers ;
+ import sequence ;
+
+ rule __init__ ( )
+ {
+ self.next-flag = 1 ;
+ }
+
+ # Associate 'value' with 'properties'.
+ #
+ rule insert ( properties * : value )
+ {
+ self.all-flags += self.$(self.next-flag) ;
+ self.$(self.next-flag) = $(value) $(properties) ;
+
+ self.next-flag = [ numbers.increment $(self.next-flag) ] ;
+ }
+
+ # Returns the value associated with 'properties' or any subset of it. If
+ # more than one subset has a value assigned to it, returns the value for the
+ # longest subset, if it is unique.
+ #
+ rule find ( property-set )
+ {
+ # First find all matches.
+ local matches ;
+ local match-ranks ;
+ for local i in $(self.all-flags)
+ {
+ local list = $($(i)) ;
+ if [ $(property-set).contains-raw $(list[2-]) ]
+ {
+ matches += $(list[1]) ;
+ match-ranks += [ sequence.length $(list) ] ;
+ }
+ }
+ local best = [ sequence.select-highest-ranked $(matches)
+ : $(match-ranks) ] ;
+ if $(best[2])
+ {
+ import errors : error : errors.error ;
+ errors.error "Ambiguous key $(properties:J= :E=)" ;
+ }
+ return $(best) ;
+ }
+
+ # Returns the value associated with 'properties'. If 'value' parameter is
+ # given, replaces the found value.
+ #
+ rule find-replace ( properties * : value ? )
+ {
+ # First find all matches.
+ local matches ;
+ local match-ranks ;
+ for local i in $(self.all-flags)
+ {
+ if $($(i)[2-]) in $(properties)
+ {
+ matches += $(i) ;
+ match-ranks += [ sequence.length $($(i)) ] ;
+ }
+ }
+ local best = [ sequence.select-highest-ranked $(matches)
+ : $(match-ranks) ] ;
+ if $(best[2])
+ {
+ import errors : error : errors.error ;
+ errors.error "Ambiguous key $(properties:J= :E=)" ;
+ }
+ local original = $($(best)[1]) ;
+ if $(value)-is-set
+ {
+ $(best) = $(value) $($(best)[2-]) ;
+ }
+ return $(original) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+ import feature ;
+
+ # Local rules must be explicitly re-imported.
+ import property : path-order abbreviate-dashed ;
+
+ feature.prepare-test property-test-temp ;
+
+ feature.feature toolset : gcc : implicit symmetric ;
+ feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
+ 3.0.2 : optional ;
+ feature.feature define : : free ;
+ feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
+ feature.feature optimization : on off ;
+ feature.feature variant : debug release : implicit composite symmetric ;
+ feature.feature rtti : on off : link-incompatible ;
+
+ feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
+ feature.compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
+
+ assert.true path-order $(test-space) debug <define>foo ;
+ assert.false path-order $(test-space) <define>foo debug ;
+ assert.true path-order $(test-space) gcc debug ;
+ assert.false path-order $(test-space) debug gcc ;
+ assert.true path-order $(test-space) <optimization>on <rtti>on ;
+ assert.false path-order $(test-space) <rtti>on <optimization>on ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <define>FOO
+ : refine <toolset>gcc <rtti>off
+ : <define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <optimization>on
+ : refine <toolset>gcc <optimization>off
+ : <optimization>on
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off
+ : refine <toolset>gcc : <rtti>off : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
+ : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+ : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+ : $(test-space) ;
+
+ assert.result <define>MY_RELEASE
+ : evaluate-conditionals-in-context
+ <variant>release,<rtti>off:<define>MY_RELEASE
+ : <toolset>gcc <variant>release <rtti>off ;
+
+ assert.result debug
+ : as-path <optimization>off <variant>debug
+ : $(test-space) ;
+
+ assert.result gcc/debug/rtti-off
+ : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
+ : $(test-space) ;
+
+ assert.result optmz-off : abbreviate-dashed optimization-off ;
+ assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
+
+ try ;
+ validate <feature>value : $(test-space) ;
+ catch "Invalid property '<feature>value': unknown feature 'feature'." ;
+
+ try ;
+ validate <rtti>default : $(test-space) ;
+ catch \"default\" is not a known value of feature <rtti> ;
+
+ validate <define>WHATEVER : $(test-space) ;
+
+ try ;
+ validate <rtti> : $(test-space) ;
+ catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
+
+ try ;
+ validate value : $(test-space) ;
+ catch \"value\" is not an implicit feature value ;
+
+ assert.result-set-equal <rtti>on
+ : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
+
+ assert.result-set-equal <include>a
+ : select include : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a
+ : select include bar : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a <toolset>gcc
+ : select include <bar> <toolset> : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <toolset>kylix <include>a
+ : change <toolset>gcc <include>a : <toolset> kylix ;
+
+ pm = [ new property-map ] ;
+ $(pm).insert <toolset>gcc : o ;
+ $(pm).insert <toolset>gcc <os>NT : obj ;
+ $(pm).insert <toolset>gcc <os>CYGWIN : obj ;
+
+ assert.equal o : [ $(pm).find-replace <toolset>gcc ] ;
+
+ assert.equal obj : [ $(pm).find-replace <toolset>gcc <os>NT ] ;
+
+ try ;
+ $(pm).find-replace <toolset>gcc <os>NT <os>CYGWIN ;
+ catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ;
+
+ # Test ordinary properties.
+ assert.result : split-conditional <toolset>gcc ;
+
+ # Test properties with ":".
+ assert.result : split-conditional <define>"FOO=A::B" ;
+
+ # Test conditional feature.
+ assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
+ : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
+
+ feature.finish-test property-test-temp ;
+}
diff --git a/src/boost/tools/build/src/build/property.py b/src/boost/tools/build/src/build/property.py
new file mode 100644
index 000000000..e7f5c982d
--- /dev/null
+++ b/src/boost/tools/build/src/build/property.py
@@ -0,0 +1,750 @@
+# Status: ported, except for tests.
+# Base revision: 64070
+#
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import re
+import sys
+from functools import total_ordering
+
+from b2.util.utility import *
+from b2.build import feature
+from b2.util import sequence, qualify_jam_action, is_iterable_typed
+import b2.util.set
+from b2.manager import get_manager
+
+
+__re_two_ampersands = re.compile ('&&')
+__re_comma = re.compile (',')
+__re_split_condition = re.compile ('(.*):(<.*)')
+__re_split_conditional = re.compile (r'(.+):<(.+)')
+__re_colon = re.compile (':')
+__re_has_condition = re.compile (r':<')
+__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
+
+_not_applicable_feature='not-applicable-in-this-context'
+feature.feature(_not_applicable_feature, [], ['free'])
+
+__abbreviated_paths = False
+
+
+class PropertyMeta(type):
+ """
+ This class exists to implement the isinstance() and issubclass()
+ hooks for the Property class. Since we've introduce the concept of
+ a LazyProperty, isinstance(p, Property) will fail when p is a LazyProperty.
+ Implementing both __instancecheck__ and __subclasscheck__ will allow
+ LazyProperty instances to pass the isinstance() and issubclass check for
+ the Property class.
+
+ Additionally, the __call__ method intercepts the call to the Property
+ constructor to ensure that calling Property with the same arguments
+ will always return the same Property instance.
+ """
+ _registry = {}
+ current_id = 1
+
+ def __call__(mcs, f, value, condition=None):
+ """
+ This intercepts the call to the Property() constructor.
+
+ This exists so that the same arguments will always return the same Property
+ instance. This allows us to give each instance a unique ID.
+ """
+ from b2.build.feature import Feature
+ if not isinstance(f, Feature):
+ f = feature.get(f)
+ if condition is None:
+ condition = []
+ key = (f, value) + tuple(sorted(condition))
+ if key not in mcs._registry:
+ instance = super(PropertyMeta, mcs).__call__(f, value, condition)
+ mcs._registry[key] = instance
+ return mcs._registry[key]
+
+ @staticmethod
+ def check(obj):
+ return (hasattr(obj, 'feature') and
+ hasattr(obj, 'value') and
+ hasattr(obj, 'condition'))
+
+ def __instancecheck__(self, instance):
+ return self.check(instance)
+
+ def __subclasscheck__(self, subclass):
+ return self.check(subclass)
+
+
+@total_ordering
+class Property(object):
+
+ __slots__ = ('feature', 'value', 'condition', '_to_raw', '_hash', 'id')
+ __metaclass__ = PropertyMeta
+
+ def __init__(self, f, value, condition=None):
+ assert(f.free or ':' not in value)
+ if condition is None:
+ condition = []
+
+ self.feature = f
+ self.value = value
+ self.condition = condition
+ self._hash = hash((self.feature, self.value) + tuple(sorted(self.condition)))
+ self.id = PropertyMeta.current_id
+ # increment the id counter.
+ # this allows us to take a list of Property
+ # instances and use their unique integer ID
+ # to create a key for PropertySet caching. This is
+ # much faster than string comparison.
+ PropertyMeta.current_id += 1
+
+ condition_str = ''
+ if condition:
+ condition_str = ",".join(str(p) for p in self.condition) + ':'
+
+ self._to_raw = '{}<{}>{}'.format(condition_str, f.name, value)
+
+ def to_raw(self):
+ return self._to_raw
+
+ def __str__(self):
+
+ return self._to_raw
+
+ def __hash__(self):
+ return self._hash
+
+ def __eq__(self, other):
+ return self._hash == other._hash
+
+ def __lt__(self, other):
+ return (self.feature.name, self.value) < (other.feature.name, other.value)
+
+
+@total_ordering
+class LazyProperty(object):
+ def __init__(self, feature_name, value, condition=None):
+ if condition is None:
+ condition = []
+
+ self.__property = Property(
+ feature.get(_not_applicable_feature), feature_name + value, condition=condition)
+ self.__name = feature_name
+ self.__value = value
+ self.__condition = condition
+ self.__feature = None
+
+ def __getattr__(self, item):
+ if self.__feature is None:
+ try:
+ self.__feature = feature.get(self.__name)
+ self.__property = Property(self.__feature, self.__value, self.__condition)
+ except KeyError:
+ pass
+ return getattr(self.__property, item)
+
+ def __hash__(self):
+ return hash(self.__property)
+
+ def __str__(self):
+ return self.__property._to_raw
+
+ def __eq__(self, other):
+ return self.__property == other
+
+ def __lt__(self, other):
+ return (self.feature.name, self.value) < (other.feature.name, other.value)
+
+
+def create_from_string(s, allow_condition=False,allow_missing_value=False):
+ assert isinstance(s, basestring)
+ assert isinstance(allow_condition, bool)
+ assert isinstance(allow_missing_value, bool)
+ condition = []
+ import types
+ if not isinstance(s, types.StringType):
+ print type(s)
+ if __re_has_condition.search(s):
+
+ if not allow_condition:
+ raise BaseException("Conditional property is not allowed in this context")
+
+ m = __re_separate_condition_and_property.match(s)
+ condition = m.group(1)
+ s = m.group(2)
+
+ # FIXME: break dependency cycle
+ from b2.manager import get_manager
+
+ if condition:
+ condition = [create_from_string(x) for x in condition.split(',')]
+
+ feature_name = get_grist(s)
+ if not feature_name:
+ if feature.is_implicit_value(s):
+ f = feature.implied_feature(s)
+ value = s
+ p = Property(f, value, condition=condition)
+ else:
+ raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
+ else:
+ value = get_value(s)
+ if not value and not allow_missing_value:
+ get_manager().errors()("Invalid property '%s' -- no value specified" % s)
+
+ if feature.valid(feature_name):
+ p = Property(feature.get(feature_name), value, condition=condition)
+ else:
+ # In case feature name is not known, it is wrong to do a hard error.
+ # Feature sets change depending on the toolset. So e.g.
+ # <toolset-X:version> is an unknown feature when using toolset Y.
+ #
+ # Ideally we would like to ignore this value, but most of
+ # Boost.Build code expects that we return a valid Property. For this
+ # reason we use a sentinel <not-applicable-in-this-context> feature.
+ #
+ # The underlying cause for this problem is that python port Property
+ # is more strict than its Jam counterpart and must always reference
+ # a valid feature.
+ p = LazyProperty(feature_name, value, condition=condition)
+
+ return p
+
+def create_from_strings(string_list, allow_condition=False):
+ assert is_iterable_typed(string_list, basestring)
+ return [create_from_string(s, allow_condition) for s in string_list]
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __results
+
+ # A cache of results from as_path
+ __results = {}
+
+reset ()
+
+
+def set_abbreviated_paths(on=True):
+ global __abbreviated_paths
+ if on == 'off':
+ on = False
+ on = bool(on)
+ __abbreviated_paths = on
+
+
+def get_abbreviated_paths():
+ return __abbreviated_paths or '--abbreviated-paths' in sys.argv
+
+
+def path_order (x, y):
+ """ Helper for as_path, below. Orders properties with the implicit ones
+ first, and within the two sections in alphabetical order of feature
+ name.
+ """
+ if x == y:
+ return 0
+
+ xg = get_grist (x)
+ yg = get_grist (y)
+
+ if yg and not xg:
+ return -1
+
+ elif xg and not yg:
+ return 1
+
+ else:
+ if not xg:
+ x = feature.expand_subfeatures([x])
+ y = feature.expand_subfeatures([y])
+
+ if x < y:
+ return -1
+ elif x > y:
+ return 1
+ else:
+ return 0
+
+def identify(string):
+ return string
+
+# Uses Property
+def refine (properties, requirements):
+ """ Refines 'properties' by overriding any non-free properties
+ for which a different value is specified in 'requirements'.
+ Conditional requirements are just added without modification.
+ Returns the resulting list of properties.
+ """
+ assert is_iterable_typed(properties, Property)
+ assert is_iterable_typed(requirements, Property)
+ # The result has no duplicates, so we store it in a set
+ result = set()
+
+ # Records all requirements.
+ required = {}
+
+ # All the elements of requirements should be present in the result
+ # Record them so that we can handle 'properties'.
+ for r in requirements:
+ # Don't consider conditional requirements.
+ if not r.condition:
+ required[r.feature] = r
+
+ for p in properties:
+ # Skip conditional properties
+ if p.condition:
+ result.add(p)
+ # No processing for free properties
+ elif p.feature.free:
+ result.add(p)
+ else:
+ if p.feature in required:
+ result.add(required[p.feature])
+ else:
+ result.add(p)
+
+ return sequence.unique(list(result) + requirements)
+
+def translate_paths (properties, path):
+ """ Interpret all path properties in 'properties' as relative to 'path'
+ The property values are assumed to be in system-specific form, and
+ will be translated into normalized form.
+ """
+ assert is_iterable_typed(properties, Property)
+ result = []
+
+ for p in properties:
+
+ if p.feature.path:
+ values = __re_two_ampersands.split(p.value)
+
+ new_value = "&&".join(os.path.normpath(os.path.join(path, v)) for v in values)
+
+ if new_value != p.value:
+ result.append(Property(p.feature, new_value, p.condition))
+ else:
+ result.append(p)
+
+ else:
+ result.append (p)
+
+ return result
+
+def translate_indirect(properties, context_module):
+ """Assumes that all feature values that start with '@' are
+ names of rules, used in 'context-module'. Such rules can be
+ either local to the module or global. Qualified local rules
+ with the name of the module."""
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(context_module, basestring)
+ result = []
+ for p in properties:
+ if p.value[0] == '@':
+ q = qualify_jam_action(p.value[1:], context_module)
+ get_manager().engine().register_bjam_action(q)
+ result.append(Property(p.feature, '@' + q, p.condition))
+ else:
+ result.append(p)
+
+ return result
+
+def validate (properties):
+ """ Exit with error if any of the properties is not valid.
+ properties may be a single property or a sequence of properties.
+ """
+ if isinstance(properties, Property):
+ properties = [properties]
+ assert is_iterable_typed(properties, Property)
+ for p in properties:
+ __validate1(p)
+
+def expand_subfeatures_in_conditions (properties):
+ assert is_iterable_typed(properties, Property)
+ result = []
+ for p in properties:
+
+ if not p.condition:
+ result.append(p)
+ else:
+ expanded = []
+ for c in p.condition:
+ # It common that condition includes a toolset which
+ # was never defined, or mentiones subfeatures which
+ # were never defined. In that case, validation will
+ # only produce an spirious error, so don't validate.
+ expanded.extend(feature.expand_subfeatures ([c], True))
+
+ # we need to keep LazyProperties lazy
+ if isinstance(p, LazyProperty):
+ value = p.value
+ feature_name = get_grist(value)
+ value = value.replace(feature_name, '')
+ result.append(LazyProperty(feature_name, value, condition=expanded))
+ else:
+ result.append(Property(p.feature, p.value, expanded))
+
+ return result
+
+# FIXME: this should go
+def split_conditional (property):
+ """ If 'property' is conditional property, returns
+ condition and the property, e.g
+ <variant>debug,<toolset>gcc:<inlining>full will become
+ <variant>debug,<toolset>gcc <inlining>full.
+ Otherwise, returns empty string.
+ """
+ assert isinstance(property, basestring)
+ m = __re_split_conditional.match (property)
+
+ if m:
+ return (m.group (1), '<' + m.group (2))
+
+ return None
+
+
+def select (features, properties):
+ """ Selects properties which correspond to any of the given features.
+ """
+ assert is_iterable_typed(properties, basestring)
+ result = []
+
+ # add any missing angle brackets
+ features = add_grist (features)
+
+ return [p for p in properties if get_grist(p) in features]
+
+def validate_property_sets (sets):
+ if __debug__:
+ from .property_set import PropertySet
+ assert is_iterable_typed(sets, PropertySet)
+ for s in sets:
+ validate(s.all())
+
+def evaluate_conditionals_in_context (properties, context):
+ """ Removes all conditional properties which conditions are not met
+ For those with met conditions, removes the condition. Properties
+ in conditions are looked up in 'context'
+ """
+ if __debug__:
+ from .property_set import PropertySet
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(context, PropertySet)
+ base = []
+ conditional = []
+
+ for p in properties:
+ if p.condition:
+ conditional.append (p)
+ else:
+ base.append (p)
+
+ result = base[:]
+ for p in conditional:
+
+ # Evaluate condition
+ # FIXME: probably inefficient
+ if all(x in context for x in p.condition):
+ result.append(Property(p.feature, p.value))
+
+ return result
+
+
+def change (properties, feature, value = None):
+ """ Returns a modified version of properties with all values of the
+ given feature replaced by the given value.
+ If 'value' is None the feature will be removed.
+ """
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(feature, basestring)
+ assert isinstance(value, (basestring, type(None)))
+ result = []
+
+ feature = add_grist (feature)
+
+ for p in properties:
+ if get_grist (p) == feature:
+ if value:
+ result.append (replace_grist (value, feature))
+
+ else:
+ result.append (p)
+
+ return result
+
+
+################################################################
+# Private functions
+
+def __validate1 (property):
+ """ Exit with error if property is not valid.
+ """
+ assert isinstance(property, Property)
+ msg = None
+
+ if not property.feature.free:
+ feature.validate_value_string (property.feature, property.value)
+
+
+###################################################################
+# Still to port.
+# Original lines are prefixed with "# "
+#
+#
+# import utility : ungrist ;
+# import sequence : unique ;
+# import errors : error ;
+# import feature ;
+# import regex ;
+# import sequence ;
+# import set ;
+# import path ;
+# import assert ;
+#
+#
+
+
+# rule validate-property-sets ( property-sets * )
+# {
+# for local s in $(property-sets)
+# {
+# validate [ feature.split $(s) ] ;
+# }
+# }
+#
+
+def remove(attributes, properties):
+ """Returns a property sets which include all the elements
+ in 'properties' that do not have attributes listed in 'attributes'."""
+ if isinstance(attributes, basestring):
+ attributes = [attributes]
+ assert is_iterable_typed(attributes, basestring)
+ assert is_iterable_typed(properties, basestring)
+ result = []
+ for e in properties:
+ attributes_new = feature.attributes(get_grist(e))
+ has_common_features = 0
+ for a in attributes_new:
+ if a in attributes:
+ has_common_features = 1
+ break
+
+ if not has_common_features:
+ result += e
+
+ return result
+
+
+def take(attributes, properties):
+ """Returns a property set which include all
+ properties in 'properties' that have any of 'attributes'."""
+ assert is_iterable_typed(attributes, basestring)
+ assert is_iterable_typed(properties, basestring)
+ result = []
+ for e in properties:
+ if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
+ result.append(e)
+ return result
+
+def translate_dependencies(properties, project_id, location):
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(project_id, basestring)
+ assert isinstance(location, basestring)
+ result = []
+ for p in properties:
+
+ if not p.feature.dependency:
+ result.append(p)
+ else:
+ v = p.value
+ m = re.match("(.*)//(.*)", v)
+ if m:
+ rooted = m.group(1)
+ if rooted[0] == '/':
+ # Either project id or absolute Linux path, do nothing.
+ pass
+ else:
+ rooted = os.path.join(os.getcwd(), location, rooted)
+
+ result.append(Property(p.feature, rooted + "//" + m.group(2), p.condition))
+
+ elif os.path.isabs(v):
+ result.append(p)
+ else:
+ result.append(Property(p.feature, project_id + "//" + v, p.condition))
+
+ return result
+
+
+class PropertyMap:
+ """ Class which maintains a property set -> string mapping.
+ """
+ def __init__ (self):
+ self.__properties = []
+ self.__values = []
+
+ def insert (self, properties, value):
+ """ Associate value with properties.
+ """
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(value, basestring)
+ self.__properties.append(properties)
+ self.__values.append(value)
+
+ def find (self, properties):
+ """ Return the value associated with properties
+ or any subset of it. If more than one
+ subset has value assigned to it, return the
+ value for the longest subset, if it's unique.
+ """
+ assert is_iterable_typed(properties, basestring)
+ return self.find_replace (properties)
+
+ def find_replace(self, properties, value=None):
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(value, (basestring, type(None)))
+ matches = []
+ match_ranks = []
+
+ for i in range(0, len(self.__properties)):
+ p = self.__properties[i]
+
+ if b2.util.set.contains (p, properties):
+ matches.append (i)
+ match_ranks.append(len(p))
+
+ best = sequence.select_highest_ranked (matches, match_ranks)
+
+ if not best:
+ return None
+
+ if len (best) > 1:
+ raise NoBestMatchingAlternative ()
+
+ best = best [0]
+
+ original = self.__values[best]
+
+ if value:
+ self.__values[best] = value
+
+ return original
+
+# local rule __test__ ( )
+# {
+# import errors : try catch ;
+# import feature ;
+# import feature : feature subfeature compose ;
+#
+# # local rules must be explicitly re-imported
+# import property : path-order ;
+#
+# feature.prepare-test property-test-temp ;
+#
+# feature toolset : gcc : implicit symmetric ;
+# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+# 3.0 3.0.1 3.0.2 : optional ;
+# feature define : : free ;
+# feature runtime-link : dynamic static : symmetric link-incompatible ;
+# feature optimization : on off ;
+# feature variant : debug release : implicit composite symmetric ;
+# feature rtti : on off : link-incompatible ;
+#
+# compose <variant>debug : <define>_DEBUG <optimization>off ;
+# compose <variant>release : <define>NDEBUG <optimization>on ;
+#
+# import assert ;
+# import "class" : new ;
+#
+# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
+#
+# assert.result <toolset>gcc <rtti>off <define>FOO
+# : refine <toolset>gcc <rtti>off
+# : <define>FOO
+# : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc <optimization>on
+# : refine <toolset>gcc <optimization>off
+# : <optimization>on
+# : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc <rtti>off
+# : refine <toolset>gcc : <rtti>off : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO
+# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+# : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+# : $(test-space)
+# ;
+#
+# assert.result <define>MY_RELEASE
+# : evaluate-conditionals-in-context
+# <variant>release,<rtti>off:<define>MY_RELEASE
+# : <toolset>gcc <variant>release <rtti>off
+#
+# ;
+#
+# try ;
+# validate <feature>value : $(test-space) ;
+# catch "Invalid property '<feature>value': unknown feature 'feature'." ;
+#
+# try ;
+# validate <rtti>default : $(test-space) ;
+# catch \"default\" is not a known value of feature <rtti> ;
+#
+# validate <define>WHATEVER : $(test-space) ;
+#
+# try ;
+# validate <rtti> : $(test-space) ;
+# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
+#
+# try ;
+# validate value : $(test-space) ;
+# catch "value" is not a value of an implicit feature ;
+#
+#
+# assert.result <rtti>on
+# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
+#
+# assert.result <include>a
+# : select include : <include>a <toolset>gcc ;
+#
+# assert.result <include>a
+# : select include bar : <include>a <toolset>gcc ;
+#
+# assert.result <include>a <toolset>gcc
+# : select include <bar> <toolset> : <include>a <toolset>gcc ;
+#
+# assert.result <toolset>kylix <include>a
+# : change <toolset>gcc <include>a : <toolset> kylix ;
+#
+# # Test ordinary properties
+# assert.result
+# : split-conditional <toolset>gcc
+# ;
+#
+# # Test properties with ":"
+# assert.result
+# : split-conditional <define>FOO=A::B
+# ;
+#
+# # Test conditional feature
+# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
+# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
+# ;
+#
+# feature.finish-test property-test-temp ;
+# }
+#
+
diff --git a/src/boost/tools/build/src/build/property_set.py b/src/boost/tools/build/src/build/property_set.py
new file mode 100644
index 000000000..3fc86de27
--- /dev/null
+++ b/src/boost/tools/build/src/build/property_set.py
@@ -0,0 +1,498 @@
+# Status: ported.
+# Base revision: 40480
+
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+import hashlib
+
+import bjam
+from b2.util.utility import *
+import property, feature
+import b2.build.feature
+from b2.exceptions import *
+from b2.build.property import get_abbreviated_paths
+from b2.util.sequence import unique
+from b2.util.set import difference
+from b2.util import cached, abbreviate_dashed, is_iterable_typed
+
+from b2.manager import get_manager
+
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __cache
+
+ # A cache of property sets
+ # TODO: use a map of weak refs?
+ __cache = {}
+
+reset ()
+
+
+def create (raw_properties = []):
+ """ Creates a new 'PropertySet' instance for the given raw properties,
+ or returns an already existing one.
+ """
+ assert (is_iterable_typed(raw_properties, property.Property)
+ or is_iterable_typed(raw_properties, basestring))
+ # FIXME: propagate to callers.
+ if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
+ x = raw_properties
+ else:
+ x = [property.create_from_string(ps) for ps in raw_properties]
+
+ # These two lines of code are optimized to the current state
+ # of the Property class. Since this function acts as the caching
+ # frontend to the PropertySet class modifying these two lines
+ # could have a severe performance penalty. Be careful.
+ # It would be faster to sort by p.id, but some projects may rely
+ # on the fact that the properties are ordered alphabetically. So,
+ # we maintain alphabetical sorting so as to maintain backward compatibility.
+ x = sorted(set(x), key=lambda p: (p.feature.name, p.value, p.condition))
+ key = tuple(p.id for p in x)
+
+ if key not in __cache:
+ __cache [key] = PropertySet(x)
+
+ return __cache [key]
+
+def create_with_validation (raw_properties):
+ """ Creates new 'PropertySet' instances after checking
+ that all properties are valid and converting implicit
+ properties into gristed form.
+ """
+ assert is_iterable_typed(raw_properties, basestring)
+ properties = [property.create_from_string(s) for s in raw_properties]
+ property.validate(properties)
+
+ return create(properties)
+
+def empty ():
+ """ Returns PropertySet with empty set of properties.
+ """
+ return create ()
+
+def create_from_user_input(raw_properties, jamfile_module, location):
+ """Creates a property-set from the input given by the user, in the
+ context of 'jamfile-module' at 'location'"""
+ assert is_iterable_typed(raw_properties, basestring)
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(location, basestring)
+ properties = property.create_from_strings(raw_properties, True)
+ properties = property.translate_paths(properties, location)
+ properties = property.translate_indirect(properties, jamfile_module)
+
+ project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None)
+ if not project_id:
+ project_id = os.path.abspath(location)
+ properties = property.translate_dependencies(properties, project_id, location)
+ properties = property.expand_subfeatures_in_conditions(properties)
+ return create(properties)
+
+
+def refine_from_user_input(parent_requirements, specification, jamfile_module,
+ location):
+ """Refines requirements with requirements provided by the user.
+ Specially handles "-<property>value" syntax in specification
+ to remove given requirements.
+ - parent-requirements -- property-set object with requirements
+ to refine
+ - specification -- string list of requirements provided by the use
+ - project-module -- the module to which context indirect features
+ will be bound.
+ - location -- the path to which path features are relative."""
+ assert isinstance(parent_requirements, PropertySet)
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(location, basestring)
+
+ if not specification:
+ return parent_requirements
+
+
+ add_requirements = []
+ remove_requirements = []
+
+ for r in specification:
+ if r[0] == '-':
+ remove_requirements.append(r[1:])
+ else:
+ add_requirements.append(r)
+
+ if remove_requirements:
+ # Need to create property set, so that path features
+ # and indirect features are translated just like they
+ # are in project requirements.
+ ps = create_from_user_input(remove_requirements,
+ jamfile_module, location)
+
+ parent_requirements = create(difference(parent_requirements.all(),
+ ps.all()))
+ specification = add_requirements
+
+ requirements = create_from_user_input(specification,
+ jamfile_module, location)
+
+ return parent_requirements.refine(requirements)
+
+class PropertySet:
+ """ Class for storing a set of properties.
+ - there's 1<->1 correspondence between identity and value. No
+ two instances of the class are equal. To maintain this property,
+ the 'PropertySet.create' rule should be used to create new instances.
+ Instances are immutable.
+
+ - each property is classified with regard to it's effect on build
+ results. Incidental properties have no effect on build results, from
+ Boost.Build point of view. Others are either free, or non-free, which we
+ call 'base'. Each property belong to exactly one of those categories and
+ it's possible to get list of properties in each category.
+
+ In addition, it's possible to get list of properties with specific
+ attribute.
+
+ - several operations, like and refine and as_path are provided. They all use
+ caching whenever possible.
+ """
+ def __init__ (self, properties=None):
+ if properties is None:
+ properties = []
+ assert is_iterable_typed(properties, property.Property)
+
+ self.all_ = properties
+ self._all_set = {p.id for p in properties}
+
+ self.incidental_ = []
+ self.free_ = []
+ self.base_ = []
+ self.dependency_ = []
+ self.non_dependency_ = []
+ self.conditional_ = []
+ self.non_conditional_ = []
+ self.propagated_ = []
+ self.link_incompatible = []
+
+ # A cache of refined properties.
+ self.refined_ = {}
+
+ # A cache of property sets created by adding properties to this one.
+ self.added_ = {}
+
+ # Cache for the default properties.
+ self.defaults_ = None
+
+ # Cache for the expanded properties.
+ self.expanded_ = None
+
+ # Cache for the expanded composite properties
+ self.composites_ = None
+
+ # Cache for property set with expanded subfeatures
+ self.subfeatures_ = None
+
+ # Cache for the property set containing propagated properties.
+ self.propagated_ps_ = None
+
+ # A map of features to its values.
+ self.feature_map_ = None
+
+ # A tuple (target path, is relative to build directory)
+ self.target_path_ = None
+
+ self.as_path_ = None
+
+ # A cache for already evaluated sets.
+ self.evaluated_ = {}
+
+ # stores the list of LazyProperty instances.
+ # these are being kept separate from the normal
+ # Property instances so that when this PropertySet
+ # tries to return one of its attributes, it
+ # will then try to evaluate the LazyProperty instances
+ # first before returning.
+ self.lazy_properties = []
+
+ for p in properties:
+ f = p.feature
+ if isinstance(p, property.LazyProperty):
+ self.lazy_properties.append(p)
+ # A feature can be both incidental and free,
+ # in which case we add it to incidental.
+ elif f.incidental:
+ self.incidental_.append(p)
+ elif f.free:
+ self.free_.append(p)
+ else:
+ self.base_.append(p)
+
+ if p.condition:
+ self.conditional_.append(p)
+ else:
+ self.non_conditional_.append(p)
+
+ if f.dependency:
+ self.dependency_.append (p)
+ elif not isinstance(p, property.LazyProperty):
+ self.non_dependency_.append (p)
+
+ if f.propagated:
+ self.propagated_.append(p)
+ if f.link_incompatible:
+ self.link_incompatible.append(p)
+
+
+ def all(self):
+ return self.all_
+
+ def raw (self):
+ """ Returns the list of stored properties.
+ """
+ # create a new list due to the LazyProperties.
+ # this gives them a chance to evaluate to their
+ # true Property(). This approach is being
+ # taken since calculations should not be using
+ # PropertySet.raw()
+ return [p._to_raw for p in self.all_]
+
+ def __str__(self):
+ return ' '.join(p._to_raw for p in self.all_)
+
+ def base (self):
+ """ Returns properties that are neither incidental nor free.
+ """
+ result = [p for p in self.lazy_properties
+ if not(p.feature.incidental or p.feature.free)]
+ result.extend(self.base_)
+ return result
+
+ def free (self):
+ """ Returns free properties which are not dependency properties.
+ """
+ result = [p for p in self.lazy_properties
+ if not p.feature.incidental and p.feature.free]
+ result.extend(self.free_)
+ return result
+
+ def non_free(self):
+ return self.base() + self.incidental()
+
+ def dependency (self):
+ """ Returns dependency properties.
+ """
+ result = [p for p in self.lazy_properties if p.feature.dependency]
+ result.extend(self.dependency_)
+ return self.dependency_
+
+ def non_dependency (self):
+ """ Returns properties that are not dependencies.
+ """
+ result = [p for p in self.lazy_properties if not p.feature.dependency]
+ result.extend(self.non_dependency_)
+ return result
+
+ def conditional (self):
+ """ Returns conditional properties.
+ """
+ return self.conditional_
+
+ def non_conditional (self):
+ """ Returns properties that are not conditional.
+ """
+ return self.non_conditional_
+
+ def incidental (self):
+ """ Returns incidental properties.
+ """
+ result = [p for p in self.lazy_properties if p.feature.incidental]
+ result.extend(self.incidental_)
+ return result
+
+ def refine (self, requirements):
+ """ Refines this set's properties using the requirements passed as an argument.
+ """
+ assert isinstance(requirements, PropertySet)
+ if requirements not in self.refined_:
+ r = property.refine(self.all_, requirements.all_)
+
+ self.refined_[requirements] = create(r)
+
+ return self.refined_[requirements]
+
+ def expand (self):
+ if not self.expanded_:
+ expanded = feature.expand(self.all_)
+ self.expanded_ = create(expanded)
+ return self.expanded_
+
+ def expand_subfeatures(self):
+ if not self.subfeatures_:
+ self.subfeatures_ = create(feature.expand_subfeatures(self.all_))
+ return self.subfeatures_
+
+ def evaluate_conditionals(self, context=None):
+ assert isinstance(context, (PropertySet, type(None)))
+ if not context:
+ context = self
+
+ if context not in self.evaluated_:
+ # FIXME: figure why the call messes up first parameter
+ self.evaluated_[context] = create(
+ property.evaluate_conditionals_in_context(self.all(), context))
+
+ return self.evaluated_[context]
+
+ def propagated (self):
+ if not self.propagated_ps_:
+ self.propagated_ps_ = create (self.propagated_)
+ return self.propagated_ps_
+
+ def add_defaults (self):
+ # FIXME: this caching is invalidated when new features
+ # are declare inside non-root Jamfiles.
+ if not self.defaults_:
+ expanded = feature.add_defaults(self.all_)
+ self.defaults_ = create(expanded)
+ return self.defaults_
+
+ def as_path (self):
+ if not self.as_path_:
+
+ def path_order (p1, p2):
+
+ i1 = p1.feature.implicit
+ i2 = p2.feature.implicit
+
+ if i1 != i2:
+ return i2 - i1
+ else:
+ return cmp(p1.feature.name, p2.feature.name)
+
+ # trim redundancy
+ properties = feature.minimize(self.base_)
+
+ # sort according to path_order
+ properties.sort (path_order)
+
+ components = []
+ for p in properties:
+ f = p.feature
+ if f.implicit:
+ components.append(p.value)
+ else:
+ value = f.name.replace(':', '-') + "-" + p.value
+ if property.get_abbreviated_paths():
+ value = abbreviate_dashed(value)
+ components.append(value)
+
+ self.as_path_ = '/'.join(components)
+
+ return self.as_path_
+
+ def target_path (self):
+ """ Computes the target path that should be used for
+ target with these properties.
+ Returns a tuple of
+ - the computed path
+ - if the path is relative to build directory, a value of
+ 'true'.
+ """
+ if not self.target_path_:
+ # The <location> feature can be used to explicitly
+ # change the location of generated targets
+ l = self.get ('<location>')
+ if l:
+ computed = l[0]
+ is_relative = False
+
+ else:
+ p = self.as_path()
+ if hash_maybe:
+ p = hash_maybe(p)
+
+ # Really, an ugly hack. Boost regression test system requires
+ # specific target paths, and it seems that changing it to handle
+ # other directory layout is really hard. For that reason,
+ # we teach V2 to do the things regression system requires.
+ # The value o '<location-prefix>' is predended to the path.
+ prefix = self.get ('<location-prefix>')
+
+ if prefix:
+ if len (prefix) > 1:
+ raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix)
+
+ computed = os.path.join(prefix[0], p)
+
+ else:
+ computed = p
+
+ if not computed:
+ computed = "."
+
+ is_relative = True
+
+ self.target_path_ = (computed, is_relative)
+
+ return self.target_path_
+
+ def add (self, ps):
+ """ Creates a new property set containing the properties in this one,
+ plus the ones of the property set passed as argument.
+ """
+ assert isinstance(ps, PropertySet)
+ if ps not in self.added_:
+ self.added_[ps] = create(self.all_ + ps.all())
+ return self.added_[ps]
+
+ def add_raw (self, properties):
+ """ Creates a new property set containing the properties in this one,
+ plus the ones passed as argument.
+ """
+ return self.add (create (properties))
+
+
+ def get (self, feature):
+ """ Returns all values of 'feature'.
+ """
+ if type(feature) == type([]):
+ feature = feature[0]
+ if not isinstance(feature, b2.build.feature.Feature):
+ feature = b2.build.feature.get(feature)
+ assert isinstance(feature, b2.build.feature.Feature)
+
+ if self.feature_map_ is None:
+ self.feature_map_ = {}
+
+ for v in self.all_:
+ if v.feature not in self.feature_map_:
+ self.feature_map_[v.feature] = []
+ self.feature_map_[v.feature].append(v.value)
+
+ return self.feature_map_.get(feature, [])
+
+ @cached
+ def get_properties(self, feature):
+ """Returns all contained properties associated with 'feature'"""
+ if not isinstance(feature, b2.build.feature.Feature):
+ feature = b2.build.feature.get(feature)
+ assert isinstance(feature, b2.build.feature.Feature)
+
+ result = []
+ for p in self.all_:
+ if p.feature == feature:
+ result.append(p)
+ return result
+
+ def __contains__(self, item):
+ return item.id in self._all_set
+
+def hash(p):
+ m = hashlib.md5()
+ m.update(p)
+ return m.hexdigest()
+
+hash_maybe = hash if "--hash" in bjam.variable("ARGV") else None
+
diff --git a/src/boost/tools/build/src/build/readme.txt b/src/boost/tools/build/src/build/readme.txt
new file mode 100644
index 000000000..b15055b8e
--- /dev/null
+++ b/src/boost/tools/build/src/build/readme.txt
@@ -0,0 +1,11 @@
+Copyright 2001, 2002 Dave Abrahams
+Copyright 2002 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+
+Development code for new build system. To run unit tests for jam code, execute:
+
+ bjam --debug --build-system=test
+
+Comprehensive tests require Python. See ../test/readme.txt
diff --git a/src/boost/tools/build/src/build/scanner.jam b/src/boost/tools/build/src/build/scanner.jam
new file mode 100644
index 000000000..bbb9b4db4
--- /dev/null
+++ b/src/boost/tools/build/src/build/scanner.jam
@@ -0,0 +1,163 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements scanners: objects computing implicit dependencies for files, such
+# as includes in C++.
+#
+# A scanner has a regular expression used to find the dependencies, some data
+# needed to interpret those dependencies (e.g., include paths), and code which
+# establishing needed relationships between actual jam targets.
+#
+# Scanner objects are created by actions when they try to actualize virtual
+# targets, passed to the virtual-target.actualize() method and are then
+# associated with actual targets. It is possible to use several scanners for a
+# single virtual-target. For example, a single source file might be compiled
+# twice - each time using a different include path. In this case, two separate
+# actual targets will be created, each having a scanner of its own.
+#
+# Typically, scanners are created from target type and the action's properties,
+# using the rule 'get' in this module. Directly creating scanners is not
+# recommended, as it might create multiple equvivalent but different instances,
+# and lead to unnecessary actual target duplication. However, actions can also
+# create scanners in a special way, instead of relying on just the target type.
+
+import "class" : new ;
+import property ;
+import property-set ;
+import virtual-target ;
+
+# Base scanner class.
+#
+class scanner
+{
+ rule __init__ ( )
+ {
+ }
+
+ # Returns a pattern to use for scanning.
+ #
+ rule pattern ( )
+ {
+ import errors : error : errors.error ;
+ errors.error "method must be overridden" ;
+ }
+
+ # Establish necessary relationship between targets, given an actual target
+ # being scanned and a list of pattern matches in that file.
+ #
+ rule process ( target : matches * )
+ {
+ import errors : error : errors.error ;
+ errors.error "method must be overridden" ;
+ }
+}
+
+
+# Registers a new generator class, specifying a set of properties relevant to
+# this scanner. Constructor for that class should have one parameter: a list of
+# properties.
+#
+rule register ( scanner-class : relevant-properties * )
+{
+ .registered += $(scanner-class) ;
+ .relevant-properties.$(scanner-class) = $(relevant-properties) ;
+}
+
+
+# Common scanner class, usable when there is only one kind of includes (unlike
+# C, where "" and <> includes have different search paths).
+#
+class common-scanner : scanner
+{
+ import scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+ ISFILE $(matches) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+
+# Returns an instance of a previously registered scanner, with the specified
+# properties.
+#
+rule get ( scanner-class : property-set )
+{
+ if ! $(scanner-class) in $(.registered)
+ {
+ import errors ;
+ errors.error "attempt to get an unregistered scanner" ;
+ }
+
+ local r = $(.rv-cache.$(property-set)) ;
+ if ! $(r)
+ {
+ r = [ property-set.create
+ [ property.select $(.relevant-properties.$(scanner-class)) :
+ [ $(property-set).raw ] ] ] ;
+ .rv-cache.$(property-set) = $(r) ;
+ }
+
+ if ! $(scanner.$(scanner-class).$(r:J=-))
+ {
+ local s = [ new $(scanner-class) [ $(r).raw ] ] ;
+ scanner.$(scanner-class).$(r:J=-) = $(s) ;
+ }
+ return $(scanner.$(scanner-class).$(r:J=-)) ;
+}
+
+
+# Installs the specified scanner on the actual target 'target'.
+#
+rule install ( scanner : target )
+{
+ HDRSCAN on $(target) = [ $(scanner).pattern ] ;
+ SCANNER on $(target) = $(scanner) ;
+ HDRRULE on $(target) = scanner.hdrrule ;
+
+ # Scanner reflects differences in properties affecting binding of 'target',
+ # which will be known when processing includes for it, and give information
+ # on how to interpret different include types (e.g. quoted vs. those in
+ # angle brackets in C files).
+ HDRGRIST on $(target) = $(scanner) ;
+}
+
+
+# Propagate scanner settings from 'including-target' to 'targets'.
+#
+rule propagate ( scanner : targets * : including-target )
+{
+ HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
+ SCANNER on $(targets) = $(scanner) ;
+ HDRRULE on $(targets) = scanner.hdrrule ;
+ HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
+}
+
+
+rule hdrrule ( target : matches * : binding )
+{
+ local scanner = [ on $(target) return $(SCANNER) ] ;
+ $(scanner).process $(target) : $(matches) : $(binding) ;
+}
+
+
+# hdrrule must be available at global scope so it can be invoked by header
+# scanning.
+#
+IMPORT scanner : hdrrule : : scanner.hdrrule ;
diff --git a/src/boost/tools/build/src/build/scanner.py b/src/boost/tools/build/src/build/scanner.py
new file mode 100644
index 000000000..9f5fc76f2
--- /dev/null
+++ b/src/boost/tools/build/src/build/scanner.py
@@ -0,0 +1,167 @@
+# Status: ported.
+# Base revision: 45462
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements scanners: objects that compute implicit dependencies for
+# files, such as includes in C++.
+#
+# Scanner has a regular expression used to find dependencies, some
+# data needed to interpret those dependencies (for example, include
+# paths), and a code which actually established needed relationship
+# between actual jam targets.
+#
+# Scanner objects are created by actions, when they try to actualize
+# virtual targets, passed to 'virtual-target.actualize' method and are
+# then associated with actual targets. It is possible to use
+# several scanners for a virtual-target. For example, a single source
+# might be used by to compile actions, with different include paths.
+# In this case, two different actual targets will be created, each
+# having scanner of its own.
+#
+# Typically, scanners are created from target type and action's
+# properties, using the rule 'get' in this module. Directly creating
+# scanners is not recommended, because it might create many equvivalent
+# but different instances, and lead in unneeded duplication of
+# actual targets. However, actions can also create scanners in a special
+# way, instead of relying on just target type.
+import property
+import bjam
+import os
+from b2.manager import get_manager
+from b2.util import is_iterable_typed
+
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __scanners, __rv_cache, __scanner_cache
+
+ # Maps registered scanner classes to relevant properties
+ __scanners = {}
+
+ # A cache of scanners.
+ # The key is: class_name.properties_tag, where properties_tag is the concatenation
+ # of all relevant properties, separated by '-'
+ __scanner_cache = {}
+
+reset ()
+
+
+def register(scanner_class, relevant_properties):
+ """ Registers a new generator class, specifying a set of
+ properties relevant to this scanner. Ctor for that class
+ should have one parameter: list of properties.
+ """
+ assert issubclass(scanner_class, Scanner)
+ assert isinstance(relevant_properties, basestring)
+ __scanners[str(scanner_class)] = relevant_properties
+
+def registered(scanner_class):
+ """ Returns true iff a scanner of that class is registered
+ """
+ return str(scanner_class) in __scanners
+
+def get(scanner_class, properties):
+ """ Returns an instance of previously registered scanner
+ with the specified properties.
+ """
+ assert issubclass(scanner_class, Scanner)
+ assert is_iterable_typed(properties, basestring)
+ scanner_name = str(scanner_class)
+
+ if not registered(scanner_name):
+ raise BaseException ("attempt to get unregistered scanner: %s" % scanner_name)
+
+ relevant_properties = __scanners[scanner_name]
+ r = property.select(relevant_properties, properties)
+
+ scanner_id = scanner_name + '.' + '-'.join(r)
+
+ if scanner_id not in __scanner_cache:
+ __scanner_cache[scanner_id] = scanner_class(r)
+
+ return __scanner_cache[scanner_id]
+
+class Scanner:
+ """ Base scanner class.
+ """
+ def __init__ (self):
+ pass
+
+ def pattern (self):
+ """ Returns a pattern to use for scanning.
+ """
+ raise BaseException ("method must be overridden")
+
+ def process (self, target, matches, binding):
+ """ Establish necessary relationship between targets,
+ given actual target being scanned, and a list of
+ pattern matches in that file.
+ """
+ raise BaseException ("method must be overridden")
+
+
+# Common scanner class, which can be used when there's only one
+# kind of includes (unlike C, where "" and <> includes have different
+# search paths).
+class CommonScanner(Scanner):
+
+ def __init__ (self, includes):
+ Scanner.__init__(self)
+ self.includes = includes
+
+ def process(self, target, matches, binding):
+
+ target_path = os.path.normpath(os.path.dirname(binding[0]))
+ bjam.call("mark-included", target, matches)
+
+ get_manager().engine().set_target_variable(matches, "SEARCH",
+ [target_path] + self.includes)
+ get_manager().scanners().propagate(self, matches)
+
+class ScannerRegistry:
+
+ def __init__ (self, manager):
+ self.manager_ = manager
+ self.count_ = 0
+ self.exported_scanners_ = {}
+
+ def install (self, scanner, target, vtarget):
+ """ Installs the specified scanner on actual target 'target'.
+ vtarget: virtual target from which 'target' was actualized.
+ """
+ assert isinstance(scanner, Scanner)
+ assert isinstance(target, basestring)
+ assert isinstance(vtarget, basestring)
+ engine = self.manager_.engine()
+ engine.set_target_variable(target, "HDRSCAN", scanner.pattern())
+ if scanner not in self.exported_scanners_:
+ exported_name = "scanner_" + str(self.count_)
+ self.count_ = self.count_ + 1
+ self.exported_scanners_[scanner] = exported_name
+ bjam.import_rule("", exported_name, scanner.process)
+ else:
+ exported_name = self.exported_scanners_[scanner]
+
+ engine.set_target_variable(target, "HDRRULE", exported_name)
+
+ # scanner reflects difference in properties affecting
+ # binding of 'target', which will be known when processing
+ # includes for it, will give information on how to
+ # interpret quoted includes.
+ engine.set_target_variable(target, "HDRGRIST", str(id(scanner)))
+ pass
+
+ def propagate(self, scanner, targets):
+ assert isinstance(scanner, Scanner)
+ assert is_iterable_typed(targets, basestring) or isinstance(targets, basestring)
+ engine = self.manager_.engine()
+ engine.set_target_variable(targets, "HDRSCAN", scanner.pattern())
+ engine.set_target_variable(targets, "HDRRULE",
+ self.exported_scanners_[scanner])
+ engine.set_target_variable(targets, "HDRGRIST", str(id(scanner)))
+
diff --git a/src/boost/tools/build/src/build/targets.jam b/src/boost/tools/build/src/build/targets.jam
new file mode 100644
index 000000000..cb841f8fe
--- /dev/null
+++ b/src/boost/tools/build/src/build/targets.jam
@@ -0,0 +1,1792 @@
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports 'abstract' targets, which are targets explicitly defined in a
+# Jamfile.
+#
+# Abstract targets are represented by classes derived from 'abstract-target'
+# class. The first abstract target is 'project-target', which is created for
+# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module
+# (see project.jam).
+#
+# Project targets keep a list of 'main-target' instances. A main target is what
+# the user explicitly defines in a Jamfile. It is possible to have several
+# definitions for a main target, for example to have different lists of sources
+# for different platforms. So, main targets keep a list of alternatives.
+#
+# Each alternative is an instance of 'abstract-target'. When a main target
+# subvariant is defined by some rule, that rule will decide what class to use,
+# create an instance of that class and add it to the list of alternatives for
+# the main target.
+#
+# Rules supplied by the build system will use only targets derived from the
+# 'basic-target' class, which will provide some default behaviour. There will be
+# different classes derived from it such as 'make-target', created by the 'make'
+# rule, and 'typed-target', created by rules such as 'exe' and 'lib'.
+#
+# +--------------------------+
+# | abstract-target |
+# +==========================+
+# | name |
+# | project |
+# | |
+# | generate(properties) = 0 |
+# +-------------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------+-----+-------------------------------+
+# | | |
+# | | |
+# +-----------+----------+ +------+------+ +-------+------+
+# | project-target | | main-target | | basic-target |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----+ generate |<>------------->| generate |
+# | main-target | +-------------+ | construct = 0|
+# +----------------------+ +-------+------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+-----------------+-----------------+------------------+
+# | | | |
+# | | | |
+# ... ---+-----+ +-------+------+ +------+------+ +-------+------+
+# | | typed-target | | make-target | | stage-target |
+# . +==============+ +=============+ +==============+
+# . | construct | | construct | | construct |
+# +--------------+ +-------------+ +--------------+
+
+import assert ;
+import build-request ;
+import "class" : new ;
+import feature ;
+import indirect ;
+import path ;
+import property ;
+import property-set ;
+import sequence ;
+import set ;
+import toolset ;
+
+
+# Base class for all abstract targets.
+#
+class abstract-target
+{
+ import assert ;
+ import "class" ;
+ import errors ;
+ import project ;
+
+ rule __init__ ( name # Name of the target in Jamfile.
+ : project-target # The project target to which this one belongs.
+ )
+ {
+ # Note: it might seem that we don't need either name or project at all.
+ # However, there are places where we really need it. One example is
+ # error messages which should name problematic targets. Another is
+ # setting correct paths for sources and generated files.
+
+ self.name = $(name) ;
+ self.project = $(project-target) ;
+ self.location = [ errors.nearest-user-location ] ;
+ }
+
+ # Returns the name of this target.
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Returns the project for this target.
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Return the location where the target was declared.
+ rule location ( )
+ {
+ return $(self.location) ;
+ }
+
+ # Returns a user-readable name for this target.
+ rule full-name ( )
+ {
+ local location = [ $(self.project).get location ] ;
+ return $(location)/$(self.name) ;
+ }
+
+ # Generates virtual targets for this abstract target using the specified
+ # properties, unless a different value of some feature is required by the
+ # target.
+ # On success, returns:
+ # - a property-set with the usage requirements to be applied to dependants
+ # - a list of produced virtual targets, which may be empty.
+ # If 'property-set' is empty, performs the default build of this target, in
+ # a way specific to the derived class.
+ #
+ rule generate ( property-set )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ rule rename ( new-name )
+ {
+ self.name = $(new-name) ;
+ }
+}
+
+
+if --debug-building in [ modules.peek : ARGV ]
+{
+ modules.poke : .debug-building : true ;
+}
+
+
+rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Project target class (derived from 'abstract-target').
+#
+# This class has the following responsibilities:
+# - Maintaining a list of main targets in this project and building them.
+#
+# Main targets are constructed in two stages:
+# - When Jamfile is read, a number of calls to 'add-alternative' is made. At
+# that time, alternatives can also be renamed to account for inline targets.
+# - The first time 'main-target' or 'has-main-target' rule is called, all
+# alternatives are enumerated and main targets are created.
+#
+class project-target : abstract-target
+{
+ import project ;
+ import targets ;
+ import path ;
+ import print ;
+ import property-set ;
+ import set ;
+ import sequence ;
+ import toolset ;
+ import "class" : new ;
+
+ rule __init__ ( name : project-module parent-project ?
+ : requirements * : default-build * )
+ {
+ abstract-target.__init__ $(name) : $(__name__) ;
+
+ self.project-module = $(project-module) ;
+ self.location = [ project.attribute $(project-module) location ] ;
+ self.requirements = $(requirements) ;
+ self.default-build = $(default-build) ;
+
+ if $(parent-project)
+ {
+ inherit $(parent-project) ;
+ }
+ }
+
+ # This is needed only by the 'make' rule. Need to find a way to make 'make'
+ # work without this method.
+ #
+ rule project-module ( )
+ {
+ return $(self.project-module) ;
+ }
+
+ rule get ( attribute )
+ {
+ return [ project.attribute $(self.project-module) $(attribute) ] ;
+ }
+
+ rule build-dir ( )
+ {
+ if ! $(self.build-dir)
+ {
+ self.build-dir = [ get build-dir ] ;
+ if ! $(self.build-dir)
+ {
+ local location = [ $(self.project).get location ] ;
+ if $(location)
+ {
+ self.build-dir = [ path.join $(location) bin ] ;
+ }
+ else
+ {
+ local id = [ get id ] ;
+ if $(id)
+ {
+ local rid = [ MATCH ^/(.*) : $(id) ] ;
+ self.build-dir = [ path.join [ project.standalone-build-dir ] $(rid) ] ;
+ }
+ else
+ {
+ errors.error "Could not create build-dir for standalone project $(self.project-module:E=)."
+ : "Missing project id" ;
+ }
+ }
+ }
+ }
+ return $(self.build-dir) ;
+ }
+
+ # Generates all possible targets contained in this project.
+ #
+ rule generate ( property-set * )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "building project" [ name ]
+ " ('$(__name__)') with" [ $(property-set).raw ] ;
+ targets.increase-indent ;
+ }
+
+ local usage-requirements = [ property-set.empty ] ;
+ local targets ;
+
+ for local t in [ targets-to-build ]
+ {
+ local g = [ $(t).generate $(property-set) ] ;
+ usage-requirements = [ $(usage-requirements).add $(g[1]) ] ;
+ targets += $(g[2-]) ;
+ }
+ targets.decrease-indent ;
+ return $(usage-requirements) [ sequence.unique $(targets) ] ;
+ }
+
+ # Computes and returns a list of abstract-target instances which must be
+ # built when this project is built.
+ #
+ rule targets-to-build ( )
+ {
+ local result ;
+
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ # Collect all main targets here, except for "explicit" ones.
+ for local t in $(self.main-targets)
+ {
+ if ! [ $(t).name ] in $(self.explicit-targets)
+ {
+ result += $(t) ;
+ }
+ }
+
+ # Collect all projects referenced via "projects-to-build" attribute.
+ local self-location = [ get location ] ;
+ for local pn in [ get projects-to-build ]
+ {
+ result += [ find $(pn)/ ] ;
+ }
+
+ return $(result) ;
+ }
+
+ # Add 'target' to the list of targets in this project that should be build
+ # only by explicit request
+ #
+ rule mark-target-as-explicit ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.explicit-targets += $(target-name) ;
+ }
+
+ rule mark-target-as-always ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.always-targets += $(target-name) ;
+ }
+
+ # Add new target alternative
+ #
+ rule add-alternative ( target-instance )
+ {
+ if $(self.built-main-targets)
+ {
+ import errors : error : errors.error ;
+ errors.error add-alternative called when main targets are already
+ created. : in project [ full-name ] ;
+ }
+ self.alternatives += $(target-instance) ;
+ if ! ( [ $(target-instance).name ] in $(self.alternative-names) )
+ {
+ self.alternative-names += [ $(target-instance).name ] ;
+ }
+ }
+
+ # Checks if an alternative was declared for the target.
+ # Unlike checking for a main target this does not require
+ # building the main targets. And hence can be used in/directly
+ # while loading a project.
+ #
+ rule has-alternative-for-target ( target-name )
+ {
+ if $(target-name) in $(self.alternative-names)
+ {
+ return 1 ;
+ }
+ }
+
+ # Returns a 'main-target' class instance corresponding to 'name'.
+ #
+ rule main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+ return $(self.main-target.$(name)) ;
+ }
+
+ # Returns whether a main target with the specified name exists.
+ #
+ rule has-main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ if $(self.main-target.$(name))
+ {
+ return true ;
+ }
+ }
+
+ # Worker function for the find rule not implementing any caching and simply
+ # returning nothing in case the target can not be found.
+ #
+ rule find-really ( id )
+ {
+ local result ;
+ local current-location = [ get location ] ;
+
+ local split = [ MATCH ^(.*)//(.*)$ : $(id) ] ;
+ local project-part = $(split[1]) ;
+ local target-part = $(split[2]) ;
+
+ local extra-error-message ;
+ if $(project-part)
+ {
+ # There is an explicitly specified project part in id. Looks up the
+ # project and passes the request to it.
+ local pm = [ project.find $(project-part) : $(current-location) ] ;
+ if $(pm)
+ {
+ project-target = [ project.target $(pm) ] ;
+ result = [ $(project-target).find $(target-part) : no-error ] ;
+ }
+ else
+ {
+ extra-error-message = could not resolve project reference
+ '$(project-part)' ;
+ if ! [ path.is-rooted $(project-part) ]
+ {
+ local rooted = [ path.root $(project-part) / ] ;
+ if $(rooted) && [ project.is-registered-id $(rooted) ]
+ {
+ extra-error-message += - possibly missing a leading
+ slash ('/') character. ;
+ }
+ }
+ }
+ }
+ else
+ {
+ # Interpret target-name as name of main target. Need to do this
+ # before checking for file. Consider the following scenario with a
+ # toolset not modifying its executable's names, e.g. gcc on
+ # Unix-like platforms:
+ #
+ # exe test : test.cpp ;
+ # install s : test : <location>. ;
+ #
+ # After the first build we would have a target named 'test' in the
+ # Jamfile and a file named 'test' on the disk. We need the target to
+ # override the file.
+ result = [ main-target $(id) ] ;
+
+ # Interpret id as an existing file reference.
+ if ! $(result)
+ {
+ result = [ new file-reference [ path.make $(id) ] :
+ $(self.project) ] ;
+ if ! [ $(result).exists ]
+ {
+ result = ;
+ }
+ }
+
+ # Interpret id as project-id.
+ if ! $(result)
+ {
+ local project-module = [ project.find $(id) :
+ $(current-location) ] ;
+ if $(project-module)
+ {
+ result = [ project.target $(project-module) ] ;
+ }
+ }
+ }
+
+ return $(result:E="") $(extra-error-message) ;
+ }
+
+ # Find and return the target with the specified id, treated relative to
+ # self. Id may specify either a target or a file name with the target taking
+ # priority. May report an error or return nothing if the target is not found
+ # depending on the 'no-error' parameter.
+ #
+ rule find ( id : no-error ? )
+ {
+ local v = $(.id.$(id)) ;
+ local extra-error-message ;
+ if ! $(v)
+ {
+ local r = [ find-really $(id) ] ;
+ v = $(r[1]) ;
+ extra-error-message = $(r[2-]) ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ .id.$(id) = $(v) ;
+ }
+
+ if $(v) != none
+ {
+ return $(v) ;
+ }
+ else if ! $(no-error)
+ {
+ local current-location = [ get location ] ;
+ import errors : user-error : errors.user-error ;
+ errors.user-error Unable to find file or target named
+ : " " '$(id)'
+ : referred to from project at
+ : " " '$(current-location)'
+ : $(extra-error-message) ;
+ }
+ }
+
+ rule build-main-targets ( )
+ {
+ self.built-main-targets = true ;
+ for local a in $(self.alternatives)
+ {
+ local name = [ $(a).name ] ;
+ local target = $(self.main-target.$(name)) ;
+ if ! $(target)
+ {
+ local t = [ new main-target $(name) : $(self.project) ] ;
+ self.main-target.$(name) = $(t) ;
+ self.main-targets += $(t) ;
+ target = $(self.main-target.$(name)) ;
+ }
+
+ if $(name) in $(self.always-targets)
+ {
+ $(a).always ;
+ }
+
+ $(target).add-alternative $(a) ;
+ }
+ }
+
+ # Accessor, add a constant.
+ #
+ rule add-constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ : type ? # Optional type of value.
+ )
+ {
+ switch $(type)
+ {
+ case path :
+ local r ;
+ for local v in $(value)
+ {
+ local l = $(self.location) ;
+ if ! $(l)
+ {
+ # Project corresponding to config files do not have
+ # 'location' attribute, but do have source location. It
+ # might be more reasonable to make every project have a
+ # location and use some other approach to prevent buildable
+ # targets in config files, but that has been left for later.
+ l = [ get source-location ] ;
+ }
+ v = [ path.root [ path.make $(v) ] $(l) ] ;
+ # Now make the value absolute path.
+ v = [ path.root $(v) [ path.pwd ] ] ;
+ # Constants should be in platform-native form.
+ v = [ path.native $(v) ] ;
+ r += $(v) ;
+ }
+ value = $(r) ;
+ }
+ if ! $(name) in $(self.constants)
+ {
+ self.constants += $(name) ;
+ }
+ self.constant.$(name) = $(value) ;
+ # Inject the constant in the scope of the Jamroot module.
+ modules.poke $(self.project-module) : $(name) : $(value) ;
+ }
+
+ rule inherit ( parent )
+ {
+ for local c in [ modules.peek $(parent) : self.constants ]
+ {
+ # No need to pass the type. Path constants were converted to
+ # absolute paths already by parent.
+ add-constant $(c) : [ modules.peek $(parent) : self.constant.$(c) ]
+ ;
+ }
+
+ # Import rules from parent.
+ local this-module = [ project-module ] ;
+ local parent-module = [ $(parent).project-module ] ;
+ # Do not import rules coming from 'project-rules' as they must be
+ # imported localized.
+ local user-rules = [ set.difference
+ [ RULENAMES $(parent-module) ] :
+ [ RULENAMES project-rules ] ] ;
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules)
+ ;
+ EXPORT $(this-module) : $(user-rules) ;
+
+ toolset.inherit-flags $(this-module) : $(parent-module) ;
+ }
+}
+
+
+# Helper rules to detect cycles in main target references.
+#
+local rule start-building ( main-target-instance )
+{
+ if $(main-target-instance) in $(.targets-being-built)
+ {
+ local names ;
+ for local t in $(.targets-being-built) $(main-target-instance)
+ {
+ names += [ $(t).full-name ] ;
+ }
+
+ import errors ;
+ errors.error "Recursion in main target references"
+ : "the following target are being built currently:"
+ : $(names) ;
+ }
+ .targets-being-built += $(main-target-instance) ;
+}
+
+
+local rule end-building ( main-target-instance )
+{
+ .targets-being-built = $(.targets-being-built[1--2]) ;
+}
+
+
+# A named top-level target in Jamfile.
+#
+class main-target : abstract-target
+{
+ import assert ;
+ import feature ;
+ import property-set ;
+ import sequence ;
+ import set ;
+ import targets : start-building end-building ;
+ import utility ;
+
+ rule __init__ ( name : project )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+ }
+
+ # Add a new alternative for this target
+ rule add-alternative ( target )
+ {
+ local d = [ $(target).default-build ] ;
+ if $(self.alternatives) && ( $(self.default-build) != $(d) )
+ {
+ import errors : error : errors.error ;
+ errors.error "default build must be identical in all alternatives"
+ : "main target is" [ full-name ]
+ : "with" [ $(d).raw ]
+ : "differing from previous default build"
+ [ $(self.default-build).raw ] ;
+ }
+ else
+ {
+ self.default-build = $(d) ;
+ }
+ self.alternatives += $(target) ;
+ }
+
+ # Returns the best viable alternative for this property-set. See the
+ # documentation for selection rules.
+ #
+ rule select-alternatives ( property-set debug ? )
+ {
+ # When selecting alternatives we have to consider defaults, for example:
+ # lib l : l.cpp : <variant>debug ;
+ # lib l : l_opt.cpp : <variant>release ;
+ # will not work unless we add default value <variant>debug.
+ property-set = [ $(property-set).add-defaults ] ;
+
+ # The algorithm: we keep the current best viable alternative. When we
+ # encounter a new best viable alternative, we compare it with the
+ # current one.
+
+ local best ;
+ local best-properties ;
+
+ if $(self.alternatives[2-])
+ {
+ local bad ;
+ local worklist = $(self.alternatives) ;
+ while $(worklist) && ! $(bad)
+ {
+ local v = $(worklist[1]) ;
+ local properties = [ $(v).match $(property-set) $(debug) ] ;
+
+ if $(properties) != no-match
+ {
+ if ! $(best)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ if $(properties) = $(best-properties)
+ {
+ bad = true ;
+ }
+ else if $(properties) in $(best-properties)
+ {
+ # Do nothing, this alternative is worse
+ }
+ else if $(best-properties) in $(properties)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ bad = true ;
+ }
+ }
+ }
+ worklist = $(worklist[2-]) ;
+ }
+ if ! $(bad)
+ {
+ return $(best) ;
+ }
+ }
+ else
+ {
+ return $(self.alternatives) ;
+ }
+ }
+
+ # Features are relevant here if they could affect alternative
+ # selection. That is, base, non-conditional properties that
+ # are not identical in all target alternatives.
+ rule relevant-features ( )
+ {
+ if $(self.alternatives[2-])
+ {
+ if $(self.relevant-features)
+ {
+ return $(self.relevant-features) ;
+ }
+ local all-properties ;
+ for t in $(self.alternatives)
+ {
+ local ps = [ $(t).requirements ] ;
+ ps = [ property-set.create [ $(ps).non-conditional ] ] ;
+ all-properties += [ $(ps).base ] ;
+ }
+ all-properties = [ sequence.unique $(all-properties) ] ;
+ local result ;
+ for t in $(self.alternatives)
+ {
+ local ps = [ $(t).requirements ] ;
+ ps = [ property-set.create [ $(ps).non-conditional ] ] ;
+ local properties = [ set.difference $(all-properties) : [ $(ps).base ] ] ;
+ result += $(properties:G) ;
+ }
+ result = [ sequence.transform utility.ungrist : [ sequence.unique $(result) ] ] ;
+ self.relevant-features = [ property-set.create <relevant>$(result) ] ;
+ return $(self.relevant-features) ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule apply-default-build ( property-set )
+ {
+ return [ targets.apply-default-build $(property-set) :
+ $(self.default-build) ] ;
+ }
+
+ # Select an alternative for this main target, by finding all alternatives
+ # whose requirements are satisfied by 'properties' and picking the one with
+ # the longest requirements set. Returns the result of calling 'generate' on
+ # that alternative.
+ #
+ rule generate ( property-set )
+ {
+ start-building $(__name__) ;
+
+ local all-property-sets = [ apply-default-build $(property-set) ] ;
+ local relevant = [ relevant-features ] ;
+ local usage-requirements = [ property-set.empty ] ;
+ local result ;
+ for local p in $(all-property-sets)
+ {
+ local r = [ generate-really [ $(p).add $(relevant) ] ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ }
+ }
+ end-building $(__name__) ;
+ return $(usage-requirements) [ sequence.unique $(result) ] ;
+ }
+
+ # Generates the main target with the given property set and returns a list
+ # which first element is property-set object containing usage-requirements
+ # of generated target and with generated virtual target in other elements.
+ # It is possible that no targets are generated.
+ #
+ local rule generate-really ( property-set )
+ {
+ local best-alternatives = [ select-alternatives $(property-set) ] ;
+ if ! $(best-alternatives)
+ {
+ ECHO "error: No best alternative for" [ full-name ] ;
+ select-alternatives $(property-set) debug ;
+ return [ property-set.empty ] ;
+ }
+ else
+ {
+ # Now return virtual targets for the only alternative.
+ return [ $(best-alternatives).generate $(property-set) ] ;
+ }
+ }
+
+ rule rename ( new-name )
+ {
+ abstract-target.rename $(new-name) ;
+ for local a in $(self.alternatives)
+ {
+ $(a).rename $(new-name) ;
+ }
+ }
+}
+
+
+# Abstract target referring to a source file. This is an artificial entity
+# allowing sources to a target to be represented using a list of abstract target
+# instances.
+#
+class file-reference : abstract-target
+{
+ import virtual-target ;
+ import property-set ;
+ import path ;
+
+ rule __init__ ( file : project )
+ {
+ abstract-target.__init__ $(file) : $(project) ;
+ }
+
+ rule generate ( properties )
+ {
+ return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
+ [ location ] : $(self.project) ] ;
+ }
+
+ # Returns true if the referred file really exists.
+ rule exists ( )
+ {
+ location ;
+ return $(self.file-path) ;
+ }
+
+ # Returns the location of target. Needed by 'testing.jam'.
+ rule location ( )
+ {
+ if ! $(self.file-location)
+ {
+ local source-location = [ $(self.project).get source-location ] ;
+ for local src-dir in $(source-location)
+ {
+ if ! $(self.file-location)
+ {
+ local location = [ path.root $(self.name) $(src-dir) ] ;
+ if [ CHECK_IF_FILE [ path.native $(location) ] ]
+ {
+ self.file-location = $(src-dir) ;
+ self.file-path = $(location) ;
+ }
+ }
+ }
+ }
+ return $(self.file-location) ;
+ }
+}
+
+
+# Given a target-reference, made in context of 'project', returns the
+# abstract-target instance that is referred to, as well as properties explicitly
+# specified for this reference.
+#
+rule resolve-reference ( target-reference : project )
+{
+ # Separate target name from properties override.
+ local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ;
+ local id = $(split[1]) ;
+ if ! $(split) || ! $(id)
+ {
+ import errors ;
+ errors.error "Malformed target reference $(target-reference)" ;
+ }
+ local sproperties = ;
+ if $(split[3])
+ {
+ sproperties = [ property.make [ feature.split $(split[3]) ] ] ;
+ sproperties = [ feature.expand $(sproperties) ] ;
+ }
+
+ # Find the target.
+ local target = [ $(project).find $(id) ] ;
+
+ return $(target) [ property-set.create $(sproperties) ] ;
+}
+
+
+# Attempts to generate the target given by target reference, which can refer
+# both to a main target or to a file. Returns a list consisting of
+# - usage requirements
+# - generated virtual targets, if any
+#
+rule generate-from-reference (
+ target-reference # Target reference.
+ : project # Project where the reference is made.
+ : property-set # Properties of the main target that makes the reference.
+)
+{
+ local r = [ resolve-reference $(target-reference) : $(project) ] ;
+ local target = $(r[1]) ;
+ local sproperties = $(r[2]) ;
+
+ # Take properties which should be propagated and refine them with
+ # source-specific requirements.
+ local propagated = [ $(property-set).propagated ] ;
+ local rproperties = [ $(propagated).refine $(sproperties) ] ;
+ if $(rproperties[1]) = "@error"
+ {
+ import errors ;
+ errors.error
+ "When building" [ full-name ] " with properties " $(properties) :
+ "Invalid properties specified for " $(source) ":"
+ $(rproperties[2-]) ;
+ }
+ return [ $(target).generate $(rproperties) ] ;
+}
+
+
+rule apply-default-build ( property-set : default-build )
+{
+ # 1. First, see what properties from default-build are already present in
+ # property-set.
+
+ local expanded = [ $(property-set).expand ] ;
+ local raw = [ $(property-set).raw ] ;
+ local specified-features = [ $(expanded).raw ] ;
+ specified-features = $(specified-features:G) ;
+
+ local defaults-to-apply ;
+ for local d in [ $(default-build).raw ]
+ {
+ if ! $(d:G) in $(specified-features)
+ {
+ defaults-to-apply += $(d) ;
+ }
+ }
+
+ # 2. If there are any defaults to be applied, form a new build request. Pass
+ # it through to 'expand-no-defaults' since default-build might contain
+ # "release debug" resulting in two property-sets.
+ local result ;
+ if $(defaults-to-apply)
+ {
+ # We have to compress subproperties here to prevent property lists like:
+ # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
+ #
+ # from being expanded into:
+ # <toolset-msvc:version>7.1/<threading>multi
+ # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
+ #
+ # due to a cross-product property combination. That may be an indication
+ # that build-request.expand-no-defaults is the wrong rule to use here.
+ properties = [ build-request.expand-no-defaults
+ [ feature.compress-subproperties $(raw) ] $(defaults-to-apply) ] ;
+
+ if $(properties)
+ {
+ for local p in $(properties)
+ {
+ result += [ property-set.create
+ [ feature.split $(p) ] ] ;
+ }
+ }
+ else
+ {
+ result = [ property-set.empty ] ;
+ }
+ }
+ else
+ {
+ result = $(property-set) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a build request and requirements, return properties common to dependency
+# build request and target requirements.
+#
+# TODO: Document exactly what 'common properties' are, whether they should
+# include default property values, whether they should contain any conditional
+# properties or should those be already processed, etc. See whether there are
+# any differences between use cases with empty and non-empty build-request as
+# well as with requirements containing and those not containing any non-free
+# features.
+#
+rule common-properties ( build-request requirements )
+{
+ # For optimization, we add free requirements directly, without using a
+ # complex algorithm. This gives the complex algorithm a better chance of
+ # caching results.
+ local free = [ $(requirements).free ] ;
+ local non-free = [ property-set.create [ $(requirements).base ]
+ [ $(requirements).incidental ] ] ;
+
+ local key = .rp.$(build-request)-$(non-free) ;
+ if ! $($(key))
+ {
+ $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
+ }
+ return [ $($(key)).add-raw $(free) ] ;
+}
+
+
+# Given a 'context' -- a set of already present properties, and 'requirements',
+# decide which extra properties should be applied to 'context'. For conditional
+# requirements, this means evaluating the condition. For indirect conditional
+# requirements, this means calling a rule. Ordinary requirements are always
+# applied.
+#
+# Handles the situation where evaluating one conditional requirement affects
+# conditions of another conditional requirements, such as:
+# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
+#
+# If 'what' is 'refined' returns context refined with new requirements. If
+# 'what' is 'added' returns just the requirements to be applied.
+#
+rule evaluate-requirements ( requirements : context )
+{
+ # Apply non-conditional requirements. It is possible that further
+ # conditional requirement change a value set by non-conditional
+ # requirements. For example:
+ #
+ # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
+ #
+ # I am not sure if this should be an error, or not, especially given that
+ #
+ # <threading>single
+ #
+ # might come from project's requirements.
+
+ local unconditional = [ feature.expand [ $(requirements).non-conditional ] ]
+ ;
+
+ local raw = [ $(context).raw ] ;
+ raw = [ property.refine $(raw) : $(unconditional) ] ;
+
+ # We have collected properties that surely must be present in common
+ # properties. We now try to figure out what other properties should be added
+ # in order to satisfy rules (4)-(6) from the docs.
+
+ local defaults = [ toolset.defaults ] ;
+ defaults = [ $(defaults).raw ] ;
+
+ local conditionals = [ $(requirements).conditional ] ;
+ # The 'count' variable has one element for each conditional feature and for
+ # each occurrence of '<conditional>' feature. It is used as a loop
+ # counter: for each iteration of the loop before we remove one element and
+ # the property set should stabilize before we are done. It is assumed that
+ # #conditionals iterations should be enough for properties to propagate
+ # along conditions in any direction.
+ local count = $(conditionals) $(defaults)
+ and-once-more ;
+
+ local added-requirements ;
+ local added-defaults ;
+
+ local current = $(raw) ;
+
+ local ok ;
+ while $(count)
+ {
+ # We need to expand composites here so that the requirements can
+ # safely override composite features.
+ current = [ feature.expand-composites $(current) ] ;
+ current = [ feature.add-defaults $(current) ] ;
+ # Evaluate conditionals in context of current properties.
+ local e = [ property.evaluate-conditionals-in-context $(conditionals) :
+ $(current) ] ;
+ local d = [ property.evaluate-conditionals-in-context $(defaults) :
+ $(current) ] ;
+
+ if $(e) = $(added-requirements) && $(d) = $(added-defaults)
+ {
+ # If we got the same result, we have found the final properties.
+ count = ;
+ ok = true ;
+ }
+ else
+ {
+ # Oops, conditional evaluation results have changed. Also 'current'
+ # contains leftovers from a previous evaluation. Recompute 'current'
+ # using initial properties and conditional requirements.
+ added-requirements = $(e) ;
+ current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ;
+ added-defaults = $(d) ;
+ current = [ property.refine $(d) : $(current) ] ;
+ }
+ count = $(count[2-]) ;
+ }
+ if ! $(ok)
+ {
+ import errors ;
+ errors.error Can not evaluate conditional properties $(conditionals) ;
+ }
+
+ return [ property-set.create $(current) ] ;
+}
+
+
+rule common-properties2 ( build-request requirements )
+{
+ return [ evaluate-requirements $(requirements) : $(build-request) ] ;
+}
+
+
+rule push-target ( target )
+{
+ .targets = $(target) $(.targets) ;
+}
+
+rule pop-target ( )
+{
+ .targets = $(.targets[2-]) ;
+}
+
+# Return the metatarget that is currently being generated.
+rule current ( )
+{
+ return $(.targets[1]) ;
+}
+
+
+# Implements the most standard way of constructing main target alternative from
+# sources. Allows sources to be either file or other main target and handles
+# generation of those dependency targets.
+#
+class basic-target : abstract-target
+{
+ import build-request ;
+ import build-system ;
+ import "class" : new ;
+ import feature ;
+ import property ;
+ import property-set ;
+ import sequence ;
+ import set ;
+ import targets ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+
+ self.sources = $(sources) ;
+ if ! $(requirements)
+ {
+ requirements = [ property-set.empty ] ;
+ }
+ self.requirements = $(requirements) ;
+ if ! $(default-build)
+ {
+ default-build = [ property-set.empty ] ;
+ }
+ self.default-build = $(default-build) ;
+ if ! $(usage-requirements)
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ self.usage-requirements = $(usage-requirements) ;
+
+ if $(sources:G)
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error properties found "in" the 'sources' parameter
+ "for" [ full-name ] ;
+ }
+ }
+
+ rule always ( )
+ {
+ self.always = 1 ;
+ }
+
+ # Returns the list of abstract-targets which are used as sources. The extra
+ # properties specified for sources are not represented. The only user for
+ # this rule at the moment is the "--dump-tests" feature of the test system.
+ #
+ rule sources ( )
+ {
+ if ! $(self.source-targets)
+ {
+ for local s in $(self.sources)
+ {
+ self.source-targets += [ targets.resolve-reference $(s) :
+ $(self.project) ] ;
+ }
+ }
+ return $(self.source-targets) ;
+ }
+
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule default-build ( )
+ {
+ return $(self.default-build) ;
+ }
+
+ # Returns the alternative condition for this alternative, if the condition
+ # is satisfied by 'property-set'.
+ #
+ rule match ( property-set debug ? )
+ {
+ # The condition is composed of all base non-conditional properties. We
+ # only expand subfeatures in the condition. We do not expand
+ # composites. We want to be able to put
+ # <toolset>msvc-6.0
+ # in requirements. On the other hand, if we have <variant>release as a
+ # condition it does not make sense to require <optimization>full to be
+ # in the build request just to select this variant.
+ local bcondition = [ $(self.requirements).base ] ;
+ local ccondition = [ $(self.requirements).conditional ] ;
+ local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
+ condition = [ feature.expand-subfeatures $(condition) : unchecked ] ;
+ if $(debug)
+ {
+ ECHO " next alternative: required properties:"
+ $(condition:E=(empty)) ;
+ }
+
+ if $(condition) in [ $(property-set).raw ]
+ {
+ if $(debug)
+ {
+ ECHO " matched" ;
+ }
+ return $(condition) ;
+ }
+ else
+ {
+ if $(debug)
+ {
+ ECHO " not matched" ;
+ }
+ return no-match ;
+ }
+ }
+
+ # Takes a target reference, which might be either target id or a dependency
+ # property, and generates that target using 'property-set' as a build
+ # request.
+ #
+ # The results are added to the variable called 'result-var'. Usage
+ # requirements are added to the variable called 'usage-requirements-var'.
+ #
+ rule generate-dependencies ( dependencies * : property-set : result-var
+ usage-requirements-var )
+ {
+ for local dependency in $(dependencies)
+ {
+ local grist = $(dependency:G) ;
+ local id = $(dependency:G=) ;
+ local result = [ targets.generate-from-reference $(id) :
+ $(self.project) : $(property-set) ] ;
+
+ $(result-var) += $(result[2-]:G=$(grist)) ;
+ $(usage-requirements-var) += [ $(result[1]).raw ] ;
+ }
+ }
+
+ # Determines final build properties, generates sources, and calls
+ # 'construct'. This method should not be overridden.
+ #
+ rule generate ( property-set )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ local fn = [ full-name ] ;
+ ECHO [ targets.indent ] "Building target '$(fn)'" ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] Build "request:" $(property-set)
+ [ $(property-set).raw ] ;
+ local cf = [ build-system.command-line-free-features ] ;
+ ECHO [ targets.indent ] Command line free "features:" [ $(cf).raw ] ;
+ ECHO [ targets.indent ] Target "requirements:"
+ [ $(self.requirements).raw ] ;
+ }
+ targets.push-target $(__name__) ;
+
+ # Apply free features from the command line. If user said
+ # define=FOO
+ # he most likely wants this define to be set for all compiles.
+ # Make it before check for already built.
+ property-set = [ $(property-set).add
+ [ build-system.command-line-free-features ] ] ;
+
+ if ! $(self.generated.$(property-set))
+ {
+ local rproperties = [ targets.common-properties $(property-set)
+ $(self.requirements) ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Common properties: "
+ [ $(rproperties).raw ] ;
+ }
+
+ if ( $(rproperties[1]) != "@error" ) && ( [ $(rproperties).get
+ <build> ] != no )
+ {
+ local source-targets ;
+ local properties = [ $(rproperties).non-dependency ] ;
+ local usage-requirements ;
+
+ generate-dependencies [ $(rproperties).dependency ] :
+ $(rproperties) : properties usage-requirements ;
+
+ generate-dependencies $(self.sources) : $(rproperties) :
+ source-targets usage-requirements ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Usage requirements for"
+ $(self.name)": " $(usage-requirements) ;
+ }
+
+ rproperties = [ property-set.create $(properties)
+ $(usage-requirements) ] ;
+ usage-requirements = [ property-set.create $(usage-requirements)
+ ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Build properties: "
+ [ $(rproperties).raw ] ;
+ }
+
+ local extra = [ $(rproperties).get <source> ] ;
+ source-targets += $(extra:G=) ;
+ # We might get duplicate sources, for example if we link to two
+ # libraries having the same <library> usage requirement. Use
+ # stable sort, since for some targets the order is important,
+ # e.g. RUN_PY targets need a python source to come first.
+ source-targets = [ sequence.unique $(source-targets) : stable ]
+ ;
+
+ local result = [ construct $(self.name) : $(source-targets) :
+ $(rproperties) ] ;
+
+ if $(result)
+ {
+ local gur = $(result[1]) ;
+ result = $(result[2-]) ;
+
+ # Relevant is automatically applied to usage requirements
+ # and only applies for propagated features
+ local relevant = [ propagated-relevant
+ [ $(gur).get <relevant> ]
+ [ $(rproperties).get <relevant> ] ] ;
+ gur = [ property-set.create
+ [ property.change [ $(gur).raw ] : <relevant> ]
+ <relevant>$(relevant) ] ;
+
+ local s = [ create-subvariant $(result)
+ : [ virtual-target.recent-targets ]
+ : $(property-set) : $(source-targets)
+ : $(rproperties) : $(usage-requirements) ] ;
+ virtual-target.clear-recent-targets ;
+
+ if $(self.always)
+ {
+ for local t in [ $(s).created-targets ]
+ {
+ $(t).always ;
+ }
+ }
+
+ local ur = [ compute-usage-requirements $(s) ] ;
+ ur = [ $(ur).add $(gur) ] ;
+ $(s).set-usage-requirements $(ur) ;
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ }
+
+ self.generated.$(property-set) = $(ur) $(result) ;
+ }
+ }
+ else
+ {
+ if $(rproperties[1]) = "@error"
+ {
+ ECHO [ targets.indent ] "Skipping build of:" [ full-name ]
+ "cannot compute common properties" ;
+ }
+ else if [ $(rproperties).get <build> ] = no
+ {
+ # If we just see <build>no, we cannot produce any reasonable
+ # diagnostics. The code that adds this property is expected
+ # to explain why a target is not built, for example using
+ # the configure.log-component-configuration function.
+ }
+ else
+ {
+ ECHO [ targets.indent ] "Skipping build of: " [ full-name ]
+ " unknown reason" ;
+ }
+
+ # We are here either because there has been an error computing
+ # properties or there is <build>no in properties. In the latter
+ # case we do not want any diagnostic. In the former case, we
+ # need diagnostics. FIXME
+
+ # If this target fails to build, add <build>no to properties to
+ # cause any parent target to fail to build. Except that it
+ # - does not work now, since we check for <build>no only in
+ # common properties, but not in properties that came from
+ # dependencies
+ # - it is not clear if that is a good idea anyway. The alias
+ # target, for example, should not fail to build if a
+ # dependency fails.
+ self.generated.$(property-set) = [ property-set.create <build>no
+ ] ;
+ }
+ }
+ else
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Already built" ;
+ local ur = $(self.generated.$(property-set)) ;
+ ur = $(ur[0]) ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ targets.decrease-indent ;
+ }
+ }
+
+ targets.pop-target ;
+ targets.decrease-indent ;
+ return $(self.generated.$(property-set)) ;
+ }
+
+ # Given the set of generated targets, and refined build properties,
+ # determines and sets appropriate usage requirements on those targets.
+ #
+ rule compute-usage-requirements ( subvariant )
+ {
+ local rproperties = [ $(subvariant).build-properties ] ;
+ xusage-requirements =
+ [ $(self.usage-requirements).evaluate-conditionals
+ $(rproperties) ] ;
+
+ # Filter out non-propagated <relevant> properties
+ local relevant ;
+ for local r in [ $(xusage-requirements).get <relevant> ]
+ {
+ local check = [ MATCH "(.*):<relevant>(.*)" : $(r) ] ;
+ if $(check) { check = $(check[2]) ; }
+ else { check = $(r) ; }
+ if propagated in [ feature.attributes <$(check)> ]
+ {
+ relevant += $(r) ;
+ }
+ }
+ local raw = [ $(xusage-requirements).raw ] ;
+ local free = [ property.take free : $(raw) ] ;
+ if $(free) != $(raw)
+ {
+ if ! $(self.warned-usage-requirements)
+ {
+ self.warned-usage-requirements = true ;
+ ECHO "warning:" non-free usage requirements
+ [ set.difference $(raw) : $(free) ] ignored ;
+ ECHO "warning:" in main-target [ name ] at [ location ] ;
+ }
+ }
+ xusage-requirements = [ property-set.create
+ [ property.change $(free) : <relevant> ]
+ <relevant>$(relevant) ] ;
+
+ # We generate all dependency properties and add them, as well as their
+ # usage requirements, to the result.
+ local extra ;
+ generate-dependencies [ $(xusage-requirements).dependency ] :
+ $(rproperties) : extra extra ;
+
+ local result = [ property-set.create
+ [ $(xusage-requirements).non-dependency ] $(extra) ] ;
+
+ # Propagate usage requirements we got from sources, except for the
+ # <pch-header> and <pch-file> features.
+ #
+ # That feature specifies which pch file to use, and should apply only to
+ # direct dependents. Consider:
+ #
+ # pch pch1 : ...
+ # lib lib1 : ..... pch1 ;
+ # pch pch2 :
+ # lib lib2 : pch2 lib1 ;
+ #
+ # Here, lib2 should not get <pch-header> property from pch1.
+ #
+ # Essentially, when those two features are in usage requirements, they
+ # are propagated only to direct dependents. We might need a more general
+ # mechanism, but for now, only those two features are special.
+ #
+ # TODO - Actually there are more possible candidates like for instance
+ # when listing static library X as a source for another static library.
+ # Then static library X will be added as a <source> property to the
+ # second library's usage requirements but those requirements should last
+ # only up to the first executable or shared library that actually links
+ # to it.
+ local raw = [ $(subvariant).sources-usage-requirements ] ;
+ raw = [ $(raw).raw ] ;
+ raw = [ property.change $(raw) : <pch-header> ] ;
+ raw = [ property.change $(raw) : <pch-file> ] ;
+ return [ $(result).add [ property-set.create $(raw) ] ] ;
+ }
+
+ local rule propagated-relevant ( values * )
+ {
+ local result ;
+ for local v in [ feature.expand-relevant $(values) ]
+ {
+ if propagated in [ feature.attributes <$(v)> ]
+ {
+ result += $(v) ;
+ }
+ }
+ return $(result) ;
+ }
+
+ # Creates new subvariant instances for 'targets'.
+ # 'root-targets' - virtual targets to be returned to dependants
+ # 'all-targets' - virtual targets created while building this main target
+ # 'build-request' - property-set instance with requested build properties
+ #
+ local rule create-subvariant ( root-targets * : all-targets * :
+ build-request : sources * : rproperties : usage-requirements )
+ {
+ for local e in $(root-targets)
+ {
+ $(e).root true ;
+ }
+
+ # Process all virtual targets that will be created if this main target
+ # is created.
+ local s = [ new subvariant $(__name__) : $(build-request) : $(sources) :
+ $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
+ for local v in $(all-targets)
+ {
+ if ! [ $(v).creating-subvariant ]
+ {
+ $(v).creating-subvariant $(s) ;
+ }
+ }
+ return $(s) ;
+ }
+
+ # Constructs virtual targets for this abstract target and the dependency
+ # graph. Returns a usage-requirements property-set and a list of virtual
+ # targets. Should be overridden in derived classes.
+ #
+ rule construct ( name : source-targets * : properties * )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+class typed-target : basic-target
+{
+ import generators ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+
+ self.type = $(type) ;
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local r = [ generators.construct $(self.project) $(name:S=)
+ : $(self.type)
+ : [ property-set.create [ $(property-set).raw ]
+ <main-target-type>$(self.type) ]
+ : $(source-targets) : true ] ;
+ if ! $(r)
+ {
+ local viable-generators = [ generators.find-viable-generators
+ $(self.type) : $(property-set) ] ;
+ ECHO "WARNING: Unable to construct" [ full-name ]
+ "of type" $(self.type)
+ "with these properties:" [ $(property-set).raw ] ;
+ ECHO "WARNING: Considered these as possible generators:" ;
+ for local gen in $(viable-generators)
+ {
+ ECHO "WARNING:" [ $(gen).id ]
+ "with source types {" [ $(gen).source-types ] "}"
+ "and requirements {" [ $(gen).requirements ] "}" ;
+ }
+
+ # Are there any top-level generators for this type/property set.
+ if ! [ generators.find-viable-generators $(self.type) :
+ $(property-set) ]
+ {
+ ECHO "error: no generators were found for type '$(self.type)'" ;
+ ECHO "error: and the requested properties" ;
+ ECHO "error: make sure you've configured the needed tools" ;
+ ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+ EXIT "To debug this problem, try the --debug-generators option."
+ ;
+ }
+ }
+ return $(r) ;
+ }
+}
+
+
+# Return the list of sources to use, if main target rule is invoked with
+# 'sources'. If there are any objects in 'sources', they are treated as main
+# target instances, and the name of such targets are adjusted to be
+# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if
+# a non-empty value is passed as the 'no-renaming' parameter.
+#
+rule main-target-sources ( sources * : main-target-name : no-renaming ? )
+{
+ local result ;
+ for local t in $(sources)
+ {
+ if [ class.is-instance $(t) ]
+ {
+ local name = [ $(t).name ] ;
+ if ! $(no-renaming)
+ {
+ name = $(main-target-name)__$(name) ;
+ $(t).rename $(name) ;
+ }
+ # Inline targets are not built by default.
+ local p = [ $(t).project ] ;
+ $(p).mark-target-as-explicit $(name) ;
+ result += $(name) ;
+ }
+ else
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the requirements to use when declaring a main target, obtained by
+# translating all specified property paths and refining project requirements
+# with the ones specified for the target.
+#
+rule main-target-requirements (
+ specification * # Properties explicitly specified for the main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local requirements = [ property-set.refine-from-user-input
+ [ $(project).get requirements ] : $(specification) :
+ [ $(project).project-module ] : [ $(project).get location ] ] ;
+ if $(requirements[1]) = "@error"
+ {
+ import errors ;
+ errors.error "Conflicting requirements for target:" $(requirements) ;
+ }
+ local result = [ $(requirements).add [ toolset.requirements ] ] ;
+ return [ $(result).add-raw [ property.evaluate-conditional-relevance [ $(result).raw ] ] ] ;
+}
+
+
+# Returns the usage requirements to use when declaring a main target, which are
+# obtained by translating all specified property paths and adding project's
+# usage requirements.
+#
+rule main-target-usage-requirements (
+ specification * # Use-properties explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local project-usage-requirements = [ $(project).get usage-requirements ] ;
+
+ # We do not use 'refine-from-user-input' because:
+ # - I am not sure if removing parent's usage requirements makes sense
+ # - refining usage requirements is not needed, since usage requirements are
+ # always free.
+ local usage-requirements = [ property-set.create-from-user-input
+ $(specification)
+ : [ $(project).project-module ] [ $(project).get location ] ] ;
+
+ local result = [ $(project-usage-requirements).add $(usage-requirements) ] ;
+ local relevant =
+ [ property.evaluate-conditional-relevance [ $(result).raw ] ] ;
+ return [ $(result).add-raw $(relevant) ] ;
+}
+
+
+# Return the default build value to use when declaring a main target, which is
+# obtained by using the specified value if not empty and parent's default build
+# attribute otherwise.
+#
+rule main-target-default-build (
+ specification * # Default build explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local result ;
+ if $(specification)
+ {
+ result = $(specification) ;
+ }
+ else
+ {
+ result = [ $(project).get default-build ] ;
+ }
+ return [ property-set.create-with-validation $(result) ] ;
+}
+
+
+# Registers the specified target as a main target alternative and returns it.
+#
+rule main-target-alternative ( target )
+{
+ local ptarget = [ $(target).project ] ;
+ $(ptarget).add-alternative $(target) ;
+ return $(target) ;
+}
+
+
+# Creates a metatarget with the specified properties, using 'klass' as the
+# class. The 'name', 'sources', 'requirements', 'default-build' and
+# 'usage-requirements' are assumed to be in the form specified by the user in
+# the Jamfile corresponding to 'project'.
+#
+rule create-metatarget ( klass : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [ targets.main-target-alternative [ new $(klass) $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ] ;
+}
+
+
+# Creates a typed-target with the specified properties. The 'name', 'sources',
+# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
+# the form specified by the user in the Jamfile corresponding to 'project'.
+#
+rule create-typed-target ( type : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [ targets.main-target-alternative [ new typed-target $(name) :
+ $(project) : $(type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ] ;
+}
diff --git a/src/boost/tools/build/src/build/targets.py b/src/boost/tools/build/src/build/targets.py
new file mode 100644
index 000000000..45bbe497e
--- /dev/null
+++ b/src/boost/tools/build/src/build/targets.py
@@ -0,0 +1,1523 @@
+# Status: ported.
+# Base revision: 64488
+
+# Copyright Vladimir Prus 2002-2007.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports 'abstract' targets, which are targets explicitly defined in Jamfile.
+#
+# Abstract targets are represented by classes derived from 'AbstractTarget' class.
+# The first abstract target is 'project_target', which is created for each
+# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module.
+# (see project.jam).
+#
+# Project targets keep a list of 'MainTarget' instances.
+# A main target is what the user explicitly defines in a Jamfile. It is
+# possible to have several definitions for a main target, for example to have
+# different lists of sources for different platforms. So, main targets
+# keep a list of alternatives.
+#
+# Each alternative is an instance of 'AbstractTarget'. When a main target
+# subvariant is defined by some rule, that rule will decide what class to
+# use, create an instance of that class and add it to the list of alternatives
+# for the main target.
+#
+# Rules supplied by the build system will use only targets derived
+# from 'BasicTarget' class, which will provide some default behaviour.
+# There will be two classes derived from it, 'make-target', created by the
+# 'make' rule, and 'TypedTarget', created by rules such as 'exe' and 'dll'.
+
+#
+# +------------------------+
+# |AbstractTarget |
+# +========================+
+# |name |
+# |project |
+# | |
+# |generate(properties) = 0|
+# +-----------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------------+------+------------------------------+
+# | | |
+# | | |
+# +----------+-----------+ +------+------+ +------+-------+
+# | project_target | | MainTarget | | BasicTarget |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----------+ generate |<>------------->| generate |
+# | main-target | +-------------+ | construct = 0|
+# +----------------------+ +--------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+----------------+------------------+----------------+---+
+# | | | |
+# | | | |
+# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
+# | | TypedTarget | | make-target | | stage-target |
+# . +==============+ +=============+ +==============+
+# . | construct | | construct | | construct |
+# +--------------+ +-------------+ +--------------+
+
+import re
+import os.path
+import sys
+
+from b2.manager import get_manager
+
+from b2.util.utility import *
+import property, project, virtual_target, property_set, feature, generators, toolset
+from virtual_target import Subvariant
+from b2.exceptions import *
+from b2.util.sequence import unique
+from b2.util import path, bjam_signature, safe_isinstance, is_iterable_typed
+from b2.build import errors
+from b2.build.errors import user_error_checkpoint
+
+import b2.build.build_request as build_request
+
+import b2.util.set
+_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$')
+
+class TargetRegistry:
+
+ def __init__ (self):
+ # All targets that are currently being built.
+ # Only the key is id (target), the value is the actual object.
+ self.targets_being_built_ = {}
+
+ # Current indent for debugging messages
+ self.indent_ = ""
+
+ self.debug_building_ = "--debug-building" in bjam.variable("ARGV")
+
+ self.targets_ = []
+
+ def main_target_alternative (self, target):
+ """ Registers the specified target as a main target alternatives.
+ Returns 'target'.
+ """
+ assert isinstance(target, AbstractTarget)
+ target.project ().add_alternative (target)
+ return target
+
+ def main_target_sources (self, sources, main_target_name, no_renaming=0):
+ """Return the list of sources to use, if main target rule is invoked
+ with 'sources'. If there are any objects in 'sources', they are treated
+ as main target instances, and the name of such targets are adjusted to
+ be '<name_of_this_target>__<name_of_source_target>'. Such renaming
+ is disabled is non-empty value is passed for 'no-renaming' parameter."""
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(main_target_name, basestring)
+ assert isinstance(no_renaming, (int, bool))
+ result = []
+
+ for t in sources:
+
+ t = b2.util.jam_to_value_maybe(t)
+
+ if isinstance (t, AbstractTarget):
+ name = t.name ()
+
+ if not no_renaming:
+ name = main_target_name + '__' + name
+ t.rename (name)
+
+ # Inline targets are not built by default.
+ p = t.project()
+ p.mark_targets_as_explicit([name])
+ result.append(name)
+
+ else:
+ result.append (t)
+
+ return result
+
+
+ def main_target_requirements(self, specification, project):
+ """Returns the requirement to use when declaring a main target,
+ which are obtained by
+ - translating all specified property paths, and
+ - refining project requirements with the one specified for the target
+
+ 'specification' are the properties xplicitly specified for a
+ main target
+ 'project' is the project where the main taret is to be declared."""
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
+ # create a copy since the list is being modified
+ specification = list(specification)
+ specification.extend(toolset.requirements())
+
+ requirements = property_set.refine_from_user_input(
+ project.get("requirements"), specification,
+ project.project_module(), project.get("location"))
+
+ return requirements
+
+ def main_target_usage_requirements (self, specification, project):
+ """ Returns the use requirement to use when declaraing a main target,
+ which are obtained by
+ - translating all specified property paths, and
+ - adding project's usage requirements
+ specification: Use-properties explicitly specified for a main target
+ project: Project where the main target is to be declared
+ """
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
+ project_usage_requirements = project.get ('usage-requirements')
+
+ # We don't use 'refine-from-user-input' because I'm not sure if:
+ # - removing of parent's usage requirements makes sense
+ # - refining of usage requirements is not needed, since usage requirements
+ # are always free.
+ usage_requirements = property_set.create_from_user_input(
+ specification, project.project_module(), project.get("location"))
+
+ return project_usage_requirements.add (usage_requirements)
+
+ def main_target_default_build (self, specification, project):
+ """ Return the default build value to use when declaring a main target,
+ which is obtained by using specified value if not empty and parent's
+ default build attribute otherwise.
+ specification: Default build explicitly specified for a main target
+ project: Project where the main target is to be declared
+ """
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
+ if specification:
+ return property_set.create_with_validation(specification)
+ else:
+ return project.get ('default-build')
+
+ def start_building (self, main_target_instance):
+ """ Helper rules to detect cycles in main target references.
+ """
+ assert isinstance(main_target_instance, MainTarget)
+ if id(main_target_instance) in self.targets_being_built_:
+ names = []
+ for t in self.targets_being_built_.values() + [main_target_instance]:
+ names.append (t.full_name())
+
+ get_manager().errors()("Recursion in main target references\n")
+
+ self.targets_being_built_[id(main_target_instance)] = main_target_instance
+
+ def end_building (self, main_target_instance):
+ assert isinstance(main_target_instance, MainTarget)
+ assert (id(main_target_instance) in self.targets_being_built_)
+ del self.targets_being_built_ [id (main_target_instance)]
+
+ def create_typed_target (self, type, project, name, sources, requirements, default_build, usage_requirements):
+ """ Creates a TypedTarget with the specified properties.
+ The 'name', 'sources', 'requirements', 'default_build' and
+ 'usage_requirements' are assumed to be in the form specified
+ by the user in Jamfile corresponding to 'project'.
+ """
+ assert isinstance(type, basestring)
+ assert isinstance(project, ProjectTarget)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ return self.main_target_alternative (TypedTarget (name, project, type,
+ self.main_target_sources (sources, name),
+ self.main_target_requirements (requirements, project),
+ self.main_target_default_build (default_build, project),
+ self.main_target_usage_requirements (usage_requirements, project)))
+
+ def increase_indent(self):
+ self.indent_ += " "
+
+ def decrease_indent(self):
+ self.indent_ = self.indent_[0:-4]
+
+ def logging(self):
+ return self.debug_building_
+
+ def log(self, message):
+ if self.debug_building_:
+ print self.indent_ + message
+
+ def push_target(self, target):
+ assert isinstance(target, AbstractTarget)
+ self.targets_.append(target)
+
+ def pop_target(self):
+ self.targets_ = self.targets_[:-1]
+
+ def current(self):
+ return self.targets_[0]
+
+
+class GenerateResult:
+
+ def __init__ (self, ur=None, targets=None):
+ if not targets:
+ targets = []
+ assert isinstance(ur, property_set.PropertySet) or ur is None
+ assert is_iterable_typed(targets, virtual_target.VirtualTarget)
+
+ self.__usage_requirements = ur
+ self.__targets = targets
+
+ if not self.__usage_requirements:
+ self.__usage_requirements = property_set.empty ()
+
+ def usage_requirements (self):
+ return self.__usage_requirements
+
+ def targets (self):
+ return self.__targets
+
+ def extend (self, other):
+ assert (isinstance (other, GenerateResult))
+
+ self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ())
+ self.__targets.extend (other.targets ())
+
+class AbstractTarget:
+ """ Base class for all abstract targets.
+ """
+ def __init__ (self, name, project, manager = None):
+ """ manager: the Manager object
+ name: name of the target
+ project: the project target to which this one belongs
+ manager:the manager object. If none, uses project.manager ()
+ """
+ assert isinstance(name, basestring)
+ assert (isinstance (project, ProjectTarget))
+ # Note: it might seem that we don't need either name or project at all.
+ # However, there are places where we really need it. One example is error
+ # messages which should name problematic targets. Another is setting correct
+ # paths for sources and generated files.
+
+ # Why allow manager to be specified? Because otherwise project target could not derive
+ # from this class.
+ if manager:
+ self.manager_ = manager
+ else:
+ self.manager_ = project.manager ()
+
+ self.name_ = name
+ self.project_ = project
+ self.location_ = errors.nearest_user_location()
+
+ def manager (self):
+ return self.manager_
+
+ def name (self):
+ """ Returns the name of this target.
+ """
+ return self.name_
+
+ def project (self):
+ """ Returns the project for this target.
+ """
+ return self.project_
+
+ def location (self):
+ """ Return the location where the target was declared.
+ """
+ return self.location_
+
+ def full_name (self):
+ """ Returns a user-readable name for this target.
+ """
+ location = self.project ().get ('location')
+ return location + '/' + self.name_
+
+ def generate (self, property_set):
+ """ Takes a property set. Generates virtual targets for this abstract
+ target, using the specified properties, unless a different value of some
+ feature is required by the target.
+ On success, returns a GenerateResult instance with:
+ - a property_set with the usage requirements to be
+ applied to dependents
+ - a list of produced virtual targets, which may be
+ empty.
+ If 'property_set' is empty, performs default build of this
+ target, in a way specific to derived class.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def rename (self, new_name):
+ assert isinstance(new_name, basestring)
+ self.name_ = new_name
+
+class ProjectTarget (AbstractTarget):
+ """ Project target class (derived from 'AbstractTarget')
+
+ This class these responsibilities:
+ - maintaining a list of main target in this project and
+ building it
+
+ Main targets are constructed in two stages:
+ - When Jamfile is read, a number of calls to 'add_alternative' is made.
+ At that time, alternatives can also be renamed to account for inline
+ targets.
+ - The first time 'main-target' or 'has-main-target' rule is called,
+ all alternatives are enumerated an main targets are created.
+ """
+ def __init__ (self, manager, name, project_module, parent_project, requirements, default_build):
+ assert isinstance(project_module, basestring)
+ assert isinstance(parent_project, (ProjectTarget, type(None)))
+ assert isinstance(requirements, (type(None), property_set.PropertySet))
+ assert isinstance(default_build, (type(None), property_set.PropertySet))
+ AbstractTarget.__init__ (self, name, self, manager)
+
+ self.project_module_ = project_module
+ self.location_ = manager.projects().attribute (project_module, 'location')
+ self.requirements_ = requirements
+ self.default_build_ = default_build
+
+ self.build_dir_ = None
+
+ # A cache of IDs
+ self.ids_cache_ = {}
+
+ # True is main targets have already been built.
+ self.built_main_targets_ = False
+
+ # A list of the registered alternatives for this project.
+ self.alternatives_ = []
+
+ # A map from main target name to the target corresponding
+ # to it.
+ self.main_target_ = {}
+
+ # Targets marked as explicit.
+ self.explicit_targets_ = set()
+
+ # Targets marked as always
+ self.always_targets_ = set()
+
+ # The constants defined for this project.
+ self.constants_ = {}
+
+ # Whether targets for all main target are already created.
+ self.built_main_targets_ = 0
+
+ if parent_project:
+ self.inherit (parent_project)
+
+
+ # TODO: This is needed only by the 'make' rule. Need to find the
+ # way to make 'make' work without this method.
+ def project_module (self):
+ return self.project_module_
+
+ def get (self, attribute):
+ assert isinstance(attribute, basestring)
+ return self.manager().projects().attribute(
+ self.project_module_, attribute)
+
+ def build_dir (self):
+ if not self.build_dir_:
+ self.build_dir_ = self.get ('build-dir')
+ if not self.build_dir_:
+ self.build_dir_ = os.path.join(self.project_.get ('location'), 'bin')
+
+ return self.build_dir_
+
+ def generate (self, ps):
+ """ Generates all possible targets contained in this project.
+ """
+ assert isinstance(ps, property_set.PropertySet)
+ self.manager_.targets().log(
+ "Building project '%s' with '%s'" % (self.name (), str(ps)))
+ self.manager_.targets().increase_indent ()
+
+ result = GenerateResult ()
+
+ for t in self.targets_to_build ():
+ g = t.generate (ps)
+ result.extend (g)
+
+ self.manager_.targets().decrease_indent ()
+ return result
+
+ def targets_to_build (self):
+ """ Computes and returns a list of AbstractTarget instances which
+ must be built when this project is built.
+ """
+ result = []
+
+ if not self.built_main_targets_:
+ self.build_main_targets ()
+
+ # Collect all main targets here, except for "explicit" ones.
+ for n, t in self.main_target_.iteritems ():
+ if not t.name () in self.explicit_targets_:
+ result.append (t)
+
+ # Collect all projects referenced via "projects-to-build" attribute.
+ self_location = self.get ('location')
+ for pn in self.get ('projects-to-build'):
+ result.append (self.find(pn + "/"))
+
+ return result
+
+ def mark_targets_as_explicit (self, target_names):
+ """Add 'target' to the list of targets in this project
+ that should be build only by explicit request."""
+
+ # Record the name of the target, not instance, since this
+ # rule is called before main target instances are created.
+ assert is_iterable_typed(target_names, basestring)
+ self.explicit_targets_.update(target_names)
+
+ def mark_targets_as_always(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
+ self.always_targets_.update(target_names)
+
+ def add_alternative (self, target_instance):
+ """ Add new target alternative.
+ """
+ assert isinstance(target_instance, AbstractTarget)
+ if self.built_main_targets_:
+ raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ())
+
+ self.alternatives_.append (target_instance)
+
+ def main_target (self, name):
+ assert isinstance(name, basestring)
+ if not self.built_main_targets_:
+ self.build_main_targets()
+
+ return self.main_target_[name]
+
+ def has_main_target (self, name):
+ """Tells if a main target with the specified name exists."""
+ assert isinstance(name, basestring)
+ if not self.built_main_targets_:
+ self.build_main_targets()
+
+ return name in self.main_target_
+
+ def create_main_target (self, name):
+ """ Returns a 'MainTarget' class instance corresponding to the 'name'.
+ """
+ assert isinstance(name, basestring)
+ if not self.built_main_targets_:
+ self.build_main_targets ()
+
+ return self.main_targets_.get (name, None)
+
+
+ def find_really(self, id):
+ """ Find and return the target with the specified id, treated
+ relative to self.
+ """
+ assert isinstance(id, basestring)
+
+ result = None
+ current_location = self.get ('location')
+
+ __re_split_project_target = re.compile (r'(.*)//(.*)')
+ split = __re_split_project_target.match (id)
+
+ project_part = None
+ target_part = None
+
+ if split:
+ project_part = split.group(1)
+ target_part = split.group(2)
+ if not target_part:
+ get_manager().errors()(
+ 'Project ID, "{}", is not a valid target reference. There should '
+ 'be either a target name after the "//" or the "//" should be removed '
+ 'from the target reference.'
+ .format(id)
+ )
+
+
+ project_registry = self.project_.manager ().projects ()
+
+ extra_error_message = ''
+ if project_part:
+ # There's explicit project part in id. Looks up the
+ # project and pass the request to it.
+ pm = project_registry.find (project_part, current_location)
+
+ if pm:
+ project_target = project_registry.target (pm)
+ result = project_target.find (target_part, no_error=1)
+
+ else:
+ extra_error_message = "error: could not find project '$(project_part)'"
+
+ else:
+ # Interpret target-name as name of main target
+ # Need to do this before checking for file. Consider this:
+ #
+ # exe test : test.cpp ;
+ # install s : test : <location>. ;
+ #
+ # After first build we'll have target 'test' in Jamfile and file
+ # 'test' on the disk. We need target to override the file.
+
+ result = None
+ if self.has_main_target(id):
+ result = self.main_target(id)
+
+ if not result:
+ result = FileReference (self.manager_, id, self.project_)
+ if not result.exists ():
+ # File actually does not exist.
+ # Reset 'target' so that an error is issued.
+ result = None
+
+
+ if not result:
+ # Interpret id as project-id
+ project_module = project_registry.find (id, current_location)
+ if project_module:
+ result = project_registry.target (project_module)
+
+ return result
+
+ def find (self, id, no_error = False):
+ assert isinstance(id, basestring)
+ assert isinstance(no_error, int) # also matches bools
+ v = self.ids_cache_.get (id, None)
+
+ if not v:
+ v = self.find_really (id)
+ self.ids_cache_ [id] = v
+
+ if v or no_error:
+ return v
+
+ raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location')))
+
+
+ def build_main_targets (self):
+ self.built_main_targets_ = True
+
+ for a in self.alternatives_:
+ name = a.name ()
+ if name not in self.main_target_:
+ t = MainTarget (name, self.project_)
+ self.main_target_ [name] = t
+
+ if name in self.always_targets_:
+ a.always()
+
+ self.main_target_ [name].add_alternative (a)
+
+ def add_constant(self, name, value, path=0):
+ """Adds a new constant for this project.
+
+ The constant will be available for use in Jamfile
+ module for this project. If 'path' is true,
+ the constant will be interpreted relatively
+ to the location of project.
+ """
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(value, basestring)
+ assert isinstance(path, int) # will also match bools
+ if path:
+ l = self.location_
+ if not l:
+ # Project corresponding to config files do not have
+ # 'location' attribute, but do have source location.
+ # It might be more reasonable to make every project have
+ # a location and use some other approach to prevent buildable
+ # targets in config files, but that's for later.
+ l = self.get('source-location')
+
+ value = os.path.join(l, value[0])
+ # Now make the value absolute path. Constants should be in
+ # platform-native form.
+ value = [os.path.normpath(os.path.join(os.getcwd(), value))]
+
+ self.constants_[name] = value
+ bjam.call("set-variable", self.project_module(), name, value)
+
+ def inherit(self, parent_project):
+ assert isinstance(parent_project, ProjectTarget)
+ for c in parent_project.constants_:
+ # No need to pass the type. Path constants were converted to
+ # absolute paths already by parent.
+ self.add_constant(c, parent_project.constants_[c])
+
+ # Import rules from parent
+ this_module = self.project_module()
+ parent_module = parent_project.project_module()
+
+ rules = bjam.call("RULENAMES", parent_module)
+ if not rules:
+ rules = []
+ user_rules = [x for x in rules
+ if x not in self.manager().projects().project_rules().all_names()]
+ if user_rules:
+ bjam.call("import-rules-from-parent", parent_module, this_module, user_rules)
+
+class MainTarget (AbstractTarget):
+ """ A named top-level target in Jamfile.
+ """
+ def __init__ (self, name, project):
+ AbstractTarget.__init__ (self, name, project)
+ self.alternatives_ = []
+ self.best_alternative = None
+ self.default_build_ = property_set.empty ()
+
+ def add_alternative (self, target):
+ """ Add a new alternative for this target.
+ """
+ assert isinstance(target, BasicTarget)
+ d = target.default_build ()
+
+ if self.alternatives_ and self.default_build_ != d:
+ get_manager().errors()("default build must be identical in all alternatives\n"
+ "main target is '%s'\n"
+ "with '%s'\n"
+ "differing from previous default build: '%s'" % (self.full_name (), d.raw (), self.default_build_.raw ()))
+
+ else:
+ self.default_build_ = d
+
+ self.alternatives_.append (target)
+
+ def __select_alternatives (self, property_set_, debug):
+ """ Returns the best viable alternative for this property_set
+ See the documentation for selection rules.
+ # TODO: shouldn't this be 'alternative' (singular)?
+ """
+ # When selecting alternatives we have to consider defaults,
+ # for example:
+ # lib l : l.cpp : <variant>debug ;
+ # lib l : l_opt.cpp : <variant>release ;
+ # won't work unless we add default value <variant>debug.
+ assert isinstance(property_set_, property_set.PropertySet)
+ assert isinstance(debug, int) # also matches bools
+
+ property_set_ = property_set_.add_defaults ()
+
+ # The algorithm: we keep the current best viable alternative.
+ # When we've got new best viable alternative, we compare it
+ # with the current one.
+ best = None
+ best_properties = None
+
+ if len (self.alternatives_) == 0:
+ return None
+
+ if len (self.alternatives_) == 1:
+ return self.alternatives_ [0]
+
+ if debug:
+ print "Property set for selection:", property_set_
+
+ for v in self.alternatives_:
+ properties = v.match (property_set_, debug)
+
+ if properties is not None:
+ if not best:
+ best = v
+ best_properties = properties
+
+ else:
+ if b2.util.set.equal (properties, best_properties):
+ return None
+
+ elif b2.util.set.contains (properties, best_properties):
+ # Do nothing, this alternative is worse
+ pass
+
+ elif b2.util.set.contains (best_properties, properties):
+ best = v
+ best_properties = properties
+
+ else:
+ return None
+
+ return best
+
+ def apply_default_build (self, property_set_):
+ assert isinstance(property_set_, property_set.PropertySet)
+ return apply_default_build(property_set_, self.default_build_)
+
+ def generate (self, ps):
+ """ Select an alternative for this main target, by finding all alternatives
+ which requirements are satisfied by 'properties' and picking the one with
+ longest requirements set.
+ Returns the result of calling 'generate' on that alternative.
+ """
+ assert isinstance(ps, property_set.PropertySet)
+ self.manager_.targets ().start_building (self)
+
+ # We want composite properties in build request act as if
+ # all the properties it expands too are explicitly specified.
+ ps = ps.expand ()
+
+ all_property_sets = self.apply_default_build (ps)
+
+ result = GenerateResult ()
+
+ for p in all_property_sets:
+ result.extend (self.__generate_really (p))
+
+ self.manager_.targets ().end_building (self)
+
+ return result
+
+ def __generate_really (self, prop_set):
+ """ Generates the main target with the given property set
+ and returns a list which first element is property_set object
+ containing usage_requirements of generated target and with
+ generated virtual target in other elements. It's possible
+ that no targets are generated.
+ """
+ assert isinstance(prop_set, property_set.PropertySet)
+ best_alternative = self.__select_alternatives (prop_set, debug=0)
+ self.best_alternative = best_alternative
+
+ if not best_alternative:
+ # FIXME: revive.
+ # self.__select_alternatives(prop_set, debug=1)
+ self.manager_.errors()(
+ "No best alternative for '%s'.\n"
+ % (self.full_name(),))
+
+ result = best_alternative.generate (prop_set)
+
+ # Now return virtual targets for the only alternative
+ return result
+
+ def rename(self, new_name):
+ assert isinstance(new_name, basestring)
+ AbstractTarget.rename(self, new_name)
+ for a in self.alternatives_:
+ a.rename(new_name)
+
+class FileReference (AbstractTarget):
+ """ Abstract target which refers to a source file.
+ This is artificial creature; it's useful so that sources to
+ a target can be represented as list of abstract target instances.
+ """
+ def __init__ (self, manager, file, project):
+ AbstractTarget.__init__ (self, file, project)
+ self.file_location_ = None
+
+ def generate (self, properties):
+ return GenerateResult (None, [
+ self.manager_.virtual_targets ().from_file (
+ self.name_, self.location(), self.project_) ])
+
+ def exists (self):
+ """ Returns true if the referred file really exists.
+ """
+ if self.location ():
+ return True
+ else:
+ return False
+
+ def location (self):
+ # Returns the location of target. Needed by 'testing.jam'
+ if not self.file_location_:
+ source_location = self.project_.get('source-location')
+
+ for src_dir in source_location:
+ location = os.path.join(src_dir, self.name())
+ if os.path.isfile(location):
+ self.file_location_ = src_dir
+ self.file_path = location
+ break
+
+ return self.file_location_
+
+def resolve_reference(target_reference, project):
+ """ Given a target_reference, made in context of 'project',
+ returns the AbstractTarget instance that is referred to, as well
+ as properties explicitly specified for this reference.
+ """
+ # Separate target name from properties override
+ assert isinstance(target_reference, basestring)
+ assert isinstance(project, ProjectTarget)
+ split = _re_separate_target_from_properties.match (target_reference)
+ if not split:
+ raise BaseException ("Invalid reference: '%s'" % target_reference)
+
+ id = split.group (1)
+
+ sproperties = []
+
+ if split.group (3):
+ sproperties = property.create_from_strings(feature.split(split.group(3)))
+ sproperties = feature.expand_composites(sproperties)
+
+ # Find the target
+ target = project.find (id)
+
+ return (target, property_set.create(sproperties))
+
+def generate_from_reference(target_reference, project, property_set_):
+ """ Attempts to generate the target given by target reference, which
+ can refer both to a main target or to a file.
+ Returns a list consisting of
+ - usage requirements
+ - generated virtual targets, if any
+ target_reference: Target reference
+ project: Project where the reference is made
+ property_set: Properties of the main target that makes the reference
+ """
+ assert isinstance(target_reference, basestring)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(property_set_, property_set.PropertySet)
+ target, sproperties = resolve_reference(target_reference, project)
+
+ # Take properties which should be propagated and refine them
+ # with source-specific requirements.
+ propagated = property_set_.propagated()
+ rproperties = propagated.refine(sproperties)
+
+ return target.generate(rproperties)
+
+
+
+class BasicTarget (AbstractTarget):
+ """ Implements the most standard way of constructing main target
+ alternative from sources. Allows sources to be either file or
+ other main target and handles generation of those dependency
+ targets.
+ """
+ def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None):
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(requirements, property_set.PropertySet) or requirements is None
+ assert isinstance(default_build, property_set.PropertySet) or default_build is None
+ assert isinstance(usage_requirements, property_set.PropertySet) or usage_requirements is None
+ AbstractTarget.__init__ (self, name, project)
+
+ for s in sources:
+ if get_grist (s):
+ raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name))
+
+ self.sources_ = sources
+
+ if not requirements: requirements = property_set.empty ()
+ self.requirements_ = requirements
+
+ if not default_build: default_build = property_set.empty ()
+ self.default_build_ = default_build
+
+ if not usage_requirements: usage_requirements = property_set.empty ()
+ self.usage_requirements_ = usage_requirements
+
+ # A cache for resolved references
+ self.source_targets_ = None
+
+ # A cache for generated targets
+ self.generated_ = {}
+
+ # A cache for build requests
+ self.request_cache = {}
+
+ # Result of 'capture_user_context' has everything. For example, if this
+ # target is declare as result of loading Jamfile which was loaded when
+ # building target B which was requested from A, then we'll have A, B and
+ # Jamroot location in context. We only care about Jamroot location, most
+ # of the times.
+ self.user_context_ = self.manager_.errors().capture_user_context()[-1:]
+
+ self.always_ = False
+
+ def always(self):
+ self.always_ = True
+
+ def sources (self):
+ """ Returns the list of AbstractTargets which are used as sources.
+ The extra properties specified for sources are not represented.
+ The only used of this rule at the moment is the '--dump-tests'
+ feature of the test system.
+ """
+ if self.source_targets_ == None:
+ self.source_targets_ = []
+ for s in self.sources_:
+ self.source_targets_.append(resolve_reference(s, self.project_)[0])
+
+ return self.source_targets_
+
+ def requirements (self):
+ return self.requirements_
+
+ def default_build (self):
+ return self.default_build_
+
+ def common_properties (self, build_request, requirements):
+ """ Given build request and requirements, return properties
+ common to dependency build request and target build
+ properties.
+ """
+ # For optimization, we add free unconditional requirements directly,
+ # without using complex algorithsm.
+ # This gives the complex algorithm better chance of caching results.
+ # The exact effect of this "optimization" is no longer clear
+ assert isinstance(build_request, property_set.PropertySet)
+ assert isinstance(requirements, property_set.PropertySet)
+ free_unconditional = []
+ other = []
+ for p in requirements.all():
+ if p.feature.free and not p.condition and p.feature.name != 'conditional':
+ free_unconditional.append(p)
+ else:
+ other.append(p)
+ other = property_set.create(other)
+
+ key = (build_request, other)
+ if key not in self.request_cache:
+ self.request_cache[key] = self.__common_properties2 (build_request, other)
+
+ return self.request_cache[key].add_raw(free_unconditional)
+
+ # Given 'context' -- a set of already present properties, and 'requirements',
+ # decide which extra properties should be applied to 'context'.
+ # For conditional requirements, this means evaluating condition. For
+ # indirect conditional requirements, this means calling a rule. Ordinary
+ # requirements are always applied.
+ #
+ # Handles situation where evaluating one conditional requirements affects
+ # condition of another conditional requirements, for example:
+ #
+ # <toolset>gcc:<variant>release <variant>release:<define>RELEASE
+ #
+ # If 'what' is 'refined' returns context refined with new requirements.
+ # If 'what' is 'added' returns just the requirements that must be applied.
+ def evaluate_requirements(self, requirements, context, what):
+ # Apply non-conditional requirements.
+ # It's possible that that further conditional requirement change
+ # a value set by non-conditional requirements. For example:
+ #
+ # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
+ #
+ # I'm not sure if this should be an error, or not, especially given that
+ #
+ # <threading>single
+ #
+ # might come from project's requirements.
+ assert isinstance(requirements, property_set.PropertySet)
+ assert isinstance(context, property_set.PropertySet)
+ assert isinstance(what, basestring)
+ unconditional = feature.expand(requirements.non_conditional())
+
+ context = context.refine(property_set.create(unconditional))
+
+ # We've collected properties that surely must be present in common
+ # properties. We now try to figure out what other properties
+ # should be added in order to satisfy rules (4)-(6) from the docs.
+
+ conditionals = property_set.create(requirements.conditional())
+
+ # It's supposed that #conditionals iterations
+ # should be enough for properties to propagate along conditions in any
+ # direction.
+ max_iterations = len(conditionals.all()) +\
+ len(requirements.get("<conditional>")) + 1
+
+ added_requirements = []
+ current = context
+
+ # It's assumed that ordinary conditional requirements can't add
+ # <indirect-conditional> properties, and that rules referred
+ # by <indirect-conditional> properties can't add new
+ # <indirect-conditional> properties. So the list of indirect conditionals
+ # does not change.
+ indirect = requirements.get("<conditional>")
+
+ ok = 0
+ for i in range(0, max_iterations):
+
+ e = conditionals.evaluate_conditionals(current).all()[:]
+
+ # Evaluate indirect conditionals.
+ for i in indirect:
+ new = None
+ i = b2.util.jam_to_value_maybe(i)
+ if callable(i):
+ # This is Python callable, yeah.
+ new = i(current)
+ else:
+ # Name of bjam function. Because bjam is unable to handle
+ # list of Property, pass list of strings.
+ br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()])
+ if br:
+ new = property.create_from_strings(br)
+ if new:
+ new = property.translate_paths(new, self.project().location())
+ e.extend(new)
+
+ if e == added_requirements:
+ # If we got the same result, we've found final properties.
+ ok = 1
+ break
+ else:
+ # Oops, results of evaluation of conditionals has changed.
+ # Also 'current' contains leftover from previous evaluation.
+ # Recompute 'current' using initial properties and conditional
+ # requirements.
+ added_requirements = e
+ current = context.refine(property_set.create(feature.expand(e)))
+
+ if not ok:
+ self.manager().errors()("Can't evaluate conditional properties "
+ + str(conditionals))
+
+
+ if what == "added":
+ return property_set.create(unconditional + added_requirements)
+ elif what == "refined":
+ return current
+ else:
+ self.manager().errors("Invalid value of the 'what' parameter")
+
+ def __common_properties2(self, build_request, requirements):
+ # This guarantees that default properties are present
+ # in result, unless they are overridden by some requirement.
+ # TODO: There is possibility that we've added <foo>bar, which is composite
+ # and expands to <foo2>bar2, but default value of <foo2> is not bar2,
+ # in which case it's not clear what to do.
+ #
+ assert isinstance(build_request, property_set.PropertySet)
+ assert isinstance(requirements, property_set.PropertySet)
+ build_request = build_request.add_defaults()
+ # Featured added by 'add-default' can be composite and expand
+ # to features without default values -- so they are not added yet.
+ # It could be clearer/faster to expand only newly added properties
+ # but that's not critical.
+ build_request = build_request.expand()
+
+ return self.evaluate_requirements(requirements, build_request,
+ "refined")
+
+ def match (self, property_set_, debug):
+ """ Returns the alternative condition for this alternative, if
+ the condition is satisfied by 'property_set'.
+ """
+ # The condition is composed of all base non-conditional properties.
+ # It's not clear if we should expand 'self.requirements_' or not.
+ # For one thing, it would be nice to be able to put
+ # <toolset>msvc-6.0
+ # in requirements.
+ # On the other hand, if we have <variant>release in condition it
+ # does not make sense to require <optimization>full to be in
+ # build request just to select this variant.
+ assert isinstance(property_set_, property_set.PropertySet)
+ bcondition = self.requirements_.base ()
+ ccondition = self.requirements_.conditional ()
+ condition = b2.util.set.difference (bcondition, ccondition)
+
+ if debug:
+ print " next alternative: required properties:", [str(p) for p in condition]
+
+ if b2.util.set.contains (condition, property_set_.all()):
+
+ if debug:
+ print " matched"
+
+ return condition
+
+ else:
+ return None
+
+
+ def generate_dependency_targets (self, target_ids, property_set_):
+ assert is_iterable_typed(target_ids, basestring)
+ assert isinstance(property_set_, property_set.PropertySet)
+ targets = []
+ usage_requirements = []
+ for id in target_ids:
+
+ result = generate_from_reference(id, self.project_, property_set_)
+ targets += result.targets()
+ usage_requirements += result.usage_requirements().all()
+
+ return (targets, usage_requirements)
+
+ def generate_dependency_properties(self, properties, ps):
+ """ Takes a target reference, which might be either target id
+ or a dependency property, and generates that target using
+ 'property_set' as build request.
+
+ Returns a tuple (result, usage_requirements).
+ """
+ assert is_iterable_typed(properties, property.Property)
+ assert isinstance(ps, property_set.PropertySet)
+ result_properties = []
+ usage_requirements = []
+ for p in properties:
+
+ result = generate_from_reference(p.value, self.project_, ps)
+
+ for t in result.targets():
+ result_properties.append(property.Property(p.feature, t))
+
+ usage_requirements += result.usage_requirements().all()
+
+ return (result_properties, usage_requirements)
+
+
+
+
+ @user_error_checkpoint
+ def generate (self, ps):
+ """ Determines final build properties, generates sources,
+ and calls 'construct'. This method should not be
+ overridden.
+ """
+ assert isinstance(ps, property_set.PropertySet)
+ self.manager_.errors().push_user_context(
+ "Generating target " + self.full_name(), self.user_context_)
+
+ if self.manager().targets().logging():
+ self.manager().targets().log(
+ "Building target '%s'" % self.name_)
+ self.manager().targets().increase_indent ()
+ self.manager().targets().log(
+ "Build request: '%s'" % str (ps.raw ()))
+ cf = self.manager().command_line_free_features()
+ self.manager().targets().log(
+ "Command line free features: '%s'" % str (cf.raw ()))
+ self.manager().targets().log(
+ "Target requirements: %s'" % str (self.requirements().raw ()))
+
+ self.manager().targets().push_target(self)
+
+ if ps not in self.generated_:
+
+ # Apply free features form the command line. If user
+ # said
+ # define=FOO
+ # he most likely want this define to be set for all compiles.
+ ps = ps.refine(self.manager().command_line_free_features())
+ rproperties = self.common_properties (ps, self.requirements_)
+
+ self.manager().targets().log(
+ "Common properties are '%s'" % str (rproperties))
+
+ if rproperties.get("<build>") != ["no"]:
+
+ result = GenerateResult ()
+
+ properties = rproperties.non_dependency ()
+
+ (p, u) = self.generate_dependency_properties (rproperties.dependency (), rproperties)
+ properties += p
+ assert all(isinstance(p, property.Property) for p in properties)
+ usage_requirements = u
+
+ (source_targets, u) = self.generate_dependency_targets (self.sources_, rproperties)
+ usage_requirements += u
+
+ self.manager_.targets().log(
+ "Usage requirements for '%s' are '%s'" % (self.name_, usage_requirements))
+
+ # FIXME:
+
+ rproperties = property_set.create(properties + usage_requirements)
+ usage_requirements = property_set.create (usage_requirements)
+
+ self.manager_.targets().log(
+ "Build properties: '%s'" % str(rproperties))
+
+ source_targets += rproperties.get('<source>')
+
+ # We might get duplicate sources, for example if
+ # we link to two library which have the same <library> in
+ # usage requirements.
+ # Use stable sort, since for some targets the order is
+ # important. E.g. RUN_PY target need python source to come
+ # first.
+ source_targets = unique(source_targets, stable=True)
+
+ # FIXME: figure why this call messes up source_targets in-place
+ result = self.construct (self.name_, source_targets[:], rproperties)
+
+ if result:
+ assert len(result) == 2
+ gur = result [0]
+ result = result [1]
+
+ if self.always_:
+ for t in result:
+ t.always()
+
+ s = self.create_subvariant (
+ result,
+ self.manager().virtual_targets().recent_targets(), ps,
+ source_targets, rproperties, usage_requirements)
+ self.manager().virtual_targets().clear_recent_targets()
+
+ ur = self.compute_usage_requirements (s)
+ ur = ur.add (gur)
+ s.set_usage_requirements (ur)
+
+ self.manager_.targets().log (
+ "Usage requirements from '%s' are '%s'" %
+ (self.name(), str(rproperties)))
+
+ self.generated_[ps] = GenerateResult (ur, result)
+ else:
+ self.generated_[ps] = GenerateResult (property_set.empty(), [])
+ else:
+ # If we just see <build>no, we cannot produce any reasonable
+ # diagnostics. The code that adds this property is expected
+ # to explain why a target is not built, for example using
+ # the configure.log-component-configuration function.
+
+ # If this target fails to build, add <build>no to properties
+ # to cause any parent target to fail to build. Except that it
+ # - does not work now, since we check for <build>no only in
+ # common properties, but not in properties that came from
+ # dependencies
+ # - it's not clear if that's a good idea anyway. The alias
+ # target, for example, should not fail to build if a dependency
+ # fails.
+ self.generated_[ps] = GenerateResult(
+ property_set.create(["<build>no"]), [])
+ else:
+ self.manager().targets().log ("Already built")
+
+ self.manager().targets().pop_target()
+ self.manager().targets().decrease_indent()
+
+ return self.generated_[ps]
+
+ def compute_usage_requirements (self, subvariant):
+ """ Given the set of generated targets, and refined build
+ properties, determines and sets appripriate usage requirements
+ on those targets.
+ """
+ assert isinstance(subvariant, virtual_target.Subvariant)
+ rproperties = subvariant.build_properties ()
+ xusage_requirements =self.evaluate_requirements(
+ self.usage_requirements_, rproperties, "added")
+
+ # We generate all dependency properties and add them,
+ # as well as their usage requirements, to result.
+ (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties)
+ extra = r1 + r2
+
+ result = property_set.create (xusage_requirements.non_dependency () + extra)
+
+ # Propagate usage requirements we've got from sources, except
+ # for the <pch-header> and <pch-file> features.
+ #
+ # That feature specifies which pch file to use, and should apply
+ # only to direct dependents. Consider:
+ #
+ # pch pch1 : ...
+ # lib lib1 : ..... pch1 ;
+ # pch pch2 :
+ # lib lib2 : pch2 lib1 ;
+ #
+ # Here, lib2 should not get <pch-header> property from pch1.
+ #
+ # Essentially, when those two features are in usage requirements,
+ # they are propagated only to direct dependents. We might need
+ # a more general mechanism, but for now, only those two
+ # features are special.
+ properties = []
+ for p in subvariant.sources_usage_requirements().all():
+ if p.feature.name not in ('pch-header', 'pch-file'):
+ properties.append(p)
+ if 'shared' in rproperties.get('link'):
+ new_properties = []
+ for p in properties:
+ if p.feature.name != 'library':
+ new_properties.append(p)
+ properties = new_properties
+
+ result = result.add_raw(properties)
+ return result
+
+ def create_subvariant (self, root_targets, all_targets,
+ build_request, sources,
+ rproperties, usage_requirements):
+ """Creates a new subvariant-dg instances for 'targets'
+ - 'root-targets' the virtual targets will be returned to dependents
+ - 'all-targets' all virtual
+ targets created while building this main target
+ - 'build-request' is property-set instance with
+ requested build properties"""
+ assert is_iterable_typed(root_targets, virtual_target.VirtualTarget)
+ assert is_iterable_typed(all_targets, virtual_target.VirtualTarget)
+ assert isinstance(build_request, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(rproperties, property_set.PropertySet)
+ assert isinstance(usage_requirements, property_set.PropertySet)
+
+ for e in root_targets:
+ e.root (True)
+
+ s = Subvariant (self, build_request, sources,
+ rproperties, usage_requirements, all_targets)
+
+ for v in all_targets:
+ if not v.creating_subvariant():
+ v.creating_subvariant(s)
+
+ return s
+
+ def construct (self, name, source_targets, properties):
+ """ Constructs the virtual targets for this abstract targets and
+ the dependency graph. Returns a tuple consisting of the properties and the list of virtual targets.
+ Should be overridden in derived classes.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+
+class TypedTarget (BasicTarget):
+ import generators
+
+ def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements):
+ assert isinstance(type, basestring)
+ BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements)
+ self.type_ = type
+
+ def __jam_repr__(self):
+ return b2.util.value_to_jam(self)
+
+ def type (self):
+ return self.type_
+
+ def construct (self, name, source_targets, prop_set):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ r = generators.construct (self.project_, os.path.splitext(name)[0],
+ self.type_,
+ prop_set.add_raw(['<main-target-type>' + self.type_]),
+ source_targets, True)
+
+ if not r:
+ print "warning: Unable to construct '%s'" % self.full_name ()
+
+ # Are there any top-level generators for this type/property set.
+ if not generators.find_viable_generators (self.type_, prop_set):
+ print "error: no generators were found for type '" + self.type_ + "'"
+ print "error: and the requested properties"
+ print "error: make sure you've configured the needed tools"
+ print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
+
+ print "To debug this problem, try the --debug-generators option."
+ sys.exit(1)
+
+ return r
+
+def apply_default_build(property_set_, default_build):
+ # 1. First, see what properties from default_build
+ # are already present in property_set.
+ assert isinstance(property_set_, property_set.PropertySet)
+ assert isinstance(default_build, property_set.PropertySet)
+
+ defaults_to_apply = []
+ for d in default_build.all():
+ if not property_set_.get(d.feature):
+ defaults_to_apply.append(d)
+
+ # 2. If there's any defaults to be applied, form the new
+ # build request. Pass it throw 'expand-no-defaults', since
+ # default_build might contain "release debug", which will
+ # result in two property_sets.
+ result = []
+ if defaults_to_apply:
+
+ # We have to compress subproperties here to prevent
+ # property lists like:
+ #
+ # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
+ #
+ # from being expanded into:
+ #
+ # <toolset-msvc:version>7.1/<threading>multi
+ # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
+ #
+ # due to cross-product property combination. That may
+ # be an indication that
+ # build_request.expand-no-defaults is the wrong rule
+ # to use here.
+ properties = build_request.expand_no_defaults(
+ [property_set.create([p]) for p in
+ feature.compress_subproperties(property_set_.all()) + defaults_to_apply]
+ )
+
+ if properties:
+ for p in properties:
+ result.append(property_set.create(feature.expand(p.all())))
+ else:
+ result = [property_set.empty()]
+
+ else:
+ result.append (property_set_)
+
+ return result
+
+
+def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements):
+ assert isinstance(name, basestring)
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ TypedTarget(name, project, type,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+
+def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ klass(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+def metatarget_function_for_class(class_):
+
+ @bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"]))
+ def create_metatarget(name, sources, requirements = [], default_build = None, usage_requirements = []):
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ class_(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+ return create_metatarget
diff --git a/src/boost/tools/build/src/build/toolset.jam b/src/boost/tools/build/src/build/toolset.jam
new file mode 100644
index 000000000..d103f6b70
--- /dev/null
+++ b/src/boost/tools/build/src/build/toolset.jam
@@ -0,0 +1,703 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for toolset definition.
+
+import errors ;
+import feature ;
+import generators ;
+import numbers ;
+import path ;
+import property ;
+import regex ;
+import sequence ;
+import set ;
+import property-set ;
+import order ;
+import "class" : new ;
+import utility ;
+
+
+.flag-no = 1 ;
+
+.ignore-requirements = ;
+
+# This is used only for testing, to make sure we do not get random extra
+# elements in paths.
+if --ignore-toolset-requirements in [ modules.peek : ARGV ]
+{
+ .ignore-requirements = 1 ;
+}
+
+
+# Initializes an additional toolset-like module. First load the 'toolset-module'
+# and then calls its 'init' rule with trailing arguments.
+#
+rule using ( toolset-module : * )
+{
+ import $(toolset-module) ;
+ $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
+ ;
+}
+
+
+# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
+# converted to '<toolset>gcc/<toolset-version>3.2'.
+#
+local rule normalize-condition ( property-sets * )
+{
+ local result ;
+ for local p in $(property-sets)
+ {
+ local split = [ feature.split $(p) ] ;
+ local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
+ result += $(expanded:J=/) ;
+ }
+ return $(result) ;
+}
+
+
+# Specifies if the 'flags' rule should check that the invoking module is the
+# same as the module we are setting the flag for. 'v' can be either 'checked' or
+# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
+# the setting that was in effect before calling this rule.
+#
+rule push-checking-for-flags-module ( v )
+{
+ .flags-module-checking = $(v) $(.flags-module-checking) ;
+}
+
+rule pop-checking-for-flags-module ( )
+{
+ .flags-module-checking = $(.flags-module-checking[2-]) ;
+}
+
+
+# Specifies features that are referenced by the action rule.
+# This is necessary in order to detect that these features
+# are relevant.
+#
+rule uses-features ( rule-or-module : features * : unchecked ? )
+{
+ local caller = [ CALLER_MODULE ] ;
+ if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
+ && [ MATCH "(Jamfile<.*)" : $(caller) ]
+ {
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule-or-module = $(caller).$(rule-or-module) ;
+ }
+ else
+ {
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ if $(unchecked) != unchecked
+ && $(.flags-module-checking[1]) != unchecked
+ && $(module_) != $(caller)
+ {
+ errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
+ }
+ }
+ .uses-features.$(rule-or-module) += $(features) ;
+}
+
+# Specifies the flags (variables) that must be set on targets under certain
+# conditions, described by arguments.
+#
+rule flags (
+ rule-or-module # If contains a dot, should be a rule name. The flags will
+ # be applied when that rule is used to set up build
+ # actions.
+ #
+ # If does not contain dot, should be a module name. The
+ # flag will be applied for all rules in that module. If
+ # module for rule is different from the calling module, an
+ # error is issued.
+
+ variable-name # Variable that should be set on target.
+ condition * : # A condition when this flag should be applied. Should be a
+ # set of property sets. If one of those property sets is
+ # contained in the build properties, the flag will be used.
+ # Implied values are not allowed: "<toolset>gcc" should be
+ # used, not just "gcc". Subfeatures, like in
+ # "<toolset>gcc-3.2" are allowed. If left empty, the flag
+ # will be used unconditionally.
+ #
+ # Property sets may use value-less properties ('<a>' vs.
+ # '<a>value') to match absent properties. This allows to
+ # separately match:
+ #
+ # <architecture>/<address-model>64
+ # <architecture>ia64/<address-model>
+ #
+ # Where both features are optional. Without this syntax
+ # we would be forced to define "default" values.
+
+ values * : # The value to add to variable. If <feature> is specified,
+ # then the value of 'feature' will be added.
+ unchecked ? # If value 'unchecked' is passed, will not test that flags
+ # are set for the calling module.
+ : hack-hack ? # For
+ # flags rule OPTIONS <cxx-abi> : -model ansi
+ # Treat <cxx-abi> as condition
+ # FIXME: ugly hack.
+)
+{
+ local caller = [ CALLER_MODULE ] ;
+ if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
+ && [ MATCH "(Jamfile<.*)" : $(caller) ]
+ {
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule-or-module = $(caller).$(rule-or-module) ;
+ }
+ else
+ {
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ if $(unchecked) != unchecked
+ && $(.flags-module-checking[1]) != unchecked
+ && $(module_) != $(caller)
+ {
+ errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
+ }
+ }
+
+ if $(condition) && ! $(condition:G=) && ! $(hack-hack)
+ {
+ # We have condition in the form '<feature>', that is, without value.
+ # That is an older syntax:
+ # flags gcc.link RPATH <dll-path> ;
+ # for compatibility, convert it to
+ # flags gcc.link RPATH : <dll-path> ;
+ values = $(condition) ;
+ condition = ;
+ }
+
+ if $(condition)
+ {
+ property.validate-property-sets $(condition) ;
+ condition = [ normalize-condition $(condition) ] ;
+ }
+
+ add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
+}
+
+
+# Adds a new flag setting with the specified values. Does no checking.
+#
+local rule add-flag ( rule-or-module : variable-name : condition * : values * )
+{
+ .$(rule-or-module).flags += $(.flag-no) ;
+
+ # Store all flags for a module.
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ .module-flags.$(module_) += $(.flag-no) ;
+ # Store flag-no -> rule-or-module mapping.
+ .rule-or-module.$(.flag-no) = $(rule-or-module) ;
+
+ .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
+ .$(rule-or-module).values.$(.flag-no) += $(values) ;
+ .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
+
+ .flag-no = [ numbers.increment $(.flag-no) ] ;
+}
+
+
+# Returns the first element of 'property-sets' which is a subset of
+# 'properties' or an empty list if no such element exists.
+#
+rule find-property-subset ( property-sets * : properties * )
+{
+ # Cut property values off.
+ local prop-keys = $(properties:G) ;
+
+ local result ;
+ for local s in $(property-sets)
+ {
+ if ! $(result)
+ {
+ # Handle value-less properties like '<architecture>' (compare with
+ # '<architecture>x86').
+
+ local set = [ feature.split $(s) ] ;
+
+ # Find the set of features that
+ # - have no property specified in required property set
+ # - are omitted in the build property set.
+ local default-props ;
+ for local i in $(set)
+ {
+ # If $(i) is a value-less property it should match default value
+ # of an optional property. See the first line in the example
+ # below:
+ #
+ # property set properties result
+ # <a> <b>foo <b>foo match
+ # <a> <b>foo <a>foo <b>foo no match
+ # <a>foo <b>foo <b>foo no match
+ # <a>foo <b>foo <a>foo <b>foo match
+ if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
+ {
+ default-props += $(i) ;
+ }
+ }
+
+ if $(set) in $(properties) $(default-props)
+ {
+ result = $(s) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a value to be added to some flag for some target based on the flag's
+# value definition and the given target's property set.
+#
+rule handle-flag-value ( value * : properties * )
+{
+ local result ;
+ if $(value:G)
+ {
+ local matches = [ property.select $(value) : $(properties) ] ;
+ local order ;
+ for local p in $(matches)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ if dependency in $(att)
+ {
+ # The value of a dependency feature is a target and needs to be
+ # actualized.
+ result += [ $(p:G=).actualize ] ;
+ }
+ else if path in $(att) || free in $(att)
+ {
+ local values ;
+ # Treat features with && in the value specially -- each
+ # &&-separated element is considered a separate value. This is
+ # needed to handle searched libraries or include paths, which
+ # may need to be in a specific order.
+ if ! [ MATCH (&&) : $(p:G=) ]
+ {
+ values = $(p:G=) ;
+ }
+ else
+ {
+ values = [ regex.split $(p:G=) "&&" ] ;
+ }
+ if path in $(att)
+ {
+ values = [ sequence.transform path.native : $(values) ] ;
+ }
+ result += $(values) ;
+ if $(values[2])
+ {
+ if ! $(order)
+ {
+ order = [ new order ] ;
+ }
+ local prev ;
+ for local v in $(values)
+ {
+ if $(prev)
+ {
+ $(order).add-pair $(prev) $(v) ;
+ }
+ prev = $(v) ;
+ }
+ }
+ }
+ else
+ {
+ result += $(p:G=) ;
+ }
+ }
+ if $(order)
+ {
+ result = [ $(order).order [ sequence.unique $(result) : stable ] ] ;
+ DELETE_MODULE $(order) ;
+ }
+ }
+ else
+ {
+ result += $(value) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a rule name and a property set, returns a list of interleaved variables
+# names and values which must be set on targets for that rule/property-set
+# combination.
+#
+rule set-target-variables-aux ( rule-or-module : property-set )
+{
+ local result ;
+ properties = [ $(property-set).raw ] ;
+ for local f in $(.$(rule-or-module).flags)
+ {
+ local variable = $(.$(rule-or-module).variable.$(f)) ;
+ local condition = $(.$(rule-or-module).condition.$(f)) ;
+ local values = $(.$(rule-or-module).values.$(f)) ;
+
+ if ! $(condition) ||
+ [ find-property-subset $(condition) : $(properties) ]
+ {
+ local processed ;
+ for local v in $(values)
+ {
+ # The value might be <feature-name> so needs special treatment.
+ processed += [ handle-flag-value $(v) : $(properties) ] ;
+ }
+ for local r in $(processed)
+ {
+ result += $(variable) $(r) ;
+ }
+ }
+ }
+
+ # Strip away last dot separated part and recurse.
+ local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ;
+ if $(next)
+ {
+ result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
+ }
+ return $(result) ;
+}
+
+rule relevant-features ( rule-or-module )
+{
+ local result ;
+ if ! $(.relevant-features.$(rule-or-module))
+ {
+ for local f in $(.$(rule-or-module).flags)
+ {
+ local condition = $(.$(rule-or-module).condition.$(f)) ;
+ local values = $(.$(rule-or-module).values.$(f)) ;
+
+ for local c in $(condition)
+ {
+ for local p in [ feature.split $(c) ]
+ {
+ if $(p:G)
+ {
+ result += $(p:G) ;
+ }
+ else
+ {
+ local temp = [ feature.expand-subfeatures $(p) ] ;
+ result += $(temp:G) ;
+ }
+ }
+ }
+
+ for local v in $(values)
+ {
+ if $(v:G)
+ {
+ result += $(v:G) ;
+ }
+ }
+ }
+
+ # Strip away last dot separated part and recurse.
+ local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ;
+ if $(next)
+ {
+ result += [ relevant-features $(next[1]) ] ;
+ }
+ result = [ sequence.unique $(result) ] ;
+ if $(result[1]) = ""
+ {
+ result = $(result) ;
+ }
+ .relevant-features.$(rule-or-module) = $(result) ;
+ return $(result) ;
+ }
+ else
+ {
+ return $(.relevant-features.$(rule-or-module)) ;
+ }
+}
+
+# Returns a list of all the features which were
+# passed to uses-features.
+local rule used-features ( rule-or-module )
+{
+ if ! $(.used-features.$(rule-or-module))
+ {
+ local result = $(.uses-features.$(rule-or-module)) ;
+
+ # Strip away last dot separated part and recurse.
+ local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ;
+ if $(next)
+ {
+ result += [ used-features $(next[1]) ] ;
+ }
+ result = [ sequence.unique $(result) ] ;
+ if $(result[1]) = ""
+ {
+ result = $(result) ;
+ }
+ .used-features.$(rule-or-module) = $(result) ;
+ return $(result) ;
+ }
+ else
+ {
+ return $(.used-features.$(rule-or-module)) ;
+ }
+}
+
+rule filter-property-set ( rule-or-module : property-set )
+{
+ local key = .filtered.property-set.$(rule-or-module).$(property-set) ;
+ if ! $($(key))
+ {
+ local relevant = [ relevant-features $(rule-or-module) ] ;
+ local result ;
+ for local p in [ $(property-set).raw ]
+ {
+ if $(p:G) in $(relevant)
+ {
+ result += $(p) ;
+ }
+ }
+ $(key) = [ property-set.create $(result) ] ;
+ }
+ return $($(key)) ;
+}
+
+rule set-target-variables ( rule-or-module targets + : property-set )
+{
+ property-set = [ filter-property-set $(rule-or-module) : $(property-set) ] ;
+ local key = .stv.$(rule-or-module).$(property-set) ;
+ local settings = $($(key)) ;
+ if ! $(settings)
+ {
+ settings = [ set-target-variables-aux $(rule-or-module) :
+ $(property-set) ] ;
+
+ if ! $(settings)
+ {
+ settings = none ;
+ }
+ $(key) = $(settings) ;
+ }
+
+ if $(settings) != none
+ {
+ local var-name = ;
+ for local name-or-value in $(settings)
+ {
+ if $(var-name)
+ {
+ $(var-name) on $(targets) += $(name-or-value) ;
+ var-name = ;
+ }
+ else
+ {
+ var-name = $(name-or-value) ;
+ }
+ }
+ }
+}
+
+
+# Returns a property-set indicating which features are relevant
+# for the given rule.
+#
+rule relevant ( rule-name )
+{
+ if ! $(.relevant-features-ps.$(rule-name))
+ {
+ local features = [ sequence.transform utility.ungrist :
+ [ relevant-features $(rule-name) ]
+ [ used-features $(rule-name) ] ] ;
+ .relevant-features-ps.$(rule-name) =
+ [ property-set.create <relevant>$(features) ] ;
+ }
+ return $(.relevant-features-ps.$(rule-name)) ;
+}
+
+
+# Make toolset 'toolset', defined in a module of the same name, inherit from
+# 'base'.
+# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
+# Another 'init' is called, which forwards to the base one.
+# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
+# property in requires is adjusted too.
+# 3. All flags are inherited.
+# 4. All rules are imported.
+#
+rule inherit ( toolset : base )
+{
+ import $(base) ;
+ inherit-generators $(toolset) : $(base) ;
+ inherit-flags $(toolset) : $(base) ;
+ inherit-rules $(toolset) : $(base) ;
+}
+
+
+rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
+{
+ properties ?= <toolset>$(toolset) ;
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ for local g in $(base-generators)
+ {
+ local id = [ $(g).id ] ;
+
+ if ! $(id) in $(generators-to-ignore)
+ {
+ # Some generator names have multiple periods in their name, so
+ # $(id:B=$(toolset)) does not generate the right new-id name. E.g.
+ # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
+ # not what we want. Manually parse the base and suffix. If there is
+ # a better way to do this, I would love to see it. See also the
+ # register() rule in the generators module.
+ local base = $(id) ;
+ local suffix = "" ;
+ while $(base:S)
+ {
+ suffix = $(base:S)$(suffix) ;
+ base = $(base:B) ;
+ }
+ local new-id = $(toolset)$(suffix) ;
+
+ generators.register [ $(g).clone $(new-id) : $(properties) ] ;
+ }
+ }
+}
+
+
+# Brings all flag definitions from the 'base' toolset into the 'toolset'
+# toolset. Flag definitions whose conditions make use of properties in
+# 'prohibited-properties' are ignored. Do not confuse property and feature, for
+# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
+# not block the other one.
+#
+# The flag conditions are not altered at all, so if a condition includes a name,
+# or version of a base toolset, it will not ever match the inheriting toolset.
+# When such flag settings must be inherited, define a rule in base toolset
+# module and call it as needed.
+#
+rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
+{
+ for local f in $(.module-flags.$(base))
+ {
+ local rule-or-module = $(.rule-or-module.$(f)) ;
+ if ( [ set.difference
+ $(.$(rule-or-module).condition.$(f)) :
+ $(prohibited-properties) ]
+ || ! $(.$(rule-or-module).condition.$(f))
+ ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
+ {
+ local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
+ local new-rule-or-module ;
+ if $(rule_)
+ {
+ new-rule-or-module = $(toolset).$(rule_) ;
+ }
+ else
+ {
+ new-rule-or-module = $(toolset) ;
+ }
+
+ add-flag
+ $(new-rule-or-module)
+ : $(.$(rule-or-module).variable.$(f))
+ : $(.$(rule-or-module).condition.$(f))
+ : $(.$(rule-or-module).values.$(f)) ;
+ }
+ }
+}
+
+
+rule inherit-rules ( toolset : base : localize ? )
+{
+ # It appears that "action" creates a local rule.
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ local rules ;
+ for local g in $(base-generators)
+ {
+ rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
+ }
+ rules = [ sequence.unique $(rules) ] ;
+ IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
+ IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
+}
+
+.requirements = [ property-set.empty ] ;
+
+# Return the list of global 'toolset requirements'. Those requirements will be
+# automatically added to the requirements of any main target.
+#
+rule requirements ( )
+{
+ return $(.requirements) ;
+}
+
+
+# Adds elements to the list of global 'toolset requirements'. The requirements
+# will be automatically added to the requirements for all main targets, as if
+# they were specified literally. For best results, all requirements added should
+# be conditional or indirect conditional.
+#
+rule add-requirements ( requirements * )
+{
+ if ! $(.ignore-requirements)
+ {
+ requirements = [ property.translate-indirect $(requirements) : [ CALLER_MODULE ] ] ;
+ requirements = [ property.expand-subfeatures-in-conditions $(requirements) ] ;
+ requirements = [ property.make $(requirements) ] ;
+ .requirements = [ $(.requirements).add-raw $(requirements) ] ;
+ }
+}
+
+# Returns the global toolset defaults.
+#
+.defaults = [ property-set.empty ] ;
+
+rule defaults ( )
+{
+ return $(.defaults) ;
+}
+
+# Add elements to the list of global toolset defaults. These properties
+# should be conditional and will override the default value of the feature.
+# Do not use this for non-conditionals. Use feature.set-default instead.
+#
+rule add-defaults ( properties * )
+{
+ if ! $(.ignore-requirements)
+ {
+ properties = [ property.translate-indirect $(properties) : [ CALLER_MODULE ] ] ;
+ properties = [ property.expand-subfeatures-in-conditions $(properties) ] ;
+ properties = [ property.make $(properties) ] ;
+ .defaults = [ $(.defaults).add-raw $(properties) ] ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
+ assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
+ assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
+
+ local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
+ assert.result <a>/<b> : find-property-subset $(p-set) : ;
+ assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
+ assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
+ assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
+}
diff --git a/src/boost/tools/build/src/build/toolset.py b/src/boost/tools/build/src/build/toolset.py
new file mode 100644
index 000000000..323e7fba3
--- /dev/null
+++ b/src/boost/tools/build/src/build/toolset.py
@@ -0,0 +1,417 @@
+# Status: being ported by Vladimir Prus
+# Base revision: 40958
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+""" Support for toolset definition.
+"""
+import sys
+
+import feature, property, generators, property_set
+import b2.util.set
+import bjam
+
+from b2.util import cached, qualify_jam_action, is_iterable_typed, is_iterable
+from b2.util.utility import *
+from b2.util import bjam_signature, sequence
+from b2.manager import get_manager
+
+__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
+__re_two_ampersands = re.compile ('(&&)')
+__re_first_segment = re.compile ('([^.]*).*')
+__re_first_group = re.compile (r'[^.]*\.(.*)')
+_ignore_toolset_requirements = '--ignore-toolset-requirements' not in sys.argv
+
+# Flag is a mechanism to set a value
+# A single toolset flag. Specifies that when certain
+# properties are in build property set, certain values
+# should be appended to some variable.
+#
+# A flag applies to a specific action in specific module.
+# The list of all flags for a module is stored, and each
+# flag further contains the name of the rule it applies
+# for,
+class Flag:
+
+ def __init__(self, variable_name, values, condition, rule = None):
+ assert isinstance(variable_name, basestring)
+ assert is_iterable(values) and all(
+ isinstance(v, (basestring, type(None))) for v in values)
+ assert is_iterable_typed(condition, property_set.PropertySet)
+ assert isinstance(rule, (basestring, type(None)))
+ self.variable_name = variable_name
+ self.values = values
+ self.condition = condition
+ self.rule = rule
+
+ def __str__(self):
+ return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\
+ ", " + str(self.condition) + ", " + str(self.rule) + ")")
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __module_flags, __flags, __stv
+
+ # Mapping from module name to a list of all flags that apply
+ # to either that module directly, or to any rule in that module.
+ # Each element of the list is Flag instance.
+ # So, for module named xxx this might contain flags for 'xxx',
+ # for 'xxx.compile', for 'xxx.compile.c++', etc.
+ __module_flags = {}
+
+ # Mapping from specific rule or module name to a list of Flag instances
+ # that apply to that name.
+ # Say, it might contain flags for 'xxx.compile.c++'. If there are
+ # entries for module name 'xxx', they are flags for 'xxx' itself,
+ # not including any rules in that module.
+ __flags = {}
+
+ # A cache for variable settings. The key is generated from the rule name and the properties.
+ __stv = {}
+
+reset ()
+
+# FIXME: --ignore-toolset-requirements
+def using(toolset_module, *args):
+ if isinstance(toolset_module, (list, tuple)):
+ toolset_module = toolset_module[0]
+ loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]);
+ loaded_toolset_module.init(*args)
+
+# FIXME push-checking-for-flags-module ....
+# FIXME: investigate existing uses of 'hack-hack' parameter
+# in jam code.
+
+@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
+ ["values", "*"]))
+def flags(rule_or_module, variable_name, condition, values = []):
+ """ Specifies the flags (variables) that must be set on targets under certain
+ conditions, described by arguments.
+ rule_or_module: If contains dot, should be a rule name.
+ The flags will be applied when that rule is
+ used to set up build actions.
+
+ If does not contain dot, should be a module name.
+ The flags will be applied for all rules in that
+ module.
+ If module for rule is different from the calling
+ module, an error is issued.
+
+ variable_name: Variable that should be set on target
+
+ condition A condition when this flag should be applied.
+ Should be set of property sets. If one of
+ those property sets is contained in build
+ properties, the flag will be used.
+ Implied values are not allowed:
+ "<toolset>gcc" should be used, not just
+ "gcc". Subfeatures, like in "<toolset>gcc-3.2"
+ are allowed. If left empty, the flag will
+ always used.
+
+ Property sets may use value-less properties
+ ('<a>' vs. '<a>value') to match absent
+ properties. This allows to separately match
+
+ <architecture>/<address-model>64
+ <architecture>ia64/<address-model>
+
+ Where both features are optional. Without this
+ syntax we'd be forced to define "default" value.
+
+ values: The value to add to variable. If <feature>
+ is specified, then the value of 'feature'
+ will be added.
+ """
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(variable_name, basestring)
+ assert is_iterable_typed(condition, basestring)
+ assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values)
+ caller = bjam.caller()
+ if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"):
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule_or_module = qualify_jam_action(rule_or_module, caller)
+ else:
+ # FIXME: revive checking that we don't set flags for a different
+ # module unintentionally
+ pass
+
+ if condition and not replace_grist (condition, ''):
+ # We have condition in the form '<feature>', that is, without
+ # value. That's a previous syntax:
+ #
+ # flags gcc.link RPATH <dll-path> ;
+ # for compatibility, convert it to
+ # flags gcc.link RPATH : <dll-path> ;
+ values = [ condition ]
+ condition = None
+
+ if condition:
+ transformed = []
+ for c in condition:
+ # FIXME: 'split' might be a too raw tool here.
+ pl = [property.create_from_string(s,False,True) for s in c.split('/')]
+ pl = feature.expand_subfeatures(pl);
+ transformed.append(property_set.create(pl))
+ condition = transformed
+
+ property.validate_property_sets(condition)
+
+ __add_flag (rule_or_module, variable_name, condition, values)
+
+def set_target_variables (manager, rule_or_module, targets, ps):
+ """
+ """
+ assert isinstance(rule_or_module, basestring)
+ assert is_iterable_typed(targets, basestring)
+ assert isinstance(ps, property_set.PropertySet)
+ settings = __set_target_variables_aux(manager, rule_or_module, ps)
+
+ if settings:
+ for s in settings:
+ for target in targets:
+ manager.engine ().set_target_variable (target, s [0], s[1], True)
+
+def find_satisfied_condition(conditions, ps):
+ """Returns the first element of 'property-sets' which is a subset of
+ 'properties', or an empty list if no such element exists."""
+ assert is_iterable_typed(conditions, property_set.PropertySet)
+ assert isinstance(ps, property_set.PropertySet)
+
+ for condition in conditions:
+
+ found_all = True
+ for i in condition.all():
+
+ if i.value:
+ found = i.value in ps.get(i.feature)
+ else:
+ # Handle value-less properties like '<architecture>' (compare with
+ # '<architecture>x86').
+ # If $(i) is a value-less property it should match default
+ # value of an optional property. See the first line in the
+ # example below:
+ #
+ # property set properties result
+ # <a> <b>foo <b>foo match
+ # <a> <b>foo <a>foo <b>foo no match
+ # <a>foo <b>foo <b>foo no match
+ # <a>foo <b>foo <a>foo <b>foo match
+ found = not ps.get(i.feature)
+
+ found_all = found_all and found
+
+ if found_all:
+ return condition
+
+ return None
+
+
+def register (toolset):
+ """ Registers a new toolset.
+ """
+ assert isinstance(toolset, basestring)
+ feature.extend('toolset', [toolset])
+
+def inherit_generators (toolset, properties, base, generators_to_ignore = []):
+ assert isinstance(toolset, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(base, basestring)
+ assert is_iterable_typed(generators_to_ignore, basestring)
+ if not properties:
+ properties = [replace_grist (toolset, '<toolset>')]
+
+ base_generators = generators.generators_for_toolset(base)
+
+ for g in base_generators:
+ id = g.id()
+
+ if not id in generators_to_ignore:
+ # Some generator names have multiple periods in their name, so
+ # $(id:B=$(toolset)) doesn't generate the right new_id name.
+ # e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
+ # which is not what we want. Manually parse the base and suffix
+ # (if there's a better way to do this, I'd love to see it.)
+ # See also register in module generators.
+ (base, suffix) = split_action_id(id)
+
+ new_id = toolset + '.' + suffix
+
+ generators.register(g.clone(new_id, properties))
+
+def inherit_flags(toolset, base, prohibited_properties = []):
+ """Brings all flag definitions from the 'base' toolset into the 'toolset'
+ toolset. Flag definitions whose conditions make use of properties in
+ 'prohibited-properties' are ignored. Don't confuse property and feature, for
+ example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
+ not block the other one.
+
+ The flag conditions are not altered at all, so if a condition includes a name,
+ or version of a base toolset, it won't ever match the inheriting toolset. When
+ such flag settings must be inherited, define a rule in base toolset module and
+ call it as needed."""
+ assert isinstance(toolset, basestring)
+ assert isinstance(base, basestring)
+ assert is_iterable_typed(prohibited_properties, basestring)
+ for f in __module_flags.get(base, []):
+
+ if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
+ match = __re_first_group.match(f.rule)
+ rule_ = None
+ if match:
+ rule_ = match.group(1)
+
+ new_rule_or_module = ''
+
+ if rule_:
+ new_rule_or_module = toolset + '.' + rule_
+ else:
+ new_rule_or_module = toolset
+
+ __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
+
+
+def inherit_rules(toolset, base):
+ engine = get_manager().engine()
+ new_actions = {}
+ for action_name, action in engine.actions.iteritems():
+ module, id = split_action_id(action_name)
+ if module == base:
+ new_action_name = toolset + '.' + id
+ # make sure not to override any existing actions
+ # that may have been declared already
+ if new_action_name not in engine.actions:
+ new_actions[new_action_name] = action
+
+ engine.actions.update(new_actions)
+
+######################################################################################
+# Private functions
+
+@cached
+def __set_target_variables_aux (manager, rule_or_module, ps):
+ """ Given a rule name and a property set, returns a list of tuples of
+ variables names and values, which must be set on targets for that
+ rule/properties combination.
+ """
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(ps, property_set.PropertySet)
+ result = []
+
+ for f in __flags.get(rule_or_module, []):
+
+ if not f.condition or find_satisfied_condition (f.condition, ps):
+ processed = []
+ for v in f.values:
+ # The value might be <feature-name> so needs special
+ # treatment.
+ processed += __handle_flag_value (manager, v, ps)
+
+ for r in processed:
+ result.append ((f.variable_name, r))
+
+ # strip away last dot separated part and recurse.
+ next = __re_split_last_segment.match(rule_or_module)
+
+ if next:
+ result.extend(__set_target_variables_aux(
+ manager, next.group(1), ps))
+
+ return result
+
+def __handle_flag_value (manager, value, ps):
+ assert isinstance(value, basestring)
+ assert isinstance(ps, property_set.PropertySet)
+ result = []
+
+ if get_grist (value):
+ f = feature.get(value)
+ values = ps.get(f)
+
+ for value in values:
+
+ if f.dependency:
+ # the value of a dependency feature is a target
+ # and must be actualized
+ result.append(value.actualize())
+
+ elif f.path or f.free:
+
+ # Treat features with && in the value
+ # specially -- each &&-separated element is considered
+ # separate value. This is needed to handle searched
+ # libraries, which must be in specific order.
+ if not __re_two_ampersands.search(value):
+ result.append(value)
+
+ else:
+ result.extend(value.split ('&&'))
+ else:
+ result.append (value)
+ else:
+ result.append (value)
+
+ return sequence.unique(result, stable=True)
+
+def __add_flag (rule_or_module, variable_name, condition, values):
+ """ Adds a new flag setting with the specified values.
+ Does no checking.
+ """
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(variable_name, basestring)
+ assert is_iterable_typed(condition, property_set.PropertySet)
+ assert is_iterable(values) and all(
+ isinstance(v, (basestring, type(None))) for v in values)
+ f = Flag(variable_name, values, condition, rule_or_module)
+
+ # Grab the name of the module
+ m = __re_first_segment.match (rule_or_module)
+ assert m
+ module = m.group(1)
+
+ __module_flags.setdefault(module, []).append(f)
+ __flags.setdefault(rule_or_module, []).append(f)
+
+__requirements = []
+
+def requirements():
+ """Return the list of global 'toolset requirements'.
+ Those requirements will be automatically added to the requirements of any main target."""
+ return __requirements
+
+def add_requirements(requirements):
+ """Adds elements to the list of global 'toolset requirements'. The requirements
+ will be automatically added to the requirements for all main targets, as if
+ they were specified literally. For best results, all requirements added should
+ be conditional or indirect conditional."""
+ assert is_iterable_typed(requirements, basestring)
+
+ if _ignore_toolset_requirements:
+ __requirements.extend(requirements)
+
+
+# Make toolset 'toolset', defined in a module of the same name,
+# inherit from 'base'
+# 1. The 'init' rule from 'base' is imported into 'toolset' with full
+# name. Another 'init' is called, which forwards to the base one.
+# 2. All generators from 'base' are cloned. The ids are adjusted and
+# <toolset> property in requires is adjusted too
+# 3. All flags are inherited
+# 4. All rules are imported.
+def inherit(toolset, base):
+ assert isinstance(toolset, basestring)
+ assert isinstance(base, basestring)
+ get_manager().projects().load_module(base, ['.']);
+
+ inherit_generators(toolset, [], base)
+ inherit_flags(toolset, base)
+ inherit_rules(toolset, base)
diff --git a/src/boost/tools/build/src/build/type.jam b/src/boost/tools/build/src/build/type.jam
new file mode 100644
index 000000000..419656df0
--- /dev/null
+++ b/src/boost/tools/build/src/build/type.jam
@@ -0,0 +1,404 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Deals with target type declaration and defines target class which supports
+# typed targets.
+
+import "class" : new ;
+import feature ;
+import generators : * ;
+import os ;
+import param ;
+import project ;
+import property ;
+import scanner ;
+
+# The following import would create a circular dependency:
+# project -> project-root -> builtin -> type -> targets -> project
+# import targets ;
+
+# The feature is optional so it would never get added implicitly. It is used
+# only for internal purposes and in all cases we want to use it explicitly.
+feature.feature target-type : : composite optional ;
+
+feature.feature main-target-type : : optional incidental ;
+feature.feature base-target-type : : composite optional free ;
+
+
+# Registers a target type, possible derived from a 'base-type'. Providing a list
+# of 'suffixes' here is a shortcut for separately calling the register-suffixes
+# rule with the given suffixes and the set-generated-target-suffix rule with the
+# first given suffix.
+#
+rule register ( type : suffixes * : base-type ? )
+{
+ # Type names cannot contain hyphens, because when used as feature-values
+ # they would be interpreted as composite features which need to be
+ # decomposed.
+ switch $(type)
+ {
+ case *-* :
+ import errors ;
+ errors.error "type name \"$(type)\" contains a hyphen" ;
+ }
+
+ if $(type) in $(.types)
+ {
+ import errors ;
+ errors.error "Type $(type) is already registered." ;
+ }
+
+ {
+ .types += $(type) ;
+ .base.$(type) = $(base-type) ;
+ .derived.$(base-type) += $(type) ;
+ .bases.$(type) = $(type) $(.bases.$(base-type)) ;
+
+ # Store suffixes for generated targets.
+ .suffixes.$(type) = [ new property-map ] ;
+
+ # Store prefixes for generated targets (e.g. "lib" for library).
+ .prefixes.$(type) = [ new property-map ] ;
+
+ if $(suffixes)-is-defined
+ {
+ # Specify mapping from suffixes to type.
+ register-suffixes $(suffixes) : $(type) ;
+ # By default generated targets of 'type' will use the first of
+ #'suffixes'. This may be overridden.
+ set-generated-target-suffix $(type) : : $(suffixes[1]) ;
+ }
+
+ feature.extend target-type : $(type) ;
+ feature.extend main-target-type : $(type) ;
+ feature.extend base-target-type : $(type) ;
+
+ feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
+ feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
+
+ # We used to declare the main target rule only when a 'main' parameter
+ # has been specified. However, it is hard to decide that a type will
+ # *never* need a main target rule and so from time to time we needed to
+ # make yet another type 'main'. So now a main target rule is defined for
+ # each type.
+ main-rule-name = [ type-to-rule-name $(type) ] ;
+ .main-target-type.$(main-rule-name) = $(type) ;
+ IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
+
+ # Adding a new derived type affects generator selection so we need to
+ # make the generator selection module update any of its cached
+ # information related to a new derived type being defined.
+ generators.update-cached-information-with-a-new-type $(type) ;
+ }
+}
+
+
+# Given a type, returns the name of the main target rule which creates targets
+# of that type.
+#
+rule type-to-rule-name ( type )
+{
+ # Lowercase everything. Convert underscores to dashes.
+ import regex ;
+ local n = [ regex.split $(type:L) "_" ] ;
+ return $(n:J=-) ;
+}
+
+
+# Given a main target rule name, returns the type for which it creates targets.
+#
+rule type-from-rule-name ( rule-name )
+{
+ return $(.main-target-type.$(rule-name)) ;
+}
+
+
+# Specifies that files with suffix from 'suffixes' be recognized as targets of
+# type 'type'. Issues an error if a different type is already specified for any
+# of the suffixes.
+#
+rule register-suffixes ( suffixes + : type )
+{
+ for local s in $(suffixes)
+ {
+ if ! $(.type.$(s))
+ {
+ .type.$(s) = $(type) ;
+ }
+ else if $(.type.$(s)) != $(type)
+ {
+ import errors ;
+ errors.error Attempting to specify multiple types for suffix
+ \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
+ }
+ }
+}
+
+
+# Returns true iff type has been registered.
+#
+rule registered ( type )
+{
+ if $(type) in $(.types)
+ {
+ return true ;
+ }
+}
+
+
+# Issues an error if 'type' is unknown.
+#
+rule validate ( type )
+{
+ if ! [ registered $(type) ]
+ {
+ import errors ;
+ errors.error "Unknown target type $(type)" ;
+ }
+}
+
+
+# Sets a scanner class that will be used for this 'type'.
+#
+rule set-scanner ( type : scanner )
+{
+ validate $(type) ;
+ .scanner.$(type) = $(scanner) ;
+}
+
+
+# Returns a scanner instance appropriate to 'type' and 'properties'.
+#
+rule get-scanner ( type : property-set )
+{
+ if $(.scanner.$(type))
+ {
+ return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
+ }
+}
+
+
+# Returns a base type for the given type or nothing in case the given type is
+# not derived.
+#
+rule base ( type )
+{
+ return $(.base.$(type)) ;
+}
+
+
+# Returns the given type and all of its base types in order of their distance
+# from type.
+#
+rule all-bases ( type )
+{
+ return $(.bases.$(type)) ;
+}
+
+
+# Returns the given type and all of its derived types in order of their distance
+# from type.
+#
+rule all-derived ( type )
+{
+ local result = $(type) ;
+ for local d in $(.derived.$(type))
+ {
+ result += [ all-derived $(d) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
+# indirect base.
+#
+rule is-derived ( type base )
+{
+ if $(base) in $(.bases.$(type))
+ {
+ return true ;
+ }
+}
+
+# Returns true if 'type' is either derived from or is equal to 'base'.
+#
+# TODO: It might be that is-derived and is-subtype were meant to be different
+# rules - one returning true for type = base and one not, but as currently
+# implemented they are actually the same. Clean this up.
+#
+rule is-subtype ( type base )
+{
+ return [ is-derived $(type) $(base) ] ;
+}
+
+
+
+
+# Sets a file suffix to be used when generating a target of 'type' with the
+# specified properties. Can be called with no properties if no suffix has
+# already been specified for the 'type'. The 'suffix' parameter can be an empty
+# string ("") to indicate that no suffix should be used.
+#
+# Note that this does not cause files with 'suffix' to be automatically
+# recognized as being of 'type'. Two different types can use the same suffix for
+# their generated files but only one type can be auto-detected for a file with
+# that suffix. User should explicitly specify which one using the
+# register-suffixes rule.
+#
+rule set-generated-target-suffix ( type : properties * : suffix )
+{
+ set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Change the suffix previously registered for this type/properties combination.
+# If suffix is not yet specified, sets it.
+#
+rule change-generated-target-suffix ( type : properties * : suffix )
+{
+ change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Returns the suffix used when generating a file of 'type' with the given
+# properties.
+#
+rule generated-target-suffix ( type : property-set )
+{
+ return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
+}
+
+
+# Sets a target prefix that should be used when generating targets of 'type'
+# with the specified properties. Can be called with empty properties if no
+# prefix for 'type' has been specified yet.
+#
+# The 'prefix' parameter can be empty string ("") to indicate that no prefix
+# should be used.
+#
+# Usage example: library names use the "lib" prefix on unix.
+#
+rule set-generated-target-prefix ( type : properties * : prefix )
+{
+ set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+# Change the prefix previously registered for this type/properties combination.
+# If prefix is not yet specified, sets it.
+#
+rule change-generated-target-prefix ( type : properties * : prefix )
+{
+ change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+rule generated-target-prefix ( type : property-set )
+{
+ return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
+}
+
+
+# Common rules for prefix/suffix provisioning follow.
+
+local rule set-generated-target-ps ( ps : type : properties * : psval )
+{
+ $(.$(ps)es.$(type)).insert $(properties) : $(psval) ;
+}
+
+
+local rule change-generated-target-ps ( ps : type : properties * : psval )
+{
+ local prev = [ $(.$(ps)es.$(type)).find-replace $(properties) : $(psval) ] ;
+ if ! $(prev)
+ {
+ set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
+ }
+}
+
+
+# Returns either prefix or suffix (as indicated by 'ps') that should be used
+# when generating a target of 'type' with the specified properties. Parameter
+# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
+# 'type', returns prefix/suffix for base type, if any.
+#
+local rule generated-target-ps ( ps : type : property-set )
+{
+ local result ;
+ local found ;
+ while $(type) && ! $(found)
+ {
+ result = [ $(.$(ps)es.$(type)).find $(property-set) ] ;
+ # If the prefix/suffix is explicitly set to an empty string, we consider
+ # prefix/suffix to be found. If we were not to compare with "", there
+ # would be no way to specify an empty prefix/suffix.
+ if $(result)-is-defined
+ {
+ found = true ;
+ }
+ type = $(.base.$(type)) ;
+ }
+ if $(result) = ""
+ {
+ result = ;
+ }
+ return $(result) ;
+}
+
+
+# Returns file type given its name. If there are several dots in filename, tries
+# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
+# be tried.
+#
+rule type ( filename )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ filename = $(filename:L) ;
+ }
+ local type ;
+ while ! $(type) && $(filename:S)
+ {
+ local suffix = $(filename:S) ;
+ type = $(.type$(suffix)) ;
+ filename = $(filename:S=) ;
+ }
+ return $(type) ;
+}
+
+
+# Rule used to construct all main targets. Note that this rule gets imported
+# into the global namespace under different alias names and the exact target
+# type to construct is selected based on the alias used to actually invoke this
+# rule.
+#
+rule main-target-rule ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ param.handle-named-params
+ sources requirements default-build usage-requirements ;
+ # First discover the required target type based on the exact alias used to
+ # invoke this rule.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = $(bt[4]) ;
+ local target-type = [ type-from-rule-name $(rulename) ] ;
+
+ # This is a circular module dependency and so must be imported here.
+ import targets ;
+
+ return [ targets.create-typed-target $(target-type) : [ project.current ] :
+ $(name) : $(sources) : $(requirements) : $(default-build) :
+ $(usage-requirements) ] ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ # TODO: Add tests for all the is-derived, is-base & related type relation
+ # checking rules.
+}
diff --git a/src/boost/tools/build/src/build/type.py b/src/boost/tools/build/src/build/type.py
new file mode 100644
index 000000000..9f6237d7d
--- /dev/null
+++ b/src/boost/tools/build/src/build/type.py
@@ -0,0 +1,381 @@
+# Status: ported.
+# Base revision: 45462.
+
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+
+
+import re
+import os
+import os.path
+from b2.util.utility import replace_grist, os_name
+from b2.exceptions import *
+from b2.build import feature, property, scanner
+from b2.util import bjam_signature, is_iterable_typed
+
+
+__re_hyphen = re.compile ('-')
+
+def __register_features ():
+ """ Register features need by this module.
+ """
+ # The feature is optional so that it is never implicitly added.
+ # It's used only for internal purposes, and in all cases we
+ # want to explicitly use it.
+ feature.feature ('target-type', [], ['composite', 'optional'])
+ feature.feature ('main-target-type', [], ['optional', 'incidental'])
+ feature.feature ('base-target-type', [], ['composite', 'optional', 'free'])
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ Note that this must be called _after_ resetting the module 'feature'.
+ """
+ global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache
+
+ __register_features ()
+
+ # Stores suffixes for generated targets.
+ __prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()]
+
+ # Maps suffixes to types
+ __suffixes_to_types = {}
+
+ # A map with all the registered types, indexed by the type name
+ # Each entry is a dictionary with following values:
+ # 'base': the name of base type or None if type has no base
+ # 'derived': a list of names of type which derive from this one
+ # 'scanner': the scanner class registered for this type, if any
+ __types = {}
+
+ # Caches suffixes for targets with certain properties.
+ __target_suffixes_cache = {}
+
+reset ()
+
+@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"]))
+def register (type, suffixes = [], base_type = None):
+ """ Registers a target type, possibly derived from a 'base-type'.
+ If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'.
+ Also, the first element gives the suffix to be used when constructing and object of
+ 'type'.
+ type: a string
+ suffixes: None or a sequence of strings
+ base_type: None or a string
+ """
+ # Type names cannot contain hyphens, because when used as
+ # feature-values they will be interpreted as composite features
+ # which need to be decomposed.
+ if __re_hyphen.search (type):
+ raise BaseException ('type name "%s" contains a hyphen' % type)
+
+ # it's possible for a type to be registered with a
+ # base type that hasn't been registered yet. in the
+ # check for base_type below and the following calls to setdefault()
+ # the key `type` will be added to __types. When the base type
+ # actually gets registered, it would fail after the simple check
+ # of "type in __types"; thus the check for "'base' in __types[type]"
+ if type in __types and 'base' in __types[type]:
+ raise BaseException ('Type "%s" is already registered.' % type)
+
+ entry = __types.setdefault(type, {})
+ entry['base'] = base_type
+ entry.setdefault('derived', [])
+ entry.setdefault('scanner', None)
+
+ if base_type:
+ __types.setdefault(base_type, {}).setdefault('derived', []).append(type)
+
+ if len (suffixes) > 0:
+ # Generated targets of 'type' will use the first of 'suffixes'
+ # (this may be overridden)
+ set_generated_target_suffix (type, [], suffixes [0])
+
+ # Specify mapping from suffixes to type
+ register_suffixes (suffixes, type)
+
+ feature.extend('target-type', [type])
+ feature.extend('main-target-type', [type])
+ feature.extend('base-target-type', [type])
+
+ if base_type:
+ feature.compose ('<target-type>' + type, [replace_grist (base_type, '<base-target-type>')])
+ feature.compose ('<base-target-type>' + type, ['<base-target-type>' + base_type])
+
+ import b2.build.generators as generators
+ # Adding a new derived type affects generator selection so we need to
+ # make the generator selection module update any of its cached
+ # information related to a new derived type being defined.
+ generators.update_cached_information_with_a_new_type(type)
+
+ # FIXME: resolving recursive dependency.
+ from b2.manager import get_manager
+ get_manager().projects().project_rules().add_rule_for_type(type)
+
+# FIXME: quick hack.
+def type_from_rule_name(rule_name):
+ assert isinstance(rule_name, basestring)
+ return rule_name.upper().replace("-", "_")
+
+
+def register_suffixes (suffixes, type):
+ """ Specifies that targets with suffix from 'suffixes' have the type 'type'.
+ If a different type is already specified for any of syffixes, issues an error.
+ """
+ assert is_iterable_typed(suffixes, basestring)
+ assert isinstance(type, basestring)
+ for s in suffixes:
+ if s in __suffixes_to_types:
+ old_type = __suffixes_to_types [s]
+ if old_type != type:
+ raise BaseException ('Attempting to specify type for suffix "%s"\nOld type: "%s", New type "%s"' % (s, old_type, type))
+ else:
+ __suffixes_to_types [s] = type
+
+def registered (type):
+ """ Returns true iff type has been registered.
+ """
+ assert isinstance(type, basestring)
+ return type in __types
+
+def validate (type):
+ """ Issues an error if 'type' is unknown.
+ """
+ assert isinstance(type, basestring)
+ if not registered (type):
+ raise BaseException ("Unknown target type '%s'" % type)
+
+def set_scanner (type, scanner):
+ """ Sets a scanner class that will be used for this 'type'.
+ """
+ if __debug__:
+ from .scanner import Scanner
+ assert isinstance(type, basestring)
+ assert issubclass(scanner, Scanner)
+ validate (type)
+ __types [type]['scanner'] = scanner
+
+def get_scanner (type, prop_set):
+ """ Returns a scanner instance appropriate to 'type' and 'property_set'.
+ """
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(prop_set, PropertySet)
+ if registered (type):
+ scanner_type = __types [type]['scanner']
+ if scanner_type:
+ return scanner.get (scanner_type, prop_set.raw ())
+ pass
+
+ return None
+
+def base(type):
+ """Returns a base type for the given type or nothing in case the given type is
+ not derived."""
+ assert isinstance(type, basestring)
+ return __types[type]['base']
+
+def all_bases (type):
+ """ Returns type and all of its bases, in the order of their distance from type.
+ """
+ assert isinstance(type, basestring)
+ result = []
+ while type:
+ result.append (type)
+ type = __types [type]['base']
+
+ return result
+
+def all_derived (type):
+ """ Returns type and all classes that derive from it, in the order of their distance from type.
+ """
+ assert isinstance(type, basestring)
+ result = [type]
+ for d in __types [type]['derived']:
+ result.extend (all_derived (d))
+
+ return result
+
+def is_derived (type, base):
+ """ Returns true if 'type' is 'base' or has 'base' as its direct or indirect base.
+ """
+ assert isinstance(type, basestring)
+ assert isinstance(base, basestring)
+ # TODO: this isn't very efficient, especially for bases close to type
+ if base in all_bases (type):
+ return True
+ else:
+ return False
+
+def is_subtype (type, base):
+ """ Same as is_derived. Should be removed.
+ """
+ assert isinstance(type, basestring)
+ assert isinstance(base, basestring)
+ # TODO: remove this method
+ return is_derived (type, base)
+
+@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
+def set_generated_target_suffix (type, properties, suffix):
+ """ Sets a target suffix that should be used when generating target
+ of 'type' with the specified properties. Can be called with
+ empty properties if no suffix for 'type' was specified yet.
+ This does not automatically specify that files 'suffix' have
+ 'type' --- two different types can use the same suffix for
+ generating, but only one type should be auto-detected for
+ a file with that suffix. User should explicitly specify which
+ one.
+
+ The 'suffix' parameter can be empty string ("") to indicate that
+ no suffix should be used.
+ """
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(suffix, basestring)
+ set_generated_target_ps(1, type, properties, suffix)
+
+
+
+def change_generated_target_suffix (type, properties, suffix):
+ """ Change the suffix previously registered for this type/properties
+ combination. If suffix is not yet specified, sets it.
+ """
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(suffix, basestring)
+ change_generated_target_ps(1, type, properties, suffix)
+
+def generated_target_suffix(type, properties):
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(properties, PropertySet)
+ return generated_target_ps(1, type, properties)
+
+
+@bjam_signature((["type"], ["properties", "*"], ["prefix"]))
+def set_generated_target_prefix(type, properties, prefix):
+ """
+ Sets a file prefix to be used when generating a target of 'type' with the
+ specified properties. Can be called with no properties if no prefix has
+ already been specified for the 'type'. The 'prefix' parameter can be an empty
+ string ("") to indicate that no prefix should be used.
+
+ Note that this does not cause files with 'prefix' to be automatically
+ recognized as being of 'type'. Two different types can use the same prefix for
+ their generated files but only one type can be auto-detected for a file with
+ that prefix. User should explicitly specify which one using the
+ register-prefixes rule.
+
+ Usage example: library names use the "lib" prefix on unix.
+ """
+ set_generated_target_ps(0, type, properties, prefix)
+
+# Change the prefix previously registered for this type/properties combination.
+# If prefix is not yet specified, sets it.
+def change_generated_target_prefix(type, properties, prefix):
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(prefix, basestring)
+ change_generated_target_ps(0, type, properties, prefix)
+
+def generated_target_prefix(type, properties):
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(properties, PropertySet)
+ return generated_target_ps(0, type, properties)
+
+def set_generated_target_ps(is_suffix, type, properties, val):
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(val, basestring)
+ properties.append ('<target-type>' + type)
+ __prefixes_suffixes[is_suffix].insert (properties, val)
+
+def change_generated_target_ps(is_suffix, type, properties, val):
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(val, basestring)
+ properties.append ('<target-type>' + type)
+ prev = __prefixes_suffixes[is_suffix].find_replace(properties, val)
+ if not prev:
+ set_generated_target_ps(is_suffix, type, properties, val)
+
+# Returns either prefix or suffix (as indicated by 'is_suffix') that should be used
+# when generating a target of 'type' with the specified properties.
+# If no prefix/suffix is specified for 'type', returns prefix/suffix for
+# base type, if any.
+def generated_target_ps_real(is_suffix, type, properties):
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ result = ''
+ found = False
+ while type and not found:
+ result = __prefixes_suffixes[is_suffix].find (['<target-type>' + type] + properties)
+
+ # Note that if the string is empty (""), but not null, we consider
+ # suffix found. Setting prefix or suffix to empty string is fine.
+ if result is not None:
+ found = True
+
+ type = __types [type]['base']
+
+ if not result:
+ result = ''
+ return result
+
+def generated_target_ps(is_suffix, type, prop_set):
+ """ Returns suffix that should be used when generating target of 'type',
+ with the specified properties. If not suffix were specified for
+ 'type', returns suffix for base type, if any.
+ """
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert isinstance(prop_set, PropertySet)
+ key = (is_suffix, type, prop_set)
+ v = __target_suffixes_cache.get(key, None)
+
+ if not v:
+ v = generated_target_ps_real(is_suffix, type, prop_set.raw())
+ __target_suffixes_cache [key] = v
+
+ return v
+
+def type(filename):
+ """ Returns file type given it's name. If there are several dots in filename,
+ tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
+ "so" will be tried.
+ """
+ assert isinstance(filename, basestring)
+ while 1:
+ filename, suffix = os.path.splitext (filename)
+ if not suffix: return None
+ suffix = suffix[1:]
+
+ if suffix in __suffixes_to_types:
+ return __suffixes_to_types[suffix]
+
+# NOTE: moved from tools/types/register
+def register_type (type, suffixes, base_type = None, os = []):
+ """ Register the given type on the specified OSes, or on remaining OSes
+ if os is not specified. This rule is injected into each of the type
+ modules for the sake of convenience.
+ """
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(suffixes, basestring)
+ assert isinstance(base_type, basestring) or base_type is None
+ assert is_iterable_typed(os, basestring)
+ if registered (type):
+ return
+
+ if not os or os_name () in os:
+ register (type, suffixes, base_type)
diff --git a/src/boost/tools/build/src/build/version.jam b/src/boost/tools/build/src/build/version.jam
new file mode 100644
index 000000000..e6f0c8fe0
--- /dev/null
+++ b/src/boost/tools/build/src/build/version.jam
@@ -0,0 +1,166 @@
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import numbers ;
+
+
+# Mirror engine JAM_VERSION
+.major = "4" ;
+.minor = "2" ;
+
+
+rule boost-build ( )
+{
+ return "$(.major).$(.minor)-git" ;
+}
+
+
+rule print ( )
+{
+ if [ verify-engine-version ]
+ {
+ ECHO "B2" [ boost-build ] ;
+ }
+}
+
+
+rule verify-engine-version ( )
+{
+ local v = [ modules.peek : JAM_VERSION ] ;
+
+ if $(v[1]) != $(.major) || $(v[2]) != $(.minor)
+ {
+ local argv = [ modules.peek : ARGV ] ;
+ local e = $(argv[1]) ;
+ local l = [ modules.binding version ] ;
+ l = $(l:D) ;
+ l = $(l:D) ;
+ ECHO "warning: mismatched versions of B2 engine and core" ;
+ ECHO "warning: B2 engine ($(e)) is $(v:J=.)" ;
+ ECHO "warning: B2 core (at $(l)) is" [ boost-build ] ;
+ }
+ else
+ {
+ return true ;
+ }
+}
+
+
+# Utility rule for testing whether all elements in a sequence are equal to 0.
+#
+local rule is-all-zeroes ( sequence * )
+{
+ local result = "true" ;
+ for local e in $(sequence)
+ {
+ if $(e) != "0"
+ {
+ result = "" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns "true" if the first version is less than the second one.
+#
+rule version-less ( lhs + : rhs + )
+{
+ numbers.check $(lhs) ;
+ numbers.check $(rhs) ;
+
+ local done ;
+ local result ;
+
+ while ! $(done) && $(lhs) && $(rhs)
+ {
+ if [ numbers.less $(lhs[1]) $(rhs[1]) ]
+ {
+ done = "true" ;
+ result = "true" ;
+ }
+ else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
+ {
+ done = "true" ;
+ }
+ else
+ {
+ lhs = $(lhs[2-]) ;
+ rhs = $(rhs[2-]) ;
+ }
+ }
+ if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
+ {
+ result = "true" ;
+ }
+
+ return $(result) ;
+}
+
+
+# Returns "true" if the current JAM version version is at least the given
+# version.
+#
+rule check-jam-version ( version + )
+{
+ local version-tag = $(version:J=.) ;
+ if ! $(version-tag)
+ {
+ import errors ;
+ errors.error Invalid version "specifier:" : $(version:E="(undefined)") ;
+ }
+
+ if ! $(.jam-version-check.$(version-tag))-is-defined
+ {
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ if ! $(jam-version)
+ {
+ import errors ;
+ errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
+ "installation is most likely terribly outdated." ;
+ }
+ .jam-version-check.$(version-tag) = "true" ;
+ if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
+ {
+ .jam-version-check.$(version-tag) = "" ;
+ }
+ }
+ return $(.jam-version-check.$(version-tag)) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ local future-version = $(jam-version) ;
+ future-version += "1" ;
+
+ assert.true check-jam-version $(jam-version) ;
+ assert.false check-jam-version $(future-version) ;
+
+ assert.true version-less 0 : 1 ;
+ assert.false version-less 0 : 0 ;
+ assert.true version-less 1 : 2 ;
+ assert.false version-less 1 : 1 ;
+ assert.false version-less 2 : 1 ;
+ assert.true version-less 3 1 20 : 3 4 10 ;
+ assert.false version-less 3 1 10 : 3 1 10 ;
+ assert.false version-less 3 4 10 : 3 1 20 ;
+ assert.true version-less 3 1 20 5 1 : 3 4 10 ;
+ assert.false version-less 3 1 10 5 1 : 3 1 10 ;
+ assert.false version-less 3 4 10 5 1 : 3 1 20 ;
+ assert.true version-less 3 1 20 : 3 4 10 5 1 ;
+ assert.true version-less 3 1 10 : 3 1 10 5 1 ;
+ assert.false version-less 3 4 10 : 3 1 20 5 1 ;
+ assert.false version-less 3 1 10 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 0 : 3 1 10 ;
+ assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
+ assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
+
+ # TODO: Add tests for invalid input data being sent to version-less.
+}
diff --git a/src/boost/tools/build/src/build/version.py b/src/boost/tools/build/src/build/version.py
new file mode 100644
index 000000000..88299060e
--- /dev/null
+++ b/src/boost/tools/build/src/build/version.py
@@ -0,0 +1,38 @@
+import os
+import sys
+
+import bjam
+
+
+from b2.manager import get_manager
+
+
+MANAGER = get_manager()
+ERROR_HANDLER = MANAGER.errors()
+
+_major = "2015"
+_minor = "07"
+
+
+def boost_build():
+ return "{}.{}-git".format(_major, _minor)
+
+
+def verify_engine_version():
+ major, minor, _ = v = bjam.variable('JAM_VERSION')
+ if major != _major or minor != _minor:
+ from textwrap import dedent
+ engine = sys.argv[0]
+ core = os.path.dirname(os.path.dirname(__file__))
+ print dedent("""\
+ warning: mismatched version of Boost.Build engine core
+ warning: Boost.Build engine "{}" is "{}"
+ warning: Boost.Build core at {} is {}
+ """.format(engine, '.'.join(v), core, boost_build()))
+ return False
+ return True
+
+
+def report():
+ if verify_engine_version():
+ print "Boost.Build " + boost_build()
diff --git a/src/boost/tools/build/src/build/virtual-target.jam b/src/boost/tools/build/src/build/virtual-target.jam
new file mode 100644
index 000000000..d7b84321c
--- /dev/null
+++ b/src/boost/tools/build/src/build/virtual-target.jam
@@ -0,0 +1,1394 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements virtual targets, which correspond to actual files created during a
+# build, but are not yet targets in Jam sense. They are needed, for example,
+# when searching for possible transformation sequences, when it is not yet known
+# whether a particular target should be created at all.
+#
+# +--------------------------+
+# | virtual-target |
+# +==========================+
+# | actualize |
+# +--------------------------+
+# | actualize-action() = 0 |
+# | actualize-location() = 0 |
+# +----------------+---------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# +---------------------+ +-------+--------------+
+# | action | | abstract-file-target |
+# +=====================| * +======================+
+# | action-name | +--+ action |
+# | properties | | +----------------------+
+# +---------------------+--+ | actualize-action() |
+# | actualize() |0..1 +-----------+----------+
+# | path() | |
+# | adjust-properties() | sources |
+# | actualize-sources() | targets |
+# +------+--------------+ ^
+# | / \
+# ^ +-+-+
+# / \ |
+# +-+-+ +-------------+-------------+
+# | | |
+# | +------+---------------+ +--------+-------------+
+# | | file-target | | searched-lib-target |
+# | +======================+ +======================+
+# | | actualize-location() | | actualize-location() |
+# | +----------------------+ +----------------------+
+# |
+# +-+------------------------------+
+# | |
+# +----+----------------+ +---------+-----------+
+# | compile-action | | link-action |
+# +=====================+ +=====================+
+# | adjust-properties() | | adjust-properties() |
+# +---------------------+ | actualize-sources() |
+# +---------------------+
+#
+# The 'compile-action' and 'link-action' classes are not defined here but in
+# builtin.jam modules. They are shown in the diagram to give the big picture.
+
+import "class" : new ;
+import feature ;
+import path ;
+import property-set ;
+import sequence ;
+import set ;
+import toolset ;
+import type ;
+import utility ;
+
+
+# Models a potential target. It can be converted into a Jam target and used in
+# building, if needed. However, it can be also dropped, which allows us to
+# search for different transformations and select only one.
+#
+class virtual-target
+{
+ import scanner ;
+ import sequence ;
+ import utility ;
+ import virtual-target ;
+
+ rule __init__ (
+ name # Target/project name.
+ : project # Project to which this target belongs.
+ )
+ {
+ self.name = $(name) ;
+ self.project = $(project) ;
+ self.dependencies = ;
+ }
+
+ # Name of this target.
+ #
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Project of this target.
+ #
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Adds additional 'virtual-target' instances this one depends on.
+ #
+ rule depends ( d + )
+ {
+ self.dependencies = [ sequence.merge $(self.dependencies) :
+ [ sequence.insertion-sort $(d) ] ] ;
+ }
+
+ rule dependencies ( )
+ {
+ return $(self.dependencies) ;
+ }
+
+ rule always ( )
+ {
+ .always = 1 ;
+ }
+
+ rule fail-expected ( )
+ {
+ .fail-expected = 1 ;
+ }
+
+ # Generates all the actual targets and sets up build actions for this
+ # target.
+ #
+ # If 'scanner' is specified, creates an additional target with the same
+ # location as the actual target, which will depend on the actual target and
+ # be associated with a 'scanner'. That additional target is returned. See
+ # the docs (#dependency_scanning) for rationale. Target must correspond to a
+ # file if 'scanner' is specified.
+ #
+ # If scanner is not specified then the actual target is returned.
+ #
+ rule actualize ( scanner ? )
+ {
+ local actual-name = [ actualize-no-scanner ] ;
+
+ if $(.always)
+ {
+ ALWAYS $(actual-name) ;
+ }
+
+ if $(.fail-expected)
+ {
+ FAIL_EXPECTED $(actual-name) ;
+ }
+
+ if ! $(scanner)
+ {
+ return $(actual-name) ;
+ }
+ else
+ {
+ # Add the scanner instance to the grist for name.
+ local g = [ sequence.join [ utility.ungrist $(actual-name:G) ]
+ $(scanner) : - ] ;
+ local name = $(actual-name:G=$(g)) ;
+
+ if ! $(self.made.$(scanner))
+ {
+ self.made.$(scanner) = true ;
+ actualize-location $(name) ;
+ scanner.install $(scanner) : $(name) ;
+ }
+ return $(name) ;
+ }
+ }
+
+# private: (overridables)
+
+ # Sets/gets the 'root' flag. Target is root if it directly corresponds to
+ # some variant of a main target.
+ #
+ rule root ( set ? )
+ {
+ if $(set)
+ {
+ self.root = true ;
+ }
+ return $(self.root) ;
+ }
+
+
+ # Sets up build actions for 'target'. Should call appropriate rules and set
+ # target variables.
+ #
+ rule actualize-action ( target )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Sets up variables on 'target' which specify its location.
+ #
+ rule actualize-location ( target )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # If the target is a generated one, returns the path where it will be
+ # generated. Otherwise, returns an empty list.
+ #
+ rule path ( )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Returns the actual target name to be used in case when no scanner is
+ # involved.
+ #
+ rule actual-name ( )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Returns additional properties that are relevant for this target
+ # beyond those required by the action.
+ #
+ rule relevant ( )
+ {
+ return [ property-set.empty ] ;
+ }
+
+# implementation
+ rule actualize-no-scanner ( )
+ {
+ # In fact, we just need to merge virtual-target with
+ # abstract-file-target as the latter is the only class derived from the
+ # former. But that has been left for later.
+
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+# Target corresponding to a file. The exact mapping for file is not yet
+# specified in this class. (TODO: Actually, the class name could be better...)
+#
+# May be a source file (when no action is specified) or a derived file
+# (otherwise).
+#
+# The target's grist is a concatenation of its project's location, action
+# properties (for derived targets) and, optionally, value identifying the main
+# target.
+#
+class abstract-file-target : virtual-target
+{
+ import project ;
+ import regex ;
+ import sequence ;
+ import path ;
+ import type ;
+ import property-set ;
+ import indirect ;
+
+ rule __init__ (
+ name # Target's name.
+ exact ? # If non-empty, the name is exactly the name created file
+ # should have. Otherwise, the '__init__' method will add a
+ # suffix obtained from 'type' by calling
+ # 'type.generated-target-suffix'.
+ : type ? # Target's type.
+ : project
+ : action ?
+ )
+ {
+ virtual-target.__init__ $(name) : $(project) ;
+
+ self.type = $(type) ;
+ self.action = $(action) ;
+ if $(action)
+ {
+ $(action).add-targets $(__name__) ;
+
+ if $(self.type) && ! $(exact)
+ {
+ _adjust-name $(name) ;
+ }
+ }
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ # Sets the path. When generating target name, it will override any path
+ # computation from properties.
+ #
+ rule set-path ( path )
+ {
+ self.path = [ path.native $(path) ] ;
+ }
+
+ # Returns the currently set action.
+ #
+ rule action ( )
+ {
+ return $(self.action) ;
+ }
+
+ # Gets or sets the subvariant which created this target. Subvariant is set
+ # when target is brought into existence and is never changed after that. In
+ # particular, if a target is shared by multiple subvariants, only the first
+ # one is stored.
+ #
+ rule creating-subvariant ( s ? # If specified, specifies the value to set,
+ # which should be a 'subvariant' class
+ # instance.
+ )
+ {
+ if $(s) && ! $(self.creating-subvariant)
+ {
+ self.creating-subvariant = $(s) ;
+ }
+ return $(self.creating-subvariant) ;
+ }
+
+ rule actualize-action ( target )
+ {
+ if $(self.action)
+ {
+ $(self.action).actualize ;
+ }
+ }
+
+ # Return a human-readable representation of this target. If this target has
+ # an action, that is:
+ #
+ # { <action-name>-<self.name>.<self.type> <action-sources>... }
+ #
+ # otherwise, it is:
+ #
+ # { <self.name>.<self.type> }
+ #
+ rule str ( )
+ {
+ local action = [ action ] ;
+ local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
+
+ if $(action)
+ {
+ local sources = [ $(action).sources ] ;
+ local action-name = [ $(action).action-name ] ;
+
+ local ss ;
+ for local s in $(sources)
+ {
+ ss += [ $(s).str ] ;
+ }
+
+ return "{" $(action-name)-$(name-dot-type) $(ss) "}" ;
+ }
+ else
+ {
+ return "{" $(name-dot-type) "}" ;
+ }
+ }
+
+ rule less ( a )
+ {
+ if [ str ] < [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+ rule equal ( a )
+ {
+ if [ str ] = [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+# private:
+ rule actual-name ( )
+ {
+ if ! $(self.actual-name)
+ {
+ local grist = [ grist ] ;
+ local basename = [ path.native $(self.name) ] ;
+ self.actual-name = <$(grist)>$(basename) ;
+ }
+ return $(self.actual-name) ;
+ }
+
+ # Helper to 'actual-name', above. Computes a unique prefix used to
+ # distinguish this target from other targets with the same name creating
+ # different files.
+ #
+ rule grist ( )
+ {
+ # Depending on target, there may be different approaches to generating
+ # unique prefixes. We generate prefixes in the form:
+ # <one letter approach code> <the actual prefix>
+ local path = [ path ] ;
+ if $(path)
+ {
+ # The target will be generated to a known path. Just use the path
+ # for identification, since path is as unique as it can get.
+ return p$(path) ;
+ }
+ else
+ {
+ # File is either source, which will be searched for, or is not a
+ # file at all. Use the location of project for distinguishing.
+ local project-location = [ $(self.project).get location ] ;
+ local location-grist = [ sequence.join [ regex.split
+ $(project-location) "/" ] : "!" ] ;
+
+ if $(self.action)
+ {
+ local ps = [ $(self.action).properties ] ;
+ local property-grist = [ $(ps).as-path ] ;
+ # 'property-grist' can be empty when 'ps' is an empty property
+ # set.
+ if $(property-grist)
+ {
+ location-grist = $(location-grist)/$(property-grist) ;
+ }
+ }
+
+ return l$(location-grist) ;
+ }
+ }
+
+ # Given the target name specified in constructor, returns the name which
+ # should be really used, by looking at the <tag> properties. Tag properties
+ # need to be specified as <tag>@rule-name. This makes Boost Build call the
+ # specified rule with the target name, type and properties to get the new
+ # name. If no <tag> property is specified or the rule specified by <tag>
+ # returns nothing, returns the result of calling
+ # virtual-target.add-prefix-and-suffix.
+ #
+ rule _adjust-name ( specified-name )
+ {
+ local ps ;
+ if $(self.action)
+ {
+ ps = [ $(self.action).properties ] ;
+ }
+ else
+ {
+ ps = [ property-set.empty ] ;
+ }
+
+ # Add this target object for use in getting additional information
+ # when tagging.
+ ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
+
+ local tag = [ $(ps).get <tag> ] ;
+
+ if $(tag)
+ {
+ local rule-name = [ MATCH ^@(.*) : $(tag) ] ;
+ if $(rule-name)
+ {
+ if $(tag[2])
+ {
+ import errors : error : errors.error ;
+ errors.error <tag>@rulename is present but is not the only
+ <tag> feature. ;
+ }
+
+ self.name = [ indirect.call $(rule-name) $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ else
+ {
+ import errors : error : errors.error ;
+ errors.error <tag> property value must be '@rule-name'. ;
+ }
+ }
+
+ # If there is no tag or the tag rule returned nothing.
+ if ! $(tag) || ! $(self.name)
+ {
+ self.name = [ virtual-target.add-prefix-and-suffix $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ }
+
+ rule actualize-no-scanner ( )
+ {
+ local name = [ actual-name ] ;
+
+ # Do anything only on the first invocation.
+ if ! $(self.made-no-scanner)
+ {
+ self.made-no-scanner = true ;
+
+ if $(self.action)
+ {
+ # For non-derived target, we do not care if there are several
+ # virtual targets that refer to the same name. One case when
+ # this is unavoidable is when the file name is main.cpp and two
+ # targets have types CPP (for compiling) and MOCCABLE_CPP (for
+ # conversion to H via Qt tools).
+ virtual-target.register-actual-name $(name) : $(__name__) ;
+ }
+
+ for local i in $(self.dependencies)
+ {
+ DEPENDS $(name) : [ $(i).actualize ] ;
+ }
+
+ actualize-location $(name) ;
+ actualize-action $(name) ;
+ }
+ return $(name) ;
+ }
+}
+
+
+# Appends the suffix appropriate to 'type/property-set' combination to the
+# specified name and returns the result.
+#
+rule add-prefix-and-suffix ( specified-name : type ? : property-set )
+{
+ local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ;
+
+ # Handle suffixes for which no leading dot is desired. Those are specified
+ # by enclosing them in <...>. Needed by python so it can create "_d.so"
+ # extensions, for example.
+ if $(suffix:G)
+ {
+ suffix = [ utility.ungrist $(suffix) ] ;
+ }
+ else
+ {
+ suffix = .$(suffix) ;
+ }
+
+ local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ;
+
+ if [ MATCH ^($(prefix)) : $(specified-name) ]
+ {
+ prefix = ;
+ }
+ return $(prefix:E="")$(specified-name)$(suffix:E="") ;
+}
+
+
+# File targets with explicitly known location.
+#
+# The file path is determined as
+# * Value passed to the 'set-path' method, if any.
+# * For derived files, project's build dir, joined with components that
+# describe action properties. If free properties are not equal to the
+# project's reference properties an element with the name of the main
+# target is added.
+# * For source files, project's source dir.
+#
+# The file suffix is determined as:
+# * The value passed to the 'suffix' method, if any.
+# * The suffix corresponding to the target's type.
+#
+class file-target : abstract-file-target
+{
+ import "class" : new ;
+ import common ;
+
+ rule __init__ (
+ name exact ?
+ : type ? # Optional type for this target.
+ : project
+ : action ?
+ : path ?
+ )
+ {
+ abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) :
+ $(action) ;
+
+ self.path = $(path) ;
+ }
+
+ rule clone-with-different-type ( new-type )
+ {
+ return [ new file-target $(self.name) exact : $(new-type) :
+ $(self.project) : $(self.action) : $(self.path) ] ;
+ }
+
+ rule actualize-location ( target )
+ {
+ # Scanner targets are always bound to already existing files in already
+ # existing folder. They need to be marked as depending on their base
+ # target (i.e. the target being scanned) but, unlike regular
+ # dependencies set up by the DEPENDS rule, they must not depend on any
+ # targets already marked as included by the base target. Otherwise such
+ # an included file being newer than the file being scanned would cause
+ # the scanner target to be updated, further causing any target depending
+ # on that scanner target to be rebuilt. This is the exact relationship
+ # as set up by Boost Jam's SEARCH binding method (needed to support
+ # searching for generated targets) so we want to bind scanner targets
+ # using this method instead of explicitly specifying their location
+ # using LOCATE.
+ #
+ # FIXME: We recognize scanner targets by their given name being
+ # different from this target's actual name. This is a hack and should be
+ # cleaned up by reorganizing who knows about scanners in the
+ # virtual-target/abstract-file-target/file-target/notfile-target/
+ # searched-lib-target/... class hierarchy.
+ local is-scanner-target ;
+ if $(target) != [ actual-name ]
+ {
+ is-scanner-target = true ;
+ }
+
+ if $(self.action) && ! $(is-scanner-target)
+ {
+ # This is a derived file.
+ local path = [ path ] ;
+ LOCATE on $(target) = $(path) ;
+
+ # Make sure the path exists.
+ DEPENDS $(target) : $(path) ;
+ common.MkDir $(path) ;
+
+ # It is possible that the target name includes a directory too, for
+ # example when installing headers. Create that directory.
+ if $(target:D)
+ {
+ local d = $(target:D) ;
+ d = $(d:R=$(path)) ;
+ DEPENDS $(target) : $(d) ;
+ common.MkDir $(d) ;
+ }
+
+ # For a real file target, we create a fake target depending on the
+ # real target. This allows us to run
+ #
+ # b2 hello.o
+ #
+ # without trying to guess the name of the real target. Note that the
+ # target has no directory name and uses a special <e> grist.
+ #
+ # First, that means that "b2 hello.o" will build all known hello.o
+ # targets. Second, the <e> grist makes sure this target will not be
+ # confused with other targets, for example, if we have subdir 'test'
+ # with target 'test' in it that includes a 'test.o' file, then the
+ # target for directory will be just 'test' the target for test.o
+ # will be <ptest/bin/gcc/debug>test.o and the target we create below
+ # will be <e>test.o
+ DEPENDS $(target:G=e) : $(target) ;
+ # Allow b2 <path-to-file>/<file> to work. This will not catch all
+ # possible ways to refer to the path (relative/absolute, extra ".",
+ # various "..", but should help in obvious cases.
+ DEPENDS $(target:G=e:R=$(path)) : $(target) ;
+ }
+ else
+ {
+ SEARCH on $(target) = [ path.native $(self.path) ] ;
+ }
+ }
+
+ # Returns the directory for this target.
+ #
+ rule path ( )
+ {
+ if ! $(self.path)
+ {
+ if $(self.action)
+ {
+ local p = [ $(self.action).properties ] ;
+ local path,relative-to-build-dir = [ $(p).target-path ] ;
+ local path = $(path,relative-to-build-dir[1]) ;
+ local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
+
+ if $(relative-to-build-dir)
+ {
+ path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
+ }
+
+ self.path = [ path.native $(path) ] ;
+ }
+ }
+ return $(self.path) ;
+ }
+}
+
+
+class notfile-target : abstract-file-target
+{
+ rule __init__ ( name : project : action ? )
+ {
+ abstract-file-target.__init__ $(name) : : $(project) : $(action) ;
+ }
+
+ # Returns nothing to indicate that the target's path is not known.
+ #
+ rule path ( )
+ {
+ return ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ ALWAYS $(target) ;
+ # TEMPORARY $(target) ;
+ NOUPDATE $(target) ;
+ }
+}
+
+
+# Class representing an action. Both 'targets' and 'sources' should list
+# instances of 'virtual-target'. Action name should name a rule with this
+# prototype:
+# rule action-name ( targets + : sources * : properties * )
+# Targets and sources are passed as actual Jam targets. The rule may not
+# establish additional dependency relationships.
+#
+class action
+{
+ import "class" ;
+ import indirect ;
+ import path ;
+ import property-set ;
+ import set : difference ;
+ import toolset ;
+ import type ;
+
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ self.sources = $(sources) ;
+
+ self.action-name = [ indirect.make-qualified $(action-name) ] ;
+
+ if ! $(property-set)
+ {
+ property-set = [ property-set.empty ] ;
+ }
+
+ if ! [ class.is-instance $(property-set) ]
+ {
+ import errors : error : errors.error ;
+ errors.error "Property set instance required" ;
+ }
+
+ self.properties = $(property-set) ;
+ }
+
+ rule add-targets ( targets * )
+ {
+ self.targets += $(targets) ;
+ }
+
+ rule replace-targets ( old-targets * : new-targets * )
+ {
+ self.targets = [ set.difference $(self.targets) : $(old-targets) ] ;
+ self.targets += $(new-targets) ;
+ }
+
+ rule targets ( )
+ {
+ return $(self.targets) ;
+ }
+
+ rule sources ( )
+ {
+ return $(self.sources) ;
+ }
+
+ rule action-name ( )
+ {
+ return $(self.action-name) ;
+ }
+
+ rule properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ # Generates actual build instructions.
+ #
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+
+ local ps = [ properties ] ;
+ local properties = [ adjust-properties $(ps) ] ;
+
+ local actual-targets ;
+ for local i in [ targets ]
+ {
+ actual-targets += [ $(i).actualize ] ;
+ }
+
+ actualize-sources [ sources ] : $(properties) ;
+
+ DEPENDS $(actual-targets) : $(self.actual-sources)
+ $(self.dependency-only-sources) ;
+
+ # Action name can include additional rule arguments, which should
+ # not be passed to 'set-target-variables'.
+ toolset.set-target-variables
+ [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
+ : $(properties) ;
+
+ # Reflect ourselves in a variable for the target. This allows
+ # looking up additional info for the action given the raw target.
+ # For example to debug or output action information from action
+ # rules.
+ .action on $(actual-targets) = $(__name__) ;
+
+ #indirect.call $(self.action-name) $(actual-targets)
+ # : $(self.actual-sources) : [ $(properties).raw ] ;
+ execute $(self.action-name) $(actual-targets)
+ : $(self.actual-sources) : [ $(properties).raw ] ;
+
+ # Since we set up the creating action here, we set up the action for
+ # cleaning up as well.
+ common.Clean clean-all : $(actual-targets) ;
+ }
+ }
+
+ # Helper for 'actualize-sources'. For each passed source, actualizes it with
+ # the appropriate scanner. Returns the actualized virtual targets.
+ #
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result = ;
+ for local i in $(sources)
+ {
+ local scanner ;
+ if [ $(i).type ]
+ {
+ scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
+ }
+ result += [ $(i).actualize $(scanner) ] ;
+ }
+ return $(result) ;
+ }
+
+ # Creates actual Jam targets for sources. Initializes the following member
+ # variables:
+ # 'self.actual-sources' -- sources passed to the updating action.
+ # 'self.dependency-only-sources' -- sources marked as dependencies, but
+ # are not used otherwise.
+ #
+ # New values will be *appended* to the variables. They may be non-empty if
+ # caller wants it.
+ #
+ rule actualize-sources ( sources * : property-set )
+ {
+ local dependencies = [ $(self.properties).get <dependency> ] ;
+
+ self.dependency-only-sources +=
+ [ actualize-source-type $(dependencies) : $(property-set) ] ;
+ self.actual-sources +=
+ [ actualize-source-type $(sources) : $(property-set) ] ;
+
+ # This is used to help b2 find dependencies in generated headers and
+ # other main targets, e.g. in:
+ #
+ # make a.h : ....... ;
+ # exe hello : hello.cpp : <implicit-dependency>a.h ;
+ #
+ # For b2 to find the dependency the generated target must be
+ # actualized (i.e. have its Jam target constructed). In the above case,
+ # if we are building just hello ("b2 hello"), 'a.h' will not be
+ # actualized unless we do it here.
+ local implicit = [ $(self.properties).get <implicit-dependency> ] ;
+ for local i in $(implicit)
+ {
+ $(i:G=).actualize ;
+ }
+ }
+
+ # Determines real properties when trying to build with 'properties'. This is
+ # the last chance to fix properties, for example to adjust includes to get
+ # generated headers correctly. Default implementation simply returns its
+ # argument.
+ #
+ rule adjust-properties ( property-set )
+ {
+ return $(property-set) ;
+ }
+
+ # Execute the action rule on the given targets, sources, and properties.
+ # Since this does the final call to the engine action rule this takes
+ # engine level targets and raw properties. One could override this, for
+ # example, to set additional variables on the target that might be
+ # difficult to determine just using toolset flags.
+ # Note, you must call this base rule when overriding as otherwise the
+ # actions will not execute and the engine will not run commands.
+ #
+ rule execute ( action-name targets + : sources * : properties * )
+ {
+ indirect.call $(action-name) $(targets) : $(sources) : $(properties) ;
+ }
+}
+
+
+# Action class which does nothing --- it produces the targets with specific
+# properties out of nowhere. It is needed to distinguish virtual targets with
+# different properties that are known to exist and have no actions which create
+# them.
+#
+class null-action : action
+{
+ rule __init__ ( property-set ? )
+ {
+ action.__init__ : .no-action : $(property-set) ;
+ }
+
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+ for local i in [ targets ]
+ {
+ $(i).actualize ;
+ }
+ }
+ }
+}
+
+
+# Class which acts exactly like 'action', except that its sources are not
+# scanned for dependencies.
+#
+class non-scanning-action : action
+{
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ action.__init__ $(sources) : $(action-name) : $(property-set) ;
+ }
+
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result ;
+ for local i in $(sources)
+ {
+ result += [ $(i).actualize ] ;
+ }
+ return $(result) ;
+ }
+}
+
+
+# Creates a virtual target with an appropriate name and type from 'file'. If a
+# target with that name in that project already exists, returns that already
+# created target.
+#
+# FIXME: a more correct way would be to compute the path to the file, based on
+# name and source location for the project, and use that path to determine if
+# the target has already been created. This logic should be shared with how we
+# usually find targets identified by a specific target id. It should also be
+# updated to work correctly when the file is specified using both relative and
+# absolute paths.
+#
+# TODO: passing a project with all virtual targets is starting to be annoying.
+#
+rule from-file ( file : file-loc : project )
+{
+ import type ; # Had to do this here to break a circular dependency.
+
+ # Check whether we already created a target corresponding to this file.
+ local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ;
+
+ if $(.files.$(path))
+ {
+ return $(.files.$(path)) ;
+ }
+ else
+ {
+ local name = [ path.make $(file) ] ;
+ local type = [ type.type $(file) ] ;
+ local result ;
+
+ result = [ new file-target $(file) : $(type) : $(project) : :
+ $(file-loc) ] ;
+
+ .files.$(path) = $(result) ;
+ return $(result) ;
+ }
+}
+
+
+# Registers a new virtual target. Checks if there is already a registered target
+# with the same name, type, project and subvariant properties as well as the
+# same sources and equal action. If such target is found it is returned and a
+# new 'target' is not registered. Otherwise, 'target' is registered and
+# returned.
+#
+rule register ( target )
+{
+ local signature = [ sequence.join [ $(target).path ] [ $(target).name ] : -
+ ] ;
+
+ local result ;
+ for local t in $(.cache.$(signature))
+ {
+ local a1 = [ $(t).action ] ;
+ local a2 = [ $(target).action ] ;
+
+ if ! $(result)
+ {
+ if ! $(a1) && ! $(a2)
+ {
+ result = $(t) ;
+ }
+ else if $(a1) && $(a2) &&
+ ( [ $(a1).action-name ] = [ $(a2).action-name ] ) &&
+ ( [ $(a1).sources ] = [ $(a2).sources ] )
+ {
+ local ps1 = [ $(a1).properties ] ;
+ local ps2 = [ $(a2).properties ] ;
+ local relevant = [ toolset.relevant [ $(a1).action-name ] ] ;
+ relevant = [ $(relevant).add [ $(target).relevant ] ] ;
+ local p1 = [ $(ps1).relevant $(relevant) ] ;
+ local p2 = [ $(ps2).relevant $(relevant) ] ;
+ if $(p1) = $(p2)
+ {
+ result = $(t) ;
+ }
+ }
+ }
+ }
+
+ if ! $(result)
+ {
+ .cache.$(signature) += $(target) ;
+ result = $(target) ;
+ }
+
+ .recent-targets += $(result) ;
+ .all-targets += $(result) ;
+
+ return $(result) ;
+}
+
+
+# Each target returned by 'register' is added to the .recent-targets list,
+# returned by this function. This allows us to find all virtual targets created
+# when building a specific main target, even those constructed only as
+# intermediate targets.
+#
+rule recent-targets ( )
+{
+ return $(.recent-targets) ;
+}
+
+
+rule clear-recent-targets ( )
+{
+ .recent-targets = ;
+}
+
+
+# Returns all virtual targets ever created.
+#
+rule all-targets ( )
+{
+ return $(.all-targets) ;
+}
+
+
+# Returns all targets from 'targets' with types equal to 'type' or derived from
+# it.
+#
+rule select-by-type ( type : targets * )
+{
+ local result ;
+ for local t in $(targets)
+ {
+ if [ type.is-subtype [ $(t).type ] $(type) ]
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule register-actual-name ( actual-name : virtual-target )
+{
+ if $(.actual.$(actual-name))
+ {
+ local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ;
+ local cmt1-name ;
+ if $(cs1)-is-defined
+ {
+ local cmt1 = [ $(cs1).main-target ] ;
+ cmt1-name = [ $(cmt1).full-name ] ;
+ }
+ local cs2 = [ $(virtual-target).creating-subvariant ] ;
+ local cmt2-name ;
+ if $(cs2)-is-defined
+ {
+ local cmt2 = [ $(cs2).main-target ] ;
+ cmt2-name = [ $(cmt2).full-name ] ;
+ }
+ local extra-error-information ;
+ if ! $(cs1)-is-defined || ! $(cs2)-is-defined
+ {
+ extra-error-information = Encountered a virtual-target without a
+ creating subvariant. It could be the virtual target has not been
+ registered via the virtual-target.register rule. ;
+ }
+
+ local action1 = [ $(.actual.$(actual-name)).action ] ;
+ local action2 = [ $(virtual-target).action ] ;
+ local properties-added ;
+ local properties-removed ;
+ if $(action1) && $(action2)
+ {
+ local p1 = [ $(action1).properties ] ;
+ local p2 = [ $(action2).properties ] ;
+ # Only show features that are relevant for either target.
+ local relevant = [ $(p1).get <relevant> ] [ $(p2).get <relevant> ] ;
+ relevant = [ feature.expand-relevant $(relevant) ] ;
+ # The presence of relevant can potentially mess things up,
+ # so we always need to show it.
+ relevant += relevant ;
+ relevant = [ property-set.create <relevant>$(relevant) ] ;
+ p1 = [ $(p1).relevant $(relevant) ] ;
+ p2 = [ $(p2).relevant $(relevant) ] ;
+ p1 = [ $(p1).raw ] ;
+ p2 = [ $(p2).raw ] ;
+ properties-removed = [ set.difference $(p1) : $(p2) ] ;
+ properties-removed ?= "none" ;
+ properties-added = [ set.difference $(p2) : $(p1) ] ;
+ properties-added ?= "none" ;
+ }
+ import errors : user-error : errors.user-error ;
+ errors.user-error "Name clash for '$(actual-name)'"
+ : ""
+ : "Tried to build the target twice, with property sets having "
+ : "these incompatible properties:"
+ : ""
+ : " - " $(properties-removed)
+ : " - " $(properties-added)
+ : ""
+ : "Please make sure to have consistent requirements for these "
+ : "properties everywhere in your project, especially for install"
+ : "targets."
+ ;
+ }
+ else
+ {
+ .actual.$(actual-name) = $(virtual-target) ;
+ }
+}
+
+
+# Traverses the dependency graph of 'target' and return all targets that will be
+# created before this one is created. If the root of some dependency graph is
+# found during traversal, it is either included or not, depending on the
+# 'include-roots' value. In either case traversal stops at root targets, i.e.
+# root target sources are not traversed.
+#
+rule traverse ( target : include-roots ? : include-sources ? )
+{
+ local result ;
+ if [ $(target).action ]
+ {
+ local action = [ $(target).action ] ;
+ # This includes the 'target' as well.
+ result += [ $(action).targets ] ;
+
+ for local t in [ $(action).sources ]
+ {
+ if ! [ $(t).root ]
+ {
+ result += [ traverse $(t) : $(include-roots) :
+ $(include-sources) ] ;
+ }
+ else if $(include-roots)
+ {
+ result += $(t) ;
+ }
+ }
+ }
+ else if $(include-sources)
+ {
+ result = $(target) ;
+ }
+ return $(result) ;
+}
+
+
+# Takes an 'action' instance and creates a new instance of it and all targets
+# produced by the action. The rule-name and properties are set to
+# 'new-rule-name' and 'new-properties', if those are specified. Returns the
+# cloned action.
+#
+rule clone-action ( action : new-project : new-action-name ? : new-properties ?
+ )
+{
+ if ! $(new-action-name)
+ {
+ new-action-name = [ $(action).action-name ] ;
+ }
+ if ! $(new-properties)
+ {
+ new-properties = [ $(action).properties ] ;
+ }
+
+ local action-class = [ modules.peek $(action) : __class__ ] ;
+ local cloned-action = [ class.new $(action-class)
+ [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ;
+
+ local cloned-targets ;
+ for local target in [ $(action).targets ]
+ {
+ local n = [ $(target).name ] ;
+ # Do not modify produced target names.
+ local cloned-target = [ class.new file-target $(n) exact :
+ [ $(target).type ] : $(new-project) : $(cloned-action) ] ;
+ local d = [ $(target).dependencies ] ;
+ if $(d)
+ {
+ $(cloned-target).depends $(d) ;
+ }
+ $(cloned-target).root [ $(target).root ] ;
+ $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ;
+
+ cloned-targets += $(cloned-target) ;
+ }
+
+ return $(cloned-action) ;
+}
+
+
+class subvariant
+{
+ import sequence ;
+ import type ;
+
+ rule __init__ ( main-target # The instance of main-target class.
+ : property-set # Properties requested for this target.
+ : sources *
+ : build-properties # Actually used properties.
+ : sources-usage-requirements # Properties propagated from sources.
+ : created-targets * ) # Top-level created targets.
+ {
+ self.main-target = $(main-target) ;
+ self.properties = $(property-set) ;
+ self.sources = $(sources) ;
+ self.build-properties = $(build-properties) ;
+ self.sources-usage-requirements = $(sources-usage-requirements) ;
+ self.created-targets = $(created-targets) ;
+
+ # Pre-compose a list of other dependency graphs this one depends on.
+ local deps = [ $(build-properties).get <implicit-dependency> ] ;
+ for local d in $(deps)
+ {
+ self.other-dg += [ $(d:G=).creating-subvariant ] ;
+ }
+
+ self.other-dg = [ sequence.unique $(self.other-dg) ] ;
+ }
+
+ rule main-target ( )
+ {
+ return $(self.main-target) ;
+ }
+
+ rule created-targets ( )
+ {
+ return $(self.created-targets) ;
+ }
+
+ rule requested-properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ rule build-properties ( )
+ {
+ return $(self.build-properties) ;
+ }
+
+ rule sources-usage-requirements ( )
+ {
+ return $(self.sources-usage-requirements) ;
+ }
+
+ rule set-usage-requirements ( usage-requirements )
+ {
+ self.usage-requirements = $(usage-requirements) ;
+ }
+
+ rule usage-requirements ( )
+ {
+ return $(self.usage-requirements) ;
+ }
+
+ # Returns all targets referenced by this subvariant, either directly or
+ # indirectly, and either as sources, or as dependency properties. Targets
+ # referred to using the dependency property are returned as properties, not
+ # targets.
+ #
+ rule all-referenced-targets ( theset )
+ {
+ # Find directly referenced targets.
+ local deps = [ $(self.build-properties).dependency ] ;
+ local all-targets = $(self.sources) $(deps) ;
+
+ # Find other subvariants.
+ local r ;
+ for local t in $(all-targets)
+ {
+ if ! [ $(theset).contains $(t) ]
+ {
+ $(theset).add $(t) ;
+ r += [ $(t:G=).creating-subvariant ] ;
+ }
+ }
+ r = [ sequence.unique $(r) ] ;
+ for local s in $(r)
+ {
+ if $(s) != $(__name__)
+ {
+ $(s).all-referenced-targets $(theset) ;
+ }
+ }
+ }
+
+ # Returns the properties specifying implicit include paths to generated
+ # headers. This traverses all targets in this subvariant and subvariants
+ # referred by <implicit-dependency> properties. For all targets of type
+ # 'target-type' (or for all targets, if 'target-type' is not specified), the
+ # result will contain <$(feature)>path-to-that-target.
+ #
+ rule implicit-includes ( feature : target-type ? )
+ {
+ local key = ii$(feature)-$(target-type:E="") ;
+ if ! $($(key))-is-not-empty
+ {
+ local target-paths = [ all-target-directories $(target-type) ] ;
+ target-paths = [ sequence.unique $(target-paths) ] ;
+ local result = $(target-paths:G=$(feature)) ;
+ if ! $(result)
+ {
+ result = "" ;
+ }
+ $(key) = $(result) ;
+ }
+ if $($(key)) = ""
+ {
+ return ;
+ }
+ else
+ {
+ return $($(key)) ;
+ }
+ }
+
+ rule all-target-directories ( target-type ? )
+ {
+ if ! $(self.target-directories.$(target-type:E=))
+ {
+ compute-target-directories $(target-type) ;
+ }
+ return $(self.target-directories.$(target-type:E=)) ;
+ }
+
+ rule compute-target-directories ( target-type ? )
+ {
+ local result ;
+ for local t in $(self.created-targets)
+ {
+ # Skip targets of the wrong type.
+ local type = [ $(t).type ] ;
+ if ! $(target-type) ||
+ ( $(type) && [ type.is-derived $(type) $(target-type) ] )
+ {
+ result = [ sequence.merge $(result) : [ $(t).path ] ] ;
+ }
+ }
+ for local d in $(self.other-dg)
+ {
+ result += [ $(d).all-target-directories $(target-type) ] ;
+ }
+ self.target-directories.$(target-type:E=) = $(result) ;
+ }
+}
diff --git a/src/boost/tools/build/src/build/virtual_target.py b/src/boost/tools/build/src/build/virtual_target.py
new file mode 100644
index 000000000..8dfd9fbf0
--- /dev/null
+++ b/src/boost/tools/build/src/build/virtual_target.py
@@ -0,0 +1,1175 @@
+# Status: ported.
+# Base revision: 64488.
+#
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# Implements virtual targets, which correspond to actual files created during
+# build, but are not yet targets in Jam sense. They are needed, for example,
+# when searching for possible transormation sequences, when it's not known
+# if particular target should be created at all.
+#
+#
+# +--------------------------+
+# | VirtualTarget |
+# +==========================+
+# | actualize |
+# +--------------------------+
+# | actualize_action() = 0 |
+# | actualize_location() = 0 |
+# +----------------+---------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# +---------------------+ +-------+--------------+
+# | Action | | AbstractFileTarget |
+# +=====================| * +======================+
+# | action_name | +--+ action |
+# | properties | | +----------------------+
+# +---------------------+--+ | actualize_action() |
+# | actualize() |0..1 +-----------+----------+
+# | path() | |
+# | adjust_properties() | sources |
+# | actualize_sources() | targets |
+# +------+--------------+ ^
+# | / \
+# ^ +-+-+
+# / \ |
+# +-+-+ +-------------+-------------+
+# | | |
+# | +------+---------------+ +--------+-------------+
+# | | FileTarget | | SearchedLibTarget |
+# | +======================+ +======================+
+# | | actualize-location() | | actualize-location() |
+# | +----------------------+ +----------------------+
+# |
+# +-+------------------------------+
+# | |
+# +----+----------------+ +---------+-----------+
+# | CompileAction | | LinkAction |
+# +=====================+ +=====================+
+# | adjust_properties() | | adjust_properties() |
+# +---------------------+ | actualize_sources() |
+# +---------------------+
+#
+# The 'CompileAction' and 'LinkAction' classes are defined not here,
+# but in builtin.jam modules. They are shown in the diagram to give
+# the big picture.
+
+import bjam
+
+import re
+import os.path
+import string
+import types
+
+from b2.util import path, utility, set, is_iterable_typed
+from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value
+from b2.util.sequence import unique
+from b2.tools import common
+from b2.exceptions import *
+import b2.build.type
+import b2.build.property_set as property_set
+
+import b2.build.property as property
+
+from b2.manager import get_manager
+from b2.util import bjam_signature
+
+__re_starts_with_at = re.compile ('^@(.*)')
+
+class VirtualTargetRegistry:
+ def __init__ (self, manager):
+ self.manager_ = manager
+
+ # A cache for FileTargets
+ self.files_ = {}
+
+ # A cache for targets.
+ self.cache_ = {}
+
+ # A map of actual names to virtual targets.
+ # Used to make sure we don't associate same
+ # actual target to two virtual targets.
+ self.actual_ = {}
+
+ self.recent_targets_ = []
+
+ # All targets ever registered
+ self.all_targets_ = []
+
+ self.next_id_ = 0
+
+ def register (self, target):
+ """ Registers a new virtual target. Checks if there's already registered target, with the same
+ name, type, project and subvariant properties, and also with the same sources
+ and equal action. If such target is found it is returned and 'target' is not registered.
+ Otherwise, 'target' is registered and returned.
+ """
+ assert isinstance(target, VirtualTarget)
+ if target.path():
+ signature = target.path() + "-" + target.name()
+ else:
+ signature = "-" + target.name()
+
+ result = None
+ if signature not in self.cache_:
+ self.cache_ [signature] = []
+
+ for t in self.cache_ [signature]:
+ a1 = t.action ()
+ a2 = target.action ()
+
+ # TODO: why are we checking for not result?
+ if not result:
+ if not a1 and not a2:
+ result = t
+ else:
+ if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources ():
+ ps1 = a1.properties ()
+ ps2 = a2.properties ()
+ p1 = ps1.base () + ps1.free () +\
+ b2.util.set.difference(ps1.dependency(), ps1.incidental())
+ p2 = ps2.base () + ps2.free () +\
+ b2.util.set.difference(ps2.dependency(), ps2.incidental())
+ if p1 == p2:
+ result = t
+
+ if not result:
+ self.cache_ [signature].append (target)
+ result = target
+
+ # TODO: Don't append if we found pre-existing target?
+ self.recent_targets_.append(result)
+ self.all_targets_.append(result)
+
+ return result
+
+ def from_file (self, file, file_location, project):
+ """ Creates a virtual target with appropriate name and type from 'file'.
+ If a target with that name in that project was already created, returns that already
+ created target.
+ TODO: more correct way would be to compute path to the file, based on name and source location
+ for the project, and use that path to determine if the target was already created.
+ TODO: passing project with all virtual targets starts to be annoying.
+ """
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(file, basestring)
+ assert isinstance(file_location, basestring)
+ assert isinstance(project, ProjectTarget)
+ # Check if we've created a target corresponding to this file.
+ path = os.path.join(os.getcwd(), file_location, file)
+ path = os.path.normpath(path)
+
+ if path in self.files_:
+ return self.files_ [path]
+
+ file_type = b2.build.type.type (file)
+
+ result = FileTarget (file, file_type, project,
+ None, file_location)
+ self.files_ [path] = result
+
+ return result
+
+ def recent_targets(self):
+ """Each target returned by 'register' is added to a list of
+ 'recent-target', returned by this function. So, this allows
+ us to find all targets created when building a given main
+ target, even if the target."""
+
+ return self.recent_targets_
+
+ def clear_recent_targets(self):
+ self.recent_targets_ = []
+
+ def all_targets(self):
+ # Returns all virtual targets ever created
+ return self.all_targets_
+
+ # Returns all targets from 'targets' with types
+ # equal to 'type' or derived from it.
+ def select_by_type(self, type, targets):
+ return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)]
+
+ def register_actual_name (self, actual_name, virtual_target):
+ assert isinstance(actual_name, basestring)
+ assert isinstance(virtual_target, VirtualTarget)
+ if actual_name in self.actual_:
+ cs1 = self.actual_ [actual_name].creating_subvariant ()
+ cs2 = virtual_target.creating_subvariant ()
+ cmt1 = cs1.main_target ()
+ cmt2 = cs2.main_target ()
+
+ action1 = self.actual_ [actual_name].action ()
+ action2 = virtual_target.action ()
+
+ properties_added = []
+ properties_removed = []
+ if action1 and action2:
+ p1 = action1.properties ()
+ p1 = p1.raw ()
+ p2 = action2.properties ()
+ p2 = p2.raw ()
+
+ properties_removed = set.difference (p1, p2)
+ if not properties_removed:
+ properties_removed = ["none"]
+
+ properties_added = set.difference (p2, p1)
+ if not properties_added:
+ properties_added = ["none"]
+
+ # FIXME: Revive printing of real location.
+ get_manager().errors()(
+ "Duplicate name of actual target: '%s'\n"
+ "previous virtual target '%s'\n"
+ "created from '%s'\n"
+ "another virtual target '%s'\n"
+ "created from '%s'\n"
+ "added properties:\n%s\n"
+ "removed properties:\n%s\n"
+ % (actual_name,
+ self.actual_ [actual_name], cmt1.project().location(),
+ virtual_target,
+ cmt2.project().location(),
+ '\n'.join('\t' + p for p in properties_added),
+ '\n'.join('\t' + p for p in properties_removed)))
+
+ else:
+ self.actual_ [actual_name] = virtual_target
+
+
+ def add_suffix (self, specified_name, file_type, prop_set):
+ """ Appends the suffix appropriate to 'type/property_set' combination
+ to the specified name and returns the result.
+ """
+ assert isinstance(specified_name, basestring)
+ assert isinstance(file_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ suffix = b2.build.type.generated_target_suffix (file_type, prop_set)
+
+ if suffix:
+ return specified_name + '.' + suffix
+
+ else:
+ return specified_name
+
+class VirtualTarget:
+ """ Potential target. It can be converted into jam target and used in
+ building, if needed. However, it can be also dropped, which allows
+ to search for different transformation and select only one.
+ name: name of this target.
+ project: project to which this target belongs.
+ """
+ def __init__ (self, name, project):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(name, basestring)
+ assert isinstance(project, ProjectTarget)
+ self.name_ = name
+ self.project_ = project
+ self.dependencies_ = []
+ self.always_ = False
+
+ # Caches if dapendencies for scanners have already been set.
+ self.made_ = {}
+
+ def manager(self):
+ return self.project_.manager()
+
+ def virtual_targets(self):
+ return self.manager().virtual_targets()
+
+ def name (self):
+ """ Name of this target.
+ """
+ return self.name_
+
+ def project (self):
+ """ Project of this target.
+ """
+ return self.project_
+
+ def depends (self, d):
+ """ Adds additional instances of 'VirtualTarget' that this
+ one depends on.
+ """
+ self.dependencies_ = unique (self.dependencies_ + d).sort ()
+
+ def dependencies (self):
+ return self.dependencies_
+
+ def always(self):
+ self.always_ = True
+
+ def actualize (self, scanner = None):
+ """ Generates all the actual targets and sets up build actions for
+ this target.
+
+ If 'scanner' is specified, creates an additional target
+ with the same location as actual target, which will depend on the
+ actual target and be associated with 'scanner'. That additional
+ target is returned. See the docs (#dependency_scanning) for rationale.
+ Target must correspond to a file if 'scanner' is specified.
+
+ If scanner is not specified, then actual target is returned.
+ """
+ if __debug__:
+ from .scanner import Scanner
+ assert scanner is None or isinstance(scanner, Scanner)
+ actual_name = self.actualize_no_scanner ()
+
+ if self.always_:
+ bjam.call("ALWAYS", actual_name)
+
+ if not scanner:
+ return actual_name
+
+ else:
+ # Add the scanner instance to the grist for name.
+ g = '-'.join ([ungrist(get_grist(actual_name)), str(id(scanner))])
+
+ name = replace_grist (actual_name, '<' + g + '>')
+
+ if name not in self.made_:
+ self.made_ [name] = True
+
+ self.project_.manager ().engine ().add_dependency (name, actual_name)
+
+ self.actualize_location (name)
+
+ self.project_.manager ().scanners ().install (scanner, name, str (self))
+
+ return name
+
+# private: (overridables)
+
+ def actualize_action (self, target):
+ """ Sets up build actions for 'target'. Should call appropriate rules
+ and set target variables.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def actualize_location (self, target):
+ """ Sets up variables on 'target' which specify its location.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def path (self):
+ """ If the target is generated one, returns the path where it will be
+ generated. Otherwise, returns empty list.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def actual_name (self):
+ """ Return that actual target name that should be used
+ (for the case where no scanner is involved)
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+
+class AbstractFileTarget (VirtualTarget):
+ """ Target which correspond to a file. The exact mapping for file
+ is not yet specified in this class. (TODO: Actually, the class name
+ could be better...)
+
+ May be a source file (when no action is specified), or
+ derived file (otherwise).
+
+ The target's grist is concatenation of project's location,
+ properties of action (for derived files), and, optionally,
+ value identifying the main target.
+
+ exact: If non-empty, the name is exactly the name
+ created file should have. Otherwise, the '__init__'
+ method will add suffix obtained from 'type' by
+ calling 'type.generated-target-suffix'.
+
+ type: optional type of this target.
+ """
+ def __init__ (self, name, type, project, action = None, exact=False):
+ assert isinstance(type, basestring) or type is None
+ assert action is None or isinstance(action, Action)
+ assert isinstance(exact, (int, bool))
+ VirtualTarget.__init__ (self, name, project)
+
+ self.type_ = type
+
+ self.action_ = action
+ self.exact_ = exact
+
+ if action:
+ action.add_targets ([self])
+
+ if self.type and not exact:
+ self.__adjust_name (name)
+
+
+ self.actual_name_ = None
+ self.path_ = None
+ self.intermediate_ = False
+ self.creating_subvariant_ = None
+
+ # True if this is a root target.
+ self.root_ = False
+
+ def type (self):
+ return self.type_
+
+ def set_path (self, path):
+ """ Sets the path. When generating target name, it will override any path
+ computation from properties.
+ """
+ assert isinstance(path, basestring)
+ self.path_ = os.path.normpath(path)
+
+ def action (self):
+ """ Returns the action.
+ """
+ return self.action_
+
+ def root (self, set = None):
+ """ Sets/gets the 'root' flag. Target is root is it directly correspods to some
+ variant of a main target.
+ """
+ assert isinstance(set, (int, bool, type(None)))
+ if set:
+ self.root_ = True
+ return self.root_
+
+ def creating_subvariant (self, s = None):
+ """ Gets or sets the subvariant which created this target. Subvariant
+ is set when target is brought into existence, and is never changed
+ after that. In particual, if target is shared by subvariant, only
+ the first is stored.
+ s: If specified, specified the value to set,
+ which should be instance of 'subvariant' class.
+ """
+ assert s is None or isinstance(s, Subvariant)
+ if s and not self.creating_subvariant ():
+ if self.creating_subvariant ():
+ raise BaseException ("Attempt to change 'dg'")
+
+ else:
+ self.creating_subvariant_ = s
+
+ return self.creating_subvariant_
+
+ def actualize_action (self, target):
+ assert isinstance(target, basestring)
+ if self.action_:
+ self.action_.actualize ()
+
+ # Return a human-readable representation of this target
+ #
+ # If this target has an action, that's:
+ #
+ # { <action-name>-<self.name>.<self.type> <action-sources>... }
+ #
+ # otherwise, it's:
+ #
+ # { <self.name>.<self.type> }
+ #
+ def str(self):
+ a = self.action()
+
+ name_dot_type = self.name_ + "." + self.type_
+
+ if a:
+ action_name = a.action_name()
+ ss = [ s.str() for s in a.sources()]
+
+ return "{ %s-%s %s}" % (action_name, name_dot_type, str(ss))
+ else:
+ return "{ " + name_dot_type + " }"
+
+# private:
+
+ def actual_name (self):
+ if not self.actual_name_:
+ self.actual_name_ = '<' + self.grist() + '>' + os.path.normpath(self.name_)
+
+ return self.actual_name_
+
+ def grist (self):
+ """Helper to 'actual_name', above. Compute unique prefix used to distinguish
+ this target from other targets with the same name which create different
+ file.
+ """
+ # Depending on target, there may be different approaches to generating
+ # unique prefixes. We'll generate prefixes in the form
+ # <one letter approach code> <the actual prefix>
+ path = self.path ()
+
+ if path:
+ # The target will be generated to a known path. Just use the path
+ # for identification, since path is as unique as it can get.
+ return 'p' + path
+
+ else:
+ # File is either source, which will be searched for, or is not a file at
+ # all. Use the location of project for distinguishing.
+ project_location = self.project_.get ('location')
+ path_components = b2.util.path.split(project_location)
+ location_grist = '!'.join (path_components)
+
+ if self.action_:
+ ps = self.action_.properties ()
+ property_grist = ps.as_path ()
+ # 'property_grist' can be empty when 'ps' is an empty
+ # property set.
+ if property_grist:
+ location_grist = location_grist + '/' + property_grist
+
+ return 'l' + location_grist
+
+ def __adjust_name(self, specified_name):
+ """Given the target name specified in constructor, returns the
+ name which should be really used, by looking at the <tag> properties.
+ The tag properties come in two flavour:
+ - <tag>value,
+ - <tag>@rule-name
+ In the first case, value is just added to name
+ In the second case, the specified rule is called with specified name,
+ target type and properties and should return the new name.
+ If not <tag> property is specified, or the rule specified by
+ <tag> returns nothing, returns the result of calling
+ virtual-target.add-suffix"""
+ assert isinstance(specified_name, basestring)
+ if self.action_:
+ ps = self.action_.properties()
+ else:
+ ps = property_set.empty()
+
+ # FIXME: I'm not sure how this is used, need to check with
+ # Rene to figure out how to implement
+ #~ We add ourselves to the properties so that any tag rule can get
+ #~ more direct information about the target than just that available
+ #~ through the properties. This is useful in implementing
+ #~ name changes based on the sources of the target. For example to
+ #~ make unique names of object files based on the source file.
+ #~ --grafik
+ #ps = property_set.create(ps.raw() + ["<target>%s" % "XXXX"])
+ #ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
+
+ tag = ps.get("<tag>")
+
+ if tag:
+
+ if len(tag) > 1:
+ get_manager().errors()(
+ """<tag>@rulename is present but is not the only <tag> feature""")
+
+ tag = tag[0]
+ if callable(tag):
+ self.name_ = tag(specified_name, self.type_, ps)
+ else:
+ if not tag[0] == '@':
+ self.manager_.errors()("""The value of the <tag> feature must be '@rule-nane'""")
+
+ exported_ps = b2.util.value_to_jam(ps, methods=True)
+ self.name_ = b2.util.call_jam_function(
+ tag[1:], specified_name, self.type_, exported_ps)
+ if self.name_:
+ self.name_ = self.name_[0]
+
+ # If there's no tag or the tag rule returned nothing.
+ if not tag or not self.name_:
+ self.name_ = add_prefix_and_suffix(specified_name, self.type_, ps)
+
+ def actualize_no_scanner(self):
+ name = self.actual_name()
+
+ # Do anything only on the first invocation
+ if not self.made_:
+ self.made_[name] = True
+
+ if self.action_:
+ # For non-derived target, we don't care if there
+ # are several virtual targets that refer to the same name.
+ # One case when this is unavoidable is when file name is
+ # main.cpp and two targets have types CPP (for compiling)
+ # and MOCCABLE_CPP (for conversion to H via Qt tools).
+ self.virtual_targets().register_actual_name(name, self)
+
+ for i in self.dependencies_:
+ self.manager_.engine().add_dependency(name, i.actualize())
+
+ self.actualize_location(name)
+ self.actualize_action(name)
+
+ return name
+
+@bjam_signature((["specified_name"], ["type"], ["property_set"]))
+def add_prefix_and_suffix(specified_name, type, property_set):
+ """Appends the suffix appropriate to 'type/property-set' combination
+ to the specified name and returns the result."""
+
+ property_set = b2.util.jam_to_value_maybe(property_set)
+
+ suffix = ""
+ if type:
+ suffix = b2.build.type.generated_target_suffix(type, property_set)
+
+ # Handle suffixes for which no leading dot is desired. Those are
+ # specified by enclosing them in <...>. Needed by python so it
+ # can create "_d.so" extensions, for example.
+ if get_grist(suffix):
+ suffix = ungrist(suffix)
+ elif suffix:
+ suffix = "." + suffix
+
+ prefix = ""
+ if type:
+ prefix = b2.build.type.generated_target_prefix(type, property_set)
+
+ if specified_name.startswith(prefix):
+ prefix = ""
+
+ if not prefix:
+ prefix = ""
+ if not suffix:
+ suffix = ""
+ return prefix + specified_name + suffix
+
+
+class FileTarget (AbstractFileTarget):
+ """ File target with explicitly known location.
+
+ The file path is determined as
+ - value passed to the 'set_path' method, if any
+ - for derived files, project's build dir, joined with components
+ that describe action's properties. If the free properties
+ are not equal to the project's reference properties
+ an element with name of main target is added.
+ - for source files, project's source dir
+
+ The file suffix is
+ - the value passed to the 'suffix' method, if any, or
+ - the suffix which correspond to the target's type.
+ """
+ def __init__ (self, name, type, project, action = None, path=None, exact=False):
+ assert isinstance(type, basestring) or type is None
+ assert action is None or isinstance(action, Action)
+ assert isinstance(exact, (int, bool))
+ AbstractFileTarget.__init__ (self, name, type, project, action, exact)
+
+ self.path_ = path
+
+ def __str__(self):
+ if self.type_:
+ return self.name_ + "." + self.type_
+ else:
+ return self.name_
+
+ def clone_with_different_type(self, new_type):
+ assert isinstance(new_type, basestring)
+ return FileTarget(self.name_, new_type, self.project_,
+ self.action_, self.path_, exact=True)
+
+ def actualize_location (self, target):
+ assert isinstance(target, basestring)
+ engine = self.project_.manager_.engine ()
+
+ if self.action_:
+ # This is a derived file.
+ path = self.path ()
+ engine.set_target_variable (target, 'LOCATE', path)
+
+ # Make sure the path exists.
+ engine.add_dependency (target, path)
+ common.mkdir(engine, path)
+
+ # It's possible that the target name includes a directory
+ # too, for example when installing headers. Create that
+ # directory.
+ d = os.path.dirname(get_value(target))
+ if d:
+ d = os.path.join(path, d)
+ engine.add_dependency(target, d)
+ common.mkdir(engine, d)
+
+ # For real file target, we create a fake target that
+ # depends on the real target. This allows to run
+ #
+ # bjam hello.o
+ #
+ # without trying to guess the name of the real target.
+ # Note the that target has no directory name, and a special
+ # grist <e>.
+ #
+ # First, that means that "bjam hello.o" will build all
+ # known hello.o targets.
+ # Second, the <e> grist makes sure this target won't be confused
+ # with other targets, for example, if we have subdir 'test'
+ # with target 'test' in it that includes 'test.o' file,
+ # then the target for directory will be just 'test' the target
+ # for test.o will be <ptest/bin/gcc/debug>test.o and the target
+ # we create below will be <e>test.o
+ engine.add_dependency("<e>%s" % get_value(target), target)
+
+ # Allow bjam <path-to-file>/<file> to work. This won't catch all
+ # possible ways to refer to the path (relative/absolute, extra ".",
+ # various "..", but should help in obvious cases.
+ engine.add_dependency("<e>%s" % (os.path.join(path, get_value(target))), target)
+
+ else:
+ # This is a source file.
+ engine.set_target_variable (target, 'SEARCH', self.project_.get ('source-location'))
+
+
+ def path (self):
+ """ Returns the directory for this target.
+ """
+ if not self.path_:
+ if self.action_:
+ p = self.action_.properties ()
+ (target_path, relative_to_build_dir) = p.target_path ()
+
+ if relative_to_build_dir:
+ # Indicates that the path is relative to
+ # build dir.
+ target_path = os.path.join (self.project_.build_dir (), target_path)
+
+ # Store the computed path, so that it's not recomputed
+ # any more
+ self.path_ = target_path
+
+ return os.path.normpath(self.path_)
+
+
+class NotFileTarget(AbstractFileTarget):
+
+ def __init__(self, name, project, action):
+ assert isinstance(action, Action)
+ AbstractFileTarget.__init__(self, name, None, project, action)
+
+ def path(self):
+ """Returns nothing, to indicate that target path is not known."""
+ return None
+
+ def actualize_location(self, target):
+ assert isinstance(target, basestring)
+ bjam.call("NOTFILE", target)
+ bjam.call("ALWAYS", target)
+ bjam.call("NOUPDATE", target)
+
+
+class Action:
+ """ Class which represents an action.
+ Both 'targets' and 'sources' should list instances of 'VirtualTarget'.
+ Action name should name a rule with this prototype
+ rule action_name ( targets + : sources * : properties * )
+ Targets and sources are passed as actual jam targets. The rule may
+ not establish dependency relationship, but should do everything else.
+ """
+ def __init__ (self, manager, sources, action_name, prop_set):
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(action_name, basestring) or action_name is None
+ assert(isinstance(prop_set, property_set.PropertySet))
+ self.sources_ = sources
+ self.action_name_ = action_name
+ if not prop_set:
+ prop_set = property_set.empty()
+ self.properties_ = prop_set
+ if not all(isinstance(v, VirtualTarget) for v in prop_set.get('implicit-dependency')):
+ import pdb
+ pdb.set_trace()
+
+ self.manager_ = manager
+ self.engine_ = self.manager_.engine ()
+ self.targets_ = []
+
+ # Indicates whether this has been actualized or not.
+ self.actualized_ = False
+
+ self.dependency_only_sources_ = []
+ self.actual_sources_ = []
+
+
+ def add_targets (self, targets):
+ assert is_iterable_typed(targets, VirtualTarget)
+ self.targets_ += targets
+
+
+ def replace_targets(self, old_targets, new_targets):
+ assert is_iterable_typed(old_targets, VirtualTarget)
+ assert is_iterable_typed(new_targets, VirtualTarget)
+ self.targets_ = [t for t in self.targets_ if not t in old_targets] + new_targets
+
+ def targets (self):
+ return self.targets_
+
+ def sources (self):
+ return self.sources_
+
+ def action_name (self):
+ return self.action_name_
+
+ def properties (self):
+ return self.properties_
+
+ def actualize (self):
+ """ Generates actual build instructions.
+ """
+ if self.actualized_:
+ return
+
+ self.actualized_ = True
+
+ ps = self.properties ()
+ properties = self.adjust_properties (ps)
+
+
+ actual_targets = []
+
+ for i in self.targets ():
+ actual_targets.append (i.actualize ())
+
+ self.actualize_sources (self.sources (), properties)
+
+ self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_)
+
+ # FIXME: check the comment below. Was self.action_name_ [1]
+ # Action name can include additional rule arguments, which should not
+ # be passed to 'set-target-variables'.
+ # FIXME: breaking circular dependency
+ import toolset
+ toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties)
+
+ engine = self.manager_.engine ()
+
+ # FIXME: this is supposed to help --out-xml option, but we don't
+ # implement that now, and anyway, we should handle it in Python,
+ # not but putting variables on bjam-level targets.
+ bjam.call("set-target-variable", actual_targets, ".action", repr(self))
+
+ self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_,
+ properties)
+
+ # Since we set up creating action here, we also set up
+ # action for cleaning up
+ self.manager_.engine ().set_update_action ('common.Clean', 'clean-all',
+ actual_targets)
+
+ return actual_targets
+
+ def actualize_source_type (self, sources, prop_set):
+ """ Helper for 'actualize_sources'.
+ For each passed source, actualizes it with the appropriate scanner.
+ Returns the actualized virtual targets.
+ """
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ result = []
+ for i in sources:
+ scanner = None
+
+# FIXME: what's this?
+# if isinstance (i, str):
+# i = self.manager_.get_object (i)
+
+ if i.type ():
+ scanner = b2.build.type.get_scanner (i.type (), prop_set)
+
+ r = i.actualize (scanner)
+ result.append (r)
+
+ return result
+
+ def actualize_sources (self, sources, prop_set):
+ """ Creates actual jam targets for sources. Initializes two member
+ variables:
+ 'self.actual_sources_' -- sources which are passed to updating action
+ 'self.dependency_only_sources_' -- sources which are made dependencies, but
+ are not used otherwise.
+
+ New values will be *appended* to the variables. They may be non-empty,
+ if caller wants it.
+ """
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ dependencies = self.properties_.get ('<dependency>')
+
+ self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
+ self.actual_sources_ += self.actualize_source_type (sources, prop_set)
+
+ # This is used to help bjam find dependencies in generated headers
+ # in other main targets.
+ # Say:
+ #
+ # make a.h : ....... ;
+ # exe hello : hello.cpp : <implicit-dependency>a.h ;
+ #
+ # However, for bjam to find the dependency the generated target must
+ # be actualized (i.e. have the jam target). In the above case,
+ # if we're building just hello ("bjam hello"), 'a.h' won't be
+ # actualized unless we do it here.
+ implicit = self.properties_.get("<implicit-dependency>")
+
+ for i in implicit:
+ i.actualize()
+
+ def adjust_properties (self, prop_set):
+ """ Determines real properties when trying building with 'properties'.
+ This is last chance to fix properties, for example to adjust includes
+ to get generated headers correctly. Default implementation returns
+ its argument.
+ """
+ assert isinstance(prop_set, property_set.PropertySet)
+ return prop_set
+
+
+class NullAction (Action):
+ """ Action class which does nothing --- it produces the targets with
+ specific properties out of nowhere. It's needed to distinguish virtual
+ targets with different properties that are known to exist, and have no
+ actions which create them.
+ """
+ def __init__ (self, manager, prop_set):
+ assert isinstance(prop_set, property_set.PropertySet)
+ Action.__init__ (self, manager, [], None, prop_set)
+
+ def actualize (self):
+ if not self.actualized_:
+ self.actualized_ = True
+
+ for i in self.targets ():
+ i.actualize ()
+
+class NonScanningAction(Action):
+ """Class which acts exactly like 'action', except that the sources
+ are not scanned for dependencies."""
+
+ def __init__(self, sources, action_name, property_set):
+ #FIXME: should the manager parameter of Action.__init__
+ #be removed? -- Steven Watanabe
+ Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set)
+
+ def actualize_source_type(self, sources, ps=None):
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(ps, property_set.PropertySet) or ps is None
+ result = []
+ for s in sources:
+ result.append(s.actualize())
+ return result
+
+def traverse (target, include_roots = False, include_sources = False):
+ """ Traverses the dependency graph of 'target' and return all targets that will
+ be created before this one is created. If root of some dependency graph is
+ found during traversal, it's either included or not, dependencing of the
+ value of 'include_roots'. In either case, sources of root are not traversed.
+ """
+ assert isinstance(target, VirtualTarget)
+ assert isinstance(include_roots, (int, bool))
+ assert isinstance(include_sources, (int, bool))
+ result = []
+
+ if target.action ():
+ action = target.action ()
+
+ # This includes 'target' as well
+ result += action.targets ()
+
+ for t in action.sources ():
+
+ # FIXME:
+ # TODO: see comment in Manager.register_object ()
+ #if not isinstance (t, VirtualTarget):
+ # t = target.project_.manager_.get_object (t)
+
+ if not t.root ():
+ result += traverse (t, include_roots, include_sources)
+
+ elif include_roots:
+ result.append (t)
+
+ elif include_sources:
+ result.append (target)
+
+ return result
+
+def clone_action (action, new_project, new_action_name, new_properties):
+ """Takes an 'action' instances and creates new instance of it
+ and all produced target. The rule-name and properties are set
+ to 'new-rule-name' and 'new-properties', if those are specified.
+ Returns the cloned action."""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(action, Action)
+ assert isinstance(new_project, ProjectTarget)
+ assert isinstance(new_action_name, basestring)
+ assert isinstance(new_properties, property_set.PropertySet)
+ if not new_action_name:
+ new_action_name = action.action_name()
+
+ if not new_properties:
+ new_properties = action.properties()
+
+ cloned_action = action.__class__(action.manager_, action.sources(), new_action_name,
+ new_properties)
+
+ cloned_targets = []
+ for target in action.targets():
+
+ n = target.name()
+ # Don't modify the name of the produced targets. Strip the directory f
+ cloned_target = FileTarget(n, target.type(), new_project,
+ cloned_action, exact=True)
+
+ d = target.dependencies()
+ if d:
+ cloned_target.depends(d)
+ cloned_target.root(target.root())
+ cloned_target.creating_subvariant(target.creating_subvariant())
+
+ cloned_targets.append(cloned_target)
+
+ return cloned_action
+
+class Subvariant:
+
+ def __init__ (self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets):
+ """
+ main_target: The instance of MainTarget class
+ prop_set: Properties requested for this target
+ sources:
+ build_properties: Actually used properties
+ sources_usage_requirements: Properties propagated from sources
+ created_targets: Top-level created targets
+ """
+ if __debug__:
+ from .targets import AbstractTarget
+ assert isinstance(main_target, AbstractTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(build_properties, property_set.PropertySet)
+ assert isinstance(sources_usage_requirements, property_set.PropertySet)
+ assert is_iterable_typed(created_targets, VirtualTarget)
+ self.main_target_ = main_target
+ self.properties_ = prop_set
+ self.sources_ = sources
+ self.build_properties_ = build_properties
+ self.sources_usage_requirements_ = sources_usage_requirements
+ self.created_targets_ = created_targets
+
+ self.usage_requirements_ = None
+
+ # Pre-compose the list of other dependency graphs, on which this one
+ # depends
+ deps = build_properties.get('<implicit-dependency>')
+
+ self.other_dg_ = []
+ for d in deps:
+ self.other_dg_.append(d.creating_subvariant ())
+
+ self.other_dg_ = unique (self.other_dg_)
+
+ self.implicit_includes_cache_ = {}
+ self.target_directories_ = None
+
+ def main_target (self):
+ return self.main_target_
+
+ def created_targets (self):
+ return self.created_targets_
+
+ def requested_properties (self):
+ return self.properties_
+
+ def build_properties (self):
+ return self.build_properties_
+
+ def sources_usage_requirements (self):
+ return self.sources_usage_requirements_
+
+ def set_usage_requirements (self, usage_requirements):
+ assert isinstance(usage_requirements, property_set.PropertySet)
+ self.usage_requirements_ = usage_requirements
+
+ def usage_requirements (self):
+ return self.usage_requirements_
+
+ def all_referenced_targets(self, result):
+ """Returns all targets referenced by this subvariant,
+ either directly or indirectly, and either as sources,
+ or as dependency properties. Targets referred with
+ dependency property are returned a properties, not targets."""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(result, (VirtualTarget, Property))
+ # Find directly referenced targets.
+ deps = self.build_properties().dependency()
+ all_targets = self.sources_ + deps
+
+ # Find other subvariants.
+ r = []
+ for e in all_targets:
+ if not e in result:
+ result.add(e)
+ if isinstance(e, property.Property):
+ t = e.value
+ else:
+ t = e
+
+ # FIXME: how can this be?
+ cs = t.creating_subvariant()
+ if cs:
+ r.append(cs)
+ r = unique(r)
+ for s in r:
+ if s != self:
+ s.all_referenced_targets(result)
+
+
+ def implicit_includes (self, feature, target_type):
+ """ Returns the properties which specify implicit include paths to
+ generated headers. This traverses all targets in this subvariant,
+ and subvariants referred by <implcit-dependecy>properties.
+ For all targets which are of type 'target-type' (or for all targets,
+ if 'target_type' is not specified), the result will contain
+ <$(feature)>path-to-that-target.
+ """
+ assert isinstance(feature, basestring)
+ assert isinstance(target_type, basestring)
+ if not target_type:
+ key = feature
+ else:
+ key = feature + "-" + target_type
+
+
+ result = self.implicit_includes_cache_.get(key)
+ if not result:
+ target_paths = self.all_target_directories(target_type)
+ target_paths = unique(target_paths)
+ result = ["<%s>%s" % (feature, p) for p in target_paths]
+ self.implicit_includes_cache_[key] = result
+
+ return result
+
+ def all_target_directories(self, target_type = None):
+ assert isinstance(target_type, (basestring, type(None)))
+ # TODO: does not appear to use target_type in deciding
+ # if we've computed this already.
+ if not self.target_directories_:
+ self.target_directories_ = self.compute_target_directories(target_type)
+ return self.target_directories_
+
+ def compute_target_directories(self, target_type=None):
+ assert isinstance(target_type, (basestring, type(None)))
+ result = []
+ for t in self.created_targets():
+ if not target_type or b2.build.type.is_derived(t.type(), target_type):
+ result.append(t.path())
+
+ for d in self.other_dg_:
+ result.extend(d.all_target_directories(target_type))
+
+ result = unique(result)
+ return result
diff --git a/src/boost/tools/build/src/build_system.py b/src/boost/tools/build/src/build_system.py
new file mode 100644
index 000000000..babe53eef
--- /dev/null
+++ b/src/boost/tools/build/src/build_system.py
@@ -0,0 +1,682 @@
+# Status: mostly ported. Missing is --out-xml support, 'configure' integration
+# and some FIXME.
+# Base revision: 64351
+
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006, 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+import os
+import sys
+import re
+
+import bjam
+
+# set this early on since some of the following modules
+# require looking at the sys.argv
+sys.argv = bjam.variable("ARGV")
+
+
+from b2.build.engine import Engine
+from b2.manager import Manager
+from b2.util.path import glob
+from b2.build import feature, property_set
+import b2.build.virtual_target
+from b2.build.targets import ProjectTarget
+import b2.build.build_request
+from b2.build.errors import ExceptionWithUserContext
+import b2.tools.common
+from b2.build.toolset import using
+
+import b2.build.virtual_target as virtual_target
+import b2.build.build_request as build_request
+
+import b2.util.regex
+
+from b2.manager import get_manager
+from b2.util import cached
+from b2.util import option
+
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+debug_config = False
+
+# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
+# directory, then we want to clean targets which are in 'foo' as well as those
+# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
+# achieve this we collect a list of projects under which cleaning is allowed.
+project_targets = []
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+results_of_main_targets = []
+
+# Was an XML dump requested?
+out_xml = False
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+default_toolset = None
+default_toolset_version = None
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
+# build request.
+#
+def command_line_free_features():
+ return command_line_free_features
+
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+def set_default_toolset(toolset, version=None):
+ default_toolset = toolset
+ default_toolset_version = version
+
+
+pre_build_hook = []
+
+def add_pre_build_hook(callable):
+ pre_build_hook.append(callable)
+
+post_build_hook = None
+
+def set_post_build_hook(callable):
+ post_build_hook = callable
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Returns actual Jam targets to be used for executing a clean request.
+#
+def actual_clean_targets(targets):
+
+ # Construct a list of projects explicitly detected as targets on this build
+ # system run. These are the projects under which cleaning is allowed.
+ for t in targets:
+ if isinstance(t, b2.build.targets.ProjectTarget):
+ project_targets.append(t.project_module())
+
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ targets_to_clean = set()
+ for t in results_of_main_targets:
+ # Do not include roots or sources.
+ targets_to_clean.update(virtual_target.traverse(t))
+
+ to_clean = []
+ for t in get_manager().virtual_targets().all_targets():
+
+ # Remove only derived targets.
+ if t.action():
+ p = t.project()
+ if t in targets_to_clean or should_clean_project(p.project_module()):
+ to_clean.append(t)
+
+ return [t.actualize() for t in to_clean]
+
+_target_id_split = re.compile("(.*)//(.*)")
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there is no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can not reuse that code without a
+# project-targets instance.
+#
+def find_target(target_id):
+
+ projects = get_manager().projects()
+ m = _target_id_split.match(target_id)
+ if m:
+ pm = projects.find(m.group(1), ".")
+ else:
+ pm = projects.find(target_id, ".")
+
+ if pm:
+ result = projects.target(pm)
+
+ if m:
+ result = result.find(m.group(2))
+
+ return result
+
+def initialize_config_module(module_name, location=None):
+
+ get_manager().projects().initialize(module_name, location)
+
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+def load_config(module_name, filename, paths, must_find=False):
+
+ if debug_config:
+ print "notice: Searching '%s' for '%s' configuration file '%s." \
+ % (paths, module_name, filename)
+
+ where = None
+ for path in paths:
+ t = os.path.join(path, filename)
+ if os.path.exists(t):
+ where = t
+ break
+
+ if where:
+ where = os.path.realpath(where)
+
+ if debug_config:
+ print "notice: Loading '%s' configuration file '%s' from '%s'." \
+ % (module_name, filename, where)
+
+ # Set source location so that path-constant in config files
+ # with relative paths work. This is of most importance
+ # for project-config.jam, but may be used in other
+ # config files as well.
+ attributes = get_manager().projects().attributes(module_name) ;
+ attributes.set('source-location', os.path.dirname(where), True)
+ get_manager().projects().load_standalone(module_name, where)
+
+ else:
+ msg = "Configuration file '%s' not found in '%s'." % (filename, path)
+ if must_find:
+ get_manager().errors()(msg)
+
+ elif debug_config:
+ print msg
+
+ return where
+
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if
+# specified. If this configuration file is loaded, regular site and user
+# configuration files will not be. If a relative path is specified, file is
+# searched for in the current folder.
+#
+# -- site-config --
+# Always named site-config.jam. Will only be found if located on the system
+# root path (Windows), /etc (non-Windows), user's home folder or the Boost
+# Build path, in that order. Not loaded in case the test-config configuration
+# file is loaded or the --ignore-site-config command-line option is specified.
+#
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded or an empty file
+# name is explicitly specified. If the file name has been given explicitly then
+# the file must exist.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+def load_configuration_files():
+
+ # Flag indicating that site configuration should not be loaded.
+ ignore_site_config = "--ignore-site-config" in sys.argv
+
+ initialize_config_module("test-config")
+ test_config = None
+ for a in sys.argv:
+ m = re.match("--test-config=(.*)$", a)
+ if m:
+ test_config = b2.util.unquote(m.group(1))
+ break
+
+ if test_config:
+ where = load_config("test-config", os.path.basename(test_config), [os.path.dirname(test_config)])
+ if where:
+ if debug_config:
+ print "notice: Regular site and user configuration files will"
+ print "notice: be ignored due to the test configuration being loaded."
+
+ user_path = [os.path.expanduser("~")] + bjam.variable("BOOST_BUILD_PATH")
+ site_path = ["/etc"] + user_path
+ if os.name in ["nt"]:
+ site_path = [os.getenv("SystemRoot")] + user_path
+
+ if debug_config and not test_config and ignore_site_config:
+ print "notice: Site configuration files will be ignored due to the"
+ print "notice: --ignore-site-config command-line option."
+
+ initialize_config_module("site-config")
+ if not test_config and not ignore_site_config:
+ load_config('site-config', 'site-config.jam', site_path)
+
+ initialize_config_module('user-config')
+ if not test_config:
+
+ # Here, user_config has value of None if nothing is explicitly
+ # specified, and value of '' if user explicitly does not want
+ # to load any user config.
+ user_config = None
+ for a in sys.argv:
+ m = re.match("--user-config=(.*)$", a)
+ if m:
+ user_config = m.group(1)
+ break
+
+ if user_config is None:
+ user_config = os.getenv("BOOST_BUILD_USER_CONFIG")
+
+ # Special handling for the case when the OS does not strip the quotes
+ # around the file name, as is the case when using Cygwin bash.
+ user_config = b2.util.unquote(user_config)
+ explicitly_requested = user_config
+
+ if user_config is None:
+ user_config = "user-config.jam"
+
+ if user_config:
+ if explicitly_requested:
+
+ user_config = os.path.abspath(user_config)
+
+ if debug_config:
+ print "notice: Loading explicitly specified user configuration file:"
+ print " " + user_config
+
+ load_config('user-config', os.path.basename(user_config), [os.path.dirname(user_config)], True)
+ else:
+ load_config('user-config', os.path.basename(user_config), user_path)
+ else:
+ if debug_config:
+ print "notice: User configuration file loading explicitly disabled."
+
+ # We look for project-config.jam from "." upward. I am not sure this is
+ # 100% right decision, we might as well check for it only alongside the
+ # Jamroot file. However:
+ # - We need to load project-config.jam before Jamroot
+ # - We probably need to load project-config.jam even if there is no Jamroot
+ # - e.g. to implement automake-style out-of-tree builds.
+ if os.path.exists("project-config.jam"):
+ file = ["project-config.jam"]
+ else:
+ file = b2.util.path.glob_in_parents(".", ["project-config.jam"])
+
+ if file:
+ initialize_config_module('project-config', os.path.dirname(file[0]))
+ load_config('project-config', "project-config.jam", [os.path.dirname(file[0])], True)
+
+ get_manager().projects().end_load()
+
+
+# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
+#
+def process_explicit_toolset_requests():
+
+ extra_properties = []
+
+ option_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^--toolset=(.*)$")
+ for e in option.split(',')]
+ feature_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^toolset=(.*)$")
+ for e in option.split(',')]
+
+ for t in option_toolsets + feature_toolsets:
+
+ # Parse toolset-version/properties.
+ (toolset_version, toolset, version) = re.match("(([^-/]+)-?([^/]+)?)/?.*", t).groups()
+
+ if debug_config:
+ print "notice: [cmdline-cfg] Detected command-line request for '%s': toolset= %s version=%s" \
+ % (toolset_version, toolset, version)
+
+ # If the toolset is not known, configure it now.
+ known = False
+ if toolset in feature.values("toolset"):
+ known = True
+
+ if known and version and not feature.is_subvalue("toolset", toolset, "version", version):
+ known = False
+ # TODO: we should do 'using $(toolset)' in case no version has been
+ # specified and there are no versions defined for the given toolset to
+ # allow the toolset to configure its default version. For this we need
+ # to know how to detect whether a given toolset has any versions
+ # defined. An alternative would be to do this whenever version is not
+ # specified but that would require that toolsets correctly handle the
+ # case when their default version is configured multiple times which
+ # should be checked for all existing toolsets first.
+
+ if not known:
+
+ if debug_config:
+ print "notice: [cmdline-cfg] toolset '%s' not previously configured; attempting to auto-configure now" % toolset_version
+ if version is not None:
+ using(toolset, version)
+ else:
+ using(toolset)
+
+ else:
+
+ if debug_config:
+
+ print "notice: [cmdline-cfg] toolset '%s' already configured" % toolset_version
+
+ # Make sure we get an appropriate property into the build request in
+ # case toolset has been specified using the "--toolset=..." command-line
+ # option form.
+ if not t in sys.argv and not t in feature_toolsets:
+
+ if debug_config:
+ print "notice: [cmdline-cfg] adding toolset=%s) to the build request." % t ;
+ extra_properties += "toolset=%s" % t
+
+ return extra_properties
+
+
+
+# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
+# child to any of the projects requested to be cleaned in this build system run.
+# Returns 'false' otherwise. Expects the .project-targets list to have already
+# been constructed.
+#
+@cached
+def should_clean_project(project):
+
+ if project in project_targets:
+ return True
+ else:
+
+ parent = get_manager().projects().attribute(project, "parent-module")
+ if parent and parent != "user-config":
+ return should_clean_project(parent)
+ else:
+ return False
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+def main():
+
+ # FIXME: document this option.
+ if "--profiling" in sys.argv:
+ import cProfile
+ r = cProfile.runctx('main_real()', globals(), locals(), "stones.prof")
+
+ import pstats
+ stats = pstats.Stats("stones.prof")
+ stats.strip_dirs()
+ stats.sort_stats('time', 'calls')
+ stats.print_callers(20)
+ return r
+ else:
+ try:
+ return main_real()
+ except ExceptionWithUserContext, e:
+ e.report()
+
+def main_real():
+
+ global debug_config, out_xml
+
+ debug_config = "--debug-configuration" in sys.argv
+ out_xml = any(re.match("^--out-xml=(.*)$", a) for a in sys.argv)
+
+ engine = Engine()
+
+ global_build_dir = option.get("build-dir")
+ manager = Manager(engine, global_build_dir)
+
+ import b2.build.configure as configure
+
+ if "--version" in sys.argv:
+ from b2.build import version
+ version.report()
+ return
+
+ # This module defines types and generator and what not,
+ # and depends on manager's existence
+ import b2.tools.builtin
+
+ b2.tools.common.init(manager)
+
+ load_configuration_files()
+
+ # Load explicitly specified toolset modules.
+ extra_properties = process_explicit_toolset_requests()
+
+ # Load the actual project build script modules. We always load the project
+ # in the current folder so 'use-project' directives have any chance of
+ # being seen. Otherwise, we would not be able to refer to subprojects using
+ # target ids.
+ current_project = None
+ projects = get_manager().projects()
+ if projects.find(".", "."):
+ current_project = projects.target(projects.load("."))
+
+ # Load the default toolset module if no other has already been specified.
+ if not feature.values("toolset"):
+
+ dt = default_toolset
+ dtv = None
+ if default_toolset:
+ dtv = default_toolset_version
+ else:
+ dt = "gcc"
+ if os.name == 'nt':
+ dt = "msvc"
+ # FIXME:
+ #else if [ os.name ] = MACOSX
+ #{
+ # default-toolset = darwin ;
+ #}
+
+ print "warning: No toolsets are configured."
+ print "warning: Configuring default toolset '%s'." % dt
+ print "warning: If the default is wrong, your build may not work correctly."
+ print "warning: Use the \"toolset=xxxxx\" option to override our guess."
+ print "warning: For more configuration options, please consult"
+ print "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
+
+ using(dt, dtv)
+
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ (target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties)
+
+ # Check that we actually found something to build.
+ if not current_project and not target_ids:
+ get_manager().errors()("no Jamfile in current directory found, and no target references specified.")
+ # FIXME:
+ # EXIT
+
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ clean = "--clean" in sys.argv
+ cleanall = "--clean-all" in sys.argv
+
+ # List of explicitly requested files to build. Any target references read
+ # from the command line parameter not recognized as one of the targets
+ # defined in the loaded Jamfiles will be interpreted as an explicitly
+ # requested file to build. If any such files are explicitly requested then
+ # only those files and the targets they depend on will be built and they
+ # will be searched for among targets that would have been built had there
+ # been no explicitly requested files.
+ explicitly_requested_files = []
+
+ # List of Boost Build meta-targets, virtual-targets and actual Jam targets
+ # constructed in this build system run.
+ targets = []
+ virtual_targets = []
+ actual_targets = []
+
+ explicitly_requested_files = []
+
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explicitly requested use the current project
+ # as the target.
+ for id in target_ids:
+ if id == "clean":
+ clean = 1
+ else:
+ t = None
+ if current_project:
+ t = current_project.find(id, no_error=1)
+ else:
+ t = find_target(id)
+
+ if not t:
+ print "notice: could not find main target '%s'" % id
+ print "notice: assuming it's a name of file to create " ;
+ explicitly_requested_files.append(id)
+ else:
+ targets.append(t)
+
+ if not targets:
+ targets = [projects.target(projects.module_name("."))]
+
+ # FIXME: put this BACK.
+
+ ## if [ option.get dump-generators : : true ]
+ ## {
+ ## generators.dump ;
+ ## }
+
+
+ # We wish to put config.log in the build directory corresponding
+ # to Jamroot, so that the location does not differ depending on
+ # directory where we do build. The amount of indirection necessary
+ # here is scary.
+ first_project = targets[0].project()
+ first_project_root_location = first_project.get('project-root')
+ first_project_root_module = manager.projects().load(first_project_root_location)
+ first_project_root = manager.projects().target(first_project_root_module)
+ first_build_build_dir = first_project_root.build_dir()
+ configure.set_log_file(os.path.join(first_build_build_dir, "config.log"))
+
+ virtual_targets = []
+
+ global results_of_main_targets
+
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ # The expansion is being performed as late as possible so that the feature
+ # validation is performed after all necessary modules (including project targets
+ # on the command line) have been loaded.
+ if properties:
+ expanded = []
+ for p in properties:
+ expanded.extend(build_request.convert_command_line_element(p))
+
+ expanded = build_request.expand_no_defaults(expanded)
+ else:
+ expanded = [property_set.empty()]
+
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
+ for p in expanded:
+ manager.set_command_line_free_features(property_set.create(p.free()))
+
+ for t in targets:
+ try:
+ g = t.generate(p)
+ if not isinstance(t, ProjectTarget):
+ results_of_main_targets.extend(g.targets())
+ virtual_targets.extend(g.targets())
+ except ExceptionWithUserContext, e:
+ e.report()
+ except Exception:
+ raise
+
+ # Convert collected virtual targets into actual raw Jam targets.
+ for t in virtual_targets:
+ actual_targets.append(t.actualize())
+
+ j = option.get("jobs")
+ if j:
+ bjam.call("set-variable", 'PARALLELISM', j)
+
+ k = option.get("keep-going", "true", "true")
+ if k in ["on", "yes", "true"]:
+ bjam.call("set-variable", "KEEP_GOING", "1")
+ elif k in ["off", "no", "false"]:
+ bjam.call("set-variable", "KEEP_GOING", "0")
+ else:
+ print "error: Invalid value for the --keep-going option"
+ sys.exit()
+
+ # The 'all' pseudo target is not strictly needed expect in the case when we
+ # use it below but people often assume they always have this target
+ # available and do not declare it themselves before use which may cause
+ # build failures with an error message about not being able to build the
+ # 'all' target.
+ bjam.call("NOTFILE", "all")
+
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared all that is left is to tell Jam to update
+ # those targets.
+ if explicitly_requested_files:
+ # Note that this case can not be joined with the regular one when only
+ # exact Boost Build targets are requested as here we do not build those
+ # requested targets but only use them to construct the dependency tree
+ # needed to build the explicitly requested files.
+ # FIXME: add $(.out-xml)
+ bjam.call("UPDATE", ["<e>%s" % x for x in explicitly_requested_files])
+ elif cleanall:
+ bjam.call("UPDATE", "clean-all")
+ elif clean:
+ manager.engine().set_update_action("common.Clean", "clean",
+ actual_clean_targets(targets))
+ bjam.call("UPDATE", "clean")
+ else:
+ # FIXME:
+ #configure.print-configure-checks-summary ;
+
+ if pre_build_hook:
+ for h in pre_build_hook:
+ h()
+
+ bjam.call("DEPENDS", "all", actual_targets)
+ ok = bjam.call("UPDATE_NOW", "all") # FIXME: add out-xml
+ if post_build_hook:
+ post_build_hook(ok)
+ # Prevent automatic update of the 'all' target, now that
+ # we have explicitly updated what we wanted.
+ bjam.call("UPDATE")
+
+ if manager.errors().count() == 0:
+ return ["ok"]
+ else:
+ return []
diff --git a/src/boost/tools/build/src/contrib/__init__.py b/src/boost/tools/build/src/contrib/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/boost/tools/build/src/contrib/__init__.py
diff --git a/src/boost/tools/build/src/contrib/boost.jam b/src/boost/tools/build/src/contrib/boost.jam
new file mode 100644
index 000000000..46a153a51
--- /dev/null
+++ b/src/boost/tools/build/src/contrib/boost.jam
@@ -0,0 +1,308 @@
+# Copyright 2008 - 2013 Roland Schwarz
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Boost library support module.
+#
+# This module allows to use the boost library from boost-build projects. The
+# location of a boost source tree or the path to a pre-built version of the
+# library can be configured from either site-config.jam or user-config.jam. If
+# no location is configured the module looks for a BOOST_ROOT environment
+# variable, which should point to a boost source tree. As a last resort it tries
+# to use pre-built libraries from the standard search path of the compiler.
+#
+# If the location to a source tree is known, the module can be configured from
+# the *-config.jam files:
+#
+# using boost : 1.35 : <root>/path-to-boost-root ;
+#
+# If the location to a pre-built version is known:
+#
+# using boost : 1.34
+# : <include>/usr/local/include/boost_1_34
+# <library>/usr/local/lib
+# ;
+#
+# It is legal to configure more than one boost library version in the config
+# files. The version identifier is used to disambiguate between them. The first
+# configured version becomes the default.
+#
+# To use a boost library you need to put a 'use' statement into your Jamfile:
+#
+# import boost ;
+#
+# boost.use-project 1.35 ;
+#
+# If you do not care about a specific version you just can omit the version
+# part, in which case the default is picked up:
+#
+# boost.use-project ;
+#
+# The library can be referenced with the project identifier '/boost'. To
+# reference the program_options you would specify:
+#
+# exe myexe : mysrc.cpp : <library>/boost//program_options ;
+#
+# Note that the requirements are automatically transformed into suitable tags to
+# find the correct pre-built library.
+#
+
+import common ;
+import modules ;
+import numbers ;
+import project ;
+import property-set ;
+import regex ;
+import toolset ;
+
+.boost.auto_config = [ property-set.create <layout>system ] ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Configuration of the boost library to use.
+#
+# This can either be a boost source tree or pre-built libraries. The 'version'
+# parameter must be a valid boost version number, e.g. 1.35, if specifying a
+# pre-built version with versioned layout. It may be a symbolic name, e.g.
+# 'trunk' if specifying a source tree. The options are specified as named
+# parameters (like properties). The following parameters are available:
+#
+# <root>/path-to-boost-root : Specify a source tree.
+# <include>/path-to-include : The include directory to search.
+# <library>/path-to-library : The library directory to search.
+# <layout>system or <layout>versioned : Built library layout.
+# <build-id>my_build_id : The custom build id to use.
+#
+rule init
+(
+ version # Version identifier.
+ : options * # Set the option properties.
+)
+{
+ if $(.boost.$(version))
+ {
+ import errors ;
+ errors.user-error Boost $(version) already configured. ;
+ }
+ else
+ {
+ if $(.debug-configuration)
+ {
+ if ! $(.boost_default)
+ {
+ echo notice\: configuring default boost library $(version) ;
+ }
+ echo notice\: configuring boost library $(version) ;
+ }
+ .boost_default ?= $(version) ; # the first configured is default
+ .boost.$(version) = [ property-set.create $(options) ] ;
+ }
+}
+
+# Use a certain version of the library.
+#
+# The use-project rule causes the module to define a boost project of searchable
+# pre-built boost libraries, or references a source tree of the boost library.
+# If the 'version' parameter is omitted either the configured default (first in
+# config files) is used or an auto configuration will be attempted.
+#
+rule use-project
+(
+ version ? # The version of the library to use.
+)
+{
+ project.push-current [ project.current ] ;
+ version ?= $(.boost_default) ;
+ version ?= auto_config ;
+
+ if $(.initialized)
+ {
+ if $(.initialized) != $(version)
+ {
+ import errors ;
+ errors.user-error Attempt to use $(__name__) with different
+ parameters. ;
+ }
+ }
+ else
+ {
+ if $(.boost.$(version))
+ {
+ local opt = $(.boost.$(version)) ;
+ local root = [ $(opt).get <root> ] ;
+ local inc = [ $(opt).get <include> ] ;
+ local lib = [ $(opt).get <library> ] ;
+
+ if $(.debug-configuration)
+ {
+ echo notice\: using boost library $(version) [ $(opt).raw ] ;
+ }
+
+ .layout = [ $(opt).get <layout> ] ;
+ .layout ?= versioned ;
+ .build_id = [ $(opt).get <build-id> ] ;
+ .version_tag = [ regex.replace $(version) "[*\\/:.\"\' ]" "_" ] ;
+ .initialized = $(version) ;
+
+ if ( $(root) && $(inc) )
+ || ( $(root) && $(lib) )
+ || ( $(lib) && ! $(inc) )
+ || ( ! $(lib) && $(inc) )
+ {
+ import errors ;
+ errors.user-error Ambiguous parameters, use either <root> or
+ <include> with <library>. ;
+ }
+ else if ! $(root) && ! $(inc)
+ {
+ root = [ modules.peek : BOOST_ROOT ] ;
+ }
+
+ local prj = [ project.current ] ;
+ local mod = [ $(prj).project-module ] ;
+
+ if $(root)
+ {
+ modules.call-in $(mod) : use-project boost : $(root) ;
+ }
+ else
+ {
+ project.initialize $(__name__) ;
+ # It is possible to override the setup of the searched libraries
+ # per version. The (unlikely) 0.0.1 tag is meant as an example
+ # template only.
+ switch $(version)
+ {
+ case 0.0.1 : boost_0_0_1 $(inc) $(lib) ;
+ case * : boost_std $(inc) $(lib) ;
+ }
+ }
+ }
+ else
+ {
+ import errors ;
+ errors.user-error Reference to unconfigured boost version. ;
+ }
+ }
+ project.pop-current ;
+}
+
+local rule boost_lib_std ( id : shared-lib-define )
+{
+ lib $(id) : : : : <link>shared:<define>$(shared-lib-define) ;
+}
+
+rule boost_std ( inc ? lib ? )
+{
+# The default definitions for pre-built libraries.
+
+ project boost
+ : usage-requirements <include>$(inc) <define>BOOST_ALL_NO_LIB
+ : requirements <tag>@tag_std <search>$(lib)
+ ;
+
+ alias headers ;
+ boost_lib_std chrono : BOOST_CHRONO_DYN_LINK ;
+ boost_lib_std container : BOOST_CONTAINER_DYN_LINK ;
+ boost_lib_std date_time : BOOST_DATE_TIME_DYN_LINK ;
+ boost_lib_std filesystem : BOOST_FILE_SYSTEM_DYN_LINK ;
+ boost_lib_std graph : BOOST_GRAPH_DYN_LINK ;
+ boost_lib_std graph_parallel : BOOST_GRAPH_DYN_LINK ;
+ boost_lib_std iostreams : BOOST_IOSTREAMS_DYN_LINK ;
+ boost_lib_std locale : BOOST_LOCALE_DYN_LINK ;
+ boost_lib_std log : BOOST_LOG_DYN_LINK ;
+ boost_lib_std log_setup : BOOST_LOG_SETUP_DYN_LINK ;
+ boost_lib_std math_c99 : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_c99f : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_c99l : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std mpi : BOOST_MPI_DYN_LINK ;
+ boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ;
+ boost_lib_std program_options : BOOST_PROGRAM_OPTIONS_DYN_LINK ;
+ boost_lib_std python : BOOST_PYTHON_DYN_LINK ;
+ boost_lib_std python3 : BOOST_PYTHON_DYN_LINK ;
+ boost_lib_std random : BOOST_RANDOM_DYN_LINK ;
+ boost_lib_std regex : BOOST_REGEX_DYN_LINK ;
+ boost_lib_std serialization : BOOST_SERIALIZATION_DYN_LINK ;
+ boost_lib_std signals : BOOST_SIGNALS_DYN_LINK ;
+ boost_lib_std system : BOOST_SYSTEM_DYN_LINK ;
+ boost_lib_std test_exec_monitor : BOOST_TEST_DYN_LINK ;
+ boost_lib_std thread : BOOST_THREAD_DYN_DLL ;
+ boost_lib_std timer : BOOST_TIMER_DYN_DLL ;
+ boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ;
+ boost_lib_std wave : BOOST_WAVE_DYN_LINK ;
+ boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ;
+}
+
+# Example placeholder for rules defining Boost library project & library targets
+# for a specific Boost library version. Copy under a different name and model
+# after the boost_std rule. Please note that it is also possible to have a per
+# version taging rule in case the tagging algorithm changes between versions.
+#
+rule boost_0_0_1 ( inc ? lib ? )
+{
+ echo "You are trying to use an example placeholder for boost libs." ;
+}
+
+rule tag_std ( name : type ? : property-set )
+{
+ name = boost_$(name) ;
+ if ( [ $(property-set).get <link> ] in static ) &&
+ ( [ $(property-set).get <target-os> ] in windows )
+ {
+ name = lib$(name) ;
+ }
+
+ local result ;
+ if $(.layout) = system
+ {
+ local version = [ MATCH "^([0-9]+)_([0-9]+)" : $(.version_tag) ] ;
+ if $(version[1]) = "1" && [ numbers.less $(version[2]) 39 ]
+ {
+ result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
+ }
+ else
+ {
+ result = [ tag_system $(name) : $(type) : $(property-set) ] ;
+ }
+ }
+ else if $(.layout) = tagged
+ {
+ result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
+ }
+ else if $(.layout) = versioned
+ {
+ result = [ tag_versioned $(name) : $(type) : $(property-set) ] ;
+ }
+ else
+ {
+ import errors ;
+ errors.error Missing layout. ;
+ }
+
+ return $(result) ;
+}
+
+rule tag_system ( name : type ? : property-set )
+{
+ return [ common.format-name <base> -$(.build_id) : $(name) : $(type) :
+ $(property-set) ] ;
+}
+
+rule tag_tagged ( name : type ? : property-set )
+{
+ return [ common.format-name <base> <threading> <runtime> -$(.build_id) :
+ $(name) : $(type) : $(property-set) ] ;
+}
+
+rule tag_versioned ( name : type ? : property-set )
+{
+ return [ common.format-name <base> <toolset> <threading> <runtime>
+ -$(.version_tag) -$(.build_id) : $(name) : $(type) : $(property-set) ] ;
+}
diff --git a/src/boost/tools/build/src/contrib/boost.py b/src/boost/tools/build/src/contrib/boost.py
new file mode 100644
index 000000000..352941e01
--- /dev/null
+++ b/src/boost/tools/build/src/contrib/boost.py
@@ -0,0 +1,280 @@
+# $Id: boost.jam 62249 2010-05-26 19:05:19Z steven_watanabe $
+# Copyright 2008 Roland Schwarz
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Boost library support module.
+#
+# This module allows to use the boost library from boost-build projects.
+# The location of a boost source tree or the path to a pre-built
+# version of the library can be configured from either site-config.jam
+# or user-config.jam. If no location is configured the module looks for
+# a BOOST_ROOT environment variable, which should point to a boost source
+# tree. As a last resort it tries to use pre-built libraries from the standard
+# search path of the compiler.
+#
+# If the location to a source tree is known, the module can be configured
+# from the *-config.jam files:
+#
+# using boost : 1.35 : <root>/path-to-boost-root ;
+#
+# If the location to a pre-built version is known:
+#
+# using boost : 1.34
+# : <include>/usr/local/include/boost_1_34
+# <library>/usr/local/lib
+# ;
+#
+# It is legal to configure more than one boost library version in the config
+# files. The version identifier is used to disambiguate between them.
+# The first configured version becomes the default.
+#
+# To use a boost library you need to put a 'use' statement into your
+# Jamfile:
+#
+# import boost ;
+#
+# boost.use-project 1.35 ;
+#
+# If you don't care about a specific version you just can omit the version
+# part, in which case the default is picked up:
+#
+# boost.use-project ;
+#
+# The library can be referenced with the project identifier '/boost'. To
+# reference the program_options you would specify:
+#
+# exe myexe : mysrc.cpp : <library>/boost//program_options ;
+#
+# Note that the requirements are automatically transformed into suitable
+# tags to find the correct pre-built library.
+#
+
+import re
+
+import bjam
+
+from b2.build import alias, property, property_set, feature
+from b2.manager import get_manager
+from b2.tools import builtin, common
+from b2.util import bjam_signature, regex
+
+
+# TODO: This is currently necessary in Python Port, but was not in Jam.
+feature.feature('layout', ['system', 'versioned', 'tag'], ['optional'])
+feature.feature('root', [], ['optional', 'free'])
+feature.feature('build-id', [], ['optional', 'free'])
+
+__initialized = None
+__boost_auto_config = property_set.create([property.Property('layout', 'system')])
+__boost_configured = {}
+__boost_default = None
+__build_id = None
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+
+# Configuration of the boost library to use.
+#
+# This can either be a boost source tree or
+# pre-built libraries. The 'version' parameter must be a valid boost
+# version number, e.g. 1.35, if specifying a pre-built version with
+# versioned layout. It may be a symbolic name, e.g. 'trunk' if specifying
+# a source tree. The options are specified as named parameters (like
+# properties). The following parameters are available:
+#
+# <root>/path-to-boost-root: Specify a source tree.
+#
+# <include>/path-to-include: The include directory to search.
+#
+# <library>/path-to-library: The library directory to search.
+#
+# <layout>system or <layout>versioned.
+#
+# <build-id>my_build_id: The custom build id to use.
+#
+def init(version, options = None):
+ assert(isinstance(version,list))
+ assert(len(version)==1)
+ version = version[0]
+ if version in __boost_configured:
+ get_manager().errors()("Boost {} already configured.".format(version));
+ else:
+ global __boost_default
+ if debug():
+ if not __boost_default:
+ print "notice: configuring default boost library {}".format(version)
+ print "notice: configuring boost library {}".format(version)
+
+ if not __boost_default:
+ __boost_default = version
+ properties = []
+ for option in options:
+ properties.append(property.create_from_string(option))
+ __boost_configured[ version ] = property_set.PropertySet(properties)
+
+projects = get_manager().projects()
+rules = projects.project_rules()
+
+
+# Use a certain version of the library.
+#
+# The use-project rule causes the module to define a boost project of
+# searchable pre-built boost libraries, or references a source tree
+# of the boost library. If the 'version' parameter is omitted either
+# the configured default (first in config files) is used or an auto
+# configuration will be attempted.
+#
+@bjam_signature(([ "version", "?" ], ))
+def use_project(version = None):
+ projects.push_current( projects.current() )
+ if not version:
+ version = __boost_default
+ if not version:
+ version = "auto_config"
+
+ global __initialized
+ if __initialized:
+ if __initialized != version:
+ get_manager().errors()('Attempt to use {} with different parameters'.format('boost'))
+ else:
+ if version in __boost_configured:
+ opts = __boost_configured[ version ]
+ root = opts.get('<root>' )
+ inc = opts.get('<include>')
+ lib = opts.get('<library>')
+
+ if debug():
+ print "notice: using boost library {} {}".format( version, opt.raw() )
+
+ global __layout
+ global __version_tag
+ __layout = opts.get('<layout>')
+ if not __layout:
+ __layout = 'versioned'
+ __build_id = opts.get('<build-id>')
+ __version_tag = re.sub("[*\\/:.\"\' ]", "_", version)
+ __initialized = version
+
+ if ( root and inc ) or \
+ ( root and lib ) or \
+ ( lib and not inc ) or \
+ ( not lib and inc ):
+ get_manager().errors()("Ambiguous parameters, use either <root> or <include> with <library>.")
+ elif not root and not inc:
+ root = bjam.variable("BOOST_ROOT")
+
+ module = projects.current().project_module()
+
+ if root:
+ bjam.call('call-in-module', module, 'use-project', ['boost', root])
+ else:
+ projects.initialize(__name__)
+ if version == '0.0.1':
+ boost_0_0_1( inc, lib )
+ else:
+ boost_std( inc, lib )
+ else:
+ get_manager().errors()("Reference to unconfigured boost version.")
+ projects.pop_current()
+
+
+rules.add_rule( 'boost.use-project', use_project )
+
+def boost_std(inc = None, lib = None):
+ # The default definitions for pre-built libraries.
+ rules.project(
+ ['boost'],
+ ['usage-requirements'] + ['<include>{}'.format(i) for i in inc] + ['<define>BOOST_ALL_NO_LIB'],
+ ['requirements'] + ['<search>{}'.format(l) for l in lib])
+
+ # TODO: There should be a better way to add a Python function into a
+ # project requirements property set.
+ tag_prop_set = property_set.create([property.Property('<tag>', tag_std)])
+ attributes = projects.attributes(projects.current().project_module())
+ attributes.requirements = attributes.requirements.refine(tag_prop_set)
+
+ alias('headers')
+
+ def boost_lib(lib_name, dyn_link_macro):
+ if (isinstance(lib_name,str)):
+ lib_name = [lib_name]
+ builtin.lib(lib_name, usage_requirements=['<link>shared:<define>{}'.format(dyn_link_macro)])
+
+ boost_lib('container' , 'BOOST_CONTAINER_DYN_LINK' )
+ boost_lib('date_time' , 'BOOST_DATE_TIME_DYN_LINK' )
+ boost_lib('filesystem' , 'BOOST_FILE_SYSTEM_DYN_LINK' )
+ boost_lib('graph' , 'BOOST_GRAPH_DYN_LINK' )
+ boost_lib('graph_parallel' , 'BOOST_GRAPH_DYN_LINK' )
+ boost_lib('iostreams' , 'BOOST_IOSTREAMS_DYN_LINK' )
+ boost_lib('locale' , 'BOOST_LOG_DYN_LINK' )
+ boost_lib('log' , 'BOOST_LOG_DYN_LINK' )
+ boost_lib('log_setup' , 'BOOST_LOG_DYN_LINK' )
+ boost_lib('math_tr1' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_tr1f' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_tr1l' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_c99' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_c99f' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_c99l' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('mpi' , 'BOOST_MPI_DYN_LINK' )
+ boost_lib('program_options' , 'BOOST_PROGRAM_OPTIONS_DYN_LINK')
+ boost_lib('python' , 'BOOST_PYTHON_DYN_LINK' )
+ boost_lib('python3' , 'BOOST_PYTHON_DYN_LINK' )
+ boost_lib('random' , 'BOOST_RANDOM_DYN_LINK' )
+ boost_lib('regex' , 'BOOST_REGEX_DYN_LINK' )
+ boost_lib('serialization' , 'BOOST_SERIALIZATION_DYN_LINK' )
+ boost_lib('wserialization' , 'BOOST_SERIALIZATION_DYN_LINK' )
+ boost_lib('signals' , 'BOOST_SIGNALS_DYN_LINK' )
+ boost_lib('system' , 'BOOST_SYSTEM_DYN_LINK' )
+ boost_lib('unit_test_framework' , 'BOOST_TEST_DYN_LINK' )
+ boost_lib('prg_exec_monitor' , 'BOOST_TEST_DYN_LINK' )
+ boost_lib('test_exec_monitor' , 'BOOST_TEST_DYN_LINK' )
+ boost_lib('thread' , 'BOOST_THREAD_DYN_DLL' )
+ boost_lib('wave' , 'BOOST_WAVE_DYN_LINK' )
+
+def boost_0_0_1( inc, lib ):
+ print "You are trying to use an example placeholder for boost libs." ;
+ # Copy this template to another place (in the file boost.jam)
+ # and define a project and libraries modelled after the
+ # boost_std rule. Please note that it is also possible to have
+ # a per version taging rule in case they are different between
+ # versions.
+
+def tag_std(name, type, prop_set):
+ name = 'boost_' + name
+ if 'static' in prop_set.get('<link>') and 'windows' in prop_set.get('<target-os>'):
+ name = 'lib' + name
+ result = None
+
+ if __layout == 'system':
+ versionRe = re.search('^([0-9]+)_([0-9]+)', __version_tag)
+ if versionRe and versionRe.group(1) == '1' and int(versionRe.group(2)) < 39:
+ result = tag_tagged(name, type, prop_set)
+ else:
+ result = tag_system(name, type, prop_set)
+ elif __layout == 'tagged':
+ result = tag_tagged(name, type, prop_set)
+ elif __layout == 'versioned':
+ result = tag_versioned(name, type, prop_set)
+ else:
+ get_manager().errors()("Missing layout")
+ return result
+
+def tag_maybe(param):
+ return ['-{}'.format(param)] if param else []
+
+def tag_system(name, type, prop_set):
+ return common.format_name(['<base>'] + tag_maybe(__build_id), name, type, prop_set)
+
+def tag_tagged(name, type, prop_set):
+ return common.format_name(['<base>', '<threading>', '<runtime>'] + tag_maybe(__build_id), name, type, prop_set)
+
+def tag_versioned(name, type, prop_set):
+ return common.format_name(['<base>', '<toolset>', '<threading>', '<runtime>'] + tag_maybe(__version_tag) + tag_maybe(__build_id),
+ name, type, prop_set)
diff --git a/src/boost/tools/build/src/contrib/modular.jam b/src/boost/tools/build/src/contrib/modular.jam
new file mode 100644
index 000000000..cba517048
--- /dev/null
+++ b/src/boost/tools/build/src/contrib/modular.jam
@@ -0,0 +1,288 @@
+# Copyright Rene Rivera 2015
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import path ;
+import project ;
+import modules ;
+import regex ;
+import type ;
+
+# Add a location, i.e. directory, where to search for libraries.
+# The optional 'prefix' indicates which rooted-prefixes the new
+# search dir applies to. The prefix defaults to '/'.
+rule add-location ( dir prefix ? : base-dir ? )
+{
+ process-args ;
+
+ prefix ?= "/" ;
+
+ # Dir path of caller to base paths from.
+ caller-module ?= [ CALLER_MODULE ] ;
+ local caller-dir = [ modules.peek $(caller-module) : __file__ ] ;
+ caller-dir = $(caller-dir:D) ;
+
+ base-dir ?= $(caller-dir) ;
+
+ .search-path-prefix += $(prefix) ;
+ .search-path.$(prefix) += [ path.root [ path.root $(dir) $(base-dir) ] [ path.pwd ] ] ;
+}
+
+# Declares additional definitions of a modular library target external
+# to the modular library build itself. This makes it possible to externally
+# define modular libraries without modifying the library. The passed in
+# values are added on demand when the named library is first declared.
+rule external (
+ name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ .external.($(name)).sources = $(sources) ;
+ .external.($(name)).requirements = $(requirements) ;
+ .external.($(name)).default-build = $(default-build) ;
+ .external.($(name)).usage-requirements = $(usage-requirements) ;
+}
+
+# Find, and declare, any modular libraries referenced in the target-refs.
+# This will both load the modular libraries, and declare/manufacture
+# the modular libraries as needed.
+rule find ( target-refs + )
+{
+ process-args ;
+
+ local caller-mod = [ CALLER_MODULE ] ;
+ local caller-dir = [ modules.peek $(caller-mod) : __file__ ] ;
+ caller-dir = $(caller-dir:D) ;
+ caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ;
+
+ local result-refs ;
+ for local target-ref in $(target-refs)
+ {
+ result-refs += [ resolve-reference $(target-ref)
+ : $(caller-mod) $(caller-dir) ] ;
+ }
+
+ return $(result-refs) ;
+}
+
+##############################################################################
+
+local rule resolve-reference ( target-ref : caller-mod caller-dir ? )
+{
+ # ECHO %%% modular.resolve-target-ref $(target-ref) :: $(caller-mod) $(caller-dir) ;
+ if ! $(caller-dir)
+ {
+ caller-dir = [ modules.peek $(caller-mod) : __file__ ] ;
+ caller-dir = $(caller-dir:D) ;
+ caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ;
+ }
+ local result-ref = $(target-ref) ;
+ local ref = [ MATCH ^(.*)//.* : $(target-ref:G=) ] ;
+ # if ! ( $(ref) in $(.target-refs) )
+ {
+ # .target-refs += $(ref) ;
+ local search-prefix ;
+ local search-sub ;
+ for local prefix in $(.search-path-prefix)
+ {
+ if ! $(search-prefix)
+ {
+ local search-match = [ MATCH ^($(prefix))/(.*)$ : $(ref) ] ;
+ search-prefix = $(search-match[1]) ;
+ search-sub = $(search-match[2]) ;
+ }
+ }
+
+ if $(search-prefix)
+ {
+ local found = [ path.glob $(.search-path.$(search-prefix)) : $(search-sub) ] ;
+ found = $(found[1]) ;
+ if $(found)
+ {
+ local lib-ref = [ regex.split $(search-sub) / ] ;
+ lib-ref = $(search-prefix)/$(lib-ref[1]) ;
+ local lib-path = [ path.relative-to $(caller-dir) $(found) ] ;
+ define-library $(lib-ref) $(caller-mod) : $(lib-path) ;
+ }
+ }
+ }
+ return $(result-ref) ;
+}
+
+local rule define-library ( name caller-module ? : root )
+{
+ # ECHO ~~~ modular.library $(name) $(caller-module) :: $(root) :: $(depends) ;
+
+ process-args ;
+
+ # Dir path of caller to base paths from.
+ caller-module ?= [ CALLER_MODULE ] ;
+ local caller-dir = [ modules.peek $(caller-module) : __file__ ] ;
+ caller-dir = $(caller-dir:D) ;
+
+ # Find the various parts of the library.
+ local lib-dir = [ path.root [ path.root $(root) $(caller-dir) ] [ path.pwd ] ] ;
+ local lib-contents = [ path.glob $(lib-dir) : "include" "build" ] ;
+ lib-contents = $(lib-contents:D=) ;
+
+ # "include" dir for library..
+ local include-dir ;
+ if "include" in $(lib-contents)
+ {
+ include-dir = $(root)/include ;
+ }
+
+ # If it has a build dir, i.e. it has targets to build,
+ # we root the project at the build dir to make it easy
+ # to refer to the build targets. This mirrors the regular
+ # Boost organization of the project aliases.
+ if "build" in $(lib-contents)
+ {
+ root = $(root)/build ;
+ build-dir = "." ;
+ }
+
+ # Shadow target declarations so that we can alter build targets
+ # to work in the standalone modular structure.
+ local lib-location = [ path.root [ path.make $(root) ] $(caller-dir) ] ;
+ local lib-module-name = [ project.module-name $(lib-location) ] ;
+ local modular-rules = [ RULENAMES modular-rules ] ;
+ IMPORT modular-rules : $(modular-rules) : $(lib-module-name) : $(modular-rules) ;
+
+ # Load/create/declare library project.
+ local lib-module = [ project.find $(root) : $(caller-dir) ] ;
+ if ! $(lib-module)
+ {
+ # If the find was unable to load the project we synthesize it.
+ lib-module = [ project.load $(lib-location) : synthesize ] ;
+ }
+ local lib-target = [ project.target $(lib-module) ] ;
+ if ! [ modules.peek $(lib-module) : __library__ ]
+ {
+ modules.poke $(lib-module) : __library__ : $(name) ;
+ for local type in [ modules.peek type : .types ]
+ {
+ main-rule-name = [ type.type-to-rule-name $(type) ] ;
+ IMPORT modular-rules : main-target-rule : $(lib-module-name) : $(main-rule-name) ;
+ }
+ }
+
+ # Declare project alternate ID.
+ modules.call-in $(caller-module) : use-project $(name) : $(root) ;
+
+ # Create a "library" target that has basic usage info if needed.
+ if ! [ $(lib-target).has-alternative-for-target library ]
+ {
+ include-dir = [ path.relative-to $(root) $(include-dir) ] ;
+
+ project.push-current $(lib-target) ;
+
+ # Declare the library alias.
+ modules.call-in $(lib-module) : library
+ : # Sources
+ : # Requirements
+ : # Default Build
+ : # Usage Requirements
+ <include>$(include-dir)
+ ;
+
+ project.pop-current ;
+ }
+}
+
+local rule process-args ( )
+{
+ if ! $(.did-process-args)
+ {
+ .did-process-args = yes ;
+ local argv = [ modules.peek : ARGV ] ;
+ local dirs = [ MATCH ^--modular-search-dir=(.*)$ : $(argv) ] ;
+ for local dir in $(dirs)
+ {
+ add-location $(dir) : [ path.pwd ] ;
+ }
+ }
+}
+
+rule apply-external (
+ mod : field : values * )
+{
+ local result ;
+ local name = [ modules.peek $(mod) : __library__ ] ;
+ values += $(.external.($(name)).$(field)) ;
+ for local value in $(values)
+ {
+ result += [ resolve-reference $(value) : $(mod) ] ;
+ }
+ return $(result) ;
+}
+
+module modular-rules
+{
+ import type ;
+ import targets ;
+ import builtin ;
+ import alias ;
+
+ # Avoids any form of installation for Boost modules.
+ rule boost-install ( libraries * ) { }
+
+ # Generic typed target rule to pre-process main target
+ # declarations to make them work within the standalone
+ # modular structure.
+ rule main-target-rule (
+ name : sources * : requirements * : default-build * :
+ usage-requirements * )
+ {
+ local mod = [ CALLER_MODULE ] ;
+
+ # ECHO @@@ [[$(mod)]] modular-rules.main-target-rule $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
+
+ # First discover the required target type based on the exact alias used to
+ # invoke this rule.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = $(bt[4]) ;
+ local target-type = [ type.type-from-rule-name $(rulename) ] ;
+ return [ targets.create-typed-target $(target-type) : [ project.current ] :
+ $(name) : $(sources) : $(requirements) : $(default-build) :
+ $(usage-requirements) ] ;
+ }
+
+ rule lib ( names + : sources * : requirements * : default-build * :
+ usage-requirements * )
+ {
+ local mod = [ CALLER_MODULE ] ;
+ requirements += <use>library ;
+ usage-requirements += <use>library ;
+
+ # ECHO @@@ [[$(mod)]] modular-rules.lib $(names) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
+ return [ builtin.lib $(names) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ;
+ }
+
+ rule alias ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+ {
+ local mod = [ CALLER_MODULE ] ;
+
+ # ECHO @@@ [[$(mod)]] modular-rules.alias $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
+ return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ;
+ }
+
+ rule library ( name ? : sources * : requirements * : default-build * :
+ usage-requirements * )
+ {
+ import modular ;
+
+ local mod = [ CALLER_MODULE ] ;
+ sources = [ modular.apply-external $(mod) : sources : $(sources) ] ;
+ requirements = [ modular.apply-external $(mod) : requirements : $(requirements) ] ;
+ default-build = [ modular.apply-external $(mod) : default-build : $(default-build) ] ;
+ usage-requirements = [ modular.apply-external $(mod) : usage-requirements : $(usage-requirements) ] ;
+
+ name ?= library ;
+
+ # ECHO @@@ [[$(mod)]] modular-rules.library $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
+ return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ;
+ }
+}
+
diff --git a/src/boost/tools/build/src/contrib/tntnet.jam b/src/boost/tools/build/src/contrib/tntnet.jam
new file mode 100644
index 000000000..0bd0ae559
--- /dev/null
+++ b/src/boost/tools/build/src/contrib/tntnet.jam
@@ -0,0 +1,208 @@
+# Copyright 2008 Eduardo Gurgel
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+
+# Support for creating components for the Tntnet web application
+# server (http://tntnet.org)
+#
+# Example:
+#
+# using tntnet : /usr ;
+# lib index : index.png index.js index.css index.ecpp otherclass.cpp
+# /tntnnet//tntnet /tntnet//cxxtools ;
+#
+#
+
+import modules ;
+import feature ;
+import errors ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+import os ;
+import virtual-target ;
+import scanner ;
+import type ;
+
+type.register ECPP : ecpp ;
+type.register JPEG : jpeg ;
+type.register JPG : jpg ;
+type.register PNG : png ;
+type.register JS : js ;
+type.register CSS : css ;
+type.register GIF : gif ;
+
+project.initialize $(__name__) ;
+project tntnet ;
+
+# Save the project so that we tolerate 'import + using' combo.
+.project = [ project.current ] ;
+# Initialized the Tntnet support module. The 'prefix' parameter
+# tells where Tntnet is installed.
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? )
+{
+ project.push-current $(.project) ;
+
+ # pre-build paths to detect reinitializations changes
+ local inc_prefix lib_prefix bin_prefix ;
+ if $(full_inc)
+ {
+ inc_prefix = $(full_inc) ;
+ }
+ else
+ {
+ inc_prefix = $(prefix)/include ;
+ }
+ if $(full_lib)
+ {
+ lib_prefix = $(full_lib) ;
+ }
+ else
+ {
+ lib_prefix = $(prefix)/lib ;
+ }
+ if $(full_bin)
+ {
+ bin_prefix = $(full_bin) ;
+ }
+ else
+ {
+ bin_prefix = $(prefix)/bin ;
+ }
+
+ if $(.initialized)
+ {
+ if $(prefix) != $(.prefix)
+ {
+ errors.error
+ "Attempt the reinitialize Tntnet with different installation prefix" ;
+ }
+ if $(inc_prefix) != $(.incprefix)
+ {
+ errors.error
+ "Attempt the reinitialize Tntnet with different include path" ;
+ }
+ if $(lib_prefix) != $(.libprefix)
+ {
+ errors.error
+ "Attempt the reinitialize Tntnet with different library path" ;
+ }
+ if $(bin_prefix) != $(.binprefix)
+ {
+ errors.error
+ "Attempt the reinitialize Tntnet with different bin path" ;
+ }
+ }
+ else
+ {
+ .initialized = true ;
+ .prefix = $(prefix) ;
+
+ # Setup prefixes for include, binaries and libs.
+ .incprefix = $(.prefix)/include ;
+ .libprefix = $(.prefix)/lib ;
+ .binprefix = $(.prefix)/bin ;
+
+ # Generates cpp files from ecpp files using "ecppc" tool
+ generators.register-standard tntnet.ecpp : ECPP : CPP ;
+ # Generates cpp files from jpeg files using "ecppc" tool
+ generators.register-standard tntnet.jpeg : JPEG : CPP ;
+ # Generates cpp files from jpg files using "ecppc" tool
+ generators.register-standard tntnet.jpg : JPG : CPP ;
+ # Generates cpp files from png files using "ecppc" tool
+ generators.register-standard tntnet.png : PNG : CPP ;
+ # Generates cpp files from js files using "ecppc" tool
+ generators.register-standard tntnet.js : JS : CPP ;
+ # Generates cpp files from gif files using "ecppc" tool
+ generators.register-standard tntnet.gif : GIF : CPP ;
+ # Generates cpp files from css files using "ecppc" tool
+ generators.register-standard tntnet.css : CSS : CPP ;
+ # Scanner for ecpp includes
+ type.set-scanner ECPP : ecpp-scanner ;
+
+
+ local usage-requirements =
+ <include>$(.incprefix)
+ <library-path>$(.libprefix)
+ <dll-path>$(.libprefix)
+ <threading>multi
+ <allow>tntnet ;
+ lib cxxtools : $(main)
+ :
+ :
+ :
+ <include>$(.incprefix)/cxxtools
+ $(usage-requiriments)
+ ;
+ lib tntnet : $(main)
+ :
+ :
+ :
+ <include>$(.incprefix)/tntnet
+ $(usage-requiriments)
+ ;
+
+ }
+ project.pop-current ;
+
+}
+
+rule directory
+{
+ return $(.prefix) ;
+}
+
+rule initialized ( )
+{
+ return $(.initialized) ;
+}
+
+# Get <include> from current toolset.
+flags tntnet.ecpp INCLUDES <include> ;
+
+actions ecpp
+{
+ $(.binprefix)/ecppc -I " $(INCLUDES) " -o $(<) $(>)
+}
+
+actions jpeg
+{
+ $(.binprefix)/ecppc -b -m image/jpeg -o $(<) $(>)
+}
+
+actions jpg
+{
+ $(.binprefix)/ecppc -b -m image/jpeg -o $(<) $(>)
+}
+
+actions js
+{
+ $(.binprefix)/ecppc -b -m application/x-javascript -o $(<) $(>)
+}
+
+actions png
+{
+ $(.binprefix)/ecppc -b -m image/png -o $(<) $(>)
+}
+actions gif
+{
+ $(.binprefix)/ecppc -b -m image/gif -o $(<) $(>)
+}
+actions css
+{
+ $(.binprefix)/ecppc -b -m text/css -o $(<) $(>)
+}
+
+class ecpp-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "<%include.*>(.*)</%include>" ;
+ }
+}
+
+scanner.register ecpp-scanner : include ;
diff --git a/src/boost/tools/build/src/contrib/wxFormBuilder.jam b/src/boost/tools/build/src/contrib/wxFormBuilder.jam
new file mode 100644
index 000000000..9ec97ef51
--- /dev/null
+++ b/src/boost/tools/build/src/contrib/wxFormBuilder.jam
@@ -0,0 +1,195 @@
+################################################################################
+#
+# Copyright (c) 2007-2008 Dario Senic, Jurko Gospodnetic.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+################################################################################
+
+################################################################################
+#
+# Boost Build wxFormBuilder generator tool module.
+#
+# wxFormBuilder is a GUI designer tool for the wxWidgets library. It can then
+# generate C++ sources modeling the designed GUI using the wxWidgets library
+# APIs.
+#
+# This module defines a wxFormBuilder project file type and rules needed to
+# generate C++ source files from those projects. With it you can simply list
+# wxFormBuilder projects as sources for some target and Boost Build will
+# automatically convert them to C++ sources and process from there.
+#
+# The wxFormBuilder executable location may be provided as a parameter when
+# configuring this toolset. Otherwise the default wxFormBuilder.exe executable
+# name is used located in the folder pointed to by the WXFORMBUILDER environment
+# variable.
+#
+# Current limitations:
+#
+# * Works only on Windows.
+# * Works only when run via Boost Jam using the native Windows cmd.exe command
+# interpreter, i.e. the default native Windows Boost Jam build.
+# * Used wxFormBuilder projects need to have their output file names defined
+# consistently with target names assumed by this build script. This means
+# that their target names must use the prefix 'wxFormBuilderGenerated_' and
+# have no output folder defined where the base name is equal to the .fpb
+# project file's name.
+#
+################################################################################
+
+################################################################################
+#
+# Implementation note:
+#
+# Avoiding the limitation on the generated target file names can be done but
+# would require depending on external tools to copy the wxFormBuilder project to
+# a temp location and then modify it in-place to set its target file names. On
+# the other hand wxFormBuilder is expected to add command-line options for
+# choosing the target file names from the command line which will allow us to
+# remove this limitation in a much cleaner way.
+# (23.08.2008.) (Jurko)
+#
+################################################################################
+
+import generators ;
+import os ;
+import path ;
+import toolset ;
+import type ;
+
+
+################################################################################
+#
+# wxFormBuilder.generate()
+# ------------------------
+#
+# Action for processing WX_FORM_BUILDER_PROJECT types.
+#
+################################################################################
+#
+# Implementation notes:
+#
+# wxFormBuilder generated CPP and H files need to be moved to the location
+# where the Boost Build target system expects them so that the generated CPP
+# file can be included into the compile process and that the clean rule
+# successfully deletes both CPP and H files. We expect wxFormBuilder to generate
+# files in the same location where the provided WX_FORM_BUILDER_PROJECT file is
+# located.
+# (15.05.2007.) (Dario)
+#
+################################################################################
+
+actions generate
+{
+ start "" /wait "$(EXECUTABLE)" /g "$(2)"
+ move "$(1[1]:BSR=$(2:P))" "$(1[1]:P)"
+ move "$(1[2]:BSR=$(2:P))" "$(1[2]:P)"
+}
+
+
+################################################################################
+#
+# wxFormBuilder.init()
+# --------------------
+#
+# Main toolset initialization rule called via the toolset.using rule.
+#
+################################################################################
+
+rule init ( executable ? )
+{
+ if $(.initialized)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO notice: [wxFormBuilder-cfg] Repeated initialization request
+ (executable \"$(executable:E="")\") detected and ignored. ;
+ }
+ }
+ else
+ {
+ local environmentVariable = WXFORMBUILDER ;
+
+ if $(.debug-configuration)
+ {
+ ECHO notice: [wxFormBuilder-cfg] Configuring wxFormBuilder... ;
+ }
+
+ # Deduce the path to the used wxFormBuilder executable.
+ if ! $(executable)
+ {
+ executable = "wxFormBuilder.exe" ;
+ local executable-path = [ os.environ $(environmentVariable) ] ;
+ if $(executable-path)-is-not-empty
+ {
+ executable = [ path.root $(executable) $(executable-path) ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO notice: [wxFormBuilder-cfg] No wxFormBuilder path
+ configured either explicitly or using the
+ $(environmentVariable) environment variable. ;
+ ECHO notice: [wxFormBuilder-cfg] To avoid complications please
+ update your configuration to includes a correct path to the
+ wxFormBuilder executable. ;
+ ECHO notice: [wxFormBuilder-cfg] wxFormBuilder executable will
+ be searched for on the system path. ;
+ }
+ }
+ if $(.debug-configuration)
+ {
+ ECHO notice: [wxFormBuilder-cfg] Will use wxFormBuilder executable
+ \"$(executable)\". ;
+ }
+
+ # Now we are sure we have everything we need to initialize this toolset.
+ .initialized = true ;
+
+ # Store the path to the used wxFormBuilder executable.
+ .executable = $(executable) ;
+
+ # Type registration.
+ type.register WX_FORM_BUILDER_PROJECT : fbp ;
+
+ # Parameters to be forwarded to the action rule.
+ toolset.flags wxFormBuilder.generate EXECUTABLE : $(.executable) ;
+
+ # Generator definition and registration.
+ generators.register-standard wxFormBuilder.generate :
+ WX_FORM_BUILDER_PROJECT : CPP(wxFormBuilderGenerated_%)
+ H(wxFormBuilderGenerated_%) ;
+ }
+}
+
+
+################################################################################
+#
+# wxFormBuilder.is-initialized()
+# ------------------------------
+#
+# Returns whether this toolset has been initialized.
+#
+################################################################################
+
+rule is-initialized ( )
+{
+ return $(.initialized) ;
+}
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+# Global variables for this module.
+.executable = ;
+.initialized = ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
diff --git a/src/boost/tools/build/src/engine/Jambase b/src/boost/tools/build/src/engine/Jambase
new file mode 100644
index 000000000..b2f8ebf31
--- /dev/null
+++ b/src/boost/tools/build/src/engine/Jambase
@@ -0,0 +1,189 @@
+#
+# /+\
+# +\ Copyright 1993, 2000 Christopher Seiwald.
+# \+/
+#
+# This file is part of Jam - see jam.c for Copyright information.
+#
+
+# This file is ALSO:
+# Copyright 2001-2004 David Abrahams.
+# Copyright 2002-2004 Rene Rivera.
+# Copyright 2015 Artur Shepilko.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+if $(NT)
+{
+ SLASH ?= \\ ;
+}
+SLASH ?= / ;
+
+
+# Glob for patterns in the directories starting from the given start directory,
+# up to and including the root of the file-system. We stop globbing as soon as
+# we find at least one match.
+#
+rule find-to-root ( dir : patterns + )
+{
+ local globs = [ GLOB $(dir) : $(patterns) ] ;
+ while ! $(globs) && $(dir:P) != $(dir)
+ {
+ dir = $(dir:P) ;
+ globs = [ GLOB $(dir) : $(patterns) ] ;
+ }
+ return $(globs) ;
+}
+
+
+# This global will hold the location of the user's boost-build.jam file.
+.boost-build-file = ;
+
+# This global will hold the location of the build system bootstrap file.
+.bootstrap-file = ;
+
+# Remember the value of $(BOOST_BUILD_PATH) supplied to us by the user.
+BOOST_BUILD_PATH.user-value = $(BOOST_BUILD_PATH) ;
+
+# On Unix only, when BOOST_BUILD_PATH is not supplied by the user, set it to a
+# sensible default value. This allows B2 to work without any
+# environment variables, which is good in itself and also required by the Debian
+# Policy.
+if ! $(BOOST_BUILD_PATH) && $(UNIX)
+{
+ BOOST_BUILD_PATH = /usr/share/boost-build ;
+}
+
+
+rule _poke ( module-name ? : variables + : value * )
+{
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+}
+
+
+# This rule can be invoked from an optional user's boost-build.jam file to both
+# indicate where to find the build system files, and to load them. The path
+# indicated is relative to the location of the boost-build.jam file.
+#
+rule boost-build ( dir ? )
+{
+ if $(.bootstrap-file)
+ {
+ ECHO "Error: Illegal attempt to re-bootstrap the build system by invoking" ;
+ ECHO ;
+ ECHO " 'boost-build" $(dir) ";'" ;
+ ECHO ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+
+ # Add the given directory to the path so we can find the build system. If
+ # dir is empty, has no effect.
+ BOOST_BUILD_PATH = $(dir:R=$(.boost-build-file:D)) $(BOOST_BUILD_PATH) ;
+
+ # We might have just modified the *global* value of BOOST_BUILD_PATH. The
+ # code that loads the rest of B2, in particular the site-config.jam
+ # and user-config.jam configuration files uses os.environ, so we need to
+ # update the value there.
+ _poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
+
+ # Try to find the build system bootstrap file 'bootstrap.jam'.
+ local bootstrap-file = [ GLOB $(BOOST_BUILD_PATH) : bootstrap.jam ] ;
+ .bootstrap-file = $(bootstrap-file[1]) ;
+
+ # There is no bootstrap.jam we can find, exit with an error.
+ if ! $(.bootstrap-file)
+ {
+ ECHO "Unable to load B2: could not find build system." ;
+ ECHO --------------------------------------------------------- ;
+ ECHO "$(.boost-build-file) attempted to load the build system by invoking" ;
+ ECHO ;
+ ECHO " 'boost-build" $(dir) ";'" ;
+ ECHO ;
+ ECHO "but we were unable to find \"bootstrap.jam\" in the specified directory" ;
+ ECHO "or in BOOST_BUILD_PATH (searching "$(BOOST_BUILD_PATH:J=", ")")." ;
+ ECHO ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+
+ if [ MATCH .*(--debug-configuration).* : $(ARGV) ]
+ {
+ ECHO "notice: loading B2 from"
+ [ NORMALIZE_PATH $(.bootstrap-file:D) ] ;
+ }
+
+ # Load the build system, now that we know where to start from.
+ include $(.bootstrap-file) ;
+}
+
+
+{
+ # We attempt to load "boost-build.jam" by searching from the current
+ # invocation directory up to the root of the file-system.
+ #
+ # boost-build.jam is expected to invoke the "boost-build" rule to load the
+ # B2 files.
+
+ local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;
+ local self = [ SELF_PATH ] ;
+ local boost-build-relative = ../../share/boost-build ;
+ local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;
+
+ local boost-build-files =
+ [ find-to-root [ PWD ] : boost-build.jam ]
+ [ GLOB $(self-based-path) : boost-build.jam ]
+ # Another temporary measure so Jam works with B2 v1.
+ [ GLOB $(search-path) : boost-build.jam ] ;
+
+ .boost-build-file = $(boost-build-files[1]) ;
+
+ # There is no boost-build.jam we can find, exit with an error, and
+ # information.
+ if ! $(.boost-build-file)
+ {
+ ECHO "Unable to load B2: could not find \"boost-build.jam\"" ;
+ ECHO --------------------------------------------------------------- ;
+
+ if ! [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]
+ {
+ ECHO "BOOST_ROOT must be set, either in the environment, or " ;
+ ECHO "on the command-line with -sBOOST_ROOT=..., to the root" ;
+ ECHO "of the boost installation." ;
+ ECHO ;
+ }
+
+ ECHO "Attempted search from" [ PWD ] "up to the root" ;
+ ECHO "at" $(self-based-path) ;
+ ECHO "and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: "$(search-path:J=", ")"." ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+
+ if [ MATCH .*(--debug-configuration).* : $(ARGV) ]
+ {
+ ECHO "notice: found boost-build.jam at"
+ [ NORMALIZE_PATH $(.boost-build-file) ] ;
+ }
+
+ # Now load the boost-build.jam to get the build system loaded. This
+ # incidentaly loads the users jamfile and attempts to build targets.
+ #
+ # We also set it up so we can tell whether we are loading the new V2 system
+ # or the the old V1 system.
+ include $(.boost-build-file) ;
+
+ # Check that, at minimum, the bootstrap file was found.
+ if ! $(.bootstrap-file)
+ {
+ ECHO "Unable to load B2" ;
+ ECHO -------------------------- ;
+ ECHO "\"$(.boost-build-file)\" was found by searching from" [ PWD ] "up to the root" ;
+ ECHO "and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: "$(search-path:J=", ")"." ;
+ ECHO ;
+ ECHO "However, it failed to call the \"boost-build\" rule to indicate" ;
+ ECHO "the location of the build system." ;
+ ECHO ;
+ EXIT "Please consult the documentation at 'http://www.boost.org'." ;
+ }
+}
diff --git a/src/boost/tools/build/src/engine/boost-jam.spec b/src/boost/tools/build/src/engine/boost-jam.spec
new file mode 100644
index 000000000..bc572fc96
--- /dev/null
+++ b/src/boost/tools/build/src/engine/boost-jam.spec
@@ -0,0 +1,64 @@
+Name: boost-jam
+Version: 3.1.19
+Summary: Build tool
+Release: 1
+Source: %{name}-%{version}.tgz
+
+License: Boost Software License, Version 1.0
+Group: Development/Tools
+URL: http://www.boost.org
+Packager: Rene Rivera <grafik@redshift-software.com>
+BuildRoot: /var/tmp/%{name}-%{version}.root
+
+%description
+Boost Jam is a build tool based on FTJam, which in turn is based on
+Perforce Jam. It contains significant improvements made to facilitate
+its use in the Boost Build System, but should be backward compatible
+with Perforce Jam.
+
+Authors:
+ Perforce Jam : Cristopher Seiwald
+ FT Jam : David Turner
+ Boost Jam : David Abrahams
+
+Copyright:
+ /+\
+ +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ \+/
+ License is hereby granted to use this software and distribute it
+ freely, as long as this copyright notice is retained and modifications
+ are clearly marked.
+ ALL WARRANTIES ARE HEREBY DISCLAIMED.
+
+Also:
+ Copyright 2001-2006 David Abrahams.
+ Copyright 2002-2006 Rene Rivera.
+ Copyright 2003-2006 Vladimir Prus.
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+%prep
+%setup -n %{name}-%{version}
+
+%build
+LOCATE_TARGET=bin ./build.sh $BOOST_JAM_TOOLSET
+
+%install
+rm -rf $RPM_BUILD_ROOT
+mkdir -p $RPM_BUILD_ROOT%{_bindir}
+mkdir -p $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
+install -m 755 bin/bjam $RPM_BUILD_ROOT%{_bindir}/bjam-%{version}
+ln -sf bjam-%{version} $RPM_BUILD_ROOT%{_bindir}/bjam
+cp -R *.html *.png *.css LICENSE*.txt images jam $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version}
+
+find $RPM_BUILD_ROOT -name CVS -type d -exec rm -r {} \;
+
+%files
+%defattr(-,root,root)
+%attr(755,root,root) /usr/bin/*
+%doc %{_docdir}/%{name}-%{version}
+
+
+%clean
+rm -rf $RPM_BUILD_ROOT
diff --git a/src/boost/tools/build/src/engine/boost-no-inspect b/src/boost/tools/build/src/engine/boost-no-inspect
new file mode 100644
index 000000000..8a06f3a70
--- /dev/null
+++ b/src/boost/tools/build/src/engine/boost-no-inspect
@@ -0,0 +1 @@
+this really out of our hands, so tell inspect to ignore directory \ No newline at end of file
diff --git a/src/boost/tools/build/src/engine/build.bat b/src/boost/tools/build/src/engine/build.bat
new file mode 100644
index 000000000..cd6cefac8
--- /dev/null
+++ b/src/boost/tools/build/src/engine/build.bat
@@ -0,0 +1,194 @@
+@ECHO OFF
+
+REM ~ Copyright 2002-2007 Rene Rivera.
+REM ~ Distributed under the Boost Software License, Version 1.0.
+REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+setlocal
+goto Start
+
+
+:Set_Error
+color 00
+goto :eof
+
+
+:Clear_Error
+ver >nul
+goto :eof
+
+
+:Error_Print
+REM Output an error message and set the errorlevel to indicate failure.
+setlocal
+ECHO ###
+ECHO ### %1
+ECHO ###
+ECHO ### You can specify the toolset as the argument, i.e.:
+ECHO ### .\build.bat msvc
+ECHO ###
+ECHO ### Toolsets supported by this script are: borland, como, gcc,
+ECHO ### gcc-nocygwin, intel-win32, metrowerks, mingw,
+ECHO ### vc11, vc12, vc14, vc141, vc142
+ECHO ###
+ECHO ### If you have Visual Studio 2017 installed you will need to either update
+ECHO ### the Visual Studio 2017 installer or run from VS 2017 Command Prompt
+ECHO ### as we where unable to detect your toolset installation.
+ECHO ###
+call :Set_Error
+endlocal
+goto :eof
+
+
+:Test_Option
+REM Tests whether the given string is in the form of an option: "--*"
+call :Clear_Error
+setlocal
+set test=%1
+if not defined test (
+ call :Set_Error
+ goto Test_Option_End
+)
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if not "-" == "%test:~1,1%" call :Set_Error
+:Test_Option_End
+endlocal
+goto :eof
+
+
+:Test_Empty
+REM Tests whether the given string is not empty
+call :Clear_Error
+setlocal
+set test=%1
+if not defined test (
+ call :Clear_Error
+ goto Test_Empty_End
+)
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if not "" == "%test%" call :Set_Error
+:Test_Empty_End
+endlocal
+goto :eof
+
+
+:Guess_Toolset
+set local
+REM Try and guess the toolset to bootstrap the build with...
+REM Sets B2_TOOLSET to the first found toolset.
+REM May also set B2_TOOLSET_ROOT to the
+REM location of the found toolset.
+
+call :Clear_Error
+call :Test_Empty "%ProgramFiles%"
+if not errorlevel 1 set "ProgramFiles=C:\Program Files"
+
+REM Visual Studio is by default installed to %ProgramFiles% on 32-bit machines and
+REM %ProgramFiles(x86)% on 64-bit machines. Making a common variable for both.
+call :Clear_Error
+call :Test_Empty "%ProgramFiles(x86)%"
+if errorlevel 1 (
+ set "VS_ProgramFiles=%ProgramFiles(x86)%"
+) else (
+ set "VS_ProgramFiles=%ProgramFiles%"
+)
+
+call guess_toolset.bat
+if errorlevel 1 (
+ call :Error_Print "Could not find a suitable toolset.")
+goto :eof
+
+endlocal
+goto :eof
+
+
+:Start
+set B2_TOOLSET=
+set B2_BUILD_ARGS=
+
+REM If no arguments guess the toolset;
+REM or if first argument is an option guess the toolset;
+REM otherwise the argument is the toolset to use.
+call :Clear_Error
+call :Test_Empty %1
+if not errorlevel 1 (
+ call :Guess_Toolset
+ if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
+)
+
+call :Clear_Error
+call :Test_Option %1
+if not errorlevel 1 (
+ call :Guess_Toolset
+ if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
+)
+
+call :Clear_Error
+set B2_TOOLSET=%1
+shift
+goto Setup_Toolset
+
+
+:Setup_Toolset
+REM Setup the toolset command and options. This bit of code
+REM needs to be flexible enough to handle both when
+REM the toolset was guessed at and found, or when the toolset
+REM was indicated in the command arguments.
+REM NOTE: The strange multiple "if ?? == _toolset_" tests are that way
+REM because in BAT variables are subsituted only once during a single
+REM command. A complete "if ... else ..."
+REM is a single command, even though it's in multiple lines here.
+:Setup_Args
+call :Clear_Error
+call :Test_Empty %1
+if not errorlevel 1 goto Config_Toolset
+call :Clear_Error
+call :Test_Option %1
+if errorlevel 1 (
+ set B2_BUILD_ARGS=%B2_BUILD_ARGS% %1
+ shift
+ goto Setup_Args
+)
+:Config_Toolset
+call config_toolset.bat
+if "_%_known_%_" == "__" (
+ call :Error_Print "Unknown toolset: %B2_TOOLSET%"
+)
+if errorlevel 1 goto Finish
+
+echo ###
+echo ### Using '%B2_TOOLSET%' toolset.
+echo ###
+
+set B2_SOURCES=
+set B2_SOURCES=%B2_SOURCES% builtins.cpp class.cpp
+set B2_SOURCES=%B2_SOURCES% command.cpp compile.cpp constants.cpp cwd.cpp
+set B2_SOURCES=%B2_SOURCES% debug.cpp debugger.cpp
+set B2_SOURCES=%B2_SOURCES% execcmd.cpp execnt.cpp filent.cpp filesys.cpp frames.cpp function.cpp
+set B2_SOURCES=%B2_SOURCES% glob.cpp hash.cpp hcache.cpp hdrmacro.cpp headers.cpp jam.cpp
+set B2_SOURCES=%B2_SOURCES% jambase.cpp jamgram.cpp lists.cpp make.cpp make1.cpp md5.cpp mem.cpp modules.cpp
+set B2_SOURCES=%B2_SOURCES% native.cpp object.cpp option.cpp output.cpp parse.cpp pathnt.cpp
+set B2_SOURCES=%B2_SOURCES% pathsys.cpp regexp.cpp rules.cpp scan.cpp search.cpp jam_strings.cpp
+set B2_SOURCES=%B2_SOURCES% subst.cpp sysinfo.cpp timestamp.cpp variable.cpp w32_getreg.cpp
+set B2_SOURCES=%B2_SOURCES% modules/order.cpp
+set B2_SOURCES=%B2_SOURCES% modules/path.cpp
+set B2_SOURCES=%B2_SOURCES% modules/property-set.cpp
+set B2_SOURCES=%B2_SOURCES% modules/regex.cpp
+set B2_SOURCES=%B2_SOURCES% modules/sequence.cpp
+set B2_SOURCES=%B2_SOURCES% modules/set.cpp
+
+set B2_CXXFLAGS=%B2_CXXFLAGS% -DNDEBUG
+
+@echo ON
+%B2_CXX% %CXXFLAGS% %B2_CXXFLAGS% %B2_SOURCES% %B2_CXX_LINK%
+dir *.exe
+copy /b .\b2.exe .\bjam.exe
+
+:Finish
+@exit /b %ERRORLEVEL%
diff --git a/src/boost/tools/build/src/engine/build.sh b/src/boost/tools/build/src/engine/build.sh
new file mode 100755
index 000000000..41af3715e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/build.sh
@@ -0,0 +1,496 @@
+#!/bin/sh
+
+#~ Copyright 2002-2019 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or copy at
+#~ http://www.boost.org/LICENSE_1_0.txt)
+
+# Reset the toolset.
+B2_TOOLSET=
+B2_OS=
+
+# Run a command, and echo before doing so. Also checks the exit status and quits
+# if there was an error.
+echo_run ()
+{
+ echo "> $@"
+ $@
+ r=$?
+ if test $r -ne 0 ; then
+ exit $r
+ fi
+}
+
+# Print an error message, and exit with a status of 1.
+error_exit ()
+{
+ echo "
+${@}
+
+You can specify the toolset as the argument, i.e.:
+ ./build.sh gcc
+
+Toolsets supported by this script are:
+ acc, clang, como, gcc, intel-darwin, intel-linux, kcc, kylix, mipspro,
+ pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp
+
+For any toolset you can override the path to the compiler with the CXX
+environment variable. You can also use additional flags for the compiler
+with the CXXFLAGS environment variable.
+
+A special toolset; cxx, is available which is used as a fallback when a more
+specific toolset is not found and the cxx command is detected. The 'cxx'
+toolset will use the CXX, CXXFLAGS, and LIBS environment variables, if present.
+
+Similarly, the cross-cxx toolset is available for cross-compiling by using the
+BUILD_CXX, BUILD_CXXFLAGS, and BUILD_LDFLAGS environment variables to compile
+binaries that will be executed on the build system. This allows CXX etc. to be
+set for cross-compilers to be propagated to subprocesses.
+" 1>&2
+ exit 1
+}
+
+# Check that a command is in the PATH.
+test_path ()
+{
+ if `command -v command 1>/dev/null 2>/dev/null`; then
+ command -v $1 1>/dev/null 2>/dev/null
+ else
+ hash $1 1>/dev/null 2>/dev/null
+ fi
+}
+
+# Check that the OS name, as returned by "uname", is as given.
+test_uname ()
+{
+ if test_path uname; then
+ test `uname` = $*
+ fi
+}
+
+# Check that the given command runs.
+test_exec ()
+{
+ "$*" 1>/dev/null 2>/dev/null
+}
+
+# Check that the compiler can do C++11.
+test_cxx11 ()
+{
+ if ! test $NO_CXX11_CHECK ; then
+ case $1 in
+ gcc) ( ${CXX:=g++} -x c++ -std=c++11 check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ intel-darwin) ( ${CXX:=icc} -xc++ check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ intel-linux) ( ${CXX:=icc} -xc++ check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ vacpp) ( ${CXX:=xlC_r} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ xlcpp) ( ${CXX:=xlC_r} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ como) ( ${CXX:=como} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ kcc) ( ${CXX:=KCC} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ kylix) ( ${CXX:=bc++} -tC -q check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ mipspro) ( ${CXX:=CC} -FE:template_in_elf_section -ptused check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ pathscale) ( ${CXX:=pathCC} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ pgi) ( ${CXX:=pgc++} -std=c++11 check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ sun*) ( ${CXX:=CC} -std=c++11 check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ clang*) ( ${CXX:=clang++} -x c++ -std=c++11 check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ tru64cxx) ( ${CXX:=cc} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ acc) ( ${CXX:=aCC} -AA check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ qcc) ( ${CXX:=QCC} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ cxx) ( ${CXX:=cxx} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ cross-cxx) ( ${CXX:=cxx} check_cxx11.cpp && rm -f a.out ) 1>/dev/null 2>/dev/null ;;
+ *) test "0" = "1" ;;
+ esac
+ else
+ test $NO_CXX11_CHECK
+ fi
+}
+
+# Try and guess the toolset to bootstrap the build with...
+guess_toolset ()
+{
+ if test_uname Darwin && test_cxx11 clang ; then B2_TOOLSET=clang
+ elif test_uname IRIX && test_cxx11 mipspro ; then B2_TOOLSET=mipspro
+ elif test_uname IRIX64 && test_cxx11 mipspro ; then B2_TOOLSET=mipspro
+ elif test_uname OSF1 && test_cxx11 tru64cxx ; then B2_TOOLSET=tru64cxx
+ elif test_uname QNX && test_path QCC && test_cxx11 qcc ; then B2_TOOLSET=qcc
+ elif test_uname Linux && test_path xlC_r ; then
+ if /usr/bin/lscpu | grep Byte | grep Little > /dev/null 2>&1 ; then
+ # Little endian linux
+ B2_TOOLSET=xlcpp
+ else
+ #Big endian linux
+ B2_TOOLSET=vacpp
+ fi
+ elif test_uname AIX && test_path xlC_r && test_cxx11 vacpp ; then B2_TOOLSET=vacpp
+ elif test_uname FreeBSD && test_path freebsd-version && test_path clang++ && test_cxx11 clang ; then B2_TOOLSET=clang
+ elif test_path g++ && test_cxx11 gcc ; then B2_TOOLSET=gcc
+ elif test_path clang++ && test_cxx11 clang ; then B2_TOOLSET=clang
+ elif test_path icc && test_cxx11 intel-linux ; then B2_TOOLSET=intel-linux
+ elif test -r /opt/intel/cc/9.0/bin/iccvars.sh && test_cxx11 intel-linux ; then
+ B2_TOOLSET=intel-linux
+ B2_TOOLSET_ROOT=/opt/intel/cc/9.0
+ elif test -r /opt/intel_cc_80/bin/iccvars.sh && test_cxx11 intel-linux ; then
+ B2_TOOLSET=intel-linux
+ B2_TOOLSET_ROOT=/opt/intel_cc_80
+ elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh && test_cxx11 intel-linux ; then
+ B2_TOOLSET=intel-linux
+ B2_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
+ elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh && test_cxx11 intel-linux ; then
+ B2_TOOLSET=intel-linux
+ B2_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
+ elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh && test_cxx11 intel-linux ; then
+ B2_TOOLSET=intel-linux
+ B2_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
+ elif test_path pgc++ && test_cxx11 pgi ; then B2_TOOLSET=pgi
+ elif test_path pathCC && test_cxx11 pathscale ; then B2_TOOLSET=pathscale
+ elif test_path como && test_cxx11 como ; then B2_TOOLSET=como
+ elif test_path KCC && test_cxx11 kcc ; then B2_TOOLSET=kcc
+ elif test_path bc++ && test_cxx11 kylix ; then B2_TOOLSET=kylix
+ elif test_path aCC && test_cxx11 acc ; then B2_TOOLSET=acc
+ elif test_uname HP-UX ; then B2_TOOLSET=acc
+ elif test -r /opt/SUNWspro/bin/cc && test_cxx11 sunpro ; then
+ B2_TOOLSET=sunpro
+ B2_TOOLSET_ROOT=/opt/SUNWspro/
+ # Test for some common compile command as the default fallback.
+ elif test_path $CXX ; then B2_TOOLSET=cxx
+ elif test_path cxx ; then
+ B2_TOOLSET=cxx
+ CXX=cxx
+ elif test_path cpp ; then
+ B2_TOOLSET=cxx
+ CXX=cpp
+ elif test_path CC ; then
+ B2_TOOLSET=cxx
+ CXX=CC
+ fi
+ if test "$B2_TOOLSET" = "" ; then
+ error_exit "Could not find a suitable toolset."
+ fi
+}
+
+check_debug_build ()
+{
+ while test $# -gt 0
+ do
+ case "$1" in
+ --debug) return 0 ;;
+ esac
+ shift
+ done
+ return 1
+}
+
+# The one option we support in the invocation
+# is the name of the toolset to force building
+# with.
+case "$1" in
+ --guess-toolset) NO_CXX11_CHECK=1 ; guess_toolset ; echo "$B2_TOOLSET" ; exit 1 ;;
+ -*) guess_toolset ;;
+ ?*) B2_TOOLSET=$1 ; shift ;;
+ *) guess_toolset ;;
+esac
+
+# We need a C++11 compiler. Check here and given some feedback about it.
+if ! test_cxx11 $B2_TOOLSET ; then
+ error_exit "
+A C++11 capable compiler is required for building the B2 engine.
+Toolset '$B2_TOOLSET' does not appear to support C++11.
+
+** Note, the C++11 capable compiler is _only_ required for building the B2
+** engine. The B2 build system allows for using any C++ level and any other
+** supported language and resource in your projects.
+"
+fi
+
+case $B2_TOOLSET in
+
+ gcc)
+ CXX=${CXX:=g++}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ # Check whether it's MinGW GCC, which has Windows headers and none of POSIX ones.
+ machine=$(${CXX} -dumpmachine 2>/dev/null)
+ if test $? -ne 0 ; then
+ echo "B2_TOOLSET is gcc, but the 'gcc' command cannot be executed."
+ echo "Make sure 'gcc' is in PATH, or use a different toolset."
+ exit 1
+ fi
+ case $machine in
+ *mingw*)
+ # MinGW insists that its bin directory be in PATH.
+ if test -r ${B2_TOOLSET_ROOT}bin/gcc ; then
+ export PATH=${B2_TOOLSET_ROOT}bin:$PATH
+ fi
+ B2_CXX="${CXX} -x c++ -std=c++11"
+ B2_CXXFLAGS_RELEASE="-O2 -s"
+ B2_CXXFLAGS_DEBUG="-O0 -g"
+ B2_OS="NT"
+ ;;
+
+ *cygwin*)
+ B2_CXX="${CXX} -x c++ -std=gnu++11"
+ B2_CXXFLAGS_RELEASE="-O2 -s"
+ B2_CXXFLAGS_DEBUG="-O0 -g"
+ ;;
+
+ *)
+ B2_CXX="${CXX} -x c++ -std=c++11"
+ B2_CXXFLAGS_RELEASE="-O2 -s"
+ B2_CXXFLAGS_DEBUG="-O0 -g"
+ esac
+ ;;
+
+ intel-darwin)
+ CXX=${CXX:=icc}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX} -xc++"
+ B2_CXXFLAGS_RELEASE="-O3 -s"
+ B2_CXXFLAGS_DEBUG="-O0 -g -p"
+ ;;
+
+ intel-linux)
+ CXX=${CXX:=icc}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ test_path ${CXX} >/dev/null 2>&1
+ if test $? ; then
+ echo "Found ${CXX} in environment"
+ B2_TOOLSET_ROOT=`echo ${CXX}| sed -e 's/bin.*\/icc//'`
+ # probably the most widespread
+ ARCH=intel64
+ else
+ echo "No intel compiler in current path"
+ echo "Look in a few old place for legacy reason"
+ if test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
+ B2_TOOLSET_ROOT=/opt/intel/cc/9.0/
+ elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
+ B2_TOOLSET_ROOT=/opt/intel_cc_80/
+ elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
+ B2_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
+ elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
+ B2_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
+ elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
+ B2_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
+ fi
+ fi
+ if test -r ${B2_TOOLSET_ROOT}bin/iccvars.sh ; then
+ # iccvars does not change LD_RUN_PATH. We adjust LD_RUN_PATH here in
+ # order not to have to rely on ld.so.conf knowing the icc library
+ # directory. We do this before running iccvars.sh in order to allow a
+ # user to add modifications to LD_RUN_PATH in iccvars.sh.
+ if test -z "${LD_RUN_PATH}"; then
+ LD_RUN_PATH="${B2_TOOLSET_ROOT}lib"
+ else
+ LD_RUN_PATH="${B2_TOOLSET_ROOT}lib:${LD_RUN_PATH}"
+ fi
+ export LD_RUN_PATH
+ . ${B2_TOOLSET_ROOT}bin/iccvars.sh $ARCH
+ fi
+ B2_CXX="${CXX} -xc++"
+ B2_CXXFLAGS_RELEASE="-O3 -s"
+ B2_CXXFLAGS_DEBUG="-O0 -g -p"
+ ;;
+
+ vacpp)
+ CXX=${CXX:=xlC_r}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=-qversion}
+ B2_CXX="${CXX}"
+ B2_CXXFLAGS_RELEASE="-O3 -s -qstrict -qinline"
+ B2_CXXFLAGS_DEBUG="-g -qNOOPTimize -qnoinline -pg"
+ ;;
+
+ xlcpp)
+ CXX=${CXX:=xlC_r}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=-qversion}
+ B2_CXX="${CXX}"
+ B2_CXXFLAGS_RELEASE="-s -O3 -qstrict -qinline"
+ B2_CXXFLAGS_DEBUG="-g -qNOOPTimize -qnoinline -pg"
+ ;;
+
+ como)
+ CXX=${CXX:=como}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX}"
+ B2_CXXFLAGS_RELEASE="-O3 --inlining"
+ B2_CXXFLAGS_DEBUG="-O0 -g --no_inlining --long_long"
+ ;;
+
+ kcc)
+ CXX=${CXX:=KCC}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="KCC"
+ B2_CXXFLAGS_RELEASE="+K2 -s"
+ B2_CXXFLAGS_DEBUG="+K0 -g"
+ ;;
+
+ kylix)
+ CXX=${CXX:=bc++}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="bc++ -tC -q"
+ B2_CXXFLAGS_RELEASE="-O2 -vi -w-inl -s"
+ B2_CXXFLAGS_DEBUG="-Od -v -vi-"
+ ;;
+
+ mipspro)
+ CXX=${CXX:=CC}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX} -FE:template_in_elf_section -ptused"
+ B2_CXXFLAGS_RELEASE="-Ofast -g0 \"-INLINE:none\" -s"
+ B2_CXXFLAGS_DEBUG="-O0 -INLINE -g"
+ ;;
+
+ pathscale)
+ CXX=${CXX:=pathCC}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX}"
+ B2_CXXFLAGS_RELEASE="-O3 -inline -s"
+ B2_CXXFLAGS_DEBUG="-O0 -noinline -ggdb"
+ ;;
+
+ pgi)
+ CXX=${CXX:=pgc++}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX} -std=c++11"
+ B2_CXXFLAGS_RELEASE="-fast -s"
+ B2_CXXFLAGS_DEBUG="-O0 -gopt"
+ ;;
+
+ sun*)
+ CXX=${CXX:=CC}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=-V}
+ if test -z "${B2_TOOLSET_ROOT}" -a -r /opt/SUNWspro/bin/CC ; then
+ B2_TOOLSET_ROOT=/opt/SUNWspro/
+ fi
+ if test -r "${B2_TOOLSET_ROOT}/bin/CC" ; then
+ PATH=${B2_TOOLSET_ROOT}bin:${PATH}
+ export PATH
+ fi
+ B2_CXX="${CXX} -std=c++11"
+ B2_CXXFLAGS_RELEASE="-xO4 -s"
+ B2_CXXFLAGS_DEBUG="-g"
+ ;;
+
+ clang*)
+ CXX=${CXX:=clang++}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX} -x c++ -std=c++11"
+ B2_TOOLSET=clang
+ B2_CXXFLAGS_RELEASE="-O3 -s"
+ B2_CXXFLAGS_DEBUG="-O0 -fno-inline -g"
+ ;;
+
+ tru64cxx)
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="cc"
+ B2_CXXFLAGS_RELEASE="-O5 -inline speed -s"
+ B2_CXXFLAGS_DEBUG="-O0 -pg -g"
+ ;;
+
+ acc)
+ CXX=${CXX:=aCC}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX} -AA"
+ B2_CXXFLAGS_RELEASE="-O3 -s"
+ B2_CXXFLAGS_DEBUG="+d -g"
+ ;;
+
+ qcc)
+ CXX=${CXX:=QCC}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX}"
+ B2_CXXFLAGS_RELEASE="-O3 -Wc,-finline-functions"
+ B2_CXXFLAGS_DEBUG="O0 -Wc,-fno-inline -gstabs+"
+ ;;
+
+ cxx)
+ CXX=${CXX:=cxx}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX}"
+ ;;
+
+ cross-cxx)
+ CXX=${BUILD_CXX:=cxx}
+ CXXFLAGS=${BUILD_CXXFLAGS}
+ CXX_VERSION_OPT=${CXX_VERSION_OPT:=--version}
+ B2_CXX="${CXX}"
+ ;;
+
+ *)
+ error_exit "Unknown toolset: $B2_TOOLSET"
+ ;;
+esac
+
+echo "
+###
+###
+### Using '$B2_TOOLSET' toolset.
+###
+###
+"
+echo_run ${CXX} ${CXX_VERSION_OPT}
+echo "
+###
+###
+"
+B2_SOURCES="\
+ builtins.cpp \
+ class.cpp \
+ command.cpp \
+ compile.cpp \
+ constants.cpp \
+ cwd.cpp \
+ debug.cpp \
+ debugger.cpp \
+ execcmd.cpp \
+ filesys.cpp \
+ frames.cpp \
+ function.cpp \
+ glob.cpp\
+ hash.cpp \
+ hcache.cpp \
+ hdrmacro.cpp \
+ headers.cpp \
+ jam.cpp \
+ jambase.cpp \
+ jamgram.cpp \
+ lists.cpp \
+ make.cpp \
+ make1.cpp \
+ md5.cpp \
+ mem.cpp \
+ modules.cpp \
+ native.cpp \
+ object.cpp \
+ option.cpp \
+ output.cpp \
+ parse.cpp \
+ pathsys.cpp \
+ regexp.cpp \
+ rules.cpp \
+ scan.cpp \
+ search.cpp \
+ jam_strings.cpp \
+ subst.cpp \
+ sysinfo.cpp \
+ timestamp.cpp \
+ variable.cpp \
+ w32_getreg.cpp \
+ modules/order.cpp \
+ modules/path.cpp \
+ modules/property-set.cpp \
+ modules/regex.cpp \
+ modules/sequence.cpp \
+ modules/set.cpp \
+ "
+case $B2_OS in
+ NT)
+ B2_SOURCES="${B2_SOURCES} execnt.cpp filent.cpp pathnt.cpp"
+ ;;
+
+ *)
+ B2_SOURCES="${B2_SOURCES} execunix.cpp fileunix.cpp pathunix.cpp"
+ ;;
+esac
+
+if check_debug_build "$@" ; then B2_CXXFLAGS="${B2_CXXFLAGS_DEBUG}"
+else B2_CXXFLAGS="${B2_CXXFLAGS_RELEASE} -DNDEBUG"
+fi
+echo_run ${B2_CXX} ${CXXFLAGS} ${B2_CXXFLAGS} ${B2_SOURCES} -o b2
+echo_run cp b2 bjam
diff --git a/src/boost/tools/build/src/engine/build_vms.com b/src/boost/tools/build/src/engine/build_vms.com
new file mode 100644
index 000000000..6f73512d0
--- /dev/null
+++ b/src/boost/tools/build/src/engine/build_vms.com
@@ -0,0 +1,153 @@
+$ ! Copyright 2002-2003 Rene Rivera, Johan Nilsson.
+$ !
+$ ! 8-APR-2004 Boris Gubenko
+$ ! Miscellaneous improvements.
+$ !
+$ ! 20-JAN-2015 Artur Shepilko
+$ ! Adapt for jam 3.1.19
+$ !
+$ ! Distributed under the Boost Software License, Version 1.0.
+$ ! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+$ !
+$ ! bootstrap build script for Jam
+$ !
+$ THIS_FACILITY = "BUILDJAM"
+$
+$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'")
+$ save_verify = f$verify(verify)
+$
+$ SAY := WRITE SYS$OUTPUT
+$ !
+$ ON WARNING THEN CONTINUE
+$ ON ERROR THEN GOTO EXIT
+$
+$ BOOST_JAM_TOOLSET = "vmsdecc"
+$ BOOST_JAM_CC = "CC"
+$ BJAM_UPDATE = ""
+$
+$ ARGS = F$EDIT("''p1' ''p2' ''p3' ''p4'","TRIM,LOWERCASE")
+$ ARGS_LEN = F$LENGTH(ARGS)
+$
+$ IF F$LOCATE("--update", ARGS) .NE. F$LENGTH(ARGS) THEN BJAM_UPDATE = "update"
+$ IF BJAM_UPDATE .EQS. "update" -
+ .AND. F$SEARCH("[.bootstrap_vms]jam0.exe") .EQS. "" THEN BJAM_UPDATE = ""
+$
+$ IF BJAM_UPDATE .NES. "update"
+$ THEN
+$ GOSUB CLEAN
+$
+$ SAY "I|Creating bootstrap directory..."
+$ CREATE /DIR [.bootstrap_vms]
+$
+$ !------------------
+$ ! NOTE: Assume jamgram and jambase have been generated (true for fresh release).
+$ ! Otherwise these need to be re-generated manually.
+$ !------------------
+$
+$ SAY "I|Building bootstrap jam..."
+$ !
+$ CC_FLAGS = "/DEFINE=VMS /STANDARD=VAXC " + -
+ "/PREFIX_LIBRARY_ENTRIES=(ALL_ENTRIES) " + -
+ "/WARNING=DISABLE=(LONGEXTERN)" + -
+ "/OBJ=[.bootstrap_vms] "
+$
+$ CC_INCLUDE=""
+$
+$ SAY "I|Using compile flags: ", CC_FLAGS
+$
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE command.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE compile.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE constants.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE debug.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execcmd.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE frames.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE function.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE glob.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hash.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hdrmacro.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE headers.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jam.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jambase.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jamgram.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE lists.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make1.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE object.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE option.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE output.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE parse.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathsys.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE regexp.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE rules.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE scan.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE search.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE subst.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE timestamp.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE variable.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE modules.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE strings.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filesys.c
+$
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execvms.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathvms.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filevms.c
+$
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE builtins.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE class.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE cwd.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE native.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE md5.c
+$
+$ CC_INCLUDE = "/INCLUDE=(""./modules"")"
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]set.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]path.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]regex.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]property-set.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]sequence.c
+$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]order.c
+$
+$ LIB /CREATE [.bootstrap_vms]jam0.olb [.bootstrap_vms]*.obj
+$ LINK /EXEC=[.bootstrap_vms]jam0.exe -
+ [.bootstrap_vms]jam0.olb/INCLUDE=JAM/LIB
+$
+$ IF F$SEARCH("[.bootstrap_vms]*.obj") .NES. "" THEN -
+ DELETE /NOCONF /NOLOG [.bootstrap_vms]*.obj;*, *.olb;*
+$ ENDIF
+$
+$ IF F$SEARCH("[.bootstrap_vms]jam0.exe") .NES. ""
+$ THEN
+$ IF BJAM_UPDATE .NES. "update"
+$ THEN
+$ SAY "I|Cleaning previous build..."
+$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS' clean
+$ ENDIF
+$
+$ SAY "I|Building Boost.Jam..."
+$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS'
+$ ENDIF
+$
+$
+$EXIT:
+$ sts = $STATUS
+$ exit 'sts' + (0 * f$verify(save_verify))
+
+
+$CLEAN: !GOSUB
+$ !
+$ IF F$SEARCH("[.bootstrap_vms]*.*") .NES. ""
+$ THEN
+$ SAY "I|Cleaning previous bootstrap files..."
+$ !
+$ SET FILE /PROT=(W:RWED) [.bootstrap_vms]*.*;*
+$ DELETE /NOCONF /NOLOG [.bootstrap_vms]*.*;*
+$ ENDIF
+$ !
+$ IF F$SEARCH("bootstrap_vms.dir") .NES. ""
+$ THEN
+$ SAY "I|Removing previous bootstrap directory..."
+$ !
+$ SET FILE /PROT=(W:RWED) bootstrap_vms.dir
+$ DELETE /NOCONF /NOLOG bootstrap_vms.dir;
+$ ENDIF
+$ !
+$ RETURN
diff --git a/src/boost/tools/build/src/engine/builtins.cpp b/src/boost/tools/build/src/engine/builtins.cpp
new file mode 100644
index 000000000..eb6af8c82
--- /dev/null
+++ b/src/boost/tools/build/src/engine/builtins.cpp
@@ -0,0 +1,2728 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "builtins.h"
+
+#include "compile.h"
+#include "constants.h"
+#include "cwd.h"
+#include "debugger.h"
+#include "filesys.h"
+#include "frames.h"
+#include "hash.h"
+#include "hdrmacro.h"
+#include "lists.h"
+#include "make.h"
+#include "md5.h"
+#include "native.h"
+#include "object.h"
+#include "parse.h"
+#include "pathsys.h"
+#include "rules.h"
+#include "jam_strings.h"
+#include "subst.h"
+#include "timestamp.h"
+#include "variable.h"
+#include "output.h"
+
+#include <ctype.h>
+
+#ifdef OS_NT
+#include <windows.h>
+#ifndef FSCTL_GET_REPARSE_POINT
+/* MinGW's version of windows.h is missing this, so we need
+ * to include winioctl.h directly
+ */
+#include <winioctl.h>
+#endif
+
+/* With VC8 (VS2005) these are not defined:
+ * FSCTL_GET_REPARSE_POINT (expects WINVER >= 0x0500 _WIN32_WINNT >= 0x0500 )
+ * IO_REPARSE_TAG_SYMLINK (is part of a separate Driver SDK)
+ * So define them explicitly to their expected values.
+ */
+#ifndef FSCTL_GET_REPARSE_POINT
+# define FSCTL_GET_REPARSE_POINT 0x000900a8
+#endif
+#ifndef IO_REPARSE_TAG_SYMLINK
+# define IO_REPARSE_TAG_SYMLINK (0xA000000CL)
+#endif
+
+#include <io.h>
+#if !defined(__BORLANDC__)
+#define dup _dup
+#define dup2 _dup2
+#define open _open
+#define close _close
+#endif /* __BORLANDC__ */
+#endif /* OS_NT */
+
+#if defined(USE_EXECUNIX)
+# include <sys/types.h>
+# include <sys/wait.h>
+#elif defined(OS_VMS)
+# include <wait.h>
+#else
+/*
+ * NT does not have wait() and associated macros and uses the system() return
+ * value instead. Status code group are documented at:
+ * http://msdn.microsoft.com/en-gb/library/ff565436.aspx
+ */
+# define WIFEXITED(w) (((w) & 0XFFFFFF00) == 0)
+# define WEXITSTATUS(w)(w)
+#endif
+
+/*
+ * builtins.c - builtin jam rules
+ *
+ * External routines:
+ * load_builtins() - define builtin rules
+ * unknown_rule() - reports an unknown rule occurrence to the
+ * user and exits
+ *
+ * Internal routines:
+ * append_if_exists() - if file exists, append it to the list
+ * builtin_calc() - CALC rule
+ * builtin_delete_module() - DELETE_MODULE ( MODULE ? )
+ * builtin_depends() - DEPENDS/INCLUDES rule
+ * builtin_echo() - ECHO rule
+ * builtin_exit() - EXIT rule
+ * builtin_export() - EXPORT ( MODULE ? : RULES * )
+ * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule
+ * builtin_glob() - GLOB rule
+ * builtin_glob_recursive() - ???
+ * builtin_hdrmacro() - ???
+ * builtin_import() - IMPORT rule
+ * builtin_match() - MATCH rule, regexp matching
+ * builtin_rebuilds() - REBUILDS rule
+ * builtin_rulenames() - RULENAMES ( MODULE ? )
+ * builtin_split_by_characters() - splits the given string into tokens
+ * builtin_varnames() - VARNAMES ( MODULE ? )
+ * get_source_line() - get a frame's file and line number
+ * information
+ */
+
+
+/*
+ * compile_builtin() - define builtin rules
+ */
+
+#define P0 (PARSE *)0
+#define C0 (OBJECT *)0
+
+#if defined( OS_NT ) || defined( OS_CYGWIN )
+ LIST * builtin_system_registry ( FRAME *, int );
+ LIST * builtin_system_registry_names( FRAME *, int );
+#endif
+
+int glob( char const * s, char const * c );
+
+void backtrace ( FRAME * );
+void backtrace_line ( FRAME * );
+void print_source_line( FRAME * );
+
+
+RULE * bind_builtin( char const * name_, LIST * (* f)( FRAME *, int flags ),
+ int flags, char const * * args )
+{
+ FUNCTION * func;
+ RULE * result;
+ OBJECT * name = object_new( name_ );
+
+ func = function_builtin( f, flags, args );
+
+ result = new_rule_body( root_module(), name, func, 1 );
+
+ function_free( func );
+
+ object_free( name );
+
+ return result;
+}
+
+
+RULE * duplicate_rule( char const * name_, RULE * other )
+{
+ OBJECT * name = object_new( name_ );
+ RULE * result = import_rule( other, root_module(), name );
+ object_free( name );
+ return result;
+}
+
+
+/*
+ * load_builtins() - define builtin rules
+ */
+
+void load_builtins()
+{
+ duplicate_rule( "Always",
+ bind_builtin( "ALWAYS",
+ builtin_flags, T_FLAG_TOUCHED, 0 ) );
+
+ duplicate_rule( "Depends",
+ bind_builtin( "DEPENDS",
+ builtin_depends, 0, 0 ) );
+
+ duplicate_rule( "echo",
+ duplicate_rule( "Echo",
+ bind_builtin( "ECHO",
+ builtin_echo, 0, 0 ) ) );
+
+ {
+ char const * args[] = { "message", "*", ":", "result-value", "?", 0 };
+ duplicate_rule( "exit",
+ duplicate_rule( "Exit",
+ bind_builtin( "EXIT",
+ builtin_exit, 0, args ) ) );
+ }
+
+ {
+ char const * args[] = { "directories", "*", ":", "patterns", "*", ":",
+ "case-insensitive", "?", 0 };
+ duplicate_rule( "Glob",
+ bind_builtin( "GLOB", builtin_glob, 0, args ) );
+ }
+
+ {
+ char const * args[] = { "patterns", "*", 0 };
+ bind_builtin( "GLOB-RECURSIVELY",
+ builtin_glob_recursive, 0, args );
+ }
+
+ duplicate_rule( "Includes",
+ bind_builtin( "INCLUDES",
+ builtin_depends, 1, 0 ) );
+
+ {
+ char const * args[] = { "targets", "*", ":", "targets-to-rebuild", "*",
+ 0 };
+ bind_builtin( "REBUILDS",
+ builtin_rebuilds, 0, args );
+ }
+
+ duplicate_rule( "Leaves",
+ bind_builtin( "LEAVES",
+ builtin_flags, T_FLAG_LEAVES, 0 ) );
+
+ duplicate_rule( "Match",
+ bind_builtin( "MATCH",
+ builtin_match, 0, 0 ) );
+
+ {
+ char const * args[] = { "string", ":", "delimiters", 0 };
+ bind_builtin( "SPLIT_BY_CHARACTERS",
+ builtin_split_by_characters, 0, args );
+ }
+
+ duplicate_rule( "NoCare",
+ bind_builtin( "NOCARE",
+ builtin_flags, T_FLAG_NOCARE, 0 ) );
+
+ duplicate_rule( "NOTIME",
+ duplicate_rule( "NotFile",
+ bind_builtin( "NOTFILE",
+ builtin_flags, T_FLAG_NOTFILE, 0 ) ) );
+
+ duplicate_rule( "NoUpdate",
+ bind_builtin( "NOUPDATE",
+ builtin_flags, T_FLAG_NOUPDATE, 0 ) );
+
+ duplicate_rule( "Temporary",
+ bind_builtin( "TEMPORARY",
+ builtin_flags, T_FLAG_TEMP, 0 ) );
+
+ bind_builtin( "ISFILE",
+ builtin_flags, T_FLAG_ISFILE, 0 );
+
+ duplicate_rule( "HdrMacro",
+ bind_builtin( "HDRMACRO",
+ builtin_hdrmacro, 0, 0 ) );
+
+ /* FAIL_EXPECTED is used to indicate that the result of a target build
+ * action should be inverted (ok <=> fail) this can be useful when
+ * performing test runs from Jamfiles.
+ */
+ bind_builtin( "FAIL_EXPECTED",
+ builtin_flags, T_FLAG_FAIL_EXPECTED, 0 );
+
+ bind_builtin( "RMOLD",
+ builtin_flags, T_FLAG_RMOLD, 0 );
+
+ {
+ char const * args[] = { "targets", "*", 0 };
+ bind_builtin( "UPDATE",
+ builtin_update, 0, args );
+ }
+
+ {
+ char const * args[] = { "targets", "*",
+ ":", "log", "?",
+ ":", "ignore-minus-n", "?",
+ ":", "ignore-minus-q", "?", 0 };
+ bind_builtin( "UPDATE_NOW",
+ builtin_update_now, 0, args );
+ }
+
+ {
+ char const * args[] = { "string", "pattern", "replacements", "+", 0 };
+ duplicate_rule( "subst",
+ bind_builtin( "SUBST",
+ builtin_subst, 0, args ) );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "RULENAMES",
+ builtin_rulenames, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "VARNAMES",
+ builtin_varnames, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "DELETE_MODULE",
+ builtin_delete_module, 0, args );
+ }
+
+ {
+ char const * args[] = { "source_module", "?",
+ ":", "source_rules", "*",
+ ":", "target_module", "?",
+ ":", "target_rules", "*",
+ ":", "localize", "?", 0 };
+ bind_builtin( "IMPORT",
+ builtin_import, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", ":", "rules", "*", 0 };
+ bind_builtin( "EXPORT",
+ builtin_export, 0, args );
+ }
+
+ {
+ char const * args[] = { "levels", "?", 0 };
+ bind_builtin( "CALLER_MODULE",
+ builtin_caller_module, 0, args );
+ }
+
+ {
+ char const * args[] = { "levels", "?", 0 };
+ bind_builtin( "BACKTRACE",
+ builtin_backtrace, 0, args );
+ }
+
+ {
+ char const * args[] = { 0 };
+ bind_builtin( "PWD",
+ builtin_pwd, 0, args );
+ }
+
+ {
+ char const * args[] = { "modules_to_import", "+",
+ ":", "target_module", "?", 0 };
+ bind_builtin( "IMPORT_MODULE",
+ builtin_import_module, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "IMPORTED_MODULES",
+ builtin_imported_modules, 0, args );
+ }
+
+ {
+ char const * args[] = { "instance_module", ":", "class_module", 0 };
+ bind_builtin( "INSTANCE",
+ builtin_instance, 0, args );
+ }
+
+ {
+ char const * args[] = { "sequence", "*", 0 };
+ bind_builtin( "SORT",
+ builtin_sort, 0, args );
+ }
+
+ {
+ char const * args[] = { "path_parts", "*", 0 };
+ bind_builtin( "NORMALIZE_PATH",
+ builtin_normalize_path, 0, args );
+ }
+
+ {
+ char const * args[] = { "args", "*", 0 };
+ bind_builtin( "CALC",
+ builtin_calc, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", ":", "rule", 0 };
+ bind_builtin( "NATIVE_RULE",
+ builtin_native_rule, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", ":", "rule", ":", "version", 0 };
+ bind_builtin( "HAS_NATIVE_RULE",
+ builtin_has_native_rule, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "*", 0 };
+ bind_builtin( "USER_MODULE",
+ builtin_user_module, 0, args );
+ }
+
+ {
+ char const * args[] = { 0 };
+ bind_builtin( "NEAREST_USER_LOCATION",
+ builtin_nearest_user_location, 0, args );
+ }
+
+ {
+ char const * args[] = { "file", 0 };
+ bind_builtin( "CHECK_IF_FILE",
+ builtin_check_if_file, 0, args );
+ }
+
+#ifdef HAVE_PYTHON
+ {
+ char const * args[] = { "python-module",
+ ":", "function",
+ ":", "jam-module",
+ ":", "rule-name", 0 };
+ bind_builtin( "PYTHON_IMPORT_RULE",
+ builtin_python_import_rule, 0, args );
+ }
+#endif
+
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+ {
+ char const * args[] = { "key_path", ":", "data", "?", 0 };
+ bind_builtin( "W32_GETREG",
+ builtin_system_registry, 0, args );
+ }
+
+ {
+ char const * args[] = { "key_path", ":", "result-type", 0 };
+ bind_builtin( "W32_GETREGNAMES",
+ builtin_system_registry_names, 0, args );
+ }
+# endif
+
+ {
+ char const * args[] = { "command", ":", "*", 0 };
+ duplicate_rule( "SHELL",
+ bind_builtin( "COMMAND",
+ builtin_shell, 0, args ) );
+ }
+
+ {
+ char const * args[] = { "string", 0 };
+ bind_builtin( "MD5",
+ builtin_md5, 0, args );
+ }
+
+ {
+ char const * args[] = { "name", ":", "mode", 0 };
+ bind_builtin( "FILE_OPEN",
+ builtin_file_open, 0, args );
+ }
+
+ {
+ char const * args[] = { "string", ":", "width", 0 };
+ bind_builtin( "PAD",
+ builtin_pad, 0, args );
+ }
+
+ {
+ char const * args[] = { "targets", "*", 0 };
+ bind_builtin( "PRECIOUS",
+ builtin_precious, 0, args );
+ }
+
+ {
+ char const * args [] = { 0 };
+ bind_builtin( "SELF_PATH", builtin_self_path, 0, args );
+ }
+
+ {
+ char const * args [] = { "path", 0 };
+ bind_builtin( "MAKEDIR", builtin_makedir, 0, args );
+ }
+
+ {
+ const char * args [] = { "path", 0 };
+ bind_builtin( "READLINK", builtin_readlink, 0, args );
+ }
+
+ {
+ char const * args[] = { "archives", "*",
+ ":", "member-patterns", "*",
+ ":", "case-insensitive", "?",
+ ":", "symbol-patterns", "*", 0 };
+ bind_builtin( "GLOB_ARCHIVE", builtin_glob_archive, 0, args );
+ }
+
+#ifdef JAM_DEBUGGER
+
+ {
+ const char * args[] = { "list", "*", 0 };
+ bind_builtin("__DEBUG_PRINT_HELPER__", builtin_debug_print_helper, 0, args);
+ }
+
+#endif
+
+ /* Initialize builtin modules. */
+ init_set();
+ init_path();
+ init_regex();
+ init_property_set();
+ init_sequence();
+ init_order();
+}
+
+
+/*
+ * builtin_calc() - CALC rule
+ *
+ * Performs simple mathematical operations on two arguments.
+ */
+
+LIST * builtin_calc( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+
+ LIST * result = L0;
+ long lhs_value;
+ long rhs_value;
+ long result_value;
+ char buffer[ 16 ];
+ char const * lhs;
+ char const * op;
+ char const * rhs;
+ LISTITER iter = list_begin( arg );
+ LISTITER const end = list_end( arg );
+
+ if ( iter == end ) return L0;
+ lhs = object_str( list_item( iter ) );
+
+ iter = list_next( iter );
+ if ( iter == end ) return L0;
+ op = object_str( list_item( iter ) );
+
+ iter = list_next( iter );
+ if ( iter == end ) return L0;
+ rhs = object_str( list_item( iter ) );
+
+ lhs_value = atoi( lhs );
+ rhs_value = atoi( rhs );
+
+ if ( !strcmp( "+", op ) )
+ result_value = lhs_value + rhs_value;
+ else if ( !strcmp( "-", op ) )
+ result_value = lhs_value - rhs_value;
+ else
+ return L0;
+
+ sprintf( buffer, "%ld", result_value );
+ result = list_push_back( result, object_new( buffer ) );
+ return result;
+}
+
+
+/*
+ * builtin_depends() - DEPENDS/INCLUDES rule
+ *
+ * The DEPENDS/INCLUDES builtin rule appends each of the listed sources on the
+ * dependency/includes list of each of the listed targets. It binds both the
+ * targets and sources as TARGETs.
+ */
+
+LIST * builtin_depends( FRAME * frame, int flags )
+{
+ LIST * const targets = lol_get( frame->args, 0 );
+ LIST * const sources = lol_get( frame->args, 1 );
+
+ LISTITER iter = list_begin( targets );
+ LISTITER end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+
+ if ( flags )
+ target_include_many( t, sources );
+ else
+ t->depends = targetlist( t->depends, sources );
+ }
+
+ /* Enter reverse links */
+ iter = list_begin( sources );
+ end = list_end( sources );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const s = bindtarget( list_item( iter ) );
+ if ( flags )
+ {
+ LISTITER t_iter = list_begin( targets );
+ LISTITER const t_end = list_end( targets );
+ for ( ; t_iter != t_end; t_iter = list_next( t_iter ) )
+ s->dependants = targetentry( s->dependants, bindtarget(
+ list_item( t_iter ) )->includes );
+ }
+ else
+ s->dependants = targetlist( s->dependants, targets );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_rebuilds() - REBUILDS rule
+ *
+ * Appends each of the rebuild-targets listed in its second argument to the
+ * rebuilds list for each of the targets listed in its first argument.
+ */
+
+LIST * builtin_rebuilds( FRAME * frame, int flags )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LIST * rebuilds = lol_get( frame->args, 1 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+ t->rebuilds = targetlist( t->rebuilds, rebuilds );
+ }
+ return L0;
+}
+
+
+/*
+ * builtin_echo() - ECHO rule
+ *
+ * Echoes the targets to the user. No other actions are taken.
+ */
+
+LIST * builtin_echo( FRAME * frame, int flags )
+{
+ list_print( lol_get( frame->args, 0 ) );
+ out_printf( "\n" );
+ out_flush();
+ return L0;
+}
+
+
+/*
+ * builtin_exit() - EXIT rule
+ *
+ * Echoes the targets to the user and exits the program with a failure status.
+ */
+
+LIST * builtin_exit( FRAME * frame, int flags )
+{
+ LIST * const code = lol_get( frame->args, 1 );
+ list_print( lol_get( frame->args, 0 ) );
+ out_printf( "\n" );
+ if ( !list_empty( code ) )
+ {
+ int status = atoi( object_str( list_front( code ) ) );
+#ifdef OS_VMS
+ switch( status )
+ {
+ case 0:
+ status = EXITOK;
+ break;
+ case 1:
+ status = EXITBAD;
+ break;
+ }
+#endif
+ exit( status );
+ }
+ else
+ exit( EXITBAD ); /* yeech */
+ return L0;
+}
+
+
+/*
+ * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule
+ *
+ * Marks the target with the appropriate flag, for use by make0(). It binds each
+ * target as a TARGET.
+ */
+
+LIST * builtin_flags( FRAME * frame, int flags )
+{
+ LIST * const targets = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ bindtarget( list_item( iter ) )->flags |= flags;
+ return L0;
+}
+
+
+/*
+ * builtin_glob() - GLOB rule
+ */
+
+struct globbing
+{
+ LIST * patterns;
+ LIST * results;
+ LIST * case_insensitive;
+};
+
+
+static void downcase_inplace( char * p )
+{
+ for ( ; *p; ++p )
+ *p = tolower( *p );
+}
+
+
+static void builtin_glob_back( void * closure, OBJECT * file, int status,
+ timestamp const * const time )
+{
+ PROFILE_ENTER( BUILTIN_GLOB_BACK );
+
+ struct globbing * const globbing = (struct globbing *)closure;
+ PATHNAME f;
+ string buf[ 1 ];
+ LISTITER iter;
+ LISTITER end;
+
+ /* Null out directory for matching. We wish we had file_dirscan() pass up a
+ * PATHNAME.
+ */
+ path_parse( object_str( file ), &f );
+ f.f_dir.len = 0;
+
+ /* For globbing, we unconditionally ignore current and parent directory
+ * items. Since these items always exist, there is no reason why caller of
+ * GLOB would want to see them. We could also change file_dirscan(), but
+ * then paths with embedded "." and ".." would not work anywhere.
+ */
+ if ( !strcmp( f.f_base.ptr, "." ) || !strcmp( f.f_base.ptr, ".." ) )
+ {
+ PROFILE_EXIT( BUILTIN_GLOB_BACK );
+ return;
+ }
+
+ string_new( buf );
+ path_build( &f, buf );
+
+ if ( globbing->case_insensitive )
+ downcase_inplace( buf->value );
+
+ iter = list_begin( globbing->patterns );
+ end = list_end( globbing->patterns );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( !glob( object_str( list_item( iter ) ), buf->value ) )
+ {
+ globbing->results = list_push_back( globbing->results, object_copy(
+ file ) );
+ break;
+ }
+ }
+
+ string_free( buf );
+
+ PROFILE_EXIT( BUILTIN_GLOB_BACK );
+}
+
+
+static LIST * downcase_list( LIST * in )
+{
+ LIST * result = L0;
+ LISTITER iter = list_begin( in );
+ LISTITER const end = list_end( in );
+
+ string s[ 1 ];
+ string_new( s );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ string_append( s, object_str( list_item( iter ) ) );
+ downcase_inplace( s->value );
+ result = list_push_back( result, object_new( s->value ) );
+ string_truncate( s, 0 );
+ }
+
+ string_free( s );
+ return result;
+}
+
+
+LIST * builtin_glob( FRAME * frame, int flags )
+{
+ LIST * const l = lol_get( frame->args, 0 );
+ LIST * const r = lol_get( frame->args, 1 );
+
+ LISTITER iter;
+ LISTITER end;
+ struct globbing globbing;
+
+ globbing.results = L0;
+ globbing.patterns = r;
+
+ globbing.case_insensitive =
+# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS )
+ l; /* Always case-insensitive if any files can be found. */
+# else
+ lol_get( frame->args, 2 );
+# endif
+
+ if ( globbing.case_insensitive )
+ globbing.patterns = downcase_list( r );
+
+ iter = list_begin( l );
+ end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ file_dirscan( list_item( iter ), builtin_glob_back, &globbing );
+
+ if ( globbing.case_insensitive )
+ list_free( globbing.patterns );
+
+ return globbing.results;
+}
+
+
+static int has_wildcards( char const * const str )
+{
+ return str[ strcspn( str, "[]*?" ) ] ? 1 : 0;
+}
+
+
+/*
+ * append_if_exists() - if file exists, append it to the list
+ */
+
+static LIST * append_if_exists( LIST * list, OBJECT * file )
+{
+ file_info_t * info = file_query( file );
+ return info
+ ? list_push_back( list, object_copy( info->name ) )
+ : list ;
+}
+
+
+LIST * glob1( OBJECT * dirname, OBJECT * pattern )
+{
+ LIST * const plist = list_new( object_copy( pattern ) );
+ struct globbing globbing;
+
+ globbing.results = L0;
+ globbing.patterns = plist;
+
+ globbing.case_insensitive
+# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS )
+ = plist; /* always case-insensitive if any files can be found */
+# else
+ = L0;
+# endif
+
+ if ( globbing.case_insensitive )
+ globbing.patterns = downcase_list( plist );
+
+ file_dirscan( dirname, builtin_glob_back, &globbing );
+
+ if ( globbing.case_insensitive )
+ list_free( globbing.patterns );
+
+ list_free( plist );
+
+ return globbing.results;
+}
+
+
+LIST * glob_recursive( char const * pattern )
+{
+ LIST * result = L0;
+
+ /* Check if there's metacharacters in pattern */
+ if ( !has_wildcards( pattern ) )
+ {
+ /* No metacharacters. Check if the path exists. */
+ OBJECT * const p = object_new( pattern );
+ result = append_if_exists( result, p );
+ object_free( p );
+ }
+ else
+ {
+ /* Have metacharacters in the pattern. Split into dir/name. */
+ PATHNAME path[ 1 ];
+ path_parse( pattern, path );
+
+ if ( path->f_dir.ptr )
+ {
+ LIST * dirs = L0;
+ string dirname[ 1 ];
+ string basename[ 1 ];
+ string_new( dirname );
+ string_new( basename );
+
+ string_append_range( dirname, path->f_dir.ptr,
+ path->f_dir.ptr + path->f_dir.len );
+
+ path->f_grist.ptr = 0;
+ path->f_grist.len = 0;
+ path->f_dir.ptr = 0;
+ path->f_dir.len = 0;
+ path_build( path, basename );
+
+ dirs = has_wildcards( dirname->value )
+ ? glob_recursive( dirname->value )
+ : list_push_back( dirs, object_new( dirname->value ) );
+
+ if ( has_wildcards( basename->value ) )
+ {
+ OBJECT * const b = object_new( basename->value );
+ LISTITER iter = list_begin( dirs );
+ LISTITER const end = list_end( dirs );
+ for ( ; iter != end; iter = list_next( iter ) )
+ result = list_append( result, glob1( list_item( iter ), b )
+ );
+ object_free( b );
+ }
+ else
+ {
+ LISTITER iter = list_begin( dirs );
+ LISTITER const end = list_end( dirs );
+ string file_string[ 1 ];
+ string_new( file_string );
+
+ /* No wildcard in basename. */
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ OBJECT * p;
+ path->f_dir.ptr = object_str( list_item( iter ) );
+ path->f_dir.len = strlen( object_str( list_item( iter ) ) );
+ path_build( path, file_string );
+
+ p = object_new( file_string->value );
+
+ result = append_if_exists( result, p );
+
+ object_free( p );
+
+ string_truncate( file_string, 0 );
+ }
+
+ string_free( file_string );
+ }
+
+ string_free( dirname );
+ string_free( basename );
+
+ list_free( dirs );
+ }
+ else
+ {
+ /* No directory, just a pattern. */
+ OBJECT * const p = object_new( pattern );
+ result = list_append( result, glob1( constant_dot, p ) );
+ object_free( p );
+ }
+ }
+
+ return result;
+}
+
+
+/*
+ * builtin_glob_recursive() - ???
+ */
+
+LIST * builtin_glob_recursive( FRAME * frame, int flags )
+{
+ LIST * result = L0;
+ LIST * const l = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ result = list_append( result, glob_recursive( object_str( list_item(
+ iter ) ) ) );
+ return result;
+}
+
+
+/*
+ * builtin_match() - MATCH rule, regexp matching
+ */
+
+LIST * builtin_match( FRAME * frame, int flags )
+{
+ LIST * l;
+ LIST * r;
+ LIST * result = L0;
+ LISTITER l_iter;
+ LISTITER l_end;
+ LISTITER r_iter;
+ LISTITER r_end;
+
+ string buf[ 1 ];
+ string_new( buf );
+
+ /* For each pattern */
+
+ l = lol_get( frame->args, 0 );
+ l_iter = list_begin( l );
+ l_end = list_end( l );
+ for ( ; l_iter != l_end; l_iter = list_next( l_iter ) )
+ {
+ /* Result is cached and intentionally never freed. */
+ regexp * re = regex_compile( list_item( l_iter ) );
+
+ /* For each string to match against. */
+ r = lol_get( frame->args, 1 );
+ r_iter = list_begin( r );
+ r_end = list_end( r );
+ for ( ; r_iter != r_end; r_iter = list_next( r_iter ) )
+ {
+ if ( regexec( re, object_str( list_item( r_iter ) ) ) )
+ {
+ int i;
+ int top;
+
+ /* Find highest parameter */
+
+ for ( top = NSUBEXP; top-- > 1; )
+ if ( re->startp[ top ] )
+ break;
+
+ /* And add all parameters up to highest onto list. */
+ /* Must have parameters to have results! */
+ for ( i = 1; i <= top; ++i )
+ {
+ string_append_range( buf, re->startp[ i ], re->endp[ i ] );
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ }
+
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * builtin_split_by_characters() - splits the given string into tokens
+ */
+
+LIST * builtin_split_by_characters( FRAME * frame, int flags )
+{
+ LIST * l1 = lol_get( frame->args, 0 );
+ LIST * l2 = lol_get( frame->args, 1 );
+
+ LIST * result = L0;
+
+ string buf[ 1 ];
+
+ char const * delimiters = object_str( list_front( l2 ) );
+ char * t;
+
+ string_copy( buf, object_str( list_front( l1 ) ) );
+
+ t = strtok( buf->value, delimiters );
+ while ( t )
+ {
+ result = list_push_back( result, object_new( t ) );
+ t = strtok( NULL, delimiters );
+ }
+
+ string_free( buf );
+
+ return result;
+}
+
+
+/*
+ * builtin_hdrmacro() - ???
+ */
+
+LIST * builtin_hdrmacro( FRAME * frame, int flags )
+{
+ LIST * const l = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+
+ /* Scan file for header filename macro definitions. */
+ if ( DEBUG_HEADER )
+ out_printf( "scanning '%s' for header file macro definitions\n",
+ object_str( list_item( iter ) ) );
+
+ macro_headers( t );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_rulenames() - RULENAMES ( MODULE ? )
+ *
+ * Returns a list of the non-local rule names in the given MODULE. If MODULE is
+ * not supplied, returns the list of rule names in the global module.
+ */
+
+static void add_rule_name( void * r_, void * result_ )
+{
+ RULE * const r = (RULE *)r_;
+ LIST * * const result = (LIST * *)result_;
+ if ( r->exported )
+ *result = list_push_back( *result, object_copy( r->name ) );
+}
+
+
+LIST * builtin_rulenames( FRAME * frame, int flags )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ module_t * const source_module = bindmodule( list_empty( arg0 )
+ ? 0
+ : list_front( arg0 ) );
+
+ if ( source_module->rules )
+ hashenumerate( source_module->rules, add_rule_name, &result );
+ return result;
+}
+
+
+/*
+ * builtin_varnames() - VARNAMES ( MODULE ? )
+ *
+ * Returns a list of the variable names in the given MODULE. If MODULE is not
+ * supplied, returns the list of variable names in the global module.
+ */
+
+/* helper function for builtin_varnames(), below. Used with hashenumerate, will
+ * prepend the key of each element to the list
+ */
+static void add_hash_key( void * np, void * result_ )
+{
+ LIST * * result = (LIST * *)result_;
+ *result = list_push_back( *result, object_copy( *(OBJECT * *)np ) );
+}
+
+
+LIST * builtin_varnames( FRAME * frame, int flags )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ module_t * source_module = bindmodule( list_empty( arg0 )
+ ? 0
+ : list_front( arg0 ) );
+
+ struct hash * const vars = source_module->variables;
+ if ( vars )
+ hashenumerate( vars, add_hash_key, &result );
+ return result;
+}
+
+
+/*
+ * builtin_delete_module() - DELETE_MODULE ( MODULE ? )
+ *
+ * Clears all rules and variables from the given module.
+ */
+
+LIST * builtin_delete_module( FRAME * frame, int flags )
+{
+ LIST * const arg0 = lol_get( frame->args, 0 );
+ module_t * const source_module = bindmodule( list_empty( arg0 ) ? 0 :
+ list_front( arg0 ) );
+ delete_module( source_module );
+ return L0;
+}
+
+
+/*
+ * unknown_rule() - reports an unknown rule occurrence to the user and exits
+ */
+
+void unknown_rule( FRAME * frame, char const * key, module_t * module,
+ OBJECT * rule_name )
+{
+ backtrace_line( frame->prev );
+ if ( key )
+ out_printf("%s error", key);
+ else
+ out_printf("ERROR");
+ out_printf( ": rule \"%s\" unknown in ", object_str( rule_name ) );
+ if ( module->name )
+ out_printf( "module \"%s\".\n", object_str( module->name ) );
+ else
+ out_printf( "root module.\n" );
+ backtrace( frame->prev );
+ exit( EXITBAD );
+}
+
+
+/*
+ * builtin_import() - IMPORT rule
+ *
+ * IMPORT
+ * (
+ * SOURCE_MODULE ? :
+ * SOURCE_RULES * :
+ * TARGET_MODULE ? :
+ * TARGET_RULES * :
+ * LOCALIZE ?
+ * )
+ *
+ * Imports rules from the SOURCE_MODULE into the TARGET_MODULE as local rules.
+ * If either SOURCE_MODULE or TARGET_MODULE is not supplied, it refers to the
+ * global module. SOURCE_RULES specifies which rules from the SOURCE_MODULE to
+ * import; TARGET_RULES specifies the names to give those rules in
+ * TARGET_MODULE. If SOURCE_RULES contains a name that does not correspond to
+ * a rule in SOURCE_MODULE, or if it contains a different number of items than
+ * TARGET_RULES, an error is issued. If LOCALIZE is specified, the rules will be
+ * executed in TARGET_MODULE, with corresponding access to its module local
+ * variables.
+ */
+
+LIST * builtin_import( FRAME * frame, int flags )
+{
+ LIST * source_module_list = lol_get( frame->args, 0 );
+ LIST * source_rules = lol_get( frame->args, 1 );
+ LIST * target_module_list = lol_get( frame->args, 2 );
+ LIST * target_rules = lol_get( frame->args, 3 );
+ LIST * localize = lol_get( frame->args, 4 );
+
+ module_t * target_module = bindmodule( list_empty( target_module_list )
+ ? 0
+ : list_front( target_module_list ) );
+ module_t * source_module = bindmodule( list_empty( source_module_list )
+ ? 0
+ : list_front( source_module_list ) );
+
+ LISTITER source_iter = list_begin( source_rules );
+ LISTITER const source_end = list_end( source_rules );
+ LISTITER target_iter = list_begin( target_rules );
+ LISTITER const target_end = list_end( target_rules );
+
+ for ( ;
+ source_iter != source_end && target_iter != target_end;
+ source_iter = list_next( source_iter ),
+ target_iter = list_next( target_iter ) )
+ {
+ RULE * r = nullptr;
+ RULE * imported = nullptr;
+
+ if ( !source_module->rules || !(r = (RULE *)hash_find(
+ source_module->rules, list_item( source_iter ) ) ) )
+ {
+ unknown_rule( frame, "IMPORT", source_module, list_item( source_iter
+ ) );
+ }
+
+ imported = import_rule( r, target_module, list_item( target_iter ) );
+ if ( !list_empty( localize ) )
+ rule_localize( imported, target_module );
+ /* This rule is really part of some other module. Just refer to it here,
+ * but do not let it out.
+ */
+ imported->exported = 0;
+ }
+
+ if ( source_iter != source_end || target_iter != target_end )
+ {
+ backtrace_line( frame->prev );
+ out_printf( "import error: length of source and target rule name lists "
+ "don't match!\n" );
+ out_printf( " source: " );
+ list_print( source_rules );
+ out_printf( "\n target: " );
+ list_print( target_rules );
+ out_printf( "\n" );
+ backtrace( frame->prev );
+ exit( EXITBAD );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_export() - EXPORT ( MODULE ? : RULES * )
+ *
+ * The EXPORT rule marks RULES from the SOURCE_MODULE as non-local (and thus
+ * exportable). If an element of RULES does not name a rule in MODULE, an error
+ * is issued.
+ */
+
+LIST * builtin_export( FRAME * frame, int flags )
+{
+ LIST * const module_list = lol_get( frame->args, 0 );
+ LIST * const rules = lol_get( frame->args, 1 );
+ module_t * const m = bindmodule( list_empty( module_list ) ? 0 : list_front(
+ module_list ) );
+
+ LISTITER iter = list_begin( rules );
+ LISTITER const end = list_end( rules );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ RULE * r = nullptr;
+ if ( !m->rules || !( r = (RULE *)hash_find( m->rules, list_item( iter )
+ ) ) )
+ {
+ unknown_rule( frame, "EXPORT", m, list_item( iter ) );
+ }
+ r->exported = 1;
+ }
+ return L0;
+}
+
+
+/*
+ * get_source_line() - get a frame's file and line number information
+ *
+ * This is the execution traceback information to be indicated for in debug
+ * output or an error backtrace.
+ */
+
+static void get_source_line( FRAME * frame, char const * * file, int * line )
+{
+ if ( frame->file )
+ {
+ char const * f = object_str( frame->file );
+ int l = frame->line;
+ if ( !strcmp( f, "+" ) )
+ {
+ f = "jambase.c";
+ l += 3;
+ }
+ *file = f;
+ *line = l;
+ }
+ else
+ {
+ *file = "(builtin)";
+ *line = -1;
+ }
+}
+
+
+void print_source_line( FRAME * frame )
+{
+ char const * file;
+ int line;
+ get_source_line( frame, &file, &line );
+ if ( line < 0 )
+ out_printf( "(builtin):" );
+ else
+ out_printf( "%s:%d:", file, line );
+}
+
+
+/*
+ * backtrace_line() - print a single line of error backtrace for the given
+ * frame.
+ */
+
+void backtrace_line( FRAME * frame )
+{
+ if ( frame == 0 )
+ {
+ out_printf( "(no frame):" );
+ }
+ else
+ {
+ print_source_line( frame );
+ out_printf( " in %s\n", frame->rulename );
+ }
+}
+
+
+/*
+ * backtrace() - Print the entire backtrace from the given frame to the Jambase
+ * which invoked it.
+ */
+
+void backtrace( FRAME * frame )
+{
+ if ( !frame ) return;
+ while ( ( frame = frame->prev ) )
+ backtrace_line( frame );
+}
+
+
+/*
+ * builtin_backtrace() - A Jam version of the backtrace function, taking no
+ * arguments and returning a list of quadruples: FILENAME LINE MODULE. RULENAME
+ * describing each frame. Note that the module-name is always followed by a
+ * period.
+ */
+
+LIST * builtin_backtrace( FRAME * frame, int flags )
+{
+ LIST * const levels_arg = lol_get( frame->args, 0 );
+ int levels = list_empty( levels_arg )
+ ? (int)( (unsigned int)(-1) >> 1 )
+ : atoi( object_str( list_front( levels_arg ) ) );
+
+ LIST * result = L0;
+ for ( ; ( frame = frame->prev ) && levels; --levels )
+ {
+ char const * file;
+ int line;
+ char buf[ 32 ];
+ string module_name[ 1 ];
+ get_source_line( frame, &file, &line );
+ sprintf( buf, "%d", line );
+ string_new( module_name );
+ if ( frame->module->name )
+ {
+ string_append( module_name, object_str( frame->module->name ) );
+ string_append( module_name, "." );
+ }
+ result = list_push_back( result, object_new( file ) );
+ result = list_push_back( result, object_new( buf ) );
+ result = list_push_back( result, object_new( module_name->value ) );
+ result = list_push_back( result, object_new( frame->rulename ) );
+ string_free( module_name );
+ }
+ return result;
+}
+
+
+/*
+ * builtin_caller_module() - CALLER_MODULE ( levels ? )
+ *
+ * If levels is not supplied, returns the name of the module of the rule which
+ * called the one calling this one. If levels is supplied, it is interpreted as
+ * an integer specifying a number of additional levels of call stack to traverse
+ * in order to locate the module in question. If no such module exists, returns
+ * the empty list. Also returns the empty list when the module in question is
+ * the global module. This rule is needed for implementing module import
+ * behavior.
+ */
+
+LIST * builtin_caller_module( FRAME * frame, int flags )
+{
+ LIST * const levels_arg = lol_get( frame->args, 0 );
+ int const levels = list_empty( levels_arg )
+ ? 0
+ : atoi( object_str( list_front( levels_arg ) ) );
+
+ int i;
+ for ( i = 0; ( i < levels + 2 ) && frame->prev; ++i )
+ frame = frame->prev;
+
+ return frame->module == root_module()
+ ? L0
+ : list_new( object_copy( frame->module->name ) );
+}
+
+
+/*
+ * Return the current working directory.
+ *
+ * Usage: pwd = [ PWD ] ;
+ */
+
+LIST * builtin_pwd( FRAME * frame, int flags )
+{
+ return list_new( object_copy( cwd() ) );
+}
+
+
+/*
+ * Adds targets to the list of target that jam will attempt to update.
+ */
+
+LIST * builtin_update( FRAME * frame, int flags )
+{
+ LIST * result = list_copy( targets_to_update() );
+ LIST * arg1 = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( arg1 ), end = list_end( arg1 );
+ clear_targets_to_update();
+ for ( ; iter != end; iter = list_next( iter ) )
+ mark_target_for_updating( object_copy( list_item( iter ) ) );
+ return result;
+}
+
+extern int anyhow;
+int last_update_now_status;
+
+/* Takes a list of target names and immediately updates them.
+ *
+ * Parameters:
+ * 1. Target list.
+ * 2. Optional file descriptor (converted to a string) for a log file where all
+ * the related build output should be redirected.
+ * 3. If specified, makes the build temporarily disable the -n option, i.e.
+ * forces all needed out-of-date targets to be rebuilt.
+ * 4. If specified, makes the build temporarily disable the -q option, i.e.
+ * forces the build to continue even if one of the targets fails to build.
+ */
+LIST * builtin_update_now( FRAME * frame, int flags )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LIST * log = lol_get( frame->args, 1 );
+ LIST * force = lol_get( frame->args, 2 );
+ LIST * continue_ = lol_get( frame->args, 3 );
+ int status;
+ int original_stdout = 0;
+ int original_stderr = 0;
+ int original_noexec = 0;
+ int original_quitquick = 0;
+
+ if ( !list_empty( log ) )
+ {
+ /* Temporarily redirect stdout and stderr to the given log file. */
+ int const fd = atoi( object_str( list_front( log ) ) );
+ original_stdout = dup( 0 );
+ original_stderr = dup( 1 );
+ dup2( fd, 0 );
+ dup2( fd, 1 );
+ }
+
+ if ( !list_empty( force ) )
+ {
+ original_noexec = globs.noexec;
+ globs.noexec = 0;
+ }
+
+ if ( !list_empty( continue_ ) )
+ {
+ original_quitquick = globs.quitquick;
+ globs.quitquick = 0;
+ }
+
+ status = make( targets, anyhow );
+
+ if ( !list_empty( force ) )
+ {
+ globs.noexec = original_noexec;
+ }
+
+ if ( !list_empty( continue_ ) )
+ {
+ globs.quitquick = original_quitquick;
+ }
+
+ if ( !list_empty( log ) )
+ {
+ /* Flush whatever stdio might have buffered, while descriptions 0 and 1
+ * still refer to the log file.
+ */
+ out_flush( );
+ err_flush( );
+ dup2( original_stdout, 0 );
+ dup2( original_stderr, 1 );
+ close( original_stdout );
+ close( original_stderr );
+ }
+
+ last_update_now_status = status;
+
+ return status ? L0 : list_new( object_copy( constant_ok ) );
+}
+
+
+LIST * builtin_import_module( FRAME * frame, int flags )
+{
+ LIST * const arg1 = lol_get( frame->args, 0 );
+ LIST * const arg2 = lol_get( frame->args, 1 );
+ module_t * const m = list_empty( arg2 )
+ ? root_module()
+ : bindmodule( list_front( arg2 ) );
+ import_module( arg1, m );
+ return L0;
+}
+
+
+LIST * builtin_imported_modules( FRAME * frame, int flags )
+{
+ LIST * const arg0 = lol_get( frame->args, 0 );
+ OBJECT * const module = list_empty( arg0 ) ? 0 : list_front( arg0 );
+ return imported_modules( bindmodule( module ) );
+}
+
+
+LIST * builtin_instance( FRAME * frame, int flags )
+{
+ LIST * arg1 = lol_get( frame->args, 0 );
+ LIST * arg2 = lol_get( frame->args, 1 );
+ module_t * const instance = bindmodule( list_front( arg1 ) );
+ module_t * const class_module = bindmodule( list_front( arg2 ) );
+ instance->class_module = class_module;
+ module_set_fixed_variables( instance, class_module->num_fixed_variables );
+ return L0;
+}
+
+
+LIST * builtin_sort( FRAME * frame, int flags )
+{
+ return list_sort( lol_get( frame->args, 0 ) );
+}
+
+
+LIST * builtin_normalize_path( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+
+ /* First, we iterate over all '/'-separated elements, starting from the end
+ * of string. If we see a '..', we remove a preceding path element. If we
+ * see '.', we remove it. Removal is done by overwriting data using '\1'
+ * characters. After the whole string has been processed, we do a second
+ * pass, removing any entered '\1' characters.
+ */
+
+ string in[ 1 ];
+ string out[ 1 ];
+ /* Last character of the part of string still to be processed. */
+ char * end;
+ /* Working pointer. */
+ char * current;
+ /* Number of '..' elements seen and not processed yet. */
+ int dotdots = 0;
+ int rooted = 0;
+ OBJECT * result = 0;
+ LISTITER arg_iter = list_begin( arg );
+ LISTITER arg_end = list_end( arg );
+
+ /* Make a copy of input: we should not change it. Prepend a '/' before it as
+ * a guard for the algorithm later on and remember whether it was originally
+ * rooted or not.
+ */
+ string_new( in );
+ string_push_back( in, '/' );
+ for ( ; arg_iter != arg_end; arg_iter = list_next( arg_iter ) )
+ {
+ if ( object_str( list_item( arg_iter ) )[ 0 ] != '\0' )
+ {
+ if ( in->size == 1 )
+ rooted = ( object_str( list_item( arg_iter ) )[ 0 ] == '/' ) ||
+ ( object_str( list_item( arg_iter ) )[ 0 ] == '\\' );
+ else
+ string_append( in, "/" );
+ string_append( in, object_str( list_item( arg_iter ) ) );
+ }
+ }
+
+ /* Convert \ into /. On Windows, paths using / and \ are equivalent, and we
+ * want this function to obtain a canonic representation.
+ */
+ for ( current = in->value, end = in->value + in->size;
+ current < end; ++current )
+ if ( *current == '\\' )
+ *current = '/';
+
+ /* Now we remove any extra path elements by overwriting them with '\1'
+ * characters and count how many more unused '..' path elements there are
+ * remaining. Note that each remaining path element with always starts with
+ * a '/' character.
+ */
+ for ( end = in->value + in->size - 1; end >= in->value; )
+ {
+ /* Set 'current' to the next occurrence of '/', which always exists. */
+ for ( current = end; *current != '/'; --current );
+
+ if ( current == end )
+ {
+ /* Found a trailing or duplicate '/'. Remove it. */
+ *current = '\1';
+ }
+ else if ( ( end - current == 1 ) && ( *( current + 1 ) == '.' ) )
+ {
+ /* Found '/.'. Remove them all. */
+ *current = '\1';
+ *(current + 1) = '\1';
+ }
+ else if ( ( end - current == 2 ) && ( *( current + 1 ) == '.' ) &&
+ ( *( current + 2 ) == '.' ) )
+ {
+ /* Found '/..'. Remove them all. */
+ *current = '\1';
+ *(current + 1) = '\1';
+ *(current + 2) = '\1';
+ ++dotdots;
+ }
+ else if ( dotdots )
+ {
+ memset( current, '\1', end - current + 1 );
+ --dotdots;
+ }
+ end = current - 1;
+ }
+
+ string_new( out );
+
+ /* Now we know that we need to add exactly dotdots '..' path elements to the
+ * front and that our string is either empty or has a '/' as its first
+ * significant character. If we have any dotdots remaining then the passed
+ * path must not have been rooted or else it is invalid we return an empty
+ * list.
+ */
+ if ( dotdots )
+ {
+ if ( rooted )
+ {
+ string_free( out );
+ string_free( in );
+ return L0;
+ }
+ do
+ string_append( out, "/.." );
+ while ( --dotdots );
+ }
+
+ /* Now we actually remove all the path characters marked for removal. */
+ for ( current = in->value; *current; ++current )
+ if ( *current != '\1' )
+ string_push_back( out, *current );
+
+ /* Here we know that our string contains no '\1' characters and is either
+ * empty or has a '/' as its initial character. If the original path was not
+ * rooted and we have a non-empty path we need to drop the initial '/'. If
+ * the original path was rooted and we have an empty path we need to add
+ * back the '/'.
+ */
+ result = object_new( out->size
+ ? out->value + !rooted
+ : ( rooted ? "/" : "." ) );
+
+ string_free( out );
+ string_free( in );
+
+ return list_new( result );
+}
+
+
+LIST * builtin_native_rule( FRAME * frame, int flags )
+{
+ LIST * module_name = lol_get( frame->args, 0 );
+ LIST * rule_name = lol_get( frame->args, 1 );
+
+ module_t * module = bindmodule( list_front( module_name ) );
+
+ native_rule_t * np;
+ if ( module->native_rules && (np = (native_rule_t *)hash_find(
+ module->native_rules, list_front( rule_name ) ) ) )
+ {
+ new_rule_body( module, np->name, np->procedure, 1 );
+ }
+ else
+ {
+ backtrace_line( frame->prev );
+ out_printf( "error: no native rule \"%s\" defined in module \"%s.\"\n",
+ object_str( list_front( rule_name ) ), object_str( module->name ) );
+ backtrace( frame->prev );
+ exit( EXITBAD );
+ }
+ return L0;
+}
+
+
+LIST * builtin_has_native_rule( FRAME * frame, int flags )
+{
+ LIST * module_name = lol_get( frame->args, 0 );
+ LIST * rule_name = lol_get( frame->args, 1 );
+ LIST * version = lol_get( frame->args, 2 );
+
+ module_t * module = bindmodule( list_front( module_name ) );
+
+ native_rule_t * np;
+ if ( module->native_rules && (np = (native_rule_t *)hash_find(
+ module->native_rules, list_front( rule_name ) ) ) )
+ {
+ int expected_version = atoi( object_str( list_front( version ) ) );
+ if ( np->version == expected_version )
+ return list_new( object_copy( constant_true ) );
+ }
+ return L0;
+}
+
+
+LIST * builtin_user_module( FRAME * frame, int flags )
+{
+ LIST * const module_name = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( module_name );
+ LISTITER const end = list_end( module_name );
+ for ( ; iter != end; iter = list_next( iter ) )
+ bindmodule( list_item( iter ) )->user_module = 1;
+ return L0;
+}
+
+
+LIST * builtin_nearest_user_location( FRAME * frame, int flags )
+{
+ FRAME * const nearest_user_frame = frame->module->user_module
+ ? frame
+ : frame->prev_user;
+ if ( !nearest_user_frame )
+ return L0;
+
+ {
+ LIST * result = L0;
+ char const * file;
+ int line;
+ char buf[ 32 ];
+
+ get_source_line( nearest_user_frame, &file, &line );
+ sprintf( buf, "%d", line );
+ result = list_push_back( result, object_new( file ) );
+ result = list_push_back( result, object_new( buf ) );
+ return result;
+ }
+}
+
+
+LIST * builtin_check_if_file( FRAME * frame, int flags )
+{
+ LIST * const name = lol_get( frame->args, 0 );
+ return file_is_file( list_front( name ) ) == 1
+ ? list_new( object_copy( constant_true ) )
+ : L0;
+}
+
+
+LIST * builtin_md5( FRAME * frame, int flags )
+{
+ LIST * l = lol_get( frame->args, 0 );
+ char const * s = object_str( list_front( l ) );
+
+ md5_state_t state;
+ md5_byte_t digest[ 16 ];
+ char hex_output[ 16 * 2 + 1 ];
+
+ int di;
+
+ md5_init( &state );
+ md5_append( &state, (md5_byte_t const *)s, strlen( s ) );
+ md5_finish( &state, digest );
+
+ for ( di = 0; di < 16; ++di )
+ sprintf( hex_output + di * 2, "%02x", digest[ di ] );
+
+ return list_new( object_new( hex_output ) );
+}
+
+
+LIST * builtin_file_open( FRAME * frame, int flags )
+{
+ char const * name = object_str( list_front( lol_get( frame->args, 0 ) ) );
+ char const * mode = object_str( list_front( lol_get( frame->args, 1 ) ) );
+ int fd;
+ char buffer[ sizeof( "4294967295" ) ];
+
+ if ( strcmp(mode, "w") == 0 )
+ fd = open( name, O_WRONLY|O_CREAT|O_TRUNC, 0666 );
+ else
+ fd = open( name, O_RDONLY );
+
+ if ( fd != -1 )
+ {
+ sprintf( buffer, "%d", fd );
+ return list_new( object_new( buffer ) );
+ }
+ return L0;
+}
+
+
+LIST * builtin_pad( FRAME * frame, int flags )
+{
+ OBJECT * string = list_front( lol_get( frame->args, 0 ) );
+ char const * width_s = object_str( list_front( lol_get( frame->args, 1 ) ) );
+
+ int current = strlen( object_str( string ) );
+ int desired = atoi( width_s );
+ if ( current >= desired )
+ return list_new( object_copy( string ) );
+ else
+ {
+ char * buffer = (char *)BJAM_MALLOC( desired + 1 );
+ int i;
+ LIST * result;
+
+ strcpy( buffer, object_str( string ) );
+ for ( i = current; i < desired; ++i )
+ buffer[ i ] = ' ';
+ buffer[ desired ] = '\0';
+ result = list_new( object_new( buffer ) );
+ BJAM_FREE( buffer );
+ return result;
+ }
+}
+
+
+LIST * builtin_precious( FRAME * frame, int flags )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ bindtarget( list_item( iter ) )->flags |= T_FLAG_PRECIOUS;
+ return L0;
+}
+
+
+LIST * builtin_self_path( FRAME * frame, int flags )
+{
+ extern char const * saved_argv0;
+ char * p = executable_path( saved_argv0 );
+ if ( p )
+ {
+ LIST * const result = list_new( object_new( p ) );
+ free( p );
+ return result;
+ }
+ return L0;
+}
+
+
+LIST * builtin_makedir( FRAME * frame, int flags )
+{
+ LIST * const path = lol_get( frame->args, 0 );
+ return file_mkdir( object_str( list_front( path ) ) )
+ ? L0
+ : list_new( object_copy( list_front( path ) ) );
+}
+
+LIST *builtin_readlink( FRAME * frame, int flags )
+{
+ const char * path = object_str( list_front( lol_get( frame->args, 0 ) ) );
+#ifdef OS_NT
+
+ /* This struct is declared in ntifs.h which is
+ * part of the Windows Driver Kit.
+ */
+ typedef struct _REPARSE_DATA_BUFFER {
+ ULONG ReparseTag;
+ USHORT ReparseDataLength;
+ USHORT Reserved;
+ union {
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ ULONG Flags;
+ WCHAR PathBuffer[ 1 ];
+ } SymbolicLinkReparseBuffer;
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ WCHAR PathBuffer[ 1 ];
+ } MountPointReparseBuffer;
+ struct {
+ UCHAR DataBuffer[ 1 ];
+ } GenericReparseBuffer;
+ };
+ } REPARSE_DATA_BUFFER;
+
+ HANDLE hLink = CreateFileA( path, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, NULL );
+ DWORD n;
+ union {
+ REPARSE_DATA_BUFFER reparse;
+ char data[MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
+ } buf;
+ int okay = DeviceIoControl(hLink, FSCTL_GET_REPARSE_POINT, NULL, 0, &buf, sizeof(buf), &n, NULL);
+
+ CloseHandle( hLink );
+
+ if (okay && buf.reparse.ReparseTag == IO_REPARSE_TAG_SYMLINK )
+ {
+ int index = buf.reparse.SymbolicLinkReparseBuffer.SubstituteNameOffset / 2;
+ int length = buf.reparse.SymbolicLinkReparseBuffer.SubstituteNameLength / 2;
+ char cbuf[MAX_PATH + 1];
+ int numchars = WideCharToMultiByte( CP_ACP, 0, buf.reparse.SymbolicLinkReparseBuffer.PathBuffer + index, length, cbuf, sizeof(cbuf), NULL, NULL );
+ if( numchars >= int(sizeof(cbuf)) )
+ {
+ return 0;
+ }
+ cbuf[numchars] = '\0';
+ return list_new( object_new( cbuf ) );
+ }
+ else if( okay && buf.reparse.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT )
+ {
+ int index = buf.reparse.MountPointReparseBuffer.SubstituteNameOffset / 2;
+ int length = buf.reparse.MountPointReparseBuffer.SubstituteNameLength / 2;
+ char cbuf[MAX_PATH + 1];
+ const char * result;
+ int numchars = WideCharToMultiByte( CP_ACP, 0, buf.reparse.MountPointReparseBuffer.PathBuffer + index, length, cbuf, sizeof(cbuf), NULL, NULL );
+ if( numchars >= int(sizeof(cbuf)) )
+ {
+ return 0;
+ }
+ cbuf[numchars] = '\0';
+ /* strip off the leading "\??\" */
+ result = cbuf;
+ if ( cbuf[ 0 ] == '\\' && cbuf[ 1 ] == '?' &&
+ cbuf[ 2 ] == '?' && cbuf[ 3 ] == '\\' &&
+ cbuf[ 4 ] != '\0' && cbuf[ 5 ] == ':' )
+ {
+ result += 4;
+ }
+ return list_new( object_new( result ) );
+ }
+ return 0;
+#else
+ char static_buf[256];
+ char * buf = static_buf;
+ size_t bufsize = 256;
+ LIST * result = 0;
+ while (1) {
+ ssize_t len = readlink( path, buf, bufsize );
+ if ( len < 0 )
+ {
+ break;
+ }
+ else if ( size_t(len) < bufsize )
+ {
+ buf[ len ] = '\0';
+ result = list_new( object_new( buf ) );
+ break;
+ }
+ if ( buf != static_buf )
+ BJAM_FREE( buf );
+ bufsize *= 2;
+ buf = (char *)BJAM_MALLOC( bufsize );
+ }
+
+ if ( buf != static_buf )
+ BJAM_FREE( buf );
+
+ return result;
+#endif
+}
+
+#ifdef JAM_DEBUGGER
+
+LIST *builtin_debug_print_helper( FRAME * frame, int flags )
+{
+ debug_print_result = list_copy( lol_get( frame->args, 0 ) );
+ return L0;
+}
+
+#endif
+
+#ifdef HAVE_PYTHON
+
+LIST * builtin_python_import_rule( FRAME * frame, int flags )
+{
+ static int first_time = 1;
+ char const * python_module = object_str( list_front( lol_get( frame->args,
+ 0 ) ) );
+ char const * python_function = object_str( list_front( lol_get( frame->args,
+ 1 ) ) );
+ OBJECT * jam_module = list_front( lol_get( frame->args, 2 ) );
+ OBJECT * jam_rule = list_front( lol_get( frame->args, 3 ) );
+
+ PyObject * pName;
+ PyObject * pModule;
+ PyObject * pDict;
+ PyObject * pFunc;
+
+ if ( first_time )
+ {
+ /* At the first invocation, we add the value of the global
+ * EXTRA_PYTHONPATH to the sys.path Python variable.
+ */
+ LIST * extra = 0;
+ module_t * outer_module = frame->module;
+ LISTITER iter, end;
+
+ first_time = 0;
+
+ extra = var_get( root_module(), constant_extra_pythonpath );
+
+ iter = list_begin( extra ), end = list_end( extra );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append( buf, "import sys\nsys.path.append(\"" );
+ string_append( buf, object_str( list_item( iter ) ) );
+ string_append( buf, "\")\n" );
+ PyRun_SimpleString( buf->value );
+ string_free( buf );
+ }
+ }
+
+ pName = PyString_FromString( python_module );
+ pModule = PyImport_Import( pName );
+ Py_DECREF( pName );
+
+ if ( pModule != NULL )
+ {
+ pDict = PyModule_GetDict( pModule );
+ pFunc = PyDict_GetItemString( pDict, python_function );
+
+ if ( pFunc && PyCallable_Check( pFunc ) )
+ {
+ module_t * m = bindmodule( jam_module );
+ new_rule_body( m, jam_rule, function_python( pFunc, 0 ), 0 );
+ }
+ else
+ {
+ if ( PyErr_Occurred() )
+ PyErr_Print();
+ err_printf( "Cannot find function \"%s\"\n", python_function );
+ }
+ Py_DECREF( pModule );
+ }
+ else
+ {
+ PyErr_Print();
+ err_printf( "Failed to load \"%s\"\n", python_module );
+ }
+ return L0;
+
+}
+
+#endif /* #ifdef HAVE_PYTHON */
+
+
+void lol_build( LOL * lol, char const * * elements )
+{
+ LIST * l = L0;
+ lol_init( lol );
+
+ while ( elements && *elements )
+ {
+ if ( !strcmp( *elements, ":" ) )
+ {
+ lol_add( lol, l );
+ l = L0;
+ }
+ else
+ {
+ l = list_push_back( l, object_new( *elements ) );
+ }
+ ++elements;
+ }
+
+ if ( l != L0 )
+ lol_add( lol, l );
+}
+
+
+#ifdef HAVE_PYTHON
+
+static LIST *jam_list_from_string(PyObject *a)
+{
+ return list_new( object_new( PyString_AsString( a ) ) );
+}
+
+static LIST *jam_list_from_sequence(PyObject *a)
+{
+ LIST * l = 0;
+
+ int i = 0;
+ int s = PySequence_Size( a );
+
+ for ( ; i < s; ++i )
+ {
+ /* PySequence_GetItem returns new reference. */
+ PyObject * e = PySequence_GetItem( a, i );
+ char * s = PyString_AsString( e );
+ if ( !s )
+ {
+ /* try to get the repr() on the object */
+ PyObject *repr = PyObject_Repr(e);
+ if (repr)
+ {
+ const char *str = PyString_AsString(repr);
+ PyErr_Format(PyExc_TypeError, "expecting type <str> got %s", str);
+ }
+ /* fall back to a dumb error */
+ else
+ {
+ PyErr_BadArgument();
+ }
+ return NULL;
+ }
+ l = list_push_back( l, object_new( s ) );
+ Py_DECREF( e );
+ }
+
+ return l;
+}
+
+static void make_jam_arguments_from_python(FRAME* inner, PyObject *args)
+{
+ int i;
+ int size;
+
+ /* Build up the list of arg lists. */
+ frame_init( inner );
+ inner->prev = 0;
+ inner->prev_user = 0;
+ inner->module = bindmodule( constant_python_interface );
+
+ size = PyTuple_Size( args );
+ for (i = 0 ; i < size; ++i)
+ {
+ PyObject * a = PyTuple_GetItem( args, i );
+ if ( PyString_Check( a ) )
+ {
+ lol_add( inner->args, jam_list_from_string(a) );
+ }
+ else if ( PySequence_Check( a ) )
+ {
+ lol_add( inner->args, jam_list_from_sequence(a) );
+ }
+ }
+}
+
+
+/*
+ * Calls the bjam rule specified by name passed in 'args'. The name is looked up
+ * in the context of bjam's 'python_interface' module. Returns the list of
+ * strings returned by the rule.
+ */
+
+PyObject * bjam_call( PyObject * self, PyObject * args )
+{
+ FRAME inner[ 1 ];
+ LIST * result;
+ PARSE * p;
+ OBJECT * rulename;
+ PyObject *args_proper;
+
+ /* PyTuple_GetItem returns borrowed reference. */
+ rulename = object_new( PyString_AsString( PyTuple_GetItem( args, 0 ) ) );
+
+ args_proper = PyTuple_GetSlice(args, 1, PyTuple_Size(args));
+ make_jam_arguments_from_python (inner, args_proper);
+ if ( PyErr_Occurred() )
+ {
+ return NULL;
+ }
+ Py_DECREF(args_proper);
+
+ result = evaluate_rule( bindrule( rulename, inner->module), rulename, inner );
+ object_free( rulename );
+
+ frame_free( inner );
+
+ /* Convert the bjam list into a Python list result. */
+ {
+ PyObject * const pyResult = PyList_New( list_length( result ) );
+ int i = 0;
+ LISTITER iter = list_begin( result );
+ LISTITER const end = list_end( result );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ PyList_SetItem( pyResult, i, PyString_FromString( object_str(
+ list_item( iter ) ) ) );
+ i += 1;
+ }
+ list_free( result );
+ return pyResult;
+ }
+}
+
+
+/*
+ * Accepts four arguments:
+ * - module name
+ * - rule name,
+ * - Python callable.
+ * - (optional) bjam language function signature.
+ * Creates a bjam rule with the specified name in the specified module, which
+ * will invoke the Python callable.
+ */
+
+PyObject * bjam_import_rule( PyObject * self, PyObject * args )
+{
+ char * module;
+ char * rule;
+ PyObject * func;
+ PyObject * bjam_signature = NULL;
+ module_t * m;
+ RULE * r;
+ OBJECT * module_name;
+ OBJECT * rule_name;
+
+ if ( !PyArg_ParseTuple( args, "ssO|O:import_rule",
+ &module, &rule, &func, &bjam_signature ) )
+ return NULL;
+
+ if ( !PyCallable_Check( func ) )
+ {
+ PyErr_SetString( PyExc_RuntimeError, "Non-callable object passed to "
+ "bjam.import_rule" );
+ return NULL;
+ }
+
+ module_name = *module ? object_new( module ) : 0;
+ m = bindmodule( module_name );
+ if ( module_name )
+ object_free( module_name );
+ rule_name = object_new( rule );
+ new_rule_body( m, rule_name, function_python( func, bjam_signature ), 0 );
+ object_free( rule_name );
+
+ Py_INCREF( Py_None );
+ return Py_None;
+}
+
+
+/*
+ * Accepts four arguments:
+ * - an action name
+ * - an action body
+ * - a list of variable that will be bound inside the action
+ * - integer flags.
+ * Defines an action on bjam side.
+ */
+
+PyObject * bjam_define_action( PyObject * self, PyObject * args )
+{
+ char * name;
+ char * body;
+ module_t * m;
+ PyObject * bindlist_python;
+ int flags;
+ LIST * bindlist = L0;
+ int n;
+ int i;
+ OBJECT * name_str;
+ FUNCTION * body_func;
+
+ if ( !PyArg_ParseTuple( args, "ssO!i:define_action", &name, &body,
+ &PyList_Type, &bindlist_python, &flags ) )
+ return NULL;
+
+ n = PyList_Size( bindlist_python );
+ for ( i = 0; i < n; ++i )
+ {
+ PyObject * next = PyList_GetItem( bindlist_python, i );
+ if ( !PyString_Check( next ) )
+ {
+ PyErr_SetString( PyExc_RuntimeError, "bind list has non-string "
+ "type" );
+ return NULL;
+ }
+ bindlist = list_push_back( bindlist, object_new( PyString_AsString( next
+ ) ) );
+ }
+
+ name_str = object_new( name );
+ body_func = function_compile_actions( body, constant_builtin, -1 );
+ new_rule_actions( root_module(), name_str, body_func, bindlist, flags );
+ function_free( body_func );
+ object_free( name_str );
+
+ Py_INCREF( Py_None );
+ return Py_None;
+}
+
+
+/*
+ * Returns the value of a variable in root Jam module.
+ */
+
+PyObject * bjam_variable( PyObject * self, PyObject * args )
+{
+ char * name;
+ LIST * value;
+ PyObject * result;
+ int i;
+ OBJECT * varname;
+ LISTITER iter;
+ LISTITER end;
+
+ if ( !PyArg_ParseTuple( args, "s", &name ) )
+ return NULL;
+
+ varname = object_new( name );
+ value = var_get( root_module(), varname );
+ object_free( varname );
+ iter = list_begin( value );
+ end = list_end( value );
+
+ result = PyList_New( list_length( value ) );
+ for ( i = 0; iter != end; iter = list_next( iter ), ++i )
+ PyList_SetItem( result, i, PyString_FromString( object_str( list_item(
+ iter ) ) ) );
+
+ return result;
+}
+
+
+PyObject * bjam_backtrace( PyObject * self, PyObject * args )
+{
+ PyObject * result = PyList_New( 0 );
+ struct frame * f = frame_before_python_call;
+
+ for ( ; (f = f->prev); )
+ {
+ PyObject * tuple = PyTuple_New( 4 );
+ char const * file;
+ int line;
+ char buf[ 32 ];
+ string module_name[ 1 ];
+
+ get_source_line( f, &file, &line );
+ sprintf( buf, "%d", line );
+ string_new( module_name );
+ if ( f->module->name )
+ {
+ string_append( module_name, object_str( f->module->name ) );
+ string_append( module_name, "." );
+ }
+
+ /* PyTuple_SetItem steals reference. */
+ PyTuple_SetItem( tuple, 0, PyString_FromString( file ) );
+ PyTuple_SetItem( tuple, 1, PyString_FromString( buf ) );
+ PyTuple_SetItem( tuple, 2, PyString_FromString( module_name->value ) );
+ PyTuple_SetItem( tuple, 3, PyString_FromString( f->rulename ) );
+
+ string_free( module_name );
+
+ PyList_Append( result, tuple );
+ Py_DECREF( tuple );
+ }
+ return result;
+}
+
+PyObject * bjam_caller( PyObject * self, PyObject * args )
+{
+ return PyString_FromString( frame_before_python_call->prev->module->name ?
+ object_str( frame_before_python_call->prev->module->name ) : "" );
+}
+
+#endif /* #ifdef HAVE_PYTHON */
+
+
+#ifdef HAVE_POPEN
+
+#if defined(_MSC_VER) || defined(__BORLANDC__) || defined(__MINGW64__) || defined(__MINGW32__)
+ #undef popen
+ #define popen windows_popen_wrapper
+ #undef pclose
+ #define pclose _pclose
+
+ /*
+ * This wrapper is a workaround for a funny _popen() feature on Windows
+ * where it eats external quotes in some cases. The bug seems to be related
+ * to the quote stripping functionality used by the Windows cmd.exe
+ * interpreter when its /S is not specified.
+ *
+ * Cleaned up quote from the cmd.exe help screen as displayed on Windows XP
+ * SP3:
+ *
+ * 1. If all of the following conditions are met, then quote characters on
+ * the command line are preserved:
+ *
+ * - no /S switch
+ * - exactly two quote characters
+ * - no special characters between the two quote characters, where
+ * special is one of: &<>()@^|
+ * - there are one or more whitespace characters between the two quote
+ * characters
+ * - the string between the two quote characters is the name of an
+ * executable file.
+ *
+ * 2. Otherwise, old behavior is to see if the first character is a quote
+ * character and if so, strip the leading character and remove the last
+ * quote character on the command line, preserving any text after the
+ * last quote character.
+ *
+ * This causes some commands containing quotes not to be executed correctly.
+ * For example:
+ *
+ * "\Long folder name\aaa.exe" --name="Jurko" --no-surname
+ *
+ * would get its outermost quotes stripped and would be executed as:
+ *
+ * \Long folder name\aaa.exe" --name="Jurko --no-surname
+ *
+ * which would report an error about '\Long' not being a valid command.
+ *
+ * cmd.exe help seems to indicate it would be enough to add an extra space
+ * character in front of the command to avoid this but this does not work,
+ * most likely due to the shell first stripping all leading whitespace
+ * characters from the command.
+ *
+ * Solution implemented here is to quote the whole command in case it
+ * contains any quote characters. Note thought this will not work correctly
+ * should Windows ever 'fix' this feature.
+ * (03.06.2008.) (Jurko)
+ */
+ static FILE * windows_popen_wrapper( char const * command,
+ char const * mode )
+ {
+ int const extra_command_quotes_needed = !!strchr( command, '"' );
+ string quoted_command;
+ FILE * result;
+
+ if ( extra_command_quotes_needed )
+ {
+ string_new( &quoted_command );
+ string_append( &quoted_command, "\"" );
+ string_append( &quoted_command, command );
+ string_append( &quoted_command, "\"" );
+ command = quoted_command.value;
+ }
+
+ result = _popen( command, "r" );
+
+ if ( extra_command_quotes_needed )
+ string_free( &quoted_command );
+
+ return result;
+ }
+#endif /* defined(_MSC_VER) || defined(__BORLANDC__) */
+
+
+LIST * builtin_shell( FRAME * frame, int flags )
+{
+ LIST * command = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ string s;
+ int ret;
+ char buffer[ 1024 ];
+ FILE * p = NULL;
+ int exit_status = -1;
+ int exit_status_opt = 0;
+ int no_output_opt = 0;
+ int strip_eol_opt = 0;
+
+ /* Process the variable args options. */
+ {
+ int a = 1;
+ LIST * arg = lol_get( frame->args, a );
+ for ( ; !list_empty( arg ); arg = lol_get( frame->args, ++a ) )
+ {
+ if ( !strcmp( "exit-status", object_str( list_front( arg ) ) ) )
+ exit_status_opt = 1;
+ else if ( !strcmp( "no-output", object_str( list_front( arg ) ) ) )
+ no_output_opt = 1;
+ else if ( !strcmp("strip-eol", object_str( list_front( arg ) ) ) )
+ strip_eol_opt = 1;
+ }
+ }
+
+ /* The following fflush() call seems to be indicated as a workaround for a
+ * popen() bug on POSIX implementations related to synhronizing input
+ * stream positions for the called and the calling process.
+ */
+ fflush( NULL );
+
+ p = popen( object_str( list_front( command ) ), "r" );
+ if ( p == NULL )
+ return L0;
+
+ string_new( &s );
+
+ while ( ( ret = fread( buffer, sizeof( char ), sizeof( buffer ) - 1, p ) ) >
+ 0 )
+ {
+ buffer[ ret ] = 0;
+ if ( !no_output_opt )
+ {
+ string_append( &s, buffer );
+ }
+
+ /* Explicit EOF check for systems with broken fread */
+ if ( feof( p ) ) break;
+ }
+
+ if ( strip_eol_opt )
+ string_rtrim( &s );
+
+ exit_status = pclose( p );
+
+ /* The command output is returned first. */
+ result = list_new( object_new( s.value ) );
+ string_free( &s );
+
+ /* The command exit result next. */
+ if ( exit_status_opt )
+ {
+ if ( WIFEXITED( exit_status ) )
+ exit_status = WEXITSTATUS( exit_status );
+ else
+ exit_status = -1;
+
+#ifdef OS_VMS
+ /* Harmonize VMS success status with POSIX */
+ if ( exit_status == 1 ) exit_status = EXIT_SUCCESS;
+#endif
+ sprintf( buffer, "%d", exit_status );
+ result = list_push_back( result, object_new( buffer ) );
+ }
+
+ return result;
+}
+
+#else /* #ifdef HAVE_POPEN */
+
+LIST * builtin_shell( FRAME * frame, int flags )
+{
+ return L0;
+}
+
+#endif /* #ifdef HAVE_POPEN */
+
+
+/*
+ * builtin_glob_archive() - GLOB_ARCHIVE rule
+ */
+
+struct globbing2
+{
+ LIST * patterns[ 2 ];
+ LIST * results;
+ LIST * case_insensitive;
+};
+
+
+static void builtin_glob_archive_back( void * closure, OBJECT * member,
+ LIST * symbols, int status, timestamp const * const time )
+{
+ PROFILE_ENTER( BUILTIN_GLOB_ARCHIVE_BACK );
+
+ struct globbing2 * const globbing = (struct globbing2 *)closure;
+ PATHNAME f;
+ string buf[ 1 ];
+ LISTITER iter;
+ LISTITER end;
+ LISTITER iter_symbols;
+ LISTITER end_symbols;
+ int matched = 0;
+
+ /* Match member name.
+ */
+ path_parse( object_str( member ), &f );
+
+ if ( !strcmp( f.f_member.ptr, "" ) )
+ {
+ PROFILE_EXIT( BUILTIN_GLOB_ARCHIVE_BACK );
+ return;
+ }
+
+ string_new( buf );
+ string_append_range( buf, f.f_member.ptr, f.f_member.ptr + f.f_member.len );
+
+ if ( globbing->case_insensitive )
+ downcase_inplace( buf->value );
+
+ /* Glob with member patterns. If not matched, then match symbols.
+ */
+ matched = 0;
+ iter = list_begin( globbing->patterns[ 0 ] );
+ end = list_end( globbing->patterns[ 0 ] );
+ for ( ; !matched && iter != end;
+ iter = list_next( iter ) )
+ {
+ const char * pattern = object_str( list_item( iter ) );
+ int match_exact = ( !has_wildcards( pattern ) );
+ matched = ( match_exact ?
+ ( !strcmp( pattern, buf->value ) ) :
+ ( !glob( pattern, buf->value ) ) );
+ }
+
+
+ /* Glob with symbol patterns, if requested.
+ */
+ iter = list_begin( globbing->patterns[ 1 ] );
+ end = list_end( globbing->patterns[ 1 ] );
+
+ if ( iter != end ) matched = 0;
+
+ for ( ; !matched && iter != end;
+ iter = list_next( iter ) )
+ {
+ const char * pattern = object_str( list_item( iter ) );
+ int match_exact = ( !has_wildcards( pattern ) );
+
+ iter_symbols = list_begin( symbols );
+ end_symbols = list_end( symbols );
+
+ for ( ; !matched && iter_symbols != end_symbols;
+ iter_symbols = list_next( iter_symbols ) )
+ {
+ const char * symbol = object_str( list_item( iter_symbols ) );
+
+ string_copy( buf, symbol );
+ if ( globbing->case_insensitive )
+ downcase_inplace( buf->value );
+
+ matched = ( match_exact ?
+ ( !strcmp( pattern, buf->value ) ) :
+ ( !glob( pattern, buf->value ) ) );
+ }
+ }
+
+ if ( matched )
+ {
+ globbing->results = list_push_back( globbing->results,
+ object_copy( member ) );
+ }
+
+ string_free( buf );
+
+ PROFILE_EXIT( BUILTIN_GLOB_ARCHIVE_BACK );
+}
+
+
+LIST * builtin_glob_archive( FRAME * frame, int flags )
+{
+ LIST * const l = lol_get( frame->args, 0 );
+ LIST * const r1 = lol_get( frame->args, 1 );
+ LIST * const r3 = lol_get( frame->args, 3 );
+
+ LISTITER iter;
+ LISTITER end;
+ struct globbing2 globbing;
+
+ globbing.results = L0;
+ globbing.patterns[ 0 ] = r1;
+ globbing.patterns[ 1 ] = r3;
+
+ globbing.case_insensitive =
+# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS )
+ l; /* Always case-insensitive. */
+# else
+ lol_get( frame->args, 2 ); // r2
+# endif
+
+ if ( globbing.case_insensitive )
+ {
+ globbing.patterns[ 0 ] = downcase_list( globbing.patterns[ 0 ] );
+ globbing.patterns[ 1 ] = downcase_list( globbing.patterns[ 1 ] );
+ }
+
+ iter = list_begin( l );
+ end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ file_archivescan( list_item( iter ), builtin_glob_archive_back, &globbing );
+
+ if ( globbing.case_insensitive )
+ {
+ list_free( globbing.patterns[ 0 ] );
+ list_free( globbing.patterns[ 1 ] );
+ }
+
+ return globbing.results;
+}
diff --git a/src/boost/tools/build/src/engine/builtins.h b/src/boost/tools/build/src/engine/builtins.h
new file mode 100644
index 000000000..241a0d050
--- /dev/null
+++ b/src/boost/tools/build/src/engine/builtins.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#ifndef JAM_BUILTINS_H
+# define JAM_BUILTINS_H
+
+# include "config.h"
+# include "frames.h"
+
+/*
+ * builtins.h - compile parsed jam statements
+ */
+
+void load_builtins();
+void init_set();
+void init_path();
+void init_regex();
+void init_property_set();
+void init_sequence();
+void init_order();
+
+void property_set_done();
+
+LIST *builtin_calc( FRAME * frame, int flags );
+LIST *builtin_depends( FRAME * frame, int flags );
+LIST *builtin_rebuilds( FRAME * frame, int flags );
+LIST *builtin_echo( FRAME * frame, int flags );
+LIST *builtin_exit( FRAME * frame, int flags );
+LIST *builtin_flags( FRAME * frame, int flags );
+LIST *builtin_glob( FRAME * frame, int flags );
+LIST *builtin_glob_recursive( FRAME * frame, int flags );
+LIST *builtin_subst( FRAME * frame, int flags );
+LIST *builtin_match( FRAME * frame, int flags );
+LIST *builtin_split_by_characters( FRAME * frame, int flags );
+LIST *builtin_hdrmacro( FRAME * frame, int flags );
+LIST *builtin_rulenames( FRAME * frame, int flags );
+LIST *builtin_varnames( FRAME * frame, int flags );
+LIST *builtin_delete_module( FRAME * frame, int flags );
+LIST *builtin_import( FRAME * frame, int flags );
+LIST *builtin_export( FRAME * frame, int flags );
+LIST *builtin_caller_module( FRAME * frame, int flags );
+LIST *builtin_backtrace( FRAME * frame, int flags );
+LIST *builtin_pwd( FRAME * frame, int flags );
+LIST *builtin_update( FRAME * frame, int flags );
+LIST *builtin_update_now( FRAME * frame, int flags );
+LIST *builtin_import_module( FRAME * frame, int flags );
+LIST *builtin_imported_modules( FRAME * frame, int flags );
+LIST *builtin_instance( FRAME * frame, int flags );
+LIST *builtin_sort( FRAME * frame, int flags );
+LIST *builtin_normalize_path( FRAME * frame, int flags );
+LIST *builtin_native_rule( FRAME * frame, int flags );
+LIST *builtin_has_native_rule( FRAME * frame, int flags );
+LIST *builtin_user_module( FRAME * frame, int flags );
+LIST *builtin_nearest_user_location( FRAME * frame, int flags );
+LIST *builtin_check_if_file( FRAME * frame, int flags );
+LIST *builtin_python_import_rule( FRAME * frame, int flags );
+LIST *builtin_shell( FRAME * frame, int flags );
+LIST *builtin_md5( FRAME * frame, int flags );
+LIST *builtin_file_open( FRAME * frame, int flags );
+LIST *builtin_pad( FRAME * frame, int flags );
+LIST *builtin_precious( FRAME * frame, int flags );
+LIST *builtin_self_path( FRAME * frame, int flags );
+LIST *builtin_makedir( FRAME * frame, int flags );
+LIST *builtin_readlink( FRAME * frame, int flags );
+LIST *builtin_glob_archive( FRAME * frame, int flags );
+LIST *builtin_debug_print_helper( FRAME * frame, int flags );
+
+void backtrace( FRAME *frame );
+extern int last_update_now_status;
+
+#endif
diff --git a/src/boost/tools/build/src/engine/bump_version.py b/src/boost/tools/build/src/engine/bump_version.py
new file mode 100644
index 000000000..177142288
--- /dev/null
+++ b/src/boost/tools/build/src/engine/bump_version.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python
+
+# This script is used to bump the bjam version. It takes a single argument, e.g
+#
+# ./bump_version.py 3.1.9
+#
+# and updates all the necessary files.
+#
+# Copyright 2006 Rene Rivera.
+# Copyright 2005-2006 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+import os
+import os.path
+import re
+import string
+import sys
+
+srcdir = os.path.abspath(os.path.dirname(__file__))
+docdir = os.path.abspath(os.path.join(srcdir, "..", "doc"))
+
+
+def edit(file, *replacements):
+ print(" '%s'..." % file)
+ f = open(file, 'r')
+ text = f.read()
+ f.close()
+ for (source, target) in replacements:
+ text, n = re.compile(source, re.MULTILINE).subn(target, text)
+ assert n > 0
+ f = open(file, 'w')
+ f.write(text)
+ f.close()
+
+
+def make_edits(ver):
+ ver03 = (list(ver) + [0] * 3)[0:3]
+ ver02 = ver03[0:2]
+
+ join = lambda v, s : s.join(str(x) for x in v)
+ dotJoin = lambda v : join(v, ".")
+
+ print("Setting version to %s" % str(ver03))
+
+ edit(os.path.join(srcdir, "boost-jam.spec"),
+ ('^(Version:) .*$', '\\1 %s' % dotJoin(ver03)))
+
+ edit(os.path.join(srcdir, "build.jam"),
+ ('^(_VERSION_ =).* ;$', '\\1 %s ;' % join(ver03, " ")))
+
+ edit(os.path.join(docdir, "bjam.qbk"),
+ ('(\[version).*(\])', '\\1: %s\\2' % dotJoin(ver03)),
+ ('(\[def :version:).*(\])', '\\1 %s\\2' % dotJoin(ver03)))
+
+ edit(os.path.join(srcdir, "patchlevel.h"),
+ ('^(#define VERSION_MAJOR) .*$', '\\1 %s' % ver03[0]),
+ ('^(#define VERSION_MINOR) .*$', '\\1 %s' % ver03[1]),
+ ('^(#define VERSION_PATCH) .*$', '\\1 %s' % ver03[2]),
+ ('^(#define VERSION_MAJOR_SYM) .*$', '\\1 "%02d"' % ver03[0]),
+ ('^(#define VERSION_MINOR_SYM) .*$', '\\1 "%02d"' % ver03[1]),
+ ('^(#define VERSION_PATCH_SYM) .*$', '\\1 "%02d"' % ver03[2]),
+ ('^(#define VERSION) .*$', '\\1 "%s"' % dotJoin(ver)),
+ ('^(#define JAMVERSYM) .*$', '\\1 "JAMVERSION=%s"' % dotJoin(ver02)))
+
+
+def main():
+ if len(sys.argv) < 2:
+ print("Expect new version as argument.")
+ sys.exit(1)
+ if len(sys.argv) > 3:
+ print("Too many arguments.")
+ sys.exit(1)
+
+ version = sys.argv[1].split(".")
+ if len(version) > 3:
+ print("Expect version argument in the format: <MAJOR>.<MINOR>.<PATCH>")
+ sys.exit(1)
+
+ try:
+ version = list(int(x) for x in version)
+ except ValueError:
+ print("Version values must be valid integers.")
+ sys.exit(1)
+
+ while version and version[-1] == 0:
+ version.pop()
+
+ if not version:
+ print("At least one of the version values must be positive.")
+ sys.exit()
+
+ make_edits(version)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/boost/tools/build/src/engine/check_cxx11.cpp b/src/boost/tools/build/src/engine/check_cxx11.cpp
new file mode 100644
index 000000000..f01fcff71
--- /dev/null
+++ b/src/boost/tools/build/src/engine/check_cxx11.cpp
@@ -0,0 +1,21 @@
+/* Copyright 2020 Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+This program is a compile test for support of C++11. If it compiles
+successfully some key parts of C++11 the B2 engine requires are
+available. This is used by the build script to guess and check the
+compiler to build the engine with.
+*/
+
+// Some headers we depend on..
+#include <thread>
+
+
+int main()
+{
+ // Check for basic thread calls.
+ { auto _ = std::thread::hardware_concurrency(); }
+}
diff --git a/src/boost/tools/build/src/engine/class.cpp b/src/boost/tools/build/src/engine/class.cpp
new file mode 100644
index 000000000..2e41e12f8
--- /dev/null
+++ b/src/boost/tools/build/src/engine/class.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright Vladimir Prus 2003.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "class.h"
+
+#include "constants.h"
+#include "frames.h"
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "rules.h"
+#include "jam_strings.h"
+#include "variable.h"
+#include "output.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+
+static struct hash * classes = 0;
+
+
+static void check_defined( LIST * class_names )
+{
+ LISTITER iter = list_begin( class_names );
+ LISTITER const end = list_end( class_names );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( !hash_find( classes, list_item( iter ) ) )
+ {
+ out_printf( "Class %s is not defined\n", object_str( list_item( iter ) )
+ );
+ abort();
+ }
+ }
+}
+
+
+static OBJECT * class_module_name( OBJECT * declared_name )
+{
+ string name[ 1 ];
+ OBJECT * result;
+
+ string_new( name );
+ string_append( name, "class@" );
+ string_append( name, object_str( declared_name ) );
+
+ result = object_new( name->value );
+ string_free( name );
+
+ return result;
+}
+
+
+struct import_base_data
+{
+ OBJECT * base_name;
+ module_t * base_module;
+ module_t * class_module;
+};
+
+
+static void import_base_rule( void * r_, void * d_ )
+{
+ RULE * r = (RULE *)r_;
+ RULE * ir1;
+ RULE * ir2;
+ struct import_base_data * d = (struct import_base_data *)d_;
+ OBJECT * qname;
+
+ string qualified_name[ 1 ];
+ string_new ( qualified_name );
+ string_append ( qualified_name, object_str( d->base_name ) );
+ string_push_back( qualified_name, '.' );
+ string_append ( qualified_name, object_str( r->name ) );
+ qname = object_new( qualified_name->value );
+ string_free( qualified_name );
+
+ ir1 = import_rule( r, d->class_module, r->name );
+ ir2 = import_rule( r, d->class_module, qname );
+
+ object_free( qname );
+
+ /* Copy 'exported' flag. */
+ ir1->exported = ir2->exported = r->exported;
+
+ /* If we are importing a class method, localize it. */
+ if ( ( r->module == d->base_module ) || ( r->module->class_module &&
+ ( r->module->class_module == d->base_module ) ) )
+ {
+ rule_localize( ir1, d->class_module );
+ rule_localize( ir2, d->class_module );
+ }
+}
+
+
+/*
+ * For each exported rule 'n', declared in class module for base, imports that
+ * rule in 'class' as 'n' and as 'base.n'. Imported rules are localized and
+ * marked as exported.
+ */
+
+static void import_base_rules( module_t * class_, OBJECT * base )
+{
+ OBJECT * module_name = class_module_name( base );
+ module_t * base_module = bindmodule( module_name );
+ LIST * imported;
+ struct import_base_data d;
+ d.base_name = base;
+ d.base_module = base_module;
+ d.class_module = class_;
+ object_free( module_name );
+
+ if ( base_module->rules )
+ hashenumerate( base_module->rules, import_base_rule, &d );
+
+ imported = imported_modules( base_module );
+ import_module( imported, class_ );
+ list_free( imported );
+}
+
+
+OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame )
+{
+ OBJECT * name = class_module_name( list_front( xname ) );
+ OBJECT * * pp;
+ module_t * class_module = 0;
+ int found;
+
+ if ( !classes )
+ classes = hashinit( sizeof( OBJECT * ), "classes" );
+
+ pp = (OBJECT * *)hash_insert( classes, list_front( xname ), &found );
+ if ( !found )
+ {
+ *pp = object_copy( list_front( xname ) );
+ }
+ else
+ {
+ out_printf( "Class %s already defined\n", object_str( list_front( xname ) )
+ );
+ abort();
+ }
+ check_defined( bases );
+
+ class_module = bindmodule( name );
+
+ {
+ /*
+ Initialize variables that B2 inserts in every object.
+ We want to avoid creating the object's hash if it isn't needed.
+ */
+ int num = class_module->num_fixed_variables;
+ module_add_fixed_var( class_module, constant_name, &num );
+ module_add_fixed_var( class_module, constant_class, &num );
+ module_set_fixed_variables( class_module, num );
+ }
+
+ var_set( class_module, constant_name, xname, VAR_SET );
+ var_set( class_module, constant_bases, bases, VAR_SET );
+
+ {
+ LISTITER iter = list_begin( bases );
+ LISTITER const end = list_end( bases );
+ for ( ; iter != end; iter = list_next( iter ) )
+ import_base_rules( class_module, list_item( iter ) );
+ }
+
+ return name;
+}
+
+
+static void free_class( void * xclass, void * data )
+{
+ object_free( *(OBJECT * *)xclass );
+}
+
+
+void class_done( void )
+{
+ if ( classes )
+ {
+ hashenumerate( classes, free_class, (void *)0 );
+ hashdone( classes );
+ classes = 0;
+ }
+}
diff --git a/src/boost/tools/build/src/engine/class.h b/src/boost/tools/build/src/engine/class.h
new file mode 100644
index 000000000..ee76e80c7
--- /dev/null
+++ b/src/boost/tools/build/src/engine/class.h
@@ -0,0 +1,15 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#ifndef CLASS_H_VP_2003_08_01
+#define CLASS_H_VP_2003_08_01
+
+#include "config.h"
+#include "lists.h"
+#include "frames.h"
+
+OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame );
+void class_done( void );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/command.cpp b/src/boost/tools/build/src/engine/command.cpp
new file mode 100644
index 000000000..31141fc97
--- /dev/null
+++ b/src/boost/tools/build/src/engine/command.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * command.c - maintain lists of commands
+ */
+
+#include "jam.h"
+#include "command.h"
+
+#include "lists.h"
+#include "rules.h"
+
+#include <assert.h>
+
+
+/*
+ * cmdlist_append_cmd
+ */
+CMDLIST * cmdlist_append_cmd( CMDLIST * l, CMD * cmd )
+{
+ CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) );
+ result->iscmd = 1;
+ result->next = l;
+ result->impl.cmd = cmd;
+ return result;
+}
+
+CMDLIST * cmdlist_append_target( CMDLIST * l, TARGET * t )
+{
+ CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) );
+ result->iscmd = 0;
+ result->next = l;
+ result->impl.t = t;
+ return result;
+}
+
+void cmdlist_free( CMDLIST * l )
+{
+ while ( l )
+ {
+ CMDLIST * tmp = l->next;
+ BJAM_FREE( l );
+ l = tmp;
+ }
+}
+
+/*
+ * cmd_new() - return a new CMD.
+ */
+
+CMD * cmd_new( RULE * rule, LIST * targets, LIST * sources, LIST * shell )
+{
+ CMD * cmd = (CMD *)BJAM_MALLOC( sizeof( CMD ) );
+ FRAME frame[ 1 ];
+
+ assert( cmd );
+ cmd->rule = rule;
+ cmd->shell = shell;
+ cmd->next = 0;
+ cmd->noop = 0;
+ cmd->asynccnt = 1;
+ cmd->status = 0;
+ cmd->lock = NULL;
+ cmd->unlock = NULL;
+
+ lol_init( &cmd->args );
+ lol_add( &cmd->args, targets );
+ lol_add( &cmd->args, sources );
+ string_new( cmd->buf );
+
+ frame_init( frame );
+ frame->module = rule->module;
+ lol_init( frame->args );
+ lol_add( frame->args, list_copy( targets ) );
+ lol_add( frame->args, list_copy( sources ) );
+ function_run_actions( rule->actions->command, frame, stack_global(),
+ cmd->buf );
+ frame_free( frame );
+
+ return cmd;
+}
+
+
+/*
+ * cmd_free() - free a CMD
+ */
+
+void cmd_free( CMD * cmd )
+{
+ cmdlist_free( cmd->next );
+ lol_free( &cmd->args );
+ list_free( cmd->shell );
+ string_free( cmd->buf );
+ freetargets( cmd->unlock );
+ BJAM_FREE( (void *)cmd );
+}
+
+
+/*
+ * cmd_release_targets_and_shell()
+ *
+ * Makes the CMD release its hold on its targets & shell lists and forget
+ * about them. Useful in case caller still has references to those lists and
+ * wants to reuse them after freeing the CMD object.
+ */
+
+void cmd_release_targets_and_shell( CMD * cmd )
+{
+ cmd->args.list[ 0 ] = L0; /* targets */
+ cmd->shell = L0; /* shell */
+}
diff --git a/src/boost/tools/build/src/engine/command.h b/src/boost/tools/build/src/engine/command.h
new file mode 100644
index 000000000..0b968de58
--- /dev/null
+++ b/src/boost/tools/build/src/engine/command.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 1994 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * command.h - the CMD structure and routines to manipulate them
+ *
+ * Both ACTION and CMD contain a rule, targets, and sources. An
+ * ACTION describes a rule to be applied to the given targets and
+ * sources; a CMD is what actually gets executed by the shell. The
+ * differences are due to:
+ *
+ * ACTIONS must be combined if 'actions together' is given.
+ * ACTIONS must be split if 'actions piecemeal' is given.
+ * ACTIONS must have current sources omitted for 'actions updated'.
+ *
+ * The CMD datatype holds a single command that is to be executed
+ * against a target, and they can chain together to represent the
+ * full collection of commands used to update a target.
+ *
+ * Structures:
+ *
+ * CMD - an action, ready to be formatted into a buffer and executed.
+ *
+ * External routines:
+ *
+ * cmd_new() - return a new CMD or 0 if too many args.
+ * cmd_free() - delete CMD and its parts.
+ * cmd_next() - walk the CMD chain.
+ * cmd_release_targets_and_shell() - CMD forgets about its targets & shell.
+ */
+
+
+/*
+ * CMD - an action, ready to be formatted into a buffer and executed.
+ */
+
+#ifndef COMMAND_SW20111118_H
+#define COMMAND_SW20111118_H
+
+#include "config.h"
+#include "lists.h"
+#include "rules.h"
+#include "jam_strings.h"
+
+
+typedef struct _cmd CMD;
+
+/*
+ * A list whose elements are either TARGETS or CMDS.
+ * CMDLIST is used only by CMD. A TARGET means that
+ * the CMD is the last updating action required to
+ * build the target. A CMD is the next CMD required
+ * to build the same target. (Note that a single action
+ * can update more than one target, so the CMDs form
+ * a DAG, not a straight linear list.)
+ */
+typedef struct _cmdlist {
+ struct _cmdlist * next;
+ union {
+ CMD * cmd;
+ TARGET * t;
+ } impl;
+ char iscmd;
+} CMDLIST;
+
+CMDLIST * cmdlist_append_cmd( CMDLIST *, CMD * );
+CMDLIST * cmdlist_append_target( CMDLIST *, TARGET * );
+void cmd_list_free( CMDLIST * );
+
+struct _cmd
+{
+ CMDLIST * next;
+ RULE * rule; /* rule->actions contains shell script */
+ LIST * shell; /* $(JAMSHELL) value */
+ LOL args; /* LISTs for $(<), $(>) */
+ string buf[ 1 ]; /* actual commands */
+ int noop; /* no-op commands should be faked instead of executed */
+ int asynccnt; /* number of outstanding dependencies */
+ TARGETS * lock; /* semaphores that are required by this cmd. */
+ TARGETS * unlock; /* semaphores that are released when this cmd finishes. */
+ char status; /* the command status */
+};
+
+CMD * cmd_new
+(
+ RULE * rule, /* rule (referenced) */
+ LIST * targets, /* $(<) (ownership transferred) */
+ LIST * sources, /* $(>) (ownership transferred) */
+ LIST * shell /* $(JAMSHELL) (ownership transferred) */
+);
+
+void cmd_release_targets_and_shell( CMD * );
+
+void cmd_free( CMD * );
+
+#define cmd_next( c ) ((c)->next)
+
+#endif
diff --git a/src/boost/tools/build/src/engine/compile.cpp b/src/boost/tools/build/src/engine/compile.cpp
new file mode 100644
index 000000000..030d2e186
--- /dev/null
+++ b/src/boost/tools/build/src/engine/compile.cpp
@@ -0,0 +1,233 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * compile.c - compile parsed jam statements
+ *
+ * External routines:
+ * evaluate_rule() - execute a rule invocation
+ *
+ * Internal routines:
+ * debug_compile() - printf with indent to show rule expansion
+ */
+
+#include "jam.h"
+#include "compile.h"
+
+#include "builtins.h"
+#include "class.h"
+#include "constants.h"
+#include "hash.h"
+#include "hdrmacro.h"
+#include "make.h"
+#include "modules.h"
+#include "parse.h"
+#include "rules.h"
+#include "search.h"
+#include "jam_strings.h"
+#include "variable.h"
+#include "output.h"
+
+#include <assert.h>
+#include <stdarg.h>
+#include <string.h>
+
+
+static void debug_compile( int which, char const * s, FRAME * );
+
+/* Internal functions from builtins.c */
+void backtrace( FRAME * );
+void backtrace_line( FRAME * );
+void print_source_line( FRAME * );
+void unknown_rule( FRAME *, char const * key, module_t *, OBJECT * rule_name );
+
+
+/*
+ * evaluate_rule() - execute a rule invocation
+ */
+
+LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * frame )
+{
+ LIST * result = L0;
+ profile_frame prof[ 1 ];
+ module_t * prev_module = frame->module;
+
+ if ( DEBUG_COMPILE )
+ {
+ /* Try hard to indicate in which module the rule is going to execute. */
+ char buf[ 256 ] = "";
+ if ( rule->module->name )
+ {
+ strncat( buf, object_str( rule->module->name ), sizeof( buf ) -
+ 1 );
+ strncat( buf, ".", sizeof( buf ) - 1 );
+ if ( strncmp( buf, object_str( rule->name ), strlen( buf ) ) == 0 )
+ {
+ buf[ 0 ] = 0;
+ }
+ }
+ strncat( buf, object_str( rule->name ), sizeof( buf ) - 1 );
+ debug_compile( 1, buf, frame );
+
+ lol_print( frame->args );
+ out_printf( "\n" );
+ }
+
+ if ( rule->procedure && rule->module != prev_module )
+ {
+ /* Propagate current module to nested rule invocations. */
+ frame->module = rule->module;
+ }
+
+ /* Record current rule name in frame. */
+ if ( rule->procedure )
+ {
+ frame->rulename = object_str( rulename );
+ /* And enter record profile info. */
+ if ( DEBUG_PROFILE )
+ profile_enter( function_rulename( rule->procedure ), prof );
+ }
+
+ /* Check traditional targets $(<) and sources $(>). */
+ if ( !rule->actions && !rule->procedure )
+ unknown_rule( frame, NULL, frame->module, rulename );
+
+ /* If this rule will be executed for updating the targets then construct the
+ * action for make().
+ */
+ if ( rule->actions )
+ {
+ TARGETS * t;
+
+ /* The action is associated with this instance of this rule. */
+ ACTION * const action = (ACTION *)BJAM_MALLOC( sizeof( ACTION ) );
+ memset( (char *)action, '\0', sizeof( *action ) );
+
+ action->rule = rule;
+ action->targets = targetlist( (TARGETS *)0, lol_get( frame->args, 0 ) );
+ action->sources = targetlist( (TARGETS *)0, lol_get( frame->args, 1 ) );
+ action->refs = 1;
+
+ /* If we have a group of targets all being built using the same action
+ * and any of these targets is updated, then we have to consider them
+ * all to be out-dated. We do this by adding a REBUILDS in both directions
+ * between the first target and all the other targets.
+ */
+ if ( action->targets )
+ {
+ TARGET * const t0 = action->targets->target;
+ for ( t = action->targets->next; t; t = t->next )
+ {
+ t->target->rebuilds = targetentry( t->target->rebuilds, t0 );
+ t0->rebuilds = targetentry( t0->rebuilds, t->target );
+ }
+ }
+
+ /* Append this action to the actions of each target. */
+ for ( t = action->targets; t; t = t->next )
+ t->target->actions = actionlist( t->target->actions, action );
+
+ action_free( action );
+ }
+
+ /* Now recursively compile any parse tree associated with this rule.
+ * function_refer()/function_free() call pair added to ensure the rule does
+ * not get freed while in use.
+ */
+ if ( rule->procedure )
+ {
+ FUNCTION * const function = rule->procedure;
+ function_refer( function );
+ result = function_run( function, frame, stack_global() );
+ function_free( function );
+ }
+
+ if ( DEBUG_PROFILE && rule->procedure )
+ profile_exit( prof );
+
+ if ( DEBUG_COMPILE )
+ debug_compile( -1, 0, frame );
+
+ return result;
+}
+
+
+/*
+ * Call the given rule with the specified parameters. The parameters should be
+ * of type LIST* and end with a NULL pointer. This differs from 'evaluate_rule'
+ * in that frame for the called rule is prepared inside 'call_rule'.
+ *
+ * This function is useful when a builtin rule (in C) wants to call another rule
+ * which might be implemented in Jam.
+ */
+
+LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... )
+{
+ va_list va;
+ LIST * result;
+
+ FRAME inner[ 1 ];
+ frame_init( inner );
+ inner->prev = caller_frame;
+ inner->prev_user = caller_frame->module->user_module
+ ? caller_frame
+ : caller_frame->prev_user;
+ inner->module = caller_frame->module;
+
+ va_start( va, caller_frame );
+ for ( ; ; )
+ {
+ LIST * const l = va_arg( va, LIST * );
+ if ( !l )
+ break;
+ lol_add( inner->args, l );
+ }
+ va_end( va );
+
+ result = evaluate_rule( bindrule( rulename, inner->module ), rulename, inner );
+
+ frame_free( inner );
+
+ return result;
+}
+
+
+/*
+ * debug_compile() - printf with indent to show rule expansion
+ */
+
+static void debug_compile( int which, char const * s, FRAME * frame )
+{
+ static int level = 0;
+ static char indent[ 36 ] = ">>>>|>>>>|>>>>|>>>>|>>>>|>>>>|>>>>|";
+
+ if ( which >= 0 )
+ {
+ int i;
+
+ print_source_line( frame );
+
+ i = ( level + 1 ) * 2;
+ while ( i > 35 )
+ {
+ out_puts( indent );
+ i -= 35;
+ }
+
+ out_printf( "%*.*s ", i, i, indent );
+ }
+
+ if ( s )
+ out_printf( "%s ", s );
+
+ level += which;
+}
diff --git a/src/boost/tools/build/src/engine/compile.h b/src/boost/tools/build/src/engine/compile.h
new file mode 100644
index 000000000..97370a92d
--- /dev/null
+++ b/src/boost/tools/build/src/engine/compile.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * compile.h - compile parsed jam statements
+ */
+
+#ifndef COMPILE_DWA20011022_H
+#define COMPILE_DWA20011022_H
+
+#include "config.h"
+#include "frames.h"
+#include "lists.h"
+#include "object.h"
+#include "rules.h"
+
+void compile_builtins();
+
+LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * );
+LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... );
+
+/* Flags for compile_set(), etc */
+
+#define ASSIGN_SET 0x00 /* = assign variable */
+#define ASSIGN_APPEND 0x01 /* += append variable */
+#define ASSIGN_DEFAULT 0x02 /* set only if unset */
+
+/* Flags for compile_setexec() */
+
+#define EXEC_UPDATED 0x01 /* executes updated */
+#define EXEC_TOGETHER 0x02 /* executes together */
+#define EXEC_IGNORE 0x04 /* executes ignore */
+#define EXEC_QUIETLY 0x08 /* executes quietly */
+#define EXEC_PIECEMEAL 0x10 /* executes piecemeal */
+#define EXEC_EXISTING 0x20 /* executes existing */
+
+/* Conditions for compile_if() */
+
+#define EXPR_NOT 0 /* ! cond */
+#define EXPR_AND 1 /* cond && cond */
+#define EXPR_OR 2 /* cond || cond */
+#define EXPR_EXISTS 3 /* arg */
+#define EXPR_EQUALS 4 /* arg = arg */
+#define EXPR_NOTEQ 5 /* arg != arg */
+#define EXPR_LESS 6 /* arg < arg */
+#define EXPR_LESSEQ 7 /* arg <= arg */
+#define EXPR_MORE 8 /* arg > arg */
+#define EXPR_MOREEQ 9 /* arg >= arg */
+#define EXPR_IN 10 /* arg in arg */
+
+#endif
diff --git a/src/boost/tools/build/src/engine/config.h b/src/boost/tools/build/src/engine/config.h
new file mode 100644
index 000000000..9ff147d8e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/config.h
@@ -0,0 +1,34 @@
+#ifndef B2_CONFIG_H
+#define B2_CONFIG_H
+
+/*
+Copyright 2002-2018 Rene Rivera.
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#define OPT_HEADER_CACHE_EXT 1
+#define OPT_GRAPH_DEBUG_EXT 1
+#define OPT_SEMAPHORE 1
+#define OPT_AT_FILES 1
+#define OPT_DEBUG_PROFILE 1
+#define JAM_DEBUGGER 1
+#define OPT_FIX_TARGET_VARIABLES_EXT 1
+#define OPT_IMPROVED_PATIENCE_EXT 1
+
+// Autodetect various operating systems..
+
+#if defined(_WIN32) || defined(_WIN64) || \
+ defined(__WIN32__) || defined(__TOS_WIN__) || \
+ defined(__WINDOWS__)
+ #define NT 1
+#endif
+
+#if defined(__VMS) || defined(__VMS_VER)
+ #if !defined(VMS)
+ #define VMS 1
+ #endif
+#endif
+
+#endif
diff --git a/src/boost/tools/build/src/engine/config_toolset.bat b/src/boost/tools/build/src/engine/config_toolset.bat
new file mode 100644
index 000000000..f3c2f1ebe
--- /dev/null
+++ b/src/boost/tools/build/src/engine/config_toolset.bat
@@ -0,0 +1,209 @@
+@ECHO OFF
+
+REM ~ Copyright 2002-2018 Rene Rivera.
+REM ~ Distributed under the Boost Software License, Version 1.0.
+REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+:Start
+REM Setup the toolset command and options.
+if "_%B2_TOOLSET%_" == "_msvc_" call :Config_MSVC
+if "_%B2_TOOLSET%_" == "_vc11_" call :Config_VC11
+if "_%B2_TOOLSET%_" == "_vc12_" call :Config_VC12
+if "_%B2_TOOLSET%_" == "_vc14_" call :Config_VC14
+if "_%B2_TOOLSET%_" == "_vc141_" call :Config_VC141
+if "_%B2_TOOLSET%_" == "_vc142_" call :Config_VC142
+if "_%B2_TOOLSET%_" == "_borland_" call :Config_BORLAND
+if "_%B2_TOOLSET%_" == "_como_" call :Config_COMO
+if "_%B2_TOOLSET%_" == "_gcc_" call :Config_GCC
+if "_%B2_TOOLSET%_" == "_gcc-nocygwin_" call :Config_GCC_NOCYGWIN
+if "_%B2_TOOLSET%_" == "_intel-win32_" call :Config_INTEL_WIN32
+if "_%B2_TOOLSET%_" == "_mingw_" call :Config_MINGW
+exit /b %errorlevel%
+
+:Call_If_Exists
+ECHO Call_If_Exists %*
+if EXIST %1 call %*
+goto :eof
+
+:Config_MSVC
+if not defined CXX ( set "CXX=cl" )
+if NOT "_%MSVCDir%_" == "__" (
+ set "B2_TOOLSET_ROOT=%MSVCDir%\"
+ )
+call :Call_If_Exists "%B2_TOOLSET_ROOT%bin\VCVARS32.BAT"
+if not "_%B2_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
+ )
+set "B2_CXX=%CXX% /nologo /Zi /MT /TP /Feb2 /wd4996 /Ox /GL"
+set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
+set "_known_=1"
+goto :eof
+
+:Config_VC11
+if not defined CXX ( set "CXX=cl" )
+if NOT "_%VS110COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS%
+if NOT "_%B2_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "B2_CXX=%CXX% /nologo /Zi /MT /TP /Feb2 /wd4996 /Ox /GL"
+set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
+set "_known_=1"
+goto :eof
+
+:Config_VC12
+if not defined CXX ( set "CXX=cl" )
+if NOT "_%VS120COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\"
+ )
+
+if "_%B2_ARCH%_" == "__" set B2_ARCH=x86
+set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
+
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS%
+if NOT "_%B2_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "B2_CXX=%CXX% /nologo /Zi /MT /TP /Feb2 /wd4996 /Ox /GL"
+set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
+set "_known_=1"
+goto :eof
+
+:Config_VC14
+if not defined CXX ( set "CXX=cl" )
+if "_%B2_TOOLSET_ROOT%_" == "__" (
+ if NOT "_%VS140COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\"
+ ))
+
+if "_%B2_ARCH%_" == "__" set B2_ARCH=x86
+set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
+
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS%
+if NOT "_%B2_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "B2_CXX=%CXX% /nologo /Zi /MT /TP /Feb2 /wd4996 /Ox /GL"
+set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
+set "_known_=1"
+goto :eof
+
+:Config_VC141
+if not defined CXX ( set "CXX=cl" )
+call vswhere_usability_wrapper.cmd
+REM Reset ERRORLEVEL since from now on it's all based on ENV vars
+ver > nul 2> nul
+if "_%B2_TOOLSET_ROOT%_" == "__" (
+ if NOT "_%VS150COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET_ROOT=%VS150COMNTOOLS%..\..\VC\"
+ ))
+
+if "_%B2_ARCH%_" == "__" set B2_ARCH=x86
+set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
+
+REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
+pushd %CD%
+if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS%
+popd
+set "B2_CXX=%CXX% /nologo /Zi /MT /TP /Feb2 /wd4996 /Ox /GL"
+set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
+set "_known_=1"
+goto :eof
+
+:Config_VC142
+if not defined CXX ( set "CXX=cl" )
+call vswhere_usability_wrapper.cmd
+REM Reset ERRORLEVEL since from now on it's all based on ENV vars
+ver > nul 2> nul
+if "_%B2_TOOLSET_ROOT%_" == "__" (
+ if NOT "_%VS160COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET_ROOT=%VS160COMNTOOLS%..\..\VC\"
+ ))
+
+if "_%B2_ARCH%_" == "__" set B2_ARCH=x86
+set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
+
+REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
+pushd %CD%
+if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS%
+popd
+set "B2_CXX=%CXX% /nologo /Zi /MT /TP /Feb2 /wd4996 /Ox /GL"
+set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
+set "_known_=1"
+goto :eof
+
+:Config_VCUNK
+if NOT "_%B2_TOOLSET%_" == "_vcunk_" goto Skip_VCUNK
+call vswhere_usability_wrapper.cmd
+REM Reset ERRORLEVEL since from now on it's all based on ENV vars
+ver > nul 2> nul
+if "_%B2_TOOLSET_ROOT%_" == "__" (
+ if NOT "_%VSUNKCOMNTOOLS%_" == "__" (
+ set "B2_TOOLSET_ROOT=%VSUNKCOMNTOOLS%..\..\VC\"
+ ))
+
+if "_%B2_ARCH%_" == "__" set B2_ARCH=x86
+set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH%
+
+REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
+pushd %CD%
+if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS%
+popd
+set "B2_CXX=%CXX% /nologo /Zi /MT /TP /Feb2 /wd4996 /Ox /GL"
+set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib"
+set "_known_=1"
+goto :eof
+
+:Config_BORLAND
+if not defined CXX ( set "CXX=bcc32" )
+if "_%B2_TOOLSET_ROOT%_" == "__" (
+ call guess_toolset.bat test_path bcc32.exe )
+if "_%B2_TOOLSET_ROOT%_" == "__" (
+ if not errorlevel 1 (
+ set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
+ ) )
+if not "_%B2_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%B2_TOOLSET_ROOT%Bin;%PATH%"
+ )
+set "B2_CXX=%CXX% -tC -P -O2 -w- -I"%B2_TOOLSET_ROOT%Include" -L"%B2_TOOLSET_ROOT%Lib" -Nd -eb2"
+set "_known_=1"
+goto :eof
+
+:Config_COMO
+if not defined CXX ( set "CXX=como" )
+set "B2_CXX=%CXX% --inlining -o b2.exe"
+set "_known_=1"
+goto :eof
+
+:Config_GCC
+if not defined CXX ( set "CXX=g++" )
+set "B2_CXX=%CXX% -x c++ -std=c++11 -s -O3 -o b2.exe"
+set "_known_=1"
+goto :eof
+
+:Config_GCC_NOCYGWIN
+if not defined CXX ( set "CXX=g++" )
+set "B2_CXX=%CXX% -x c++ -std=c++11 -s -O3 -mno-cygwin -o b2.exe"
+set "_known_=1"
+goto :eof
+
+:Config_INTEL_WIN32
+if not defined CXX ( set "CXX=icl" )
+set "B2_CXX=%CXX% /nologo /MT /O2 /Ob2 /Gy /GF /GA /GB /Feb2"
+set "_known_=1"
+goto :eof
+
+:Config_MINGW
+if not defined CXX ( set "CXX=g++" )
+if not "_%B2_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%"
+ )
+for /F "delims=" %%I in ("%CXX%") do set "PATH=%PATH%;%%~dpI"
+set "B2_CXX=%CXX% -x c++ -std=c++11 -s -O3 -o b2.exe"
+set "_known_=1"
+goto :eof
diff --git a/src/boost/tools/build/src/engine/constants.cpp b/src/boost/tools/build/src/engine/constants.cpp
new file mode 100644
index 000000000..ce4e3d7e4
--- /dev/null
+++ b/src/boost/tools/build/src/engine/constants.cpp
@@ -0,0 +1,192 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * constants.c - constant objects
+ *
+ * External functions:
+ *
+ * constants_init() - initialize constants
+ * constants_done() - free constants
+ *
+ */
+
+#include "constants.h"
+
+
+void constants_init( void )
+{
+ constant_empty = object_new( "" );
+ constant_dot = object_new( "." );
+ constant_plus = object_new( "+" );
+ constant_star = object_new( "*" );
+ constant_question_mark = object_new( "?" );
+ constant_ok = object_new( "ok" );
+ constant_true = object_new( "true" );
+ constant_name = object_new( "__name__" );
+ constant_bases = object_new( "__bases__" );
+ constant_class = object_new( "__class__" );
+ constant_typecheck = object_new( ".typecheck" );
+ constant_builtin = object_new( "(builtin)" );
+ constant_HCACHEFILE = object_new( "HCACHEFILE" );
+ constant_HCACHEMAXAGE = object_new( "HCACHEMAXAGE" );
+ constant_HDRSCAN = object_new( "HDRSCAN" );
+ constant_HDRRULE = object_new( "HDRRULE" );
+ constant_BINDRULE = object_new( "BINDRULE" );
+ constant_LOCATE = object_new( "LOCATE" );
+ constant_SEARCH = object_new( "SEARCH" );
+ constant_JAM_SEMAPHORE = object_new( "JAM_SEMAPHORE" );
+ constant_TIMING_RULE = object_new( "__TIMING_RULE__" );
+ constant_ACTION_RULE = object_new( "__ACTION_RULE__" );
+ constant_JAMSHELL = object_new( "JAMSHELL" );
+ constant_TMPDIR = object_new( "TMPDIR" );
+ constant_TMPNAME = object_new( "TMPNAME" );
+ constant_TMPFILE = object_new( "TMPFILE" );
+ constant_STDOUT = object_new( "STDOUT" );
+ constant_STDERR = object_new( "STDERR" );
+ constant_JAMDATE = object_new( "JAMDATE" );
+ constant_JAM_TIMESTAMP_RESOLUTION = object_new( "JAM_TIMESTAMP_RESOLUTION" );
+ constant_JAM_VERSION = object_new( "JAM_VERSION" );
+ constant_JAMUNAME = object_new( "JAMUNAME" );
+ constant_ENVIRON = object_new( ".ENVIRON" );
+ constant_ARGV = object_new( "ARGV" );
+ constant_all = object_new( "all" );
+ constant_PARALLELISM = object_new( "PARALLELISM" );
+ constant_KEEP_GOING = object_new( "KEEP_GOING" );
+ constant_other = object_new( "[OTHER]" );
+ constant_total = object_new( "[TOTAL]" );
+ constant_FILE_DIRSCAN = object_new( "FILE_DIRSCAN" );
+ constant_MAIN = object_new( "MAIN" );
+ constant_MAIN_MAKE = object_new( "MAIN_MAKE" );
+ constant_MAKE_MAKE0 = object_new( "MAKE_MAKE0" );
+ constant_MAKE_MAKE1 = object_new( "MAKE_MAKE1" );
+ constant_MAKE_MAKE0SORT = object_new( "MAKE_MAKE0SORT" );
+ constant_BINDMODULE = object_new( "BINDMODULE" );
+ constant_IMPORT_MODULE = object_new( "IMPORT_MODULE" );
+ constant_BUILTIN_GLOB_BACK = object_new( "BUILTIN_GLOB_BACK" );
+ constant_timestamp = object_new( "timestamp" );
+ constant_python = object_new("__python__");
+ constant_python_interface = object_new( "python_interface" );
+ constant_extra_pythonpath = object_new( "EXTRA_PYTHONPATH" );
+ constant_MAIN_PYTHON = object_new( "MAIN_PYTHON" );
+ constant_BUILTIN_GLOB_ARCHIVE_BACK= object_new( "BUILTIN_GLOB_ARCHIVE_BACK" );
+ constant_FILE_ARCHIVESCAN = object_new( "FILE_ARCHIVESCAN" );
+}
+
+void constants_done( void )
+{
+ object_free( constant_empty );
+ object_free( constant_dot );
+ object_free( constant_plus );
+ object_free( constant_star );
+ object_free( constant_question_mark );
+ object_free( constant_ok );
+ object_free( constant_true );
+ object_free( constant_name );
+ object_free( constant_bases );
+ object_free( constant_class );
+ object_free( constant_typecheck );
+ object_free( constant_builtin );
+ object_free( constant_HCACHEFILE );
+ object_free( constant_HCACHEMAXAGE );
+ object_free( constant_HDRSCAN );
+ object_free( constant_HDRRULE );
+ object_free( constant_BINDRULE );
+ object_free( constant_LOCATE );
+ object_free( constant_SEARCH );
+ object_free( constant_JAM_SEMAPHORE );
+ object_free( constant_TIMING_RULE );
+ object_free( constant_ACTION_RULE );
+ object_free( constant_JAMSHELL );
+ object_free( constant_TMPDIR );
+ object_free( constant_TMPNAME );
+ object_free( constant_TMPFILE );
+ object_free( constant_STDOUT );
+ object_free( constant_STDERR );
+ object_free( constant_JAMDATE );
+ object_free( constant_JAM_TIMESTAMP_RESOLUTION );
+ object_free( constant_JAM_VERSION );
+ object_free( constant_JAMUNAME );
+ object_free( constant_ENVIRON );
+ object_free( constant_ARGV );
+ object_free( constant_all );
+ object_free( constant_PARALLELISM );
+ object_free( constant_KEEP_GOING );
+ object_free( constant_other );
+ object_free( constant_total );
+ object_free( constant_FILE_DIRSCAN );
+ object_free( constant_MAIN );
+ object_free( constant_MAIN_MAKE );
+ object_free( constant_MAKE_MAKE0 );
+ object_free( constant_MAKE_MAKE1 );
+ object_free( constant_MAKE_MAKE0SORT );
+ object_free( constant_BINDMODULE );
+ object_free( constant_IMPORT_MODULE );
+ object_free( constant_BUILTIN_GLOB_BACK );
+ object_free( constant_timestamp );
+ object_free( constant_python );
+ object_free( constant_python_interface );
+ object_free( constant_extra_pythonpath );
+ object_free( constant_MAIN_PYTHON );
+ object_free( constant_FILE_ARCHIVESCAN );
+ object_free( constant_BUILTIN_GLOB_ARCHIVE_BACK );
+}
+
+OBJECT * constant_empty;
+OBJECT * constant_dot;
+OBJECT * constant_plus;
+OBJECT * constant_star;
+OBJECT * constant_question_mark;
+OBJECT * constant_ok;
+OBJECT * constant_true;
+OBJECT * constant_name;
+OBJECT * constant_bases;
+OBJECT * constant_class;
+OBJECT * constant_typecheck;
+OBJECT * constant_builtin;
+OBJECT * constant_HCACHEFILE;
+OBJECT * constant_HCACHEMAXAGE;
+OBJECT * constant_HDRSCAN;
+OBJECT * constant_HDRRULE;
+OBJECT * constant_BINDRULE;
+OBJECT * constant_LOCATE;
+OBJECT * constant_SEARCH;
+OBJECT * constant_JAM_SEMAPHORE;
+OBJECT * constant_TIMING_RULE;
+OBJECT * constant_ACTION_RULE;
+OBJECT * constant_JAMSHELL;
+OBJECT * constant_TMPDIR;
+OBJECT * constant_TMPNAME;
+OBJECT * constant_TMPFILE;
+OBJECT * constant_STDOUT;
+OBJECT * constant_STDERR;
+OBJECT * constant_JAMDATE;
+OBJECT * constant_JAM_VERSION;
+OBJECT * constant_JAMUNAME;
+OBJECT * constant_ENVIRON;
+OBJECT * constant_ARGV;
+OBJECT * constant_all;
+OBJECT * constant_PARALLELISM;
+OBJECT * constant_KEEP_GOING;
+OBJECT * constant_other;
+OBJECT * constant_total;
+OBJECT * constant_FILE_DIRSCAN;
+OBJECT * constant_MAIN;
+OBJECT * constant_MAIN_MAKE;
+OBJECT * constant_MAKE_MAKE0;
+OBJECT * constant_MAKE_MAKE1;
+OBJECT * constant_MAKE_MAKE0SORT;
+OBJECT * constant_BINDMODULE;
+OBJECT * constant_IMPORT_MODULE;
+OBJECT * constant_BUILTIN_GLOB_BACK;
+OBJECT * constant_timestamp;
+OBJECT * constant_JAM_TIMESTAMP_RESOLUTION;
+OBJECT * constant_python;
+OBJECT * constant_python_interface;
+OBJECT * constant_extra_pythonpath;
+OBJECT * constant_MAIN_PYTHON;
+OBJECT * constant_FILE_ARCHIVESCAN;
+OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK;
diff --git a/src/boost/tools/build/src/engine/constants.h b/src/boost/tools/build/src/engine/constants.h
new file mode 100644
index 000000000..ec112080d
--- /dev/null
+++ b/src/boost/tools/build/src/engine/constants.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * constants.h - constant objects
+ */
+
+#ifndef BOOST_JAM_CONSTANTS_H
+#define BOOST_JAM_CONSTANTS_H
+
+#include "config.h"
+#include "object.h"
+
+void constants_init( void );
+void constants_done( void );
+
+extern OBJECT * constant_empty; /* "" */
+extern OBJECT * constant_dot; /* "." */
+extern OBJECT * constant_plus; /* "+" */
+extern OBJECT * constant_star; /* "*" */
+extern OBJECT * constant_question_mark; /* "?" */
+extern OBJECT * constant_ok; /* "ok" */
+extern OBJECT * constant_true; /* "true" */
+extern OBJECT * constant_name; /* "__name__" */
+extern OBJECT * constant_bases; /* "__bases__" */
+extern OBJECT * constant_class; /* "__class__" */
+extern OBJECT * constant_typecheck; /* ".typecheck" */
+extern OBJECT * constant_builtin; /* "(builtin)" */
+extern OBJECT * constant_HCACHEFILE; /* "HCACHEFILE" */
+extern OBJECT * constant_HCACHEMAXAGE; /* "HCACHEMAXAGE" */
+extern OBJECT * constant_HDRSCAN; /* "HDRSCAN" */
+extern OBJECT * constant_HDRRULE; /* "HDRRULE" */
+extern OBJECT * constant_BINDRULE; /* "BINDRULE" */
+extern OBJECT * constant_LOCATE; /* "LOCATE" */
+extern OBJECT * constant_SEARCH; /* "SEARCH" */
+extern OBJECT * constant_JAM_SEMAPHORE; /* "JAM_SEMAPHORE" */
+extern OBJECT * constant_TIMING_RULE; /* "__TIMING_RULE__" */
+extern OBJECT * constant_ACTION_RULE; /* "__ACTION_RULE__" */
+extern OBJECT * constant_JAMSHELL; /* "JAMSHELL" */
+extern OBJECT * constant_TMPDIR; /* "TMPDIR" */
+extern OBJECT * constant_TMPNAME; /* "TMPNAME" */
+extern OBJECT * constant_TMPFILE; /* "TMPFILE" */
+extern OBJECT * constant_STDOUT; /* "STDOUT" */
+extern OBJECT * constant_STDERR; /* "STDERR" */
+extern OBJECT * constant_JAMDATE; /* "JAMDATE" */
+extern OBJECT * constant_JAM_TIMESTAMP_RESOLUTION; /* "JAM_TIMESTAMP_RESOLUTION" */
+extern OBJECT * constant_JAM_VERSION; /* "JAM_VERSION" */
+extern OBJECT * constant_JAMUNAME; /* "JAMUNAME" */
+extern OBJECT * constant_ENVIRON; /* ".ENVIRON" */
+extern OBJECT * constant_ARGV; /* "ARGV" */
+extern OBJECT * constant_all; /* "all" */
+extern OBJECT * constant_PARALLELISM; /* "PARALLELISM" */
+extern OBJECT * constant_KEEP_GOING; /* "KEEP_GOING" */
+extern OBJECT * constant_other; /* "[OTHER]" */
+extern OBJECT * constant_total; /* "[TOTAL]" */
+extern OBJECT * constant_FILE_DIRSCAN; /* "FILE_DIRSCAN" */
+extern OBJECT * constant_MAIN; /* "MAIN" */
+extern OBJECT * constant_MAIN_MAKE; /* "MAIN_MAKE" */
+extern OBJECT * constant_MAKE_MAKE0; /* "MAKE_MAKE0" */
+extern OBJECT * constant_MAKE_MAKE1; /* "MAKE_MAKE1" */
+extern OBJECT * constant_MAKE_MAKE0SORT; /* "MAKE_MAKE0SORT" */
+extern OBJECT * constant_BINDMODULE; /* "BINDMODULE" */
+extern OBJECT * constant_IMPORT_MODULE; /* "IMPORT_MODULE" */
+extern OBJECT * constant_BUILTIN_GLOB_BACK; /* "BUILTIN_GLOB_BACK" */
+extern OBJECT * constant_timestamp; /* "timestamp" */
+extern OBJECT * constant_python; /* "__python__" */
+extern OBJECT * constant_python_interface; /* "python_interface" */
+extern OBJECT * constant_extra_pythonpath; /* "EXTRA_PYTHONPATH" */
+extern OBJECT * constant_MAIN_PYTHON; /* "MAIN_PYTHON" */
+extern OBJECT * constant_FILE_ARCHIVESCAN; /* "FILE_ARCHIVESCAN" */
+extern OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK; /* "BUILTIN_GLOB_ARCHIVE_BACK" */
+
+#endif
diff --git a/src/boost/tools/build/src/engine/cwd.cpp b/src/boost/tools/build/src/engine/cwd.cpp
new file mode 100644
index 000000000..1dac07ec2
--- /dev/null
+++ b/src/boost/tools/build/src/engine/cwd.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2002. Vladimir Prus
+ * Copyright 2005. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "cwd.h"
+
+#include "jam.h"
+#include "mem.h"
+#include "pathsys.h"
+
+#include <assert.h>
+#include <errno.h>
+#include <limits.h>
+
+/* MinGW on Windows declares PATH_MAX in limits.h */
+#if defined( NT ) && !defined( __GNUC__ )
+# include <direct.h>
+# define PATH_MAX _MAX_PATH
+#else
+# include <unistd.h>
+# if defined( __COMO__ )
+# include <linux/limits.h>
+# endif
+#endif
+
+#ifndef PATH_MAX
+# define PATH_MAX 1024
+#endif
+
+
+static OBJECT * cwd_;
+
+
+void cwd_init( void )
+{
+ int buffer_size = PATH_MAX;
+ char * cwd_buffer = 0;
+ int error;
+
+ assert( !cwd_ );
+
+ do
+ {
+ char * const buffer = (char *)BJAM_MALLOC_RAW( buffer_size );
+#ifdef OS_VMS
+ /* cwd in POSIX-format */
+ cwd_buffer = getcwd( buffer, buffer_size, 0 );
+#else
+ cwd_buffer = getcwd( buffer, buffer_size );
+#endif
+ error = errno;
+ if ( cwd_buffer )
+ {
+ /* We store the path using its canonical/long/key format. */
+ OBJECT * const cwd = object_new( cwd_buffer );
+ cwd_ = path_as_key( cwd );
+ object_free( cwd );
+ }
+ buffer_size *= 2;
+ BJAM_FREE_RAW( buffer );
+ }
+ while ( !cwd_ && error == ERANGE );
+
+ if ( !cwd_ )
+ {
+ perror( "can not get current working directory" );
+ exit( EXITBAD );
+ }
+}
+
+
+OBJECT * cwd( void )
+{
+ assert( cwd_ );
+ return cwd_;
+}
+
+
+void cwd_done( void )
+{
+ assert( cwd_ );
+ object_free( cwd_ );
+ cwd_ = NULL;
+}
diff --git a/src/boost/tools/build/src/engine/cwd.h b/src/boost/tools/build/src/engine/cwd.h
new file mode 100644
index 000000000..39a66f843
--- /dev/null
+++ b/src/boost/tools/build/src/engine/cwd.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2002. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * cwd.h - manages the current working folder information
+ */
+
+#ifndef CWD_H
+#define CWD_H
+
+#include "config.h"
+#include "object.h"
+
+
+/* cwd() - returns the current working folder */
+OBJECT * cwd( void );
+
+/* cwd_init() - initialize the cwd module functionality
+ *
+ * The current working folder can not change in Boost Jam so this function
+ * gets the current working folder information from the OS and stores it
+ * internally.
+ *
+ * Expected to be called at program startup before the program's current
+ * working folder has been changed
+ */
+void cwd_init( void );
+
+/* cwd_done() - cleans up the cwd module functionality */
+void cwd_done( void );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/debian/changelog b/src/boost/tools/build/src/engine/debian/changelog
new file mode 100644
index 000000000..29084289c
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debian/changelog
@@ -0,0 +1,72 @@
+bjam (3.1.12-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Rene Rivera <grafik@redshift-software.com> Sat, 01 Oct 2005 00:00:00 +0000
+
+bjam (3.1.11-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Rene Rivera <grafik@redshift-software.com> Sat, 30 Apr 2005 00:00:00 +0000
+
+bjam (3.1.10-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Rene Rivera <grafik@redshift-software.com> Tue, 1 Jun 2004 05:42:35 +0000
+
+bjam (3.1.9-2) unstable; urgency=low
+
+ * Use default value of BOOST_BUILD_PATH is not is set in environment.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Wed, 17 Dec 2003 16:44:35 +0300
+
+bjam (3.1.9-1) unstable; urgency=low
+
+ * Implement NATIVE_FILE builtin and several native rules.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Dec 2003 13:15:26 +0300
+
+bjam (3.1.8-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 4 Nov 2003 20:50:43 +0300
+
+bjam (3.1.7-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Thu, 11 Sep 2003 10:45:44 +0400
+
+bjam (3.1.6-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Tue, 1 Jul 2003 09:12:18 +0400
+
+bjam (3.1.5-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Mon, 19 May 2003 14:05:13 +0400
+
+bjam (3.1.3-2) unstable; urgency=low
+
+ * Changed Debian package to be similar to Jam's package.
+
+ -- Vladimir Prus <ghost@cs.msu.su> Thu, 10 Oct 2002 18:43:26 +0400
+
+bjam (3.1.3-1) unstable; urgency=low
+
+ * New upstream release.
+
+ -- Vladimir Prus <ghost@zigzag.lvk.cs.msu.su> Fri, 4 Oct 2002 18:16:54 +0400
+
+bjam (3.1.2-1) unstable; urgency=low
+
+ * Initial Release.
+
+ -- Vladimir Prus <ghost@cs.msu.su> Wed, 14 Aug 2002 14:08:00 +0400
+
diff --git a/src/boost/tools/build/src/engine/debian/control b/src/boost/tools/build/src/engine/debian/control
new file mode 100644
index 000000000..46747d838
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debian/control
@@ -0,0 +1,16 @@
+Source: bjam
+Section: devel
+Priority: optional
+Maintainer: Vladimir Prus <ghost@cs.msu.su>
+Build-Depends: debhelper (>> 3.0.0), docbook-to-man, bison
+Standards-Version: 3.5.2
+
+Package: bjam
+Architecture: any
+Depends: ${shlibs:Depends}
+Description: Build tool
+ Boost.Jam is a portable build tool with its own interpreted language, which
+ allows to implement rather complex logic in a readable way and without
+ resorting to external programs. It is a descendant of Jam/MR tool modified to
+ suit the needs of B2. In particular, modules and rule parameters
+ were added, as well as several new builtins.
diff --git a/src/boost/tools/build/src/engine/debian/copyright b/src/boost/tools/build/src/engine/debian/copyright
new file mode 100644
index 000000000..f72e4e3a9
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debian/copyright
@@ -0,0 +1,25 @@
+This package was debianized by Vladimir Prus <ghost@cs.msu.su> on
+Wed, 17 July 2002, 19:27:00 +0400.
+
+Copyright:
+
+ /+\
+ +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ \+/
+
+ This is Release 2.4 of Jam/MR, a make-like program.
+
+ License is hereby granted to use this software and distribute it
+ freely, as long as this copyright notice is retained and modifications
+ are clearly marked.
+
+ ALL WARRANTIES ARE HEREBY DISCLAIMED.
+
+Some portions are also:
+
+ Copyright 2001-2006 David Abrahams.
+ Copyright 2002-2006 Rene Rivera.
+ Copyright 2003-2006 Vladimir Prus.
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
diff --git a/src/boost/tools/build/src/engine/debian/jam.man.sgml b/src/boost/tools/build/src/engine/debian/jam.man.sgml
new file mode 100644
index 000000000..1fabfb64c
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debian/jam.man.sgml
@@ -0,0 +1,236 @@
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V4.1//EN" [
+
+<!-- Process this file with docbook-to-man to generate an nroff manual
+ page: `docbook-to-man manpage.sgml > manpage.1'. You may view
+ the manual page with: `docbook-to-man manpage.sgml | nroff -man |
+ less'. A typical entry in a Makefile or Makefile.am is:
+
+manpage.1: manpage.sgml
+ docbook-to-man $< > $@
+ -->
+
+ <!ENTITY dhfirstname "<firstname>Yann</firstname>">
+ <!ENTITY dhsurname "<surname>Dirson</surname>">
+ <!-- Please adjust the date whenever revising the manpage. -->
+ <!ENTITY dhdate "<date>mai 23, 2001</date>">
+ <!ENTITY dhemail "<email>dirson@debian.org</email>">
+ <!ENTITY dhusername "Yann Dirson">
+ <!ENTITY dhpackage "jam">
+
+ <!ENTITY debian "<productname>Debian GNU/Linux</productname>">
+ <!ENTITY gnu "<acronym>GNU</acronym>">
+]>
+
+<refentry>
+ <refentryinfo>
+ <address>
+ &dhemail;
+ </address>
+ <author>
+ &dhfirstname;
+ &dhsurname;
+ </author>
+ <copyright>
+ <year>2001</year>
+ <holder>&dhusername;</holder>
+ </copyright>
+ &dhdate;
+ </refentryinfo>
+
+ <refmeta>
+ <refentrytitle>JAM</refentrytitle>
+ <manvolnum>1</manvolnum>
+ </refmeta>
+
+ <refnamediv>
+ <refname>Jam/MR</refname>
+ <refpurpose>Make(1) Redux</refpurpose>
+ </refnamediv>
+
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>jam</command>
+
+ <arg><option>-a</option></arg>
+ <arg><option>-n</option></arg>
+ <arg><option>-v</option></arg>
+
+ <arg><option>-d <replaceable/debug/</option></arg>
+ <arg><option>-f <replaceable/jambase/</option></arg>
+ <arg><option>-j <replaceable/jobs/</option></arg>
+ <arg><option>-o <replaceable/actionsfile/</option></arg>
+ <arg><option>-s <replaceable/var/=<replaceable/value/</option></arg>
+ <arg><option>-t <replaceable/target/</option></arg>
+
+ <arg repeat><option><replaceable/target/</option></arg>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <refsect1>
+ <title>DESCRIPTION</title>
+
+ <para>Jam is a program construction tool, like make(1).</para>
+
+ <para>Jam recursively builds target files from source files, using
+ dependency information and updating actions expressed in the
+ Jambase file, which is written in jam's own interpreted language.
+ The default Jambase is compiled into jam and provides a
+ boilerplate for common use, relying on a user-provide file
+ "Jamfile" to enumerate actual targets and sources.</para>
+ </refsect1>
+
+ <refsect1>
+ <title>OPTIONS</title>
+
+ <variablelist>
+ <varlistentry>
+ <term><option/-a/</term>
+ <listitem>
+ <para>Build all targets anyway, even if they are up-to-date.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d <replaceable/n/</option></term>
+ <listitem>
+ <para>Enable cumulative debugging levels from 1 to
+ <replaceable/n/. Interesting values are:
+
+ <glosslist>
+ <glossentry><glossterm/1/ <glossdef><simpara/Show
+ actions (the default)/</glossdef></glossentry>
+
+ <glossentry><glossterm/2/ <glossdef><simpara/Show
+ "quiet" actions and display all action
+ text/</glossdef></glossentry>
+
+ <glossentry><glossterm/3/ <glossdef><simpara>Show
+ dependency analysis, and target/source
+ timestamps/paths</simpara></glossdef></glossentry>
+
+ <glossentry><glossterm/4/ <glossdef><simpara/Show shell
+ arguments/</glossdef></glossentry>
+
+ <glossentry><glossterm/5/ <glossdef><simpara/Show rule
+ invocations and variable
+ expansions/</glossdef></glossentry>
+
+ <glossentry><glossterm/6/ <glossdef><simpara>Show
+ directory/header file/archive
+ scans</simpara></glossdef></glossentry>
+
+ <glossentry><glossterm/7/ <glossdef><simpara/Show
+ variable settings/</glossdef></glossentry>
+
+ <glossentry><glossterm/8/ <glossdef><simpara/Show
+ variable fetches/</glossdef></glossentry>
+
+ <glossentry><glossterm/9/ <glossdef><simpara/Show
+ variable manipulation, scanner
+ tokens/</glossdef></glossentry>
+ </glosslist>
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d +<replaceable/n/</option></term>
+ <listitem>
+ <para>Enable debugging level <replaceable/n/.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option/-d 0/</term>
+ <listitem>
+ <para>Turn off all debugging levels. Only errors are not
+ suppressed.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-f <replaceable/jambase/</option></term>
+ <listitem>
+ <para>Read <replaceable/jambase/ instead of using the
+ built-in Jambase. Only one <option/-f/ flag is permitted,
+ but the <replaceable/jambase/ may explicitly include other
+ files.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-j <replaceable/n/</option></term>
+ <listitem>
+ <para>Run up to <replaceable/n/ shell commands concurrently
+ (UNIX and NT only). The default is 1.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option/-n/</term>
+ <listitem>
+ <para>Don't actually execute the updating actions, but do
+ everything else. This changes the debug level default to
+ <option/-d2/.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-o <replaceable/file/</option></term>
+ <listitem>
+ <para>Write the updating actions to the specified file
+ instead of running them (or outputting them, as on the
+ Mac).</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-s <replaceable/var/=<replaceable/value/</option></term>
+ <listitem>
+ <para>Set the variable <replaceable/var/ to
+ <replaceable/value/, overriding both internal variables and
+ variables imported from the environment. </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-t <replaceable/target/</option></term>
+ <listitem>
+ <para>Rebuild <replaceable/target/ and everything that
+ depends on it, even if it is up-to-date.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option/-v/</term>
+ <listitem>
+ <para>Print the version of jam and exit.</para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+ </refsect1>
+
+ <refsect1>
+ <title>SEE ALSO</title>
+
+ <para>Jam is documented fully in HTML pages available on Debian
+ systems from
+ <filename>/usr/share/doc/jam/Jam.html</filename>.</para>
+ </refsect1>
+
+ <refsect1>
+ <title>AUTHOR</title>
+
+ <para>This manual page was created by &dhusername; &dhemail; from
+ the <filename/Jam.html/ documentation, for the &debian; system
+ (but may be used by others).</para>
+ </refsect1>
+</refentry>
+
+<!-- Keep this comment at the end of the file
+Local variables:
+sgml-omittag:t
+sgml-shorttag:t
+End:
+-->
diff --git a/src/boost/tools/build/src/engine/debian/rules b/src/boost/tools/build/src/engine/debian/rules
new file mode 100755
index 000000000..8538b3572
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debian/rules
@@ -0,0 +1,73 @@
+#!/usr/bin/make -f
+# Sample debian/rules that uses debhelper.
+# GNU copyright 1997 to 1999 by Joey Hess.
+# GNU copyright 2001 by Yann Dirson.
+
+# This is the debian/rules file for packages jam and ftjam
+# It should be usable with both packages without any change
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+
+# This is the debhelper compatibility version to use.
+export DH_COMPAT=3
+
+topdir=$(shell pwd)
+
+jam=bjam
+binname=bjam
+
+build: build-stamp
+build-stamp: debian/jam.1
+ dh_testdir
+
+ ./build.sh
+
+ touch build-stamp
+
+%.1: %.man.sgml
+ /usr/bin/docbook-to-man $< > $@
+
+clean:
+ dh_testdir
+ dh_testroot
+ rm -f build-stamp
+ rm -rf bin.*
+ rm -f jam0 debian/jam.1
+ dh_clean
+
+install: build
+ dh_testdir
+ dh_testroot
+ dh_clean -k
+ dh_installdirs
+
+ install -d ${topdir}/debian/${jam}/usr/bin
+ install -m755 bin.linuxx86/bjam ${topdir}/debian/${jam}/usr/bin/
+ install -d ${topdir}/debian/${jam}/usr/share/man/man1/
+ install -m644 debian/jam.1 ${topdir}/debian/${jam}/usr/share/man/man1/${binname}.1
+
+
+# Build architecture-independent files here.
+binary-indep: build install
+# We have nothing to do by default.
+
+# Build architecture-dependent files here.
+binary-arch: build install
+ dh_testdir
+ dh_testroot
+ dh_installdocs README RELNOTES Jambase *.html
+# dh_installemacsen
+# dh_undocumented
+ dh_installchangelogs
+ dh_strip
+ dh_compress
+ dh_fixperms
+ dh_installdeb
+ dh_shlibdeps
+ dh_gencontrol
+ dh_md5sums
+ dh_builddeb
+
+binary: binary-indep binary-arch
+.PHONY: build clean binary-indep binary-arch binary install configure
diff --git a/src/boost/tools/build/src/engine/debug.cpp b/src/boost/tools/build/src/engine/debug.cpp
new file mode 100644
index 000000000..2a19e072b
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debug.cpp
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2005, 2016. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "debug.h"
+#include "output.h"
+#include "hash.h"
+#include <time.h>
+
+
+static profile_frame * profile_stack = 0;
+static struct hash * profile_hash = 0;
+static profile_info profile_other = { 0 };
+static profile_info profile_total = { 0 };
+
+
+profile_frame * profile_init( OBJECT * rulename, profile_frame * frame )
+{
+ if ( DEBUG_PROFILE ) profile_enter( rulename, frame );
+ return frame;
+}
+
+
+void profile_enter( OBJECT * rulename, profile_frame * frame )
+{
+ if ( DEBUG_PROFILE )
+ {
+ double start = profile_clock();
+ profile_info * p;
+
+ if ( !profile_hash && rulename )
+ profile_hash = hashinit( sizeof( profile_info ), "profile" );
+
+ if ( rulename )
+ {
+ int found;
+ p = (profile_info *)hash_insert( profile_hash, rulename, &found );
+ if ( !found )
+ {
+ p->name = rulename;
+ p->cumulative = 0;
+ p->net = 0;
+ p->num_entries = 0;
+ p->stack_count = 0;
+ p->memory = 0;
+ }
+ }
+ else
+ {
+ p = &profile_other;
+ }
+
+ p->num_entries += 1;
+ p->stack_count += 1;
+
+ frame->info = p;
+
+ frame->caller = profile_stack;
+ profile_stack = frame;
+
+ frame->entry_time = profile_clock();
+ frame->overhead = 0;
+ frame->subrules = 0;
+
+ /* caller pays for the time it takes to play with the hash table */
+ if ( frame->caller )
+ frame->caller->overhead += frame->entry_time - start;
+ }
+}
+
+
+void profile_memory( long mem )
+{
+ if ( DEBUG_PROFILE )
+ if ( profile_stack && profile_stack->info )
+ profile_stack->info->memory += ((double)mem) / 1024;
+}
+
+
+void profile_exit( profile_frame * frame )
+{
+ if ( DEBUG_PROFILE )
+ {
+ /* Cumulative time for this call. */
+ double t = profile_clock() - frame->entry_time - frame->overhead;
+ /* If this rule is already present on the stack, do not add the time for
+ * this instance.
+ */
+ if ( frame->info->stack_count == 1 )
+ frame->info->cumulative += t;
+ /* Net time does not depend on presence of the same rule in call stack.
+ */
+ frame->info->net += t - frame->subrules;
+
+ if ( frame->caller )
+ {
+ /* Caller's cumulative time must account for this overhead. */
+ frame->caller->overhead += frame->overhead;
+ frame->caller->subrules += t;
+ }
+ /* Pop this stack frame. */
+ --frame->info->stack_count;
+ profile_stack = frame->caller;
+ }
+}
+
+
+static void dump_profile_entry( void * p_, void * ignored )
+{
+ profile_info * p = (profile_info *)p_;
+ double mem_each = ( p->memory / ( p->num_entries ? p->num_entries : 1
+ ) );
+ double q = p->net;
+ if (p->num_entries) q /= p->num_entries;
+ if ( !ignored )
+ {
+ profile_total.cumulative += p->net;
+ profile_total.memory += p->memory;
+ }
+ out_printf( "%10ld %12.6f %12.6f %12.8f %10.2f %10.2f %s\n", p->num_entries,
+ p->cumulative, p->net, q, p->memory, mem_each, object_str( p->name ) );
+}
+
+
+void profile_dump()
+{
+ if ( profile_hash )
+ {
+ out_printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--",
+ "--net--", "--each--", "--mem--", "--each--", "--name--" );
+ hashenumerate( profile_hash, dump_profile_entry, 0 );
+ profile_other.name = constant_other;
+ dump_profile_entry( &profile_other, 0 );
+ profile_total.name = constant_total;
+ dump_profile_entry( &profile_total, (void *)1 );
+ }
+}
+
+double profile_clock()
+{
+ return ((double) clock()) / CLOCKS_PER_SEC;
+}
+
+OBJECT * profile_make_local( char const * scope )
+{
+ if ( DEBUG_PROFILE )
+ {
+ return object_new( scope );
+ }
+ else
+ {
+ return 0;
+ }
+}
diff --git a/src/boost/tools/build/src/engine/debug.h b/src/boost/tools/build/src/engine/debug.h
new file mode 100644
index 000000000..d61faf450
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debug.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2005, 2016. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef BJAM_DEBUG_H
+#define BJAM_DEBUG_H
+
+#include "config.h"
+#include "constants.h"
+#include "object.h"
+
+
+typedef struct profile_info
+{
+ /* name of rule being called */
+ OBJECT * name;
+ /* cumulative time spent in rule, in seconds */
+ double cumulative;
+ /* time spent in rule proper, in seconds */
+ double net;
+ /* number of time rule was entered */
+ unsigned long num_entries;
+ /* number of the times this function is present in stack */
+ unsigned long stack_count;
+ /* memory allocated by the call, in KiB */
+ double memory;
+} profile_info;
+
+typedef struct profile_frame
+{
+ /* permanent storage where data accumulates */
+ profile_info * info;
+ /* overhead for profiling in this call */
+ double overhead;
+ /* time of last entry to rule */
+ double entry_time;
+ /* stack frame of caller */
+ struct profile_frame * caller;
+ /* time spent in subrules */
+ double subrules;
+} profile_frame;
+
+profile_frame * profile_init( OBJECT * rulename, profile_frame * );
+void profile_enter( OBJECT * rulename, profile_frame * );
+void profile_memory( long mem );
+void profile_exit( profile_frame * );
+void profile_dump();
+double profile_clock();
+
+#define PROFILE_ENTER( scope ) profile_frame PROF_ ## scope, *PROF_ ## scope ## _p = profile_init( constant_ ## scope, &PROF_ ## scope )
+#define PROFILE_EXIT( scope ) profile_exit( PROF_ ## scope ## _p )
+
+OBJECT * profile_make_local( char const * );
+#define PROFILE_ENTER_LOCAL( scope ) \
+ static OBJECT * constant_LOCAL_##scope = 0; \
+ if (DEBUG_PROFILE && !constant_LOCAL_##scope) constant_LOCAL_##scope = profile_make_local( #scope ); \
+ PROFILE_ENTER( LOCAL_##scope )
+#define PROFILE_EXIT_LOCAL( scope ) PROFILE_EXIT( LOCAL_##scope )
+
+#endif
diff --git a/src/boost/tools/build/src/engine/debugger.cpp b/src/boost/tools/build/src/engine/debugger.cpp
new file mode 100644
index 000000000..051166699
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debugger.cpp
@@ -0,0 +1,2738 @@
+/*
+ * Copyright 2015 Steven Watanabe
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "debugger.h"
+#include "constants.h"
+#include "jam_strings.h"
+#include "pathsys.h"
+#include "cwd.h"
+#include "function.h"
+#include <assert.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <limits.h>
+#include <signal.h>
+#include <ctype.h>
+
+#ifdef NT
+#include <windows.h>
+#include <io.h>
+#include <fcntl.h>
+#else
+#include <errno.h>
+#include <sys/wait.h>
+#include <unistd.h>
+#endif
+
+#undef debug_on_enter_function
+#undef debug_on_exit_function
+
+struct breakpoint
+{
+ OBJECT * file;
+ OBJECT * bound_file;
+ int line;
+ int status;
+};
+
+#define BREAKPOINT_ENABLED 1
+#define BREAKPOINT_DISABLED 2
+#define BREAKPOINT_DELETED 3
+
+static struct breakpoint * breakpoints;
+static int num_breakpoints;
+static int breakpoints_capacity;
+
+#define DEBUG_NO_CHILD 0
+#define DEBUG_RUN 1
+#define DEBUG_STEP 2
+#define DEBUG_NEXT 3
+#define DEBUG_FINISH 4
+#define DEBUG_STOPPED 5
+
+#define DEBUG_MSG_BREAKPOINT 1
+#define DEBUG_MSG_END_STEPPING 2
+#define DEBUG_MSG_SETUP 3
+#define DEBUG_MSG_DONE 32
+
+static int debug_state;
+static int debug_depth;
+static OBJECT * debug_file;
+static int debug_line;
+static FRAME * debug_frame;
+LIST * debug_print_result;
+static int current_token;
+static int debug_selected_frame_number;
+
+/* Commands are read from this stream. */
+static FILE * command_input;
+/* Where to send output from commands. */
+static FILE * command_output;
+/* Only valid in the parent. Reads command output from the child. */
+static FILE * command_child;
+
+struct command_elem
+{
+ const char * key;
+ void (*command)( int, const char * * );
+};
+
+static struct command_elem * command_array;
+
+static void debug_listen( void );
+static int read_command( void );
+static int is_same_file( OBJECT * file1, OBJECT * file2 );
+static void debug_mi_format_token( void );
+static OBJECT * make_absolute_path( OBJECT * filename );
+
+static void debug_string_write( FILE * out, const char * data )
+{
+ fprintf( out, "%s", data );
+ fputc( '\0', out );
+}
+
+static char * debug_string_read( FILE * in )
+{
+ string buf[ 1 ];
+ int ch;
+ char * result;
+ string_new( buf );
+ while( ( ch = fgetc( in ) ) > 0 )
+ {
+ string_push_back( buf, (char)ch );
+ }
+ result = strdup( buf->value );
+ string_free( buf );
+ return result;
+}
+
+static void debug_object_write( FILE * out, OBJECT * data )
+{
+ debug_string_write( out, object_str( data ) );
+}
+
+static OBJECT * debug_object_read( FILE * in )
+{
+ string buf[ 1 ];
+ int ch;
+ OBJECT * result;
+ string_new( buf );
+ while( ( ch = fgetc( in ) ) > 0 )
+ {
+ string_push_back( buf, (char)ch );
+ }
+ result = object_new( buf->value );
+ string_free( buf );
+ return result;
+}
+
+static void debug_int_write( FILE * out, int i )
+{
+ fprintf( out, "%d", i );
+ fputc( '\0', out );
+}
+
+static int debug_int_read( FILE * in )
+{
+ OBJECT * str = debug_object_read( in );
+ int result = atoi( object_str( str ) );
+ object_free( str );
+ return result;
+}
+
+static void debug_list_write( FILE * out, LIST * l )
+{
+ LISTITER iter = list_begin( l ), end = list_end( l );
+ fprintf( out, "%d\n", list_length( l ) );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ debug_object_write( out, list_item( iter ) );
+ }
+}
+
+static LIST * debug_list_read( FILE * in )
+{
+ int len;
+ int i;
+ int ch;
+ LIST * result = L0;
+ fscanf( in, "%d", &len );
+ ch = fgetc( in );
+ assert( ch == '\n' );
+ for ( i = 0; i < len; ++i )
+ {
+ result = list_push_back( result, debug_object_read( in ) );
+ }
+ return result;
+}
+
+static void debug_lol_write( FILE * out, LOL * lol )
+{
+ int i;
+ debug_int_write( out, lol->count );
+ for ( i = 0; i < lol->count; ++i )
+ {
+ debug_list_write( out, lol_get( lol, i ) );
+ }
+}
+
+static void debug_lol_read( FILE * in, LOL * lol )
+{
+ int count, i;
+ lol_init( lol );
+ count = debug_int_read( in );
+ for ( i = 0; i < count; ++i )
+ {
+ lol_add( lol, debug_list_read( in ) );
+ }
+}
+
+static void debug_format_rulename ( string * out, FRAME * frame )
+{
+ const char * pos = strchr( frame->rulename, '.' );
+ if ( frame->module->class_module && pos )
+ {
+ string_copy( out, object_str( frame->module->name ) );
+ string_push_back( out, '.' );
+ string_append( out, pos + 1 );
+ }
+ else
+ {
+ string_copy( out, frame->rulename );
+ }
+}
+
+static void debug_frame_write( FILE * out, FRAME * frame )
+{
+ string rulename_buffer [ 1 ];
+ OBJECT * fullname = constant_builtin;
+ OBJECT * file = frame->file;
+ if ( file == NULL ) file = constant_builtin;
+ else fullname = make_absolute_path( frame->file );
+ debug_format_rulename( rulename_buffer, frame );
+ debug_object_write( out, file );
+ debug_int_write( out, frame->line );
+ debug_object_write( out, fullname );
+ debug_lol_write( out, frame->args );
+ debug_string_write( out, rulename_buffer->value );
+ object_free( fullname );
+ string_free( rulename_buffer );
+}
+
+/*
+ * The information passed to the debugger for
+ * a frame is slightly different from the FRAME
+ * struct.
+ */
+typedef struct _frame_info
+{
+ OBJECT * file;
+ int line;
+ OBJECT * fullname;
+ LOL args[ 1 ];
+ char * rulename;
+} FRAME_INFO;
+
+static void debug_frame_info_free( FRAME_INFO * frame )
+{
+ object_free( frame->file );
+ object_free( frame->fullname );
+ lol_free( frame->args );
+ free( frame->rulename );
+}
+
+static void debug_frame_read( FILE * in, FRAME_INFO * frame )
+{
+ frame->file = debug_object_read( in );
+ frame->line = debug_int_read( in );
+ frame->fullname = debug_object_read( in );
+ debug_lol_read( in, frame->args );
+ frame->rulename = debug_string_read( in );
+}
+
+static int add_breakpoint( struct breakpoint elem )
+{
+ if ( num_breakpoints == breakpoints_capacity )
+ {
+ int new_capacity = breakpoints_capacity * 2;
+ if ( new_capacity == 0 ) new_capacity = 1;
+ breakpoints = ( struct breakpoint * )realloc( breakpoints, new_capacity * sizeof( struct breakpoint ) );
+ breakpoints_capacity = new_capacity;
+ }
+ breakpoints[ num_breakpoints++ ] = elem;
+ return num_breakpoints;
+}
+
+static int add_line_breakpoint( OBJECT * file, int line )
+{
+ struct breakpoint elem;
+ elem.file = file;
+ elem.bound_file = NULL;
+ elem.line = line;
+ elem.status = BREAKPOINT_ENABLED;
+ return add_breakpoint( elem );
+}
+
+static int add_function_breakpoint( OBJECT * name )
+{
+ struct breakpoint elem;
+ elem.file = name;
+ elem.bound_file = object_copy( name );
+ elem.line = -1;
+ elem.status = BREAKPOINT_ENABLED;
+ return add_breakpoint( elem );
+}
+
+/*
+ * Checks whether there is an active breakpoint at the
+ * specified location. Returns the breakpoint id
+ * or -1 if none is found.
+ */
+static int handle_line_breakpoint( OBJECT * file, int line )
+{
+ int i;
+ if ( file == NULL ) return 0;
+ for ( i = 0; i < num_breakpoints; ++i )
+ {
+ if ( breakpoints[ i ].bound_file == NULL && is_same_file( breakpoints[ i ].file, file ) )
+ {
+ breakpoints[ i ].bound_file = object_copy( file );
+ }
+ if ( breakpoints[ i ].status == BREAKPOINT_ENABLED &&
+ breakpoints[ i ].bound_file != NULL &&
+ object_equal( breakpoints[ i ].bound_file, file ) &&
+ breakpoints[ i ].line == line )
+ {
+ return i + 1;
+ }
+ }
+ return 0;
+}
+
+static int handle_function_breakpoint( OBJECT * name )
+{
+ return handle_line_breakpoint( name, -1 );
+}
+
+static OBJECT * make_absolute_path( OBJECT * filename )
+{
+ PATHNAME path1[ 1 ];
+ string buf[ 1 ];
+ OBJECT * result;
+ const char * root = object_str( cwd() );
+ path_parse( object_str( filename ), path1 );
+ path1->f_root.ptr = root;
+ path1->f_root.len = strlen( root );
+ string_new( buf );
+ path_build( path1, buf );
+ result = object_new( buf->value );
+ string_free( buf );
+ return result;
+}
+
+static OBJECT * get_filename( OBJECT * path )
+{
+ PATHNAME path1[ 1 ];
+ string buf[ 1 ];
+ OBJECT * result;
+ path_parse( object_str( path ), path1 );
+ path1->f_dir.ptr = NULL;
+ path1->f_dir.len = 0;
+ string_new( buf );
+ path_build( path1, buf );
+ result = object_new( buf->value );
+ string_free( buf );
+ return result;
+}
+
+static int is_same_file( OBJECT * file1, OBJECT * file2 )
+{
+ OBJECT * absolute1 = make_absolute_path( file1 );
+ OBJECT * absolute2 = make_absolute_path( file2 );
+ OBJECT * norm1 = path_as_key( absolute1 );
+ OBJECT * norm2 = path_as_key( absolute2 );
+ OBJECT * base1 = get_filename( file1 );
+ OBJECT * base2 = get_filename( file2 );
+ OBJECT * normbase1 = path_as_key( base1 );
+ OBJECT * normbase2 = path_as_key( base2 );
+ int result = object_equal( norm1, norm2 ) ||
+ ( object_equal( base1, file1 ) && object_equal( normbase1, normbase2 ) );
+ object_free( absolute1 );
+ object_free( absolute2 );
+ object_free( norm1 );
+ object_free( norm2 );
+ object_free( base1 );
+ object_free( base2 );
+ object_free( normbase1 );
+ object_free( normbase2 );
+ return result;
+}
+
+static void debug_print_source( OBJECT * filename, int line )
+{
+ FILE * file;
+
+ if ( filename == NULL || object_equal( filename, constant_builtin ) )
+ return;
+
+ file = fopen( object_str( filename ), "r" );
+ if ( file )
+ {
+ int ch;
+ int printing = 0;
+ int current_line = 1;
+ if ( line == 1 )
+ {
+ printing = 1;
+ printf( "%d\t", current_line );
+ }
+ while ( ( ch = fgetc( file ) ) != EOF )
+ {
+ if ( printing )
+ fputc( ch, stdout );
+
+ if ( ch == '\n' )
+ {
+ if ( printing )
+ break;
+
+ ++current_line;
+ if ( current_line == line )
+ {
+ printing = 1;
+ printf( "%d\t", current_line );
+ }
+ }
+ }
+ fclose( file );
+ }
+}
+
+static void debug_print_frame_info( FRAME_INFO * frame )
+{
+ OBJECT * file = frame->file;
+ if ( file == NULL ) file = constant_builtin;
+ printf( "%s ", frame->rulename );
+ if ( strcmp( frame->rulename, "module scope" ) != 0 )
+ {
+ printf( "( " );
+ if ( frame->args->count )
+ {
+ lol_print( frame->args );
+ printf( " " );
+ }
+ printf( ") " );
+ }
+ printf( "at %s:%d", object_str( file ), frame->line );
+}
+
+static void debug_mi_print_frame_info( FRAME_INFO * frame )
+{
+ printf( "frame={func=\"%s\",args=[],file=\"%s\",fullname=\"%s\",line=\"%d\"}",
+ frame->rulename,
+ object_str( frame->file ),
+ object_str( frame->fullname ),
+ frame->line );
+}
+
+static void debug_on_breakpoint( int id )
+{
+ fputc( DEBUG_MSG_BREAKPOINT, command_output );
+ debug_int_write( command_output, id );
+ fflush( command_output );
+ debug_listen();
+}
+
+static void debug_end_stepping( void )
+{
+ fputc( DEBUG_MSG_END_STEPPING, command_output );
+ fflush( command_output );
+ debug_listen();
+}
+
+void debug_on_instruction( FRAME * frame, OBJECT * file, int line )
+{
+ int breakpoint_id;
+ assert( debug_is_debugging() );
+ if ( debug_state == DEBUG_NEXT &&
+ ( debug_depth < 0 || ( debug_depth == 0 && debug_line != line ) ) )
+ {
+ debug_file = file;
+ debug_line = line;
+ debug_frame = frame;
+ debug_end_stepping();
+ }
+ else if ( debug_state == DEBUG_STEP && debug_line != line )
+ {
+ debug_file = file;
+ debug_line = line;
+ debug_frame = frame;
+ debug_end_stepping();
+ }
+ else if ( debug_state == DEBUG_FINISH && debug_depth < 0 )
+ {
+ debug_file = file;
+ debug_line = line;
+ debug_frame = frame;
+ debug_end_stepping();
+ }
+ else if ( ( debug_file == NULL || ! object_equal( file, debug_file ) ||
+ line != debug_line || debug_depth != 0 ) &&
+ ( breakpoint_id = handle_line_breakpoint( file, line ) ) )
+ {
+ debug_file = file;
+ debug_line = line;
+ debug_frame = frame;
+ debug_on_breakpoint( breakpoint_id );
+ }
+ else if ( ( debug_state == DEBUG_RUN || debug_state == DEBUG_FINISH ) &&
+ ( debug_depth < 0 || ( debug_depth == 0 && debug_line != line ) ) )
+ {
+ debug_file = NULL;
+ debug_line = 0;
+ }
+}
+
+void debug_on_enter_function( FRAME * frame, OBJECT * name, OBJECT * file, int line )
+{
+ int breakpoint_id;
+ assert( debug_is_debugging() );
+ ++debug_depth;
+ if ( debug_state == DEBUG_STEP && file )
+ {
+ debug_file = file;
+ debug_line = line;
+ debug_frame = frame;
+ debug_end_stepping();
+ }
+ else if ( ( breakpoint_id = handle_function_breakpoint( name ) ) ||
+ ( breakpoint_id = handle_line_breakpoint( file, line ) ) )
+ {
+ debug_file = file;
+ debug_line = line;
+ debug_frame = frame;
+ debug_on_breakpoint( breakpoint_id );
+ }
+}
+
+void debug_on_exit_function( OBJECT * name )
+{
+ assert( debug_is_debugging() );
+ --debug_depth;
+ if ( debug_depth < 0 )
+ {
+ /* The current location is no longer valid
+ after we return from the containing function. */
+ debug_file = NULL;
+ debug_line = 0;
+ }
+}
+
+#if NT
+static HANDLE child_handle;
+static DWORD child_pid;
+#else
+static int child_pid;
+#endif
+
+static void debug_child_continue( int argc, const char * * argv )
+{
+ debug_state = DEBUG_RUN;
+ debug_depth = 0;
+}
+
+static void debug_child_step( int argc, const char * * argv )
+{
+ debug_state = DEBUG_STEP;
+ debug_depth = 0;
+}
+
+static void debug_child_next( int argc, const char * * argv )
+{
+ debug_state = DEBUG_NEXT;
+ debug_depth = 0;
+}
+
+static void debug_child_finish( int argc, const char * * argv )
+{
+ debug_state = DEBUG_FINISH;
+ debug_depth = 0;
+}
+
+static void debug_child_kill( int argc, const char * * argv )
+{
+ exit( 0 );
+}
+
+static int debug_add_breakpoint( const char * name )
+{
+ const char * file_ptr = name;
+ const char * ptr = strrchr( file_ptr, ':' );
+ if ( ptr )
+ {
+ char * end;
+ long line = strtoul( ptr + 1, &end, 10 );
+ if ( line > 0 && line <= INT_MAX && end != ptr + 1 && *end == 0 )
+ {
+ OBJECT * file = object_new_range( file_ptr, ptr - file_ptr );
+ return add_line_breakpoint( file, line );
+ }
+ else
+ {
+ OBJECT * name = object_new( file_ptr );
+ return add_function_breakpoint( name );
+ }
+ }
+ else
+ {
+ OBJECT * name = object_new( file_ptr );
+ return add_function_breakpoint( name );
+ }
+}
+
+static void debug_child_break( int argc, const char * * argv )
+{
+ if ( argc == 2 )
+ {
+ debug_add_breakpoint( argv[ 1 ] );
+ }
+}
+
+static int get_breakpoint_by_name( const char * name )
+{
+ int result;
+ const char * file_ptr = name;
+ const char * ptr = strrchr( file_ptr, ':' );
+ if ( ptr )
+ {
+ char * end;
+ long line = strtoul( ptr + 1, &end, 10 );
+ if ( line > 0 && line <= INT_MAX && end != ptr + 1 && *end == 0 )
+ {
+ OBJECT * file = object_new_range( file_ptr, ptr - file_ptr );
+ result = handle_line_breakpoint( file, line );
+ object_free( file );
+ }
+ else
+ {
+ OBJECT * name = object_new( file_ptr );
+ result = handle_function_breakpoint( name );
+ object_free( name );
+ }
+ }
+ else
+ {
+ OBJECT * name = object_new( file_ptr );
+ result = handle_function_breakpoint( name );
+ object_free( name );
+ }
+ return result;
+}
+
+static void debug_child_disable( int argc, const char * * argv )
+{
+ if ( argc == 2 )
+ {
+ int id = atoi( argv[ 1 ] );
+ if ( id < 1 || id > num_breakpoints )
+ return;
+ --id;
+ if ( breakpoints[ id ].status == BREAKPOINT_DELETED )
+ return;
+ breakpoints[ id ].status = BREAKPOINT_DISABLED;
+ }
+}
+
+static void debug_child_enable( int argc, const char * * argv )
+{
+ if ( argc == 2 )
+ {
+ int id = atoi( argv[ 1 ] );
+ if ( id < 1 || id > num_breakpoints )
+ return;
+ --id;
+ if ( breakpoints[ id ].status == BREAKPOINT_DELETED )
+ return;
+ breakpoints[ id ].status = BREAKPOINT_ENABLED;
+ }
+}
+
+static void debug_child_delete( int argc, const char * * argv )
+{
+ if ( argc == 2 )
+ {
+ int id = atoi( argv[ 1 ] );
+ if ( id < 1 || id > num_breakpoints )
+ return;
+ --id;
+ breakpoints[ id ].status = BREAKPOINT_DELETED;
+ }
+}
+
+static void debug_child_print( int argc, const char * * argv )
+{
+ FRAME * saved_frame;
+ OBJECT * saved_file;
+ int saved_line;
+ string buf[ 1 ];
+ const char * lines[ 2 ];
+ int i;
+ FRAME new_frame = *debug_frame;
+ /* Save the current file/line, since running parse_string
+ * will likely change it.
+ */
+ saved_frame = debug_frame;
+ saved_file = debug_file;
+ saved_line = debug_line;
+ string_new( buf );
+ string_append( buf, "__DEBUG_PRINT_HELPER__" );
+ for ( i = 1; i < argc; ++i )
+ {
+ string_push_back( buf, ' ' );
+ string_append( buf, argv[ i ] );
+ }
+ string_append( buf, " ;\n" );
+ lines[ 0 ] = buf->value;
+ lines[ 1 ] = NULL;
+ parse_string( constant_builtin, lines, &new_frame );
+ string_free( buf );
+ debug_list_write( command_output, debug_print_result );
+ fflush( command_output );
+ debug_frame = saved_frame;
+ debug_file = saved_file;
+ debug_line = saved_line;
+}
+
+static void debug_child_frame( int argc, const char * * argv )
+{
+ if ( argc == 2 )
+ {
+ debug_selected_frame_number = atoi( argv[ 1 ] );
+ }
+ else
+ {
+ assert( !"Wrong number of arguments to frame." );
+ }
+}
+
+static void debug_child_info( int argc, const char * * argv )
+{
+ if ( strcmp( argv[ 1 ], "locals" ) == 0 )
+ {
+ LIST * locals = L0;
+ if ( debug_frame->function )
+ {
+ locals = function_get_variables( (FUNCTION*)debug_frame->function );
+ }
+ debug_list_write( command_output, locals );
+ fflush( command_output );
+ list_free( locals );
+ }
+ else if ( strcmp( argv[ 1 ], "frame" ) == 0 )
+ {
+ int frame_number = debug_selected_frame_number;
+ int i;
+ FRAME base = *debug_frame;
+ FRAME * frame = &base;
+ base.file = debug_file;
+ base.line = debug_line;
+ if ( argc == 3 ) frame_number = atoi( argv[ 2 ] );
+
+ for ( i = 0; i < frame_number; ++i ) frame = frame->prev;
+
+ debug_frame_write( command_output, frame );
+ }
+ else if ( strcmp( argv[ 1 ], "depth" ) == 0 )
+ {
+ int result = 0;
+ FRAME * frame = debug_frame;
+ while ( frame )
+ {
+ frame = frame->prev;
+ ++result;
+ }
+ fprintf( command_output, "%d", result );
+ fputc( '\0', command_output );
+ fflush( command_output );
+ }
+}
+
+/* Commands for the parent. */
+
+#ifdef NT
+
+static int get_module_filename( string * out )
+{
+ DWORD result;
+ string_reserve( out, 256 + 1 );
+ string_truncate( out, 256 );
+ while( ( result = GetModuleFileNameA( NULL, out->value, out->size ) ) == out->size )
+ {
+ string_reserve( out, out->size * 2 + 1);
+ string_truncate( out, out->size * 2 );
+ }
+ if ( result != 0 )
+ {
+ string_truncate( out, result );
+ return 1;
+ }
+ else
+ {
+ return 0;
+ }
+}
+
+#endif
+
+static struct command_elem child_commands[] =
+{
+ { "continue", &debug_child_continue },
+ { "kill", &debug_child_kill },
+ { "step", &debug_child_step },
+ { "next", &debug_child_next },
+ { "finish", &debug_child_finish },
+ { "break", &debug_child_break },
+ { "disable", &debug_child_disable },
+ { "enable", &debug_child_enable },
+ { "delete", &debug_child_delete },
+ { "print", &debug_child_print },
+ { "frame", &debug_child_frame },
+ { "info", &debug_child_info },
+ { NULL, NULL }
+};
+
+static void debug_mi_error( const char * message )
+{
+ debug_mi_format_token();
+ printf( "^error,msg=\"%s\"\n(gdb) \n", message );
+}
+
+static void debug_error_( const char * message )
+{
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ printf( "%s\n", message );
+ }
+ else if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_error( message );
+ }
+}
+
+static const char * debug_format_message( const char * format, va_list vargs )
+{
+ char * buf;
+ int result;
+ int sz = 80;
+ for ( ; ; )
+ {
+ va_list args;
+ buf = (char *)malloc( sz );
+ if ( !buf )
+ return 0;
+ #ifndef va_copy
+ args = vargs;
+ #else
+ va_copy( args, vargs );
+ #endif
+ #if defined(_MSC_VER) && (_MSC_VER <= 1310)
+ result = _vsnprintf( buf, sz, format, args );
+ #else
+ result = vsnprintf( buf, sz, format, args );
+ #endif
+ va_end( args );
+ if ( 0 <= result && result < sz )
+ return buf;
+ free( buf );
+ if ( result < 0 )
+ return 0;
+ sz = result + 1;
+ }
+}
+
+static void debug_error( const char * format, ... )
+{
+ va_list args;
+ const char * msg;
+ va_start( args, format );
+ msg = debug_format_message( format, args );
+ va_end( args );
+ if ( !msg )
+ {
+ debug_error_( "Failed formatting error message." );
+ return;
+ }
+ debug_error_( msg );
+ free( ( void * )msg );
+}
+
+static void debug_parent_child_exited( int pid, int exit_code )
+{
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ printf( "Child %d exited with status %d\n", (int)child_pid, (int)exit_code );
+ }
+ else if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ if ( exit_code == 0 )
+ printf( "*stopped,reason=\"exited-normally\"\n(gdb) \n" );
+ else
+ printf( "*stopped,reason=\"exited\",exit-code=\"%d\"\n(gdb) \n", exit_code );
+ }
+ else
+ {
+ assert( !"Wrong value of debug_interface." );
+ }
+}
+
+#if !NT
+
+static void debug_parent_child_signalled( int pid, int id )
+{
+
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ printf( "Child %d exited on signal %d\n", child_pid, id );
+ }
+ else if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ const char * name = "unknown";
+ const char * meaning = "unknown";
+ switch( id )
+ {
+ case SIGINT: name = "SIGINT"; meaning = "Interrupt"; break;
+ }
+ printf("*stopped,reason=\"exited-signalled\",signal-name=\"%s\",signal-meaning=\"%s\"\n(gdb) \n", name, meaning);
+ }
+ else
+ {
+ assert( !"Wrong value of debug_interface." );
+ }
+}
+
+#endif
+
+static void debug_parent_on_breakpoint( void )
+{
+ FRAME_INFO base;
+ int id;
+ id = debug_int_read( command_child );
+ fprintf( command_output, "info frame\n" );
+ fflush( command_output );
+ debug_frame_read( command_child, &base );
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ printf( "Breakpoint %d, ", id );
+ debug_print_frame_info( &base );
+ printf( "\n" );
+ debug_print_source( base.file, base.line );
+ }
+ else if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ printf( "*stopped,reason=\"breakpoint-hit\",bkptno=\"%d\",disp=\"keep\",", id );
+ debug_mi_print_frame_info( &base );
+ printf( ",thread-id=\"1\",stopped-threads=\"all\"" );
+ printf( "\n(gdb) \n" );
+ }
+ else
+ {
+ assert( !"Wrong value if debug_interface" );
+ }
+ fflush( stdout );
+}
+
+static void debug_parent_on_end_stepping( void )
+{
+ FRAME_INFO base;
+ fprintf( command_output, "info frame\n" );
+ fflush( command_output );
+ debug_frame_read( command_child, &base );
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ debug_print_source( base.file, base.line );
+ }
+ else
+ {
+ printf( "*stopped,reason=\"end-stepping-range\"," );
+ debug_mi_print_frame_info( &base );
+ printf( ",thread-id=\"1\"" );
+ printf( "\n(gdb) \n" );
+ }
+ fflush( stdout );
+}
+
+/* Waits for events from the child. */
+static void debug_parent_wait( int print_message )
+{
+ int ch = fgetc( command_child );
+ if ( ch == DEBUG_MSG_BREAKPOINT )
+ {
+ debug_parent_on_breakpoint();
+ }
+ else if ( ch == DEBUG_MSG_END_STEPPING )
+ {
+ debug_parent_on_end_stepping();
+ }
+ else if ( ch == DEBUG_MSG_SETUP )
+ {
+ /* FIXME: This is handled in the caller, but it would make
+ more sense to handle it here. */
+ return;
+ }
+ else if ( ch == EOF )
+ {
+#if NT
+ WaitForSingleObject( child_handle, INFINITE );
+ if ( print_message )
+ {
+ DWORD exit_code;
+ GetExitCodeProcess( child_handle, &exit_code );
+ debug_parent_child_exited( (int)child_pid, (int)exit_code );
+ }
+ CloseHandle( child_handle );
+#else
+ int status;
+ int pid;
+ while ( ( pid = waitpid( child_pid, &status, 0 ) ) == -1 )
+ if ( errno != EINTR )
+ break;
+ if ( print_message )
+ {
+ if ( WIFEXITED( status ) )
+ debug_parent_child_exited( child_pid, WEXITSTATUS( status ) );
+ else if ( WIFSIGNALED( status ) )
+ debug_parent_child_signalled( child_pid, WTERMSIG( status ) );
+ }
+#endif
+ fclose( command_child );
+ fclose( command_output );
+ debug_state = DEBUG_NO_CHILD;
+ }
+}
+
+/* Prints the message for starting the child. */
+static void debug_parent_run_print( int argc, const char * * argv )
+{
+ int i;
+ extern char const * saved_argv0;
+ char * name = executable_path( saved_argv0 );
+ printf( "Starting program: %s", name );
+ free( name );
+ for ( i = 1; i < argc; ++i )
+ {
+ printf( " %s", argv[ i ] );
+ }
+ printf( "\n" );
+ fflush( stdout );
+}
+
+#if NT
+
+void debug_init_handles( const char * in, const char * out )
+{
+ HANDLE read_handle;
+ int read_fd;
+ HANDLE write_handle;
+ int write_fd;
+
+ sscanf( in, "%p", &read_handle );
+ read_fd = _open_osfhandle( (intptr_t)read_handle, _O_RDONLY );
+ command_input = _fdopen( read_fd, "r" );
+
+ sscanf( out, "%p", &write_handle );
+ write_fd = _open_osfhandle( (intptr_t)write_handle, _O_WRONLY );
+ command_output = _fdopen( write_fd, "w" );
+
+ command_array = child_commands;
+
+ /* Handle the initial setup */
+ /* wake up the parent */
+ fputc( DEBUG_MSG_SETUP, command_output );
+ debug_listen();
+}
+
+static void init_parent_handles( HANDLE out, HANDLE in )
+{
+ command_child = _fdopen( _open_osfhandle( (intptr_t)in, _O_RDONLY ), "r" );
+ command_output = _fdopen( _open_osfhandle( (intptr_t)out, _O_WRONLY ), "w" );
+}
+
+static void debug_parent_copy_breakpoints( void )
+{
+ int i;
+ for ( i = 0; i < num_breakpoints; ++i )
+ {
+ fprintf( command_output, "break %s", object_str( breakpoints[ i ].file ) );
+ if ( breakpoints[ i ].line != -1 )
+ {
+ fprintf( command_output, ":%d", breakpoints[ i ].line );
+ }
+ fprintf( command_output, "\n" );
+
+ switch ( breakpoints[ i ].status )
+ {
+ case BREAKPOINT_ENABLED:
+ break;
+ case BREAKPOINT_DISABLED:
+ fprintf( command_output, "disable %d\n", i + 1 );
+ break;
+ case BREAKPOINT_DELETED:
+ fprintf( command_output, "delete %d\n", i + 1 );
+ break;
+ default:
+ assert( !"Wrong breakpoint status." );
+ }
+ }
+ fflush( command_output );
+}
+
+#endif
+
+static void debug_start_child( int argc, const char * * argv )
+{
+#if NT
+ char buf[ 80 ];
+ HANDLE pipe1[ 2 ];
+ HANDLE pipe2[ 2 ];
+ string self[ 1 ];
+ string command_line[ 1 ];
+ SECURITY_ATTRIBUTES sa = { sizeof( SECURITY_ATTRIBUTES ), NULL, TRUE };
+ PROCESS_INFORMATION pi = { NULL, NULL, 0, 0 };
+ STARTUPINFOA si = { sizeof( STARTUPINFOA ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0 };
+ assert( debug_state == DEBUG_NO_CHILD );
+ if ( ! CreatePipe( &pipe1[ 0 ], &pipe1[ 1 ], &sa, 0 ) )
+ {
+ printf("internal error: CreatePipe:1: 0x%08lx\n", GetLastError());
+ return;
+ }
+ if ( ! CreatePipe( &pipe2[ 0 ], &pipe2[ 1 ], &sa, 0 ) )
+ {
+ printf("internal error: CreatePipe:2: 0x%08lx\n", GetLastError());
+ CloseHandle( pipe1[ 0 ] );
+ CloseHandle( pipe1[ 1 ] );
+ return;
+ }
+ string_new( self );
+ if ( ! get_module_filename( self ) )
+ {
+ printf("internal error\n");
+ CloseHandle( pipe1[ 0 ] );
+ CloseHandle( pipe1[ 1 ] );
+ CloseHandle( pipe2[ 0 ] );
+ CloseHandle( pipe2[ 1 ] );
+ return;
+ }
+ string_copy( command_line, "b2 " );
+ /* Pass the handles as the first and second arguments. */
+ string_append( command_line, debugger_opt );
+ sprintf( buf, "%p", pipe1[ 0 ] );
+ string_append( command_line, buf );
+ string_push_back( command_line, ' ' );
+ string_append( command_line, debugger_opt );
+ sprintf( buf, "%p", pipe2[ 1 ] );
+ string_append( command_line, buf );
+ /* Pass the rest of the command line. */
+ {
+ int i;
+ for ( i = 1; i < argc; ++i )
+ {
+ string_push_back( command_line, ' ' );
+ string_append( command_line, argv[ i ] );
+ }
+ }
+ SetHandleInformation( pipe1[ 1 ], HANDLE_FLAG_INHERIT, 0 );
+ SetHandleInformation( pipe2[ 0 ], HANDLE_FLAG_INHERIT, 0 );
+ if ( ! CreateProcessA(
+ self->value,
+ command_line->value,
+ NULL,
+ NULL,
+ TRUE,
+ 0,
+ NULL,
+ NULL,
+ &si,
+ &pi
+ ) )
+ {
+ printf("internal error\n");
+ CloseHandle( pipe1[ 0 ] );
+ CloseHandle( pipe1[ 1 ] );
+ CloseHandle( pipe2[ 0 ] );
+ CloseHandle( pipe2[ 1 ] );
+ string_free( self );
+ string_free( command_line );
+ return;
+ }
+ child_pid = pi.dwProcessId;
+ child_handle = pi.hProcess;
+ CloseHandle( pi.hThread );
+ CloseHandle( pipe1[ 0 ] );
+ CloseHandle( pipe2[ 1 ] );
+ string_free( self );
+ string_free( command_line );
+
+ debug_state = DEBUG_RUN;
+
+ init_parent_handles( pipe1[ 1 ], pipe2[ 0 ] );
+ debug_parent_wait( 1 );
+ debug_parent_copy_breakpoints();
+ fprintf( command_output, "continue\n" );
+ fflush( command_output );
+#else
+ int pipe1[2];
+ int pipe2[2];
+ int write_fd;
+ int read_fd;
+ int pid;
+ assert( debug_state == DEBUG_NO_CHILD );
+ if (pipe(pipe1) == -1)
+ {
+ printf("internal error: pipe:1: %s\n", strerror(errno));
+ return;
+ }
+ if (pipe(pipe2) == -1)
+ {
+ close( pipe1[ 0 ] );
+ close( pipe1[ 1 ] );
+ printf("internal error: pipe:2: %s\n", strerror(errno));
+ return;
+ }
+
+ pid = fork();
+ if ( pid == -1 )
+ {
+ /* error */
+ close( pipe1[ 0 ] );
+ close( pipe1[ 1 ] );
+ close( pipe2[ 0 ] );
+ close( pipe2[ 1 ] );
+ printf("internal error: fork: %s\n", strerror(errno));
+ return;
+ }
+ else if ( pid == 0 )
+ {
+ /* child */
+ extern const char * saved_argv0;
+ read_fd = pipe1[ 0 ];
+ write_fd = pipe2[ 1 ];
+ close( pipe2[ 0 ] );
+ close( pipe1[ 1 ] );
+ command_array = child_commands;
+ argv[ 0 ] = executable_path( saved_argv0 );
+ debug_child_data.argc = argc;
+ debug_child_data.argv = argv;
+ command_input = fdopen( read_fd, "r" );
+ command_output = fdopen( write_fd, "w" );
+ longjmp( debug_child_data.jmp, 1 );
+ }
+ else
+ {
+ /* parent */
+ read_fd = pipe2[ 0 ];
+ write_fd = pipe1[ 1 ];
+ close( pipe1[ 0 ] );
+ close( pipe2[ 1 ] );
+ command_output = fdopen( write_fd, "w" );
+ command_child = fdopen( read_fd, "r" );
+ child_pid = pid;
+ }
+ debug_state = DEBUG_RUN;
+#endif
+}
+
+static void debug_parent_run( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_RUN )
+ {
+ fprintf( command_output, "kill\n" );
+ fflush( command_output );
+ debug_parent_wait( 1 );
+ }
+ debug_parent_run_print( argc, argv );
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ printf( "=thread-created,id=\"1\",group-id=\"i1\"\n" );
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ }
+ debug_start_child( argc, argv );
+ debug_parent_wait( 1 );
+}
+
+static int debug_parent_forward_nowait( int argc, const char * * argv, int print_message, int require_child )
+{
+ int i;
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ if ( require_child )
+ printf( "The program is not being run.\n" );
+ return 1;
+ }
+ fputs( argv[ 0 ], command_output );
+ for( i = 1; i < argc; ++i )
+ {
+ fputc( ' ', command_output );
+ fputs( argv[ i ], command_output );
+ }
+ fputc( '\n', command_output );
+ fflush( command_output );
+ return 0;
+}
+
+/* FIXME: This function should be eliminated when I finish all stdout to the parent. */
+static void debug_parent_forward( int argc, const char * * argv, int print_message, int require_child )
+{
+ if ( debug_parent_forward_nowait( argc, argv, print_message, require_child ) != 0 )
+ {
+ return;
+ }
+ debug_parent_wait( print_message );
+}
+
+static void debug_parent_continue( int argc, const char * * argv )
+{
+ if ( argc > 1 )
+ {
+ debug_error( "Too many arguments to continue." );
+ return;
+ }
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ }
+ debug_parent_forward( 1, argv, 1, 1 );
+}
+
+static void debug_parent_kill( int argc, const char * * argv )
+{
+ if ( argc > 1 )
+ {
+ debug_error( "Too many arguments to kill." );
+ return;
+ }
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+ fflush( stdout );
+ }
+ debug_parent_forward( 1, argv, 0, 1 );
+}
+
+static void debug_parent_step( int argc, const char * * argv )
+{
+ if ( argc > 1 )
+ {
+ debug_error( "Too many arguments to step." );
+ return;
+ }
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ }
+ debug_parent_forward( 1, argv, 1, 1 );
+}
+
+static void debug_parent_next( int argc, const char * * argv )
+{
+ if ( argc > 1 )
+ {
+ debug_error( "Too many arguments to next." );
+ return;
+ }
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ }
+ debug_parent_forward( 1, argv, 1, 1 );
+}
+
+static void debug_parent_finish( int argc, const char * * argv )
+{
+ if ( argc > 1 )
+ {
+ debug_error( "Too many arguments to finish." );
+ return;
+ }
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ }
+ debug_parent_forward( 1, argv, 1, 1 );
+}
+
+static void debug_parent_break( int argc, const char * * argv )
+{
+ int id;
+ if ( argc < 2 )
+ {
+ debug_error( "Missing argument to break." );
+ return;
+ }
+ else if ( argc > 2 )
+ {
+ debug_error( "Too many arguments to break." );
+ return;
+ }
+ id = debug_add_breakpoint( argv[ 1 ] );
+ debug_parent_forward_nowait( argc, argv, 1, 0 );
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ printf( "Breakpoint %d set at %s\n", id, argv[ 1 ] );
+ }
+ else if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+ }
+ else
+ {
+ assert( !"wrong value of debug_interface." );
+ }
+}
+
+int check_breakpoint_fn_args( int argc, const char * * argv )
+{
+ if ( argc < 2 )
+ {
+ debug_error( "Missing argument to %s.", argv[ 0 ] );
+ return 0;
+ }
+ else if ( argc > 2 )
+ {
+ debug_error( "Too many arguments to %s.", argv[ 0 ] );
+ return 0;
+ }
+ else
+ {
+ char * end;
+ long x = strtol( argv[ 1 ], &end, 10 );
+ if ( *end )
+ {
+ debug_error( "Invalid breakpoint number %s.", argv[ 1 ] );
+ return 0;
+ }
+ if ( x < 1 || x > num_breakpoints || breakpoints[ x - 1 ].status == BREAKPOINT_DELETED )
+ {
+ debug_error( "Unknown breakpoint %s.", argv[ 1 ] );
+ return 0;
+ }
+ }
+ return 1;
+}
+
+static void debug_parent_disable( int argc, const char * * argv )
+{
+ if ( ! check_breakpoint_fn_args( argc, argv ) )
+ {
+ return;
+ }
+ debug_child_disable( argc, argv );
+ debug_parent_forward_nowait( 2, argv, 1, 0 );
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+ }
+}
+
+static void debug_parent_enable( int argc, const char * * argv )
+{
+ if ( ! check_breakpoint_fn_args( argc, argv ) )
+ {
+ return;
+ }
+ debug_child_enable( argc, argv );
+ debug_parent_forward_nowait( 2, argv, 1, 0 );
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+ }
+}
+
+static void debug_parent_delete( int argc, const char * * argv )
+{
+ if ( ! check_breakpoint_fn_args( argc, argv ) )
+ {
+ return;
+ }
+ debug_child_delete( argc, argv );
+ debug_parent_forward_nowait( 2, argv, 1, 0 );
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+ }
+}
+
+static void debug_parent_clear( int argc, const char * * argv )
+{
+ char buf[ 16 ];
+ const char * new_args[ 2 ];
+ int id;
+ if ( argc < 2 )
+ {
+ debug_error( "Missing argument to clear." );
+ return;
+ }
+ else if ( argc > 2 )
+ {
+ debug_error( "Too many arguments to clear." );
+ return;
+ }
+ id = get_breakpoint_by_name( argv[ 1 ] );
+ if ( id == 0 )
+ {
+ debug_error( "No breakpoint at %s.", argv[ 1 ] );
+ return;
+ }
+
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ printf( "Deleted breakpoint %d\n", id );
+ }
+
+ sprintf( buf, "%d", id );
+ new_args[ 0 ] = "delete";
+ new_args[ 1 ] = buf;
+ debug_parent_delete( 2, new_args );
+}
+
+static void debug_parent_print( int argc, const char * * argv )
+{
+ LIST * result;
+ if ( debug_parent_forward_nowait( argc, argv, 1, 1 ) != 0 )
+ {
+ return;
+ }
+ result = debug_list_read( command_child );
+
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ {
+ list_print( result );
+ printf( "\n" );
+ }
+ else if ( debug_interface == DEBUG_INTERFACE_MI )
+ {
+ printf( "~\"$1 = " );
+ list_print( result );
+ printf( "\"\n~\"\\n\"\n" );
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+ }
+
+ list_free( result );
+}
+
+static void debug_parent_backtrace( int argc, const char * * argv )
+{
+ const char * new_args[ 3 ];
+ OBJECT * depth_str;
+ int depth;
+ int i;
+ FRAME_INFO frame;
+
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ debug_error( "The program is not being run." );
+ return;
+ }
+
+ new_args[ 0 ] = "info";
+ new_args[ 1 ] = "frame";
+
+ fprintf( command_output, "info depth\n" );
+ fflush( command_output );
+ depth_str = debug_object_read( command_child );
+ depth = atoi( object_str( depth_str ) );
+ object_free( depth_str );
+
+ for ( i = 0; i < depth; ++i )
+ {
+ char buf[ 16 ];
+ sprintf( buf, "%d", i );
+ new_args[ 2 ] = buf;
+ debug_parent_forward_nowait( 3, new_args, 0, 0 );
+ debug_frame_read( command_child, &frame );
+ printf( "#%d in ", i );
+ debug_print_frame_info( &frame );
+ printf( "\n" );
+ }
+ fflush( stdout );
+}
+
+static void debug_parent_quit( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_RUN )
+ {
+ fprintf( command_output, "kill\n" );
+ fflush( command_output );
+ debug_parent_wait( 0 );
+ }
+ exit( 0 );
+}
+
+static const char * const help_text[][2] =
+{
+ {
+ "run",
+ "run <args>\n"
+ "Creates a new b2 child process passing <args> on the command line."
+ " Terminates\nthe current child (if any).\n"
+ },
+ {
+ "continue",
+ "continue\nContinue debugging\n"
+ },
+ {
+ "step",
+ "step\nContinue to the next statement\n"
+ },
+ {
+ "next",
+ "next\nContinue to the next line in the current frame\n"
+ },
+ {
+ "finish",
+ "finish\nContinue to the end of the current frame\n"
+ },
+ {
+ "break",
+ "break <location>\n"
+ "Sets a breakpoint at <location>. <location> can be either a the name of a\nfunction or <filename>:<lineno>\n"
+ },
+ {
+ "disable",
+ "disable <breakpoint>\nDisable a breakpoint\n"
+ },
+ {
+ "enable",
+ "enable <breakpoint>\nEnable a breakpoint\n"
+ },
+ {
+ "delete",
+ "delete <breakpoint>\nDelete a breakpoint\n"
+ },
+ {
+ "clear",
+ "clear <location>\nDelete the breakpoint at <location>\n"
+ },
+ {
+ "print",
+ "print <expression>\nDisplay the value of <expression>\n"
+ },
+ {
+ "backtrace",
+ "backtrace\nDisplay the call stack\n"
+ },
+ {
+ "kill",
+ "kill\nTerminate the child\n"
+ },
+ {
+ "quit",
+ "quit\nExit the debugger\n"
+ },
+ {
+ "help",
+ "help\nhelp <command>\nShow help for debugger commands.\n"
+ },
+ { 0, 0 }
+};
+
+static void debug_parent_help( int argc, const char * * argv )
+{
+ if ( argc == 1 )
+ {
+ printf(
+ "run - Start debugging\n"
+ "continue - Continue debugging\n"
+ "step - Continue to the next statement\n"
+ "next - Continue to the next line in the current frame\n"
+ "finish - Continue to the end of the current frame\n"
+ "break - Set a breakpoint\n"
+ "disable - Disable a breakpoint\n"
+ "enable - Enable a breakpoint\n"
+ "delete - Delete a breakpoint\n"
+ "clear - Delete a breakpoint by location\n"
+ );
+ printf(
+ "print - Display an expression\n"
+ "backtrace - Display the call stack\n"
+ "kill - Terminate the child\n"
+ "quit - Exit the debugger\n"
+ "help - Debugger help\n"
+ );
+ }
+ else if ( argc == 2 )
+ {
+ int i;
+ for ( i = 0; help_text[ i ][ 0 ]; ++i )
+ {
+ if ( strcmp( argv[ 1 ], help_text[ i ][ 0 ] ) == 0 )
+ {
+ printf( "%s", help_text[ i ][ 1 ] );
+ return;
+ }
+ }
+ printf( "No command named %s\n", argv[ 1 ] );
+ }
+}
+
+static void debug_mi_break_insert( int argc, const char * * argv );
+static void debug_mi_break_delete( int argc, const char * * argv );
+static void debug_mi_break_disable( int argc, const char * * argv );
+static void debug_mi_break_enable( int argc, const char * * argv );
+static void debug_mi_break_info( int argc, const char * * argv );
+static void debug_mi_break_list( int argc, const char * * argv );
+static void debug_mi_inferior_tty_set( int argc, const char * * argv );
+static void debug_mi_gdb_exit( int argc, const char * * argv );
+static void debug_mi_gdb_set( int argc, const char * * argv );
+static void debug_mi_gdb_show( int argc, const char * * argv );
+static void debug_mi_not_implemented( int argc, const char * * argv );
+static void debug_mi_file_list_exec_source_files( int argc, const char * * argv );
+static void debug_mi_file_list_exec_source_file( int argc, const char * * argv );
+static void debug_mi_thread_info( int argc, const char * * argv );
+static void debug_mi_thread_select( int argc, const char * * argv );
+static void debug_mi_stack_info_frame( int argc, const char * * argv );
+static void debug_mi_stack_select_frame( int argc, const char * * argv );
+static void debug_mi_stack_list_variables( int argc, const char * * argv );
+static void debug_mi_stack_list_locals( int argc, const char * * argv );
+static void debug_mi_stack_list_frames( int argc, const char * * argv );
+static void debug_mi_list_target_features( int argc, const char * * argv );
+static void debug_mi_exec_run( int argc, const char * * argv );
+static void debug_mi_exec_continue( int argc, const char * * argv );
+static void debug_mi_exec_step( int argc, const char * * argv );
+static void debug_mi_exec_next( int argc, const char * * argv );
+static void debug_mi_exec_finish( int argc, const char * * argv );
+static void debug_mi_data_list_register_names( int argc, const char * * argv );
+static void debug_mi_data_evaluate_expression( int argc, const char * * argv );
+static void debug_mi_interpreter_exec( int argc, const char * * argv );
+
+static struct command_elem parent_commands[] =
+{
+ { "run", &debug_parent_run },
+ { "continue", &debug_parent_continue },
+ { "kill", &debug_parent_kill },
+ { "step", &debug_parent_step },
+ { "next", &debug_parent_next },
+ { "finish", &debug_parent_finish },
+ { "break", &debug_parent_break },
+ { "disable", &debug_parent_disable },
+ { "enable", &debug_parent_enable },
+ { "delete", &debug_parent_delete },
+ { "clear", &debug_parent_clear },
+ { "print", &debug_parent_print },
+ { "backtrace", &debug_parent_backtrace },
+ { "quit", &debug_parent_quit },
+ { "help", &debug_parent_help },
+ { "-break-insert", &debug_mi_break_insert },
+ { "-break-delete", &debug_mi_break_delete },
+ { "-break-disable", &debug_mi_break_disable },
+ { "-break-enable", &debug_mi_break_enable },
+ { "-break-info", &debug_mi_break_info },
+ { "-break-list", &debug_mi_break_list },
+ { "-inferior-tty-set", &debug_mi_inferior_tty_set },
+ { "-gdb-exit", &debug_mi_gdb_exit },
+ { "-gdb-set", &debug_mi_gdb_set },
+ { "-gdb-show", &debug_mi_gdb_show },
+ { "-enable-pretty-printing", &debug_mi_not_implemented },
+ { "-file-list-exec-source-files", &debug_mi_file_list_exec_source_files },
+ { "-file-list-exec-source-file", &debug_mi_file_list_exec_source_file },
+ { "-thread-info", &debug_mi_thread_info },
+ { "-thread-select", &debug_mi_thread_select },
+ { "-stack-info-frame", &debug_mi_stack_info_frame },
+ { "-stack-select-frame", &debug_mi_stack_select_frame },
+ { "-stack-list-variables", &debug_mi_stack_list_variables },
+ { "-stack-list-locals", &debug_mi_stack_list_locals },
+ { "-stack-list-frames", &debug_mi_stack_list_frames },
+ { "-list-target-features", &debug_mi_list_target_features },
+ { "-exec-run", &debug_mi_exec_run },
+ { "-exec-continue", &debug_mi_exec_continue },
+ { "-exec-step", &debug_mi_exec_step },
+ { "-exec-next", &debug_mi_exec_next },
+ { "-exec-finish", &debug_mi_exec_finish },
+ { "-data-list-register-names", &debug_mi_data_list_register_names },
+ { "-data-evaluate-expression", &debug_mi_data_evaluate_expression },
+ { "-interpreter-exec", &debug_mi_interpreter_exec },
+ { NULL, NULL }
+};
+
+static void debug_mi_format_token( void )
+{
+ if ( current_token != 0 )
+ {
+ printf( "%d", current_token );
+ }
+}
+
+static void debug_mi_format_breakpoint( int id )
+{
+ struct breakpoint * ptr = &breakpoints[ id - 1 ];
+ printf( "bkpt={" );
+ printf( "number=\"%d\"", id );
+ printf( ",type=\"breakpoint\"" );
+ printf( ",disp=\"keep\"" ); /* FIXME: support temporary breakpoints. */
+ printf( ",enabled=\"%s\"", ptr->status == BREAKPOINT_ENABLED ? "y" : "n" );
+ /* addr */
+ if ( ptr->line == -1 )
+ {
+ printf( ",func=\"%s\"", object_str( ptr->file ) );
+ }
+ else
+ {
+ printf( ",file=\"%s\"", object_str( ptr->file ) );
+ printf( ",line=\"%d\"", ptr->line );
+ printf( ",fullname=\"%s\"", object_str( ptr->file ) );
+ }
+ /* fullname */
+ /* times */
+ // printf( "" );
+ printf( "}" );
+}
+
+static int breakpoint_id_parse( const char * name )
+{
+ int id = atoi( name );
+ if ( id > num_breakpoints || id < 1 || breakpoints[ id ].status == BREAKPOINT_DELETED )
+ return -1;
+ return id;
+}
+
+static void debug_mi_break_insert( int argc, const char * * argv )
+{
+ const char * inner_argv[ 2 ];
+ // int temporary = 0; /* FIXME: not supported yet */
+ // int hardware = 0; /* unsupported */
+ // int force = 1; /* We don't have global debug information... */
+ int disabled = 0;
+ // int tracepoint = 0; /* unsupported */
+ // int thread_id = 0;
+ // int ignore_count = 0;
+ // const char * condition; /* FIXME: not supported yet */
+ const char * location;
+ int id;
+ for ( --argc, ++argv; argc; --argc, ++argv )
+ {
+ if ( strcmp( *argv, "-t" ) == 0 )
+ {
+ // temporary = 1;
+ }
+ else if ( strcmp( *argv, "-h" ) == 0 )
+ {
+ // hardware = 1;
+ }
+ else if ( strcmp( *argv, "-f" ) == 0 )
+ {
+ // force = 1;
+ }
+ else if ( strcmp( *argv, "-d" ) == 0 )
+ {
+ disabled = 1;
+ }
+ else if ( strcmp( *argv, "-a" ) == 0 )
+ {
+ // tracepoint = 1;
+ }
+ else if ( strcmp( *argv, "-c" ) == 0 )
+ {
+ if ( argc < 2 )
+ {
+ debug_mi_error( "Missing argument for -c." );
+ return;
+ }
+
+ // condition = argv[ 1 ];
+ --argc;
+ ++argv;
+ }
+ else if ( strcmp( *argv, "-i" ) == 0 )
+ {
+ if ( argc < 2 )
+ {
+ debug_mi_error( "Missing argument for -i." );
+ return;
+ }
+
+ // ignore_count = atoi( argv[ 1 ] );
+ --argc;
+ ++argv;
+ }
+ else if ( strcmp( *argv, "-p" ) == 0 )
+ {
+ if ( argc < 2 )
+ {
+ debug_mi_error( "Missing argument for -p." );
+ return;
+ }
+
+ // thread_id = atoi( argv[ 1 ] );
+ --argc;
+ ++argv;
+ }
+ else if ( strcmp( *argv, "--" ) == 0 )
+ {
+ --argc;
+ ++argv;
+ break;
+ }
+ else if ( **argv != '-' )
+ {
+ break;
+ }
+ else
+ {
+ debug_mi_error( "Unknown argument." );
+ return;
+ }
+ }
+ if ( argc > 1 )
+ {
+ debug_mi_error( "Too many arguments for -break-insert." );
+ return;
+ }
+
+ if ( argc == 1 )
+ {
+ location = *argv;
+ }
+ else
+ {
+ debug_mi_error( "Not implemented: -break-insert with no location." );
+ return;
+ }
+ inner_argv[ 0 ] = "break";
+ inner_argv[ 1 ] = location;
+
+ id = debug_add_breakpoint( location );
+ debug_parent_forward_nowait( 2, inner_argv, 1, 0 );
+
+ if ( disabled )
+ {
+ char buf[ 80 ];
+ sprintf( buf, "%d", num_breakpoints );
+ inner_argv[ 0 ] = "disable";
+ inner_argv[ 1 ] = buf;
+ debug_child_disable( 2, inner_argv );
+ debug_parent_forward_nowait( 2, inner_argv, 1, 0 );
+ }
+
+ debug_mi_format_token();
+ printf( "^done," );
+ debug_mi_format_breakpoint( id );
+ printf( "\n(gdb) \n" );
+}
+
+static void debug_mi_break_delete( int argc, const char * * argv )
+{
+ if ( argc < 2 )
+ {
+ debug_mi_error( "Not enough arguments for -break-delete" );
+ return;
+ }
+ for ( --argc, ++argv; argc; --argc, ++argv )
+ {
+ const char * inner_argv[ 2 ];
+ int id = breakpoint_id_parse( *argv );
+ if ( id == -1 )
+ {
+ debug_mi_error( "Not a valid breakpoint" );
+ return;
+ }
+ inner_argv[ 0 ] = "delete";
+ inner_argv[ 1 ] = *argv;
+ debug_parent_delete( 2, inner_argv );
+ }
+}
+
+static void debug_mi_break_enable( int argc, const char * * argv )
+{
+ if ( argc < 2 )
+ {
+ debug_mi_error( "Not enough arguments for -break-enable" );
+ return;
+ }
+ for ( --argc, ++argv; argc; --argc, ++argv )
+ {
+ const char * inner_argv[ 2 ];
+ int id = breakpoint_id_parse( *argv );
+ if ( id == -1 )
+ {
+ debug_mi_error( "Not a valid breakpoint" );
+ return;
+ }
+ inner_argv[ 0 ] = "enable";
+ inner_argv[ 1 ] = *argv;
+ debug_parent_enable( 2, inner_argv );
+ }
+}
+
+static void debug_mi_break_disable( int argc, const char * * argv )
+{
+ if ( argc < 2 )
+ {
+ debug_mi_error( "Not enough arguments for -break-disable" );
+ return;
+ }
+ for ( --argc, ++argv; argc; --argc, ++argv )
+ {
+ const char * inner_argv[ 2 ];
+ int id = breakpoint_id_parse( *argv );
+ if ( id == -1 )
+ {
+ debug_mi_error( "Not a valid breakpoint" );
+ return;
+ }
+ inner_argv[ 0 ] = "disable";
+ inner_argv[ 1 ] = *argv;
+ debug_parent_disable( 2, inner_argv );
+ }
+}
+
+static void debug_mi_format_breakpoint_header_col( int width, int alignment, const char * col_name, const char * colhdr )
+{
+ printf( "{width=\"%d\",alignment=\"%d\",col_name=\"%s\",colhdr=\"%s\"}", width, alignment, col_name, colhdr );
+}
+
+static void debug_mi_format_breakpoint_hdr( void )
+{
+ printf( "hdr=[" );
+ debug_mi_format_breakpoint_header_col( 7, -1, "number", "Num" );
+ printf( "," );
+ debug_mi_format_breakpoint_header_col( 14, -1, "type", "Type" );
+ printf( "," );
+ debug_mi_format_breakpoint_header_col( 4, -1, "disp", "Disp" );
+ printf( "," );
+ debug_mi_format_breakpoint_header_col( 3, -1, "enabled", "Enb" );
+ printf( "," );
+ debug_mi_format_breakpoint_header_col( 10, -1, "addr", "Address" );
+ printf( "," );
+ debug_mi_format_breakpoint_header_col( 40, 2, "what", "What" );
+ printf( "]" );
+}
+
+static void debug_mi_break_info( int argc, const char * * argv )
+{
+ int id;
+ --argc;
+ ++argv;
+ if ( strcmp( *argv, "--" ) == 0 )
+ {
+ --argc;
+ ++argv;
+ }
+ if ( argc < 1 )
+ {
+ debug_mi_error( "Not enough arguments for -break-info" );
+ return;
+ }
+ if ( argc > 1 )
+ {
+ debug_mi_error( "Too many arguments for -break-info" );
+ }
+
+ id = breakpoint_id_parse( *argv );
+ if ( id == -1 )
+ {
+ debug_mi_error( "No such breakpoint." );
+ return;
+ }
+
+ printf( "^done,BreakpointTable={"
+ "nr_rows=\"%d\",nr_cols=\"6\",", 1 );
+ debug_mi_format_breakpoint_hdr();
+ printf( ",body=[" );
+ debug_mi_format_breakpoint( id );
+ printf( "]}" );
+ printf("\n(gdb) \n");
+}
+
+static void debug_mi_break_list( int argc, const char * * argv )
+{
+ int number;
+ int i;
+ int first;
+ if ( argc > 2 || ( argc == 2 && strcmp( argv[ 1 ], "--" ) ) )
+ {
+ debug_mi_error( "Too many arguments for -break-list" );
+ return;
+ }
+
+ number = 0;
+ for ( i = 0; i < num_breakpoints; ++i )
+ if ( breakpoints[ i ].status != BREAKPOINT_DELETED )
+ ++number;
+ debug_mi_format_token();
+ printf( "^done,BreakpointTable={"
+ "nr_rows=\"%d\",nr_cols=\"6\",", number );
+ debug_mi_format_breakpoint_hdr();
+ printf( ",body=[" );
+ first = 1;
+ for ( i = 0; i < num_breakpoints; ++i )
+ if ( breakpoints[ i ].status != BREAKPOINT_DELETED )
+ {
+ if ( first ) first = 0;
+ else printf( "," );
+ debug_mi_format_breakpoint( i + 1 );
+ }
+ printf( "]}" );
+ printf("\n(gdb) \n");
+}
+
+static void debug_mi_inferior_tty_set( int argc, const char * * argv )
+{
+ /* FIXME: implement this for real */
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+}
+
+static void debug_mi_gdb_exit( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_RUN )
+ {
+ fprintf( command_output, "kill\n" );
+ fflush( command_output );
+ debug_parent_wait( 0 );
+ }
+ debug_mi_format_token();
+ printf( "^exit\n" );
+ exit( EXIT_SUCCESS );
+}
+
+static void debug_mi_gdb_set( int argc, const char * * argv )
+{
+ /* FIXME: implement this for real */
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+}
+
+static void debug_mi_gdb_show( int argc, const char * * argv )
+{
+ const char * value = "";
+ /* FIXME: implement this for real */
+ debug_mi_format_token();
+ value = "(gdb) ";
+ printf( "^done,value=\"%s\"\n(gdb) \n", value );
+}
+
+static void debug_mi_not_implemented( int argc, const char * * argv )
+{
+ /* FIXME: implement this for real */
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+}
+
+void debug_mi_file_list_exec_source_files( int argc, const char * * argv )
+{
+ /* FIXME: implement this for real */
+ debug_mi_format_token();
+ printf( "^done,files=[]\n(gdb) \n" );
+}
+
+static void debug_mi_file_list_exec_source_file( int argc, const char * * argv )
+{
+ /* FIXME: implement this for real */
+ debug_mi_format_token();
+ printf( "^error,msg=\"Don't know how to handle this yet\"\n(gdb) \n" );
+}
+
+static void debug_mi_thread_info( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ debug_mi_format_token();
+ printf( "^done,threads=[]\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[] = { "info", "frame" };
+ FRAME_INFO info;
+ debug_parent_forward_nowait( 2, new_args, 0, 0 );
+ debug_frame_read( command_child, &info );
+
+ debug_mi_format_token();
+ printf( "^done,threads=[{id=\"1\"," );
+ debug_mi_print_frame_info( &info );
+ debug_frame_info_free( &info );
+ printf( "}],current-thread-id=\"1\"\n(gdb) \n" );
+ }
+}
+
+static void debug_mi_thread_select( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ /* FIXME: better error handling*/
+ debug_mi_format_token();
+ printf( "^error,msg=\"Thread ID 1 not known\"\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[] = { "info", "frame" };
+ FRAME_INFO info;
+ debug_parent_forward_nowait( 2, new_args, 0, 0 );
+ debug_frame_read( command_child, &info );
+
+ debug_mi_format_token();
+ printf( "^done,new-thread-id=\"1\"," );
+ debug_mi_print_frame_info( &info );
+ debug_frame_info_free( &info );
+ printf( "\n(gdb) \n" );
+ }
+}
+
+static void debug_mi_stack_select_frame( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[ 2 ];
+ new_args[ 0 ] = "frame";
+ new_args[ 1 ] = argv[ 1 ];
+ debug_parent_forward_nowait( 2, new_args, 0, 0 );
+ debug_mi_format_token();
+ printf( "^done\n(gdb) \n" );
+ }
+}
+
+static void debug_mi_stack_info_frame( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ }
+ else
+ {
+ FRAME_INFO info;
+ fprintf( command_output, "info frame\n" );
+ fflush( command_output );
+ debug_frame_read( command_child, &info );
+ debug_mi_format_token();
+ printf( "^done," );
+ debug_mi_print_frame_info( &info );
+ debug_frame_info_free( &info );
+ printf( "\n(gdb) \n" );
+ }
+}
+
+static void debug_mi_stack_list_variables( int argc, const char * * argv )
+{
+#define DEBUG_PRINT_VARIABLES_NO_VALUES 1
+#define DEBUG_PRINT_VARIABLES_ALL_VALUES 2
+#define DEBUG_PRINT_VARIABLES_SIMPLE_VALUES 3
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ return;
+ }
+ --argc;
+ ++argv;
+ for ( ; argc; --argc, ++argv )
+ {
+ if ( strcmp( *argv, "--thread" ) == 0 )
+ {
+ /* Only one thread. */
+ --argc;
+ ++argv;
+ }
+ else if ( strcmp( *argv, "--no-values" ) == 0 )
+ {
+ // print_values = DEBUG_PRINT_VARIABLES_NO_VALUES;
+ }
+ else if ( strcmp( *argv, "--all-values" ) == 0 )
+ {
+ // print_values = DEBUG_PRINT_VARIABLES_ALL_VALUES;
+ }
+ else if ( strcmp( *argv, "--simple-values" ) == 0 )
+ {
+ // print_values = DEBUG_PRINT_VARIABLES_SIMPLE_VALUES;
+ }
+ else if ( strcmp( *argv, "--" ) == 0 )
+ {
+ --argc;
+ ++argv;
+ break;
+ }
+ else if ( argv[ 0 ][ 0 ] == '-' )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"Unknown argument %s\"\n(gdb) \n", *argv );
+ return;
+ }
+ else
+ {
+ break;
+ }
+ }
+ if ( argc != 0 )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"Too many arguments for -stack-list-variables\"\n(gdb) \n" );
+ return;
+ }
+
+ {
+ LIST * vars;
+ LISTITER iter, end;
+ int first = 1;
+ fprintf( command_output, "info locals\n" );
+ fflush( command_output );
+ vars = debug_list_read( command_child );
+ debug_parent_wait( 0 );
+ debug_mi_format_token();
+ printf( "^done,variables=[" );
+ for ( iter = list_begin( vars ), end = list_end( vars ); iter != end; iter = list_next( iter ) )
+ {
+ OBJECT * varname = list_item( iter );
+ string varbuf[1];
+ const char * new_args[2];
+ if ( first )
+ {
+ first = 0;
+ }
+ else
+ {
+ printf( "," );
+ }
+ printf( "{name=\"%s\",value=\"", object_str( varname ) );
+ fflush( stdout );
+ string_new( varbuf );
+ string_append( varbuf, "$(" );
+ string_append( varbuf, object_str( varname ) );
+ string_append( varbuf, ")" );
+ new_args[ 0 ] = "print";
+ new_args[ 1 ] = varbuf->value;
+ debug_parent_forward( 2, new_args, 0, 0 );
+ string_free( varbuf );
+ printf( "\"}" );
+ }
+ printf( "]\n(gdb) \n" );
+ fflush( stdout );
+ list_free( vars );
+ }
+}
+
+static void debug_mi_stack_list_locals( int argc, const char * * argv )
+{
+#define DEBUG_PRINT_VARIABLES_NO_VALUES 1
+#define DEBUG_PRINT_VARIABLES_ALL_VALUES 2
+#define DEBUG_PRINT_VARIABLES_SIMPLE_VALUES 3
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ return;
+ }
+ --argc;
+ ++argv;
+ for ( ; argc; --argc, ++argv )
+ {
+ if ( strcmp( *argv, "--thread" ) == 0 )
+ {
+ /* Only one thread. */
+ --argc;
+ ++argv;
+ if ( argc == 0 )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"Argument required for --thread.\"" );
+ return;
+ }
+ }
+ else if ( strcmp( *argv, "--no-values" ) == 0 )
+ {
+ // print_values = DEBUG_PRINT_VARIABLES_NO_VALUES;
+ }
+ else if ( strcmp( *argv, "--all-values" ) == 0 )
+ {
+ // print_values = DEBUG_PRINT_VARIABLES_ALL_VALUES;
+ }
+ else if ( strcmp( *argv, "--simple-values" ) == 0 )
+ {
+ // print_values = DEBUG_PRINT_VARIABLES_SIMPLE_VALUES;
+ }
+ else if ( strcmp( *argv, "--" ) == 0 )
+ {
+ --argc;
+ ++argv;
+ break;
+ }
+ else if ( argv[ 0 ][ 0 ] == '-' )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"Unknown argument %s\"\n(gdb) \n", *argv );
+ return;
+ }
+ else
+ {
+ break;
+ }
+ }
+ if ( argc != 0 )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"Too many arguments for -stack-list-variables\"\n(gdb) \n" );
+ return;
+ }
+
+ {
+ LIST * vars;
+ LISTITER iter, end;
+ int first = 1;
+ fprintf( command_output, "info locals\n" );
+ fflush( command_output );
+ vars = debug_list_read( command_child );
+ debug_parent_wait( 0 );
+ debug_mi_format_token();
+ printf( "^done,locals=[" );
+ for ( iter = list_begin( vars ), end = list_end( vars ); iter != end; iter = list_next( iter ) )
+ {
+ OBJECT * varname = list_item( iter );
+ string varbuf[1];
+ const char * new_args[2];
+ if ( first )
+ {
+ first = 0;
+ }
+ else
+ {
+ printf( "," );
+ }
+ printf( "{name=\"%s\",type=\"list\",value=\"", object_str( varname ) );
+ fflush( stdout );
+ string_new( varbuf );
+ string_append( varbuf, "$(" );
+ string_append( varbuf, object_str( varname ) );
+ string_append( varbuf, ")" );
+ new_args[ 0 ] = "print";
+ new_args[ 1 ] = varbuf->value;
+ debug_parent_forward( 2, new_args, 0, 0 );
+ string_free( varbuf );
+ printf( "\"}" );
+ }
+ printf( "]\n(gdb) \n" );
+ fflush( stdout );
+ list_free( vars );
+ }
+}
+
+static void debug_mi_stack_list_frames( int argc, const char * * argv )
+{
+ int depth;
+ int i;
+
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ debug_mi_format_token();
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ return;
+ }
+
+ fprintf( command_output, "info depth\n" );
+ fflush( command_output );
+ depth = debug_int_read( command_child );
+
+ debug_mi_format_token();
+ printf( "^done,stack=[" );
+ for ( i = 0; i < depth; ++i )
+ {
+ FRAME_INFO frame;
+ fprintf( command_output, "info frame %d\n", i );
+ fflush( command_output );
+ if ( i != 0 )
+ {
+ printf( "," );
+ }
+ debug_frame_read( command_child, &frame );
+ debug_mi_print_frame_info( &frame );
+ }
+ printf( "]\n(gdb) \n" );
+ fflush( stdout );
+}
+
+static void debug_mi_list_target_features( int argc, const char * * argv )
+{
+ /* FIXME: implement this for real */
+ debug_mi_format_token();
+ printf( "^done,features=[\"async\"]\n(gdb) \n" );
+}
+
+static void debug_mi_exec_run( int argc, const char * * argv )
+{
+ printf( "=thread-created,id=\"1\",group-id=\"i1\"\n" );
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ debug_start_child( argc, argv );
+ debug_parent_wait( 1 );
+}
+
+static void debug_mi_exec_continue( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[] = { "continue" };
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ debug_parent_forward( 1, new_args, 1, 0 );
+ }
+}
+
+static void debug_mi_exec_step( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[] = { "step" };
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ debug_parent_forward( 1, new_args, 1, 0 );
+ }
+}
+
+static void debug_mi_exec_next( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[] = { "next" };
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ debug_parent_forward( 1, new_args, 1, 0 );
+ }
+}
+
+static void debug_mi_exec_finish( int argc, const char * * argv )
+{
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[] = { "finish" };
+ debug_mi_format_token();
+ printf( "^running\n(gdb) \n" );
+ fflush( stdout );
+ debug_parent_forward( 1, new_args, 1, 0 );
+ }
+}
+
+static void debug_mi_data_list_register_names( int argc, const char * * argv )
+{
+ debug_mi_format_token();
+ printf( "^done,register-names=[]\n(gdb) \n" );
+}
+
+static void debug_mi_data_evaluate_expression( int argc, const char * * argv )
+{
+ if ( argc < 2 )
+ {
+ printf( "^error,msg=\"Not enough arguments for -data-evaluate-expression\"\n(gdb) \n" );
+ }
+ if ( debug_state == DEBUG_NO_CHILD )
+ {
+ printf( "^error,msg=\"No child\"\n(gdb) \n" );
+ }
+ else
+ {
+ const char * new_args[ 2 ];
+ debug_mi_format_token();
+ printf( "^done,value=\"" );
+ fflush( stdout );
+ new_args[ 0 ] = "print";
+ new_args[ 1 ] = argv[ 1 ];
+ debug_parent_forward( 2, new_args, 1, 0 );
+ printf( "\"\n(gdb) \n" );
+ }
+}
+
+static int process_command( char * command );
+
+static void debug_mi_interpreter_exec( int argc, const char * * argv )
+{
+ if ( argc < 3 )
+ {
+ debug_mi_error( "Not enough arguments for -interpreter-exec" );
+ }
+ process_command( (char *)argv[ 2 ] );
+}
+
+/* The debugger's main loop. */
+int debugger( void )
+{
+ command_array = parent_commands;
+ command_input = stdin;
+ if ( debug_interface == DEBUG_INTERFACE_MI )
+ printf( "=thread-group-added,id=\"i1\"\n(gdb) \n" );
+ while ( 1 )
+ {
+ if ( debug_interface == DEBUG_INTERFACE_CONSOLE )
+ printf("(b2db) ");
+ fflush( stdout );
+ read_command();
+ }
+ return 0;
+}
+
+
+/* Runs the matching command in the current command_array. */
+static int run_command( int argc, const char * * argv )
+{
+ struct command_elem * command;
+ const char * command_name;
+ if ( argc == 0 )
+ {
+ return 1;
+ }
+ command_name = argv[ 0 ];
+ /* Skip the GDB/MI token when choosing the command to run. */
+ while( isdigit( *command_name ) ) ++command_name;
+ current_token = atoi( argv[ 0 ] );
+ for( command = command_array; command->key; ++command )
+ {
+ if ( strcmp( command->key, command_name ) == 0 )
+ {
+ ( *command->command )( argc, argv );
+ return 1;
+ }
+ }
+ debug_error( "Unknown command: %s", command_name );
+ return 0;
+}
+
+/* Parses a single command into whitespace separated tokens, and runs it. */
+static int process_command( char * line )
+{
+ int result;
+ size_t capacity = 8;
+ char * * buffer = (char **)malloc( capacity * sizeof( char * ) );
+ char * * current = buffer;
+ char * iter = line;
+ char * saved = iter;
+ *current = iter;
+ for ( ; ; )
+ {
+ /* skip spaces */
+ while ( *iter && isspace( *iter ) )
+ {
+ ++iter;
+ }
+ if ( ! *iter )
+ {
+ break;
+ }
+ /* Find the next token */
+ saved = iter;
+ if ( *iter == '\"' )
+ {
+ saved = ++iter;
+ /* FIXME: handle escaping */
+ while ( *iter && *iter != '\"' )
+ {
+ ++iter;
+ }
+ }
+ else
+ {
+ while ( *iter && ! isspace( *iter ) )
+ {
+ ++iter;
+ }
+ }
+ /* resize the buffer if necessary */
+ if ( current == buffer + capacity )
+ {
+ buffer = (char**)realloc( (void *)buffer, capacity * 2 * sizeof( char * ) );
+ current = buffer + capacity;
+ }
+ /* append the token to the buffer */
+ *current++ = saved;
+ /* null terminate the token */
+ if ( *iter )
+ {
+ *iter++ = '\0';
+ }
+ }
+ result = run_command( current - buffer, (const char **)buffer );
+ free( (void *)buffer );
+ return result;
+}
+
+static int read_command( void )
+{
+ int result;
+ int ch;
+ string line[ 1 ];
+ string_new( line );
+ /* HACK: force line to be on the heap. */
+ string_reserve( line, 64 );
+ while( ( ch = fgetc( command_input ) ) != EOF )
+ {
+ if ( ch == '\n' )
+ {
+ break;
+ }
+ else
+ {
+ string_push_back( line, (char)ch );
+ }
+ }
+ result = process_command( line->value );
+ string_free( line );
+ return result;
+}
+
+static void debug_listen( void )
+{
+ debug_state = DEBUG_STOPPED;
+ while ( debug_state == DEBUG_STOPPED )
+ {
+ if ( feof( command_input ) )
+ exit( 1 );
+ fflush(stdout);
+ fflush( command_output );
+ read_command();
+ }
+ debug_selected_frame_number = 0;
+}
+
+struct debug_child_data_t debug_child_data;
+const char debugger_opt[] = "--b2db-internal-debug-handle=";
+int debug_interface;
diff --git a/src/boost/tools/build/src/engine/debugger.h b/src/boost/tools/build/src/engine/debugger.h
new file mode 100644
index 000000000..67956c76a
--- /dev/null
+++ b/src/boost/tools/build/src/engine/debugger.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2015 Steven Watanabe
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef DEBUGGER_SW20150314_H
+#define DEBUGGER_SW20150314_H
+
+#include "config.h"
+#include <setjmp.h>
+#include "object.h"
+#include "frames.h"
+
+#ifdef JAM_DEBUGGER
+
+void debug_on_instruction( FRAME * frame, OBJECT * file, int line );
+void debug_on_enter_function( FRAME * frame, OBJECT * name, OBJECT * file, int line );
+void debug_on_exit_function( OBJECT * name );
+int debugger( void );
+
+struct debug_child_data_t
+{
+ int argc;
+ const char * * argv;
+ jmp_buf jmp;
+};
+
+extern struct debug_child_data_t debug_child_data;
+extern LIST * debug_print_result;
+extern const char debugger_opt[];
+extern int debug_interface;
+
+#define DEBUG_INTERFACE_CONSOLE 1
+#define DEBUG_INTERFACE_MI 2
+#define DEBUG_INTERFACE_CHILD 3
+
+#define debug_is_debugging() ( debug_interface != 0 )
+#define debug_on_enter_function( frame, name, file, line ) \
+ ( debug_is_debugging()? \
+ debug_on_enter_function( frame, name, file, line ) : \
+ (void)0 )
+#define debug_on_exit_function( name ) \
+ ( debug_is_debugging()? \
+ debug_on_exit_function( name ) : \
+ (void)0 )
+
+#if NT
+
+void debug_init_handles( const char * in, const char * out );
+
+#endif
+
+#else
+
+#define debug_on_instruction( frame, file, line ) ( ( void )0 )
+#define debug_on_enter_function( frame, name, file, line ) ( ( void )0 )
+#define debug_on_exit_function( name ) ( ( void )0 )
+#define debug_is_debugging() ( 0 )
+
+#endif
+
+#endif
diff --git a/src/boost/tools/build/src/engine/execcmd.cpp b/src/boost/tools/build/src/engine/execcmd.cpp
new file mode 100644
index 000000000..8e6ec4f50
--- /dev/null
+++ b/src/boost/tools/build/src/engine/execcmd.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ * Copyright 2007 Noel Belcourt.
+ *
+ * Utility functions shared between different exec*.c platform specific
+ * implementation modules.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "execcmd.h"
+#include "output.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+
+/* Internal interrupt counter. */
+static int intr;
+
+
+/* Constructs a list of command-line elements using the format specified by the
+ * given shell list.
+ *
+ * Given argv array should have at least MAXARGC + 1 elements.
+ * Slot numbers may be between 0 and 998 (inclusive).
+ *
+ * Constructed argv list will be zero terminated. Character arrays referenced by
+ * the argv structure elements will be either elements from the give shell list,
+ * internal static buffers or the given command string and should thus not
+ * considered owned by or released via the argv structure and should be
+ * considered invalidated by the next argv_from_shell() call.
+ *
+ * Shell list elements:
+ * - Starting with '%' - represent the command string.
+ * - Starting with '!' - represent the slot number (increased by one).
+ * - Anything else - used as a literal.
+ * - If no '%' element is found, the command string is appended as an extra.
+ */
+
+void argv_from_shell( char const * * argv, LIST * shell, char const * command,
+ int const slot )
+{
+ static char jobno[ 4 ];
+
+ int i;
+ int gotpercent = 0;
+ LISTITER iter = list_begin( shell );
+ LISTITER end = list_end( shell );
+
+ assert( 0 <= slot );
+ assert( slot < 999 );
+ sprintf( jobno, "%d", slot + 1 );
+
+ for ( i = 0; iter != end && i < MAXARGC; ++i, iter = list_next( iter ) )
+ {
+ switch ( object_str( list_item( iter ) )[ 0 ] )
+ {
+ case '%': argv[ i ] = command; ++gotpercent; break;
+ case '!': argv[ i ] = jobno; break;
+ default : argv[ i ] = object_str( list_item( iter ) );
+ }
+ }
+
+ if ( !gotpercent )
+ argv[ i++ ] = command;
+
+ argv[ i ] = NULL;
+}
+
+
+/* Returns whether the given command string contains lines longer than the given
+ * maximum.
+ */
+int check_cmd_for_too_long_lines( char const * command, size_t max,
+ int * const error_length, int * const error_max_length )
+{
+ while ( *command )
+ {
+ size_t const l = strcspn( command, "\n" );
+ if ( l > max )
+ {
+ *error_length = l;
+ *error_max_length = max;
+ return EXEC_CHECK_LINE_TOO_LONG;
+ }
+ command += l;
+ if ( *command )
+ ++command;
+ }
+ return EXEC_CHECK_OK;
+}
+
+
+/* Checks whether the given shell list is actually a request to execute raw
+ * commands without an external shell.
+ */
+int is_raw_command_request( LIST * shell )
+{
+ return !list_empty( shell ) &&
+ !strcmp( object_str( list_front( shell ) ), "%" ) &&
+ list_next( list_begin( shell ) ) == list_end( shell );
+}
+
+
+/* Returns whether an interrupt has been detected so far. */
+
+int interrupted( void )
+{
+ return intr != 0;
+}
+
+
+/* Internal interrupt handler. */
+
+void onintr( int disp )
+{
+ ++intr;
+ out_printf( "...interrupted\n" );
+}
diff --git a/src/boost/tools/build/src/engine/execcmd.h b/src/boost/tools/build/src/engine/execcmd.h
new file mode 100644
index 000000000..b39e8ae2d
--- /dev/null
+++ b/src/boost/tools/build/src/engine/execcmd.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * execcmd.h - execute a shell script.
+ *
+ * Defines the interface to be implemented in platform specific implementation
+ * modules as well as different shared utility functions prepared in the
+ * execcmd.c module.
+ */
+
+#ifndef EXECCMD_H
+#define EXECCMD_H
+
+#include "config.h"
+#include "lists.h"
+#include "jam_strings.h"
+#include "timestamp.h"
+
+
+typedef struct timing_info
+{
+ double system;
+ double user;
+ timestamp start;
+ timestamp end;
+} timing_info;
+
+typedef void (* ExecCmdCallback)
+(
+ void * const closure,
+ int const status,
+ timing_info const * const,
+ char const * const cmd_stdout,
+ char const * const cmd_stderr,
+ int const cmd_exit_reason
+);
+
+/* Global initialization. Must be called after setting
+ * globs.jobs. May be called multiple times. */
+void exec_init( void );
+/* Global cleanup */
+void exec_done( void );
+
+/* Status codes passed to ExecCmdCallback routines. */
+#define EXEC_CMD_OK 0
+#define EXEC_CMD_FAIL 1
+#define EXEC_CMD_INTR 2
+
+int exec_check
+(
+ string const * command,
+ LIST * * pShell,
+ int * error_length,
+ int * error_max_length
+);
+
+/* exec_check() return codes. */
+#define EXEC_CHECK_OK 101
+#define EXEC_CHECK_NOOP 102
+#define EXEC_CHECK_LINE_TOO_LONG 103
+#define EXEC_CHECK_TOO_LONG 104
+
+/* Prevents action output from being written
+ * immediately to stdout/stderr.
+ */
+#define EXEC_CMD_QUIET 1
+
+void exec_cmd
+(
+ string const * command,
+ int flags,
+ ExecCmdCallback func,
+ void * closure,
+ LIST * shell
+);
+
+void exec_wait();
+
+
+/******************************************************************************
+ * *
+ * Utility functions defined in the execcmd.c module. *
+ * *
+ ******************************************************************************/
+
+/* Constructs a list of command-line elements using the format specified by the
+ * given shell list.
+ */
+void argv_from_shell( char const * * argv, LIST * shell, char const * command,
+ int const slot );
+
+/* Interrupt routine bumping the internal interrupt counter. Needs to be
+ * registered by platform specific exec*.c modules.
+ */
+void onintr( int disp );
+
+/* Returns whether an interrupt has been detected so far. */
+int interrupted( void );
+
+/* Checks whether the given shell list is actually a request to execute raw
+ * commands without an external shell.
+ */
+int is_raw_command_request( LIST * shell );
+
+/* Utility worker for exec_check() checking whether all the given command lines
+ * are under the specified length limit.
+ */
+int check_cmd_for_too_long_lines( char const * command, size_t max,
+ int * const error_length, int * const error_max_length );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/execnt.cpp b/src/boost/tools/build/src/engine/execnt.cpp
new file mode 100644
index 000000000..f8934fd36
--- /dev/null
+++ b/src/boost/tools/build/src/engine/execnt.cpp
@@ -0,0 +1,1370 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2007 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * execnt.c - execute a shell command on Windows NT
+ *
+ * If $(JAMSHELL) is defined, uses that to formulate the actual command. The
+ * default is: cmd.exe /Q/C
+ *
+ * In $(JAMSHELL), % expands to the command string and ! expands to the slot
+ * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
+ * not include a %, it is tacked on as the last argument.
+ *
+ * Each $(JAMSHELL) placeholder must be specified as a separate individual
+ * element in a jam variable value.
+ *
+ * Do not just set JAMSHELL to cmd.exe - it will not work!
+ *
+ * External routines:
+ * exec_check() - preprocess and validate the command
+ * exec_cmd() - launch an async command execution
+ * exec_wait() - wait for any of the async command processes to terminate
+ *
+ * Internal routines:
+ * filetime_to_seconds() - Windows FILETIME --> number of seconds conversion
+ */
+
+#include "jam.h"
+#include "output.h"
+#ifdef USE_EXECNT
+#include "execcmd.h"
+
+#include "lists.h"
+#include "output.h"
+#include "pathsys.h"
+#include "string.h"
+
+#include <assert.h>
+#include <ctype.h>
+#include <errno.h>
+#include <time.h>
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <process.h>
+#include <tlhelp32.h>
+#include <versionhelpers.h>
+
+
+/* get the maximum shell command line length according to the OS */
+static int maxline();
+/* valid raw command string length */
+static long raw_command_length( char const * command );
+/* add two 64-bit unsigned numbers, h1l1 and h2l2 */
+static FILETIME add_64(
+ unsigned long h1, unsigned long l1,
+ unsigned long h2, unsigned long l2 );
+/* */
+static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 );
+/* */
+static FILETIME negate_FILETIME( FILETIME t );
+/* record the timing info for the process */
+static void record_times( HANDLE const, timing_info * const );
+/* calc the current running time of an *active* process */
+static double running_time( HANDLE const );
+/* terminate the given process, after terminating all its children first */
+static void kill_process_tree( DWORD const procesdId, HANDLE const );
+/* waits for a command to complete or time out */
+static int try_wait( int const timeoutMillis );
+/* reads any pending output for running commands */
+static void read_output();
+/* checks if a command ran out of time, and kills it */
+static int try_kill_one();
+/* is the first process a parent (direct or indirect) to the second one */
+static int is_parent_child( DWORD const parent, DWORD const child );
+/* */
+static void close_alert( PROCESS_INFORMATION const * const );
+/* close any alerts hanging around */
+static void close_alerts();
+/* prepare a command file to be executed using an external shell */
+static char const * prepare_command_file( string const * command, int slot );
+/* invoke the actual external process using the given command line */
+static void invoke_cmd( char const * const command, int const slot );
+/* find a free slot in the running commands table */
+static int get_free_cmdtab_slot();
+/* put together the final command string we are to run */
+static void string_new_from_argv( string * result, char const * const * argv );
+/* frees and renews the given string */
+static void string_renew( string * const );
+/* reports the last failed Windows API related error message */
+static void reportWindowsError( char const * const apiName, int slot );
+/* closes a Windows HANDLE and resets its variable to 0. */
+static void closeWinHandle( HANDLE * const handle );
+/* Adds the job index to the list of currently active jobs. */
+static void register_wait( int job_id );
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+/* CreateProcessA() Windows API places a limit of 32768 characters (bytes) on
+ * the allowed command-line length, including a trailing Unicode (2-byte)
+ * nul-terminator character.
+ */
+#define MAX_RAW_COMMAND_LENGTH 32766
+
+ /* Communication buffers size */
+#define IO_BUFFER_SIZE ( 64 * 1024 )
+
+/* We hold handles for pipes used to communicate with child processes in two
+ * element arrays indexed as follows.
+ */
+#define EXECCMD_PIPE_READ 0
+#define EXECCMD_PIPE_WRITE 1
+
+static int intr_installed;
+
+
+/* The list of commands we run. */
+static struct _cmdtab_t
+{
+ /* Temporary command file used to execute the action when needed. */
+ string command_file[ 1 ];
+
+ /* Pipes for communicating with the child process. Parent reads from (0),
+ * child writes to (1).
+ */
+ HANDLE pipe_out[ 2 ];
+ HANDLE pipe_err[ 2 ];
+
+ string buffer_out[ 1 ]; /* buffer to hold stdout, if any */
+ string buffer_err[ 1 ]; /* buffer to hold stderr, if any */
+
+ PROCESS_INFORMATION pi; /* running process information */
+
+ HANDLE wait_handle;
+
+ int flags;
+
+ /* Function called when the command completes. */
+ ExecCmdCallback func;
+
+ /* Opaque data passed back to the 'func' callback. */
+ void * closure;
+} * cmdtab = NULL;
+static int cmdtab_size = 0;
+
+/* A thread-safe single element queue. Used by the worker threads
+ * to signal the main thread that a process is completed.
+ */
+struct
+{
+ int job_index;
+ HANDLE read_okay;
+ HANDLE write_okay;
+} process_queue;
+
+/*
+ * Execution unit tests.
+ */
+
+void execnt_unit_test()
+{
+#if !defined( NDEBUG )
+ /* vc6 preprocessor is broken, so assert with these strings gets confused.
+ * Use a table instead.
+ */
+ {
+ typedef struct test { const char * command; int result; } test;
+ test tests[] = {
+ { "", 0 },
+ { " ", 0 },
+ { "x", 1 },
+ { "\nx", 1 },
+ { "x\n", 1 },
+ { "\nx\n", 1 },
+ { "\nx \n", 2 },
+ { "\nx \n ", 2 },
+ { " \n\t\t\v\r\r\n \t x \v \t\t\r\n\n\n \n\n\v\t", 8 },
+ { "x\ny", -1 },
+ { "x\n\n y", -1 },
+ { "echo x > foo.bar", -1 },
+ { "echo x < foo.bar", -1 },
+ { "echo x | foo.bar", -1 },
+ { "echo x \">\" foo.bar", 18 },
+ { "echo x '<' foo.bar", 18 },
+ { "echo x \"|\" foo.bar", 18 },
+ { "echo x \\\">\\\" foo.bar", -1 },
+ { "echo x \\\"<\\\" foo.bar", -1 },
+ { "echo x \\\"|\\\" foo.bar", -1 },
+ { "\"echo x > foo.bar\"", 18 },
+ { "echo x \"'\"<' foo.bar", -1 },
+ { "echo x \\\\\"<\\\\\" foo.bar", 22 },
+ { "echo x \\x\\\"<\\\\\" foo.bar", -1 },
+ { 0 } };
+ test const * t;
+ for ( t = tests; t->command; ++t )
+ assert( raw_command_length( t->command ) == t->result );
+ }
+
+ {
+ int const length = maxline() + 9;
+ char * const cmd = (char *)BJAM_MALLOC_ATOMIC( length + 1 );
+ memset( cmd, 'x', length );
+ cmd[ length ] = 0;
+ assert( raw_command_length( cmd ) == length );
+ BJAM_FREE( cmd );
+ }
+#endif
+}
+
+/*
+ * exec_init() - global initialization
+ */
+void exec_init( void )
+{
+ if ( globs.jobs > cmdtab_size )
+ {
+ cmdtab = (_cmdtab_t*)BJAM_REALLOC( cmdtab, globs.jobs * sizeof( *cmdtab ) );
+ memset( cmdtab + cmdtab_size, 0, ( globs.jobs - cmdtab_size ) * sizeof( *cmdtab ) );
+ cmdtab_size = globs.jobs;
+ }
+ if ( globs.jobs > MAXIMUM_WAIT_OBJECTS && !process_queue.read_okay )
+ {
+ process_queue.read_okay = CreateEvent( NULL, FALSE, FALSE, NULL );
+ process_queue.write_okay = CreateEvent( NULL, FALSE, TRUE, NULL );
+ }
+}
+
+/*
+ * exec_done - free resources.
+ */
+void exec_done( void )
+{
+ if ( process_queue.read_okay )
+ {
+ CloseHandle( process_queue.read_okay );
+ }
+ if ( process_queue.write_okay )
+ {
+ CloseHandle( process_queue.write_okay );
+ }
+ BJAM_FREE( cmdtab );
+}
+
+/*
+ * exec_check() - preprocess and validate the command
+ */
+
+int exec_check
+(
+ string const * command,
+ LIST * * pShell,
+ int * error_length,
+ int * error_max_length
+)
+{
+ /* Default shell does nothing when triggered with an empty or a
+ * whitespace-only command so we simply skip running it in that case. We
+ * still pass them on to non-default shells as we do not really know what
+ * they are going to do with such commands.
+ */
+ if ( list_empty( *pShell ) )
+ {
+ char const * s = command->value;
+ while ( isspace( *s ) ) ++s;
+ if ( !*s )
+ return EXEC_CHECK_NOOP;
+ }
+
+ /* Check prerequisites for executing raw commands. */
+ if ( is_raw_command_request( *pShell ) )
+ {
+ long const raw_cmd_length = raw_command_length( command->value );
+ if ( raw_cmd_length < 0 )
+ {
+ /* Invalid characters detected - fallback to default shell. */
+ list_free( *pShell );
+ *pShell = L0;
+ }
+ else if ( raw_cmd_length > MAX_RAW_COMMAND_LENGTH )
+ {
+ *error_length = raw_cmd_length;
+ *error_max_length = MAX_RAW_COMMAND_LENGTH;
+ return EXEC_CHECK_TOO_LONG;
+ }
+ else
+ return raw_cmd_length ? EXEC_CHECK_OK : EXEC_CHECK_NOOP;
+ }
+
+ /* Now we know we are using an external shell. Note that there is no need to
+ * check for too long command strings when using an external shell since we
+ * use a command file and assume no one is going to set up a JAMSHELL format
+ * string longer than a few hundred bytes at most which should be well under
+ * the total command string limit. Should someone actually construct such a
+ * JAMSHELL value it will get reported as an 'invalid parameter'
+ * CreateProcessA() Windows API failure which seems like a good enough
+ * result for such intentional mischief.
+ */
+
+ /* Check for too long command lines. */
+ return check_cmd_for_too_long_lines( command->value, maxline(),
+ error_length, error_max_length );
+}
+
+
+/*
+ * exec_cmd() - launch an async command execution
+ *
+ * We assume exec_check() already verified that the given command can have its
+ * command string constructed as requested.
+ */
+
+void exec_cmd
+(
+ string const * cmd_orig,
+ int flags,
+ ExecCmdCallback func,
+ void * closure,
+ LIST * shell
+)
+{
+ int const slot = get_free_cmdtab_slot();
+ int const is_raw_cmd = is_raw_command_request( shell );
+ string cmd_local[ 1 ];
+
+ /* Initialize default shell - anything more than /Q/C is non-portable. */
+ static LIST * default_shell;
+ if ( !default_shell )
+ default_shell = list_new( object_new( "cmd.exe /Q/C" ) );
+
+ /* Specifying no shell means requesting the default shell. */
+ if ( list_empty( shell ) )
+ shell = default_shell;
+
+ if ( DEBUG_EXECCMD )
+ {
+ if ( is_raw_cmd )
+ out_printf( "Executing raw command directly\n" );
+ else
+ {
+ out_printf( "Executing using a command file and the shell: " );
+ list_print( shell );
+ out_printf( "\n" );
+ }
+ }
+
+ /* If we are running a raw command directly - trim its leading whitespaces
+ * as well as any trailing all-whitespace lines but keep any trailing
+ * whitespace in the final/only line containing something other than
+ * whitespace).
+ */
+ if ( is_raw_cmd )
+ {
+ char const * start = cmd_orig->value;
+ char const * p = cmd_orig->value + cmd_orig->size;
+ char const * end = p;
+ while ( isspace( *start ) ) ++start;
+ while ( p > start && isspace( p[ -1 ] ) )
+ if ( *--p == '\n' )
+ end = p;
+ string_new( cmd_local );
+ string_append_range( cmd_local, start, end );
+ assert( long(cmd_local->size) == raw_command_length( cmd_orig->value ) );
+ }
+ /* If we are not running a raw command directly, prepare a command file to
+ * be executed using an external shell and the actual command string using
+ * that command file.
+ */
+ else
+ {
+ char const * const cmd_file = prepare_command_file( cmd_orig, slot );
+ char const * argv[ MAXARGC + 1 ]; /* +1 for NULL */
+ argv_from_shell( argv, shell, cmd_file, slot );
+ string_new_from_argv( cmd_local, argv );
+ }
+
+ /* Catch interrupts whenever commands are running. */
+ if ( !intr_installed )
+ {
+ intr_installed = 1;
+ signal( SIGINT, onintr );
+ }
+
+ cmdtab[ slot ].flags = flags;
+
+ /* Save input data into the selected running commands table slot. */
+ cmdtab[ slot ].func = func;
+ cmdtab[ slot ].closure = closure;
+
+ /* Invoke the actual external process using the constructed command line. */
+ invoke_cmd( cmd_local->value, slot );
+
+ /* Free our local command string copy. */
+ string_free( cmd_local );
+}
+
+
+/*
+ * exec_wait() - wait for any of the async command processes to terminate
+ *
+ * Wait and drive at most one execution completion, while processing the I/O for
+ * all ongoing commands.
+ */
+
+void exec_wait()
+{
+ int i = -1;
+ int exit_reason; /* reason why a command completed */
+
+ /* Wait for a command to complete, while snarfing up any output. */
+ while ( 1 )
+ {
+ /* Check for a complete command, briefly. */
+ i = try_wait( 500 );
+ /* Read in the output of all running commands. */
+ read_output();
+ /* Close out pending debug style dialogs. */
+ close_alerts();
+ /* Process the completed command we found. */
+ if ( i >= 0 ) { exit_reason = EXIT_OK; break; }
+ /* Check if a command ran out of time. */
+ i = try_kill_one();
+ if ( i >= 0 ) { exit_reason = EXIT_TIMEOUT; break; }
+ }
+
+ /* We have a command... process it. */
+ {
+ DWORD exit_code;
+ timing_info time;
+ int rstat;
+
+ /* The time data for the command. */
+ record_times( cmdtab[ i ].pi.hProcess, &time );
+
+ /* Removed the used temporary command file. */
+ if ( cmdtab[ i ].command_file->size )
+ unlink( cmdtab[ i ].command_file->value );
+
+ /* Find out the process exit code. */
+ GetExitCodeProcess( cmdtab[ i ].pi.hProcess, &exit_code );
+
+ /* The dispossition of the command. */
+ if ( interrupted() )
+ rstat = EXEC_CMD_INTR;
+ else if ( exit_code )
+ rstat = EXEC_CMD_FAIL;
+ else
+ rstat = EXEC_CMD_OK;
+
+ /* Call the callback, may call back to jam rule land. */
+ (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat, &time,
+ cmdtab[ i ].buffer_out->value, cmdtab[ i ].buffer_err->value,
+ exit_reason );
+
+ /* Clean up our child process tracking data. No need to clear the
+ * temporary command file name as it gets reused.
+ */
+ closeWinHandle( &cmdtab[ i ].pi.hProcess );
+ closeWinHandle( &cmdtab[ i ].pi.hThread );
+ closeWinHandle( &cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ i ].pipe_out[ EXECCMD_PIPE_WRITE ] );
+ closeWinHandle( &cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ i ].pipe_err[ EXECCMD_PIPE_WRITE ] );
+ string_renew( cmdtab[ i ].buffer_out );
+ string_renew( cmdtab[ i ].buffer_err );
+ }
+}
+
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+/*
+ * Invoke the actual external process using the given command line. Track the
+ * process in our running commands table.
+ */
+
+static void invoke_cmd( char const * const command, int const slot )
+{
+ SECURITY_ATTRIBUTES sa = { sizeof( SECURITY_ATTRIBUTES ), 0, 0 };
+ SECURITY_DESCRIPTOR sd;
+ STARTUPINFOA si = { sizeof( STARTUPINFOA ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0 };
+
+ /* Init the security data. */
+ InitializeSecurityDescriptor( &sd, SECURITY_DESCRIPTOR_REVISION );
+ SetSecurityDescriptorDacl( &sd, TRUE, NULL, FALSE );
+ sa.lpSecurityDescriptor = &sd;
+ sa.bInheritHandle = TRUE;
+
+ /* Create output buffers. */
+ string_new( cmdtab[ slot ].buffer_out );
+ string_new( cmdtab[ slot ].buffer_err );
+
+ /* Create pipes for communicating with the child process. */
+ if ( !CreatePipe( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ],
+ &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ], &sa, IO_BUFFER_SIZE ) )
+ {
+ reportWindowsError( "CreatePipe", slot );
+ return;
+ }
+ if ( globs.pipe_action && !CreatePipe( &cmdtab[ slot ].pipe_err[
+ EXECCMD_PIPE_READ ], &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ],
+ &sa, IO_BUFFER_SIZE ) )
+ {
+ reportWindowsError( "CreatePipe", slot );
+ return;
+ }
+
+ /* Set handle inheritance off for the pipe ends the parent reads from. */
+ SetHandleInformation( cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ],
+ HANDLE_FLAG_INHERIT, 0 );
+ if ( globs.pipe_action )
+ SetHandleInformation( cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_READ ],
+ HANDLE_FLAG_INHERIT, 0 );
+
+ /* Hide the child window, if any. */
+ si.dwFlags |= STARTF_USESHOWWINDOW;
+ si.wShowWindow = SW_HIDE;
+
+ /* Redirect the child's output streams to our pipes. */
+ si.dwFlags |= STARTF_USESTDHANDLES;
+ si.hStdOutput = cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ];
+ si.hStdError = globs.pipe_action
+ ? cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ]
+ : cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ];
+
+ /* Let the child inherit stdin, as some commands assume it is available. */
+ si.hStdInput = GetStdHandle( STD_INPUT_HANDLE );
+
+ if ( DEBUG_EXECCMD )
+ out_printf( "Command string for CreateProcessA(): '%s'\n", command );
+
+ /* Run the command by creating a sub-process for it. */
+ if ( !CreateProcessA(
+ NULL , /* application name */
+ (char *)command , /* command line */
+ NULL , /* process attributes */
+ NULL , /* thread attributes */
+ TRUE , /* inherit handles */
+ CREATE_NEW_PROCESS_GROUP, /* create flags */
+ NULL , /* env vars, null inherits env */
+ NULL , /* current dir, null is our current dir */
+ &si , /* startup info */
+ &cmdtab[ slot ].pi ) ) /* child process info, if created */
+ {
+ reportWindowsError( "CreateProcessA", slot );
+ return;
+ }
+
+ register_wait( slot );
+}
+
+
+/*
+ * For more details on Windows cmd.exe shell command-line length limitations see
+ * the following MSDN article:
+ * http://support.microsoft.com/default.aspx?scid=kb;en-us;830473
+ */
+
+static int raw_maxline()
+{
+ if ( IsWindowsVersionOrGreater(5,0,0) ) return 8191; /* XP */
+ if ( IsWindowsVersionOrGreater(4,0,0) ) return 2047; /* NT 4.x */
+ return 996; /* NT 3.5.1 */
+}
+
+static int maxline()
+{
+ static int result;
+ if ( !result ) result = raw_maxline();
+ return result;
+}
+
+
+/*
+ * Closes a Windows HANDLE and resets its variable to 0.
+ */
+
+static void closeWinHandle( HANDLE * const handle )
+{
+ if ( *handle )
+ {
+ CloseHandle( *handle );
+ *handle = 0;
+ }
+}
+
+
+/*
+ * Frees and renews the given string.
+ */
+
+static void string_renew( string * const s )
+{
+ string_free( s );
+ string_new( s );
+}
+
+
+/*
+ * raw_command_length() - valid raw command string length
+ *
+ * Checks whether the given command may be executed as a raw command. If yes,
+ * returns the corresponding command string length. If not, returns -1.
+ *
+ * Rules for constructing raw command strings:
+ * - Command may not contain unquoted shell I/O redirection characters.
+ * - May have at most one command line with non-whitespace content.
+ * - Leading whitespace trimmed.
+ * - Trailing all-whitespace lines trimmed.
+ * - Trailing whitespace on the sole command line kept (may theoretically
+ * affect the executed command).
+ */
+
+static long raw_command_length( char const * command )
+{
+ char const * p;
+ char const * escape = 0;
+ char inquote = 0;
+ char const * newline = 0;
+
+ /* Skip leading whitespace. */
+ while ( isspace( *command ) )
+ ++command;
+
+ p = command;
+
+ /* Look for newlines and unquoted I/O redirection. */
+ do
+ {
+ p += strcspn( p, "\n\"'<>|\\" );
+ switch ( *p )
+ {
+ case '\n':
+ /* If our command contains non-whitespace content split over
+ * multiple lines we can not execute it directly.
+ */
+ newline = p;
+ while ( isspace( *++p ) );
+ if ( *p ) return -1;
+ break;
+
+ case '\\':
+ escape = escape && escape == p - 1 ? 0 : p;
+ ++p;
+ break;
+
+ case '"':
+ case '\'':
+ if ( escape && escape == p - 1 )
+ escape = 0;
+ else if ( inquote == *p )
+ inquote = 0;
+ else if ( !inquote )
+ inquote = *p;
+ ++p;
+ break;
+
+ case '<':
+ case '>':
+ case '|':
+ if ( !inquote )
+ return -1;
+ ++p;
+ break;
+ }
+ }
+ while ( *p );
+
+ /* Return the number of characters the command will occupy. */
+ return ( newline ? newline : p ) - command;
+}
+
+
+/* 64-bit arithmetic helpers. */
+
+/* Compute the carry bit from the addition of two 32-bit unsigned numbers. */
+#define add_carry_bit( a, b ) ((((a) | (b)) >> 31) & (~((a) + (b)) >> 31) & 0x1)
+
+/* Compute the high 32 bits of the addition of two 64-bit unsigned numbers, h1l1
+ * and h2l2.
+ */
+#define add_64_hi( h1, l1, h2, l2 ) ((h1) + (h2) + add_carry_bit(l1, l2))
+
+
+/*
+ * Add two 64-bit unsigned numbers, h1l1 and h2l2.
+ */
+
+static FILETIME add_64
+(
+ unsigned long h1, unsigned long l1,
+ unsigned long h2, unsigned long l2
+)
+{
+ FILETIME result;
+ result.dwLowDateTime = l1 + l2;
+ result.dwHighDateTime = add_64_hi( h1, l1, h2, l2 );
+ return result;
+}
+
+
+static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 )
+{
+ return add_64( t1.dwHighDateTime, t1.dwLowDateTime, t2.dwHighDateTime,
+ t2.dwLowDateTime );
+}
+
+
+static FILETIME negate_FILETIME( FILETIME t )
+{
+ /* 2s complement negation */
+ return add_64( ~t.dwHighDateTime, ~t.dwLowDateTime, 0, 1 );
+}
+
+
+/*
+ * filetime_to_seconds() - Windows FILETIME --> number of seconds conversion
+ */
+
+static double filetime_to_seconds( FILETIME const ft )
+{
+ return ft.dwHighDateTime * ( (double)( 1UL << 31 ) * 2.0 * 1.0e-7 ) +
+ ft.dwLowDateTime * 1.0e-7;
+}
+
+
+static void record_times( HANDLE const process, timing_info * const time )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
+ {
+ time->system = filetime_to_seconds( kernel );
+ time->user = filetime_to_seconds( user );
+ timestamp_from_filetime( &time->start, &creation );
+ timestamp_from_filetime( &time->end, &exit );
+ }
+}
+
+
+static char ioBuffer[ IO_BUFFER_SIZE + 1 ];
+
+#define FORWARD_PIPE_NONE 0
+#define FORWARD_PIPE_STDOUT 1
+#define FORWARD_PIPE_STDERR 2
+
+static void read_pipe
+(
+ HANDLE in, /* the pipe to read from */
+ string * out,
+ int forwarding_mode
+)
+{
+ DWORD bytesInBuffer = 0;
+ DWORD bytesAvailable = 0;
+ DWORD i;
+
+ for (;;)
+ {
+ /* check if we have any data to read */
+ if ( !PeekNamedPipe( in, NULL, IO_BUFFER_SIZE, NULL,
+ &bytesAvailable, NULL ) || bytesAvailable == 0 )
+ return;
+
+ /* we only read in the available bytes, to avoid blocking */
+ if ( !ReadFile( in, ioBuffer, bytesAvailable <= IO_BUFFER_SIZE ?
+ bytesAvailable : IO_BUFFER_SIZE, &bytesInBuffer, NULL ) || bytesInBuffer == 0 )
+ return;
+
+ /* Clean up some illegal chars. */
+ for ( i = 0; i < bytesInBuffer; ++i )
+ {
+ if ( ( (unsigned char)ioBuffer[ i ] < 1 ) )
+ ioBuffer[ i ] = '?';
+ }
+ /* Null, terminate. */
+ ioBuffer[ bytesInBuffer ] = '\0';
+ /* Append to the output. */
+ string_append( out, ioBuffer );
+ /* Copy it to our output if appropriate */
+ if ( forwarding_mode == FORWARD_PIPE_STDOUT )
+ out_data( ioBuffer );
+ else if ( forwarding_mode == FORWARD_PIPE_STDERR )
+ err_data( ioBuffer );
+ }
+}
+
+#define EARLY_OUTPUT( cmd ) \
+ ( ! ( cmd.flags & EXEC_CMD_QUIET ) )
+
+#define FORWARD_STDOUT( c ) \
+ ( ( EARLY_OUTPUT( c ) && ( globs.pipe_action != 2 ) ) ? \
+ FORWARD_PIPE_STDOUT : FORWARD_PIPE_NONE )
+#define FORWARD_STDERR( c ) \
+ ( ( EARLY_OUTPUT( c ) && ( globs.pipe_action & 2 ) ) ? \
+ FORWARD_PIPE_STDERR : FORWARD_PIPE_NONE )
+
+static void read_output()
+{
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pi.hProcess )
+ {
+ /* Read stdout data. */
+ if ( cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ] )
+ read_pipe( cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ],
+ cmdtab[ i ].buffer_out, FORWARD_STDOUT( cmdtab[ i ] ) );
+ /* Read stderr data. */
+ if ( cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ] )
+ read_pipe( cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ],
+ cmdtab[ i ].buffer_err, FORWARD_STDERR( cmdtab[ i ] ) );
+ }
+}
+
+static void CALLBACK try_wait_callback( void * data, BOOLEAN is_timeout )
+{
+ struct _cmdtab_t * slot = ( struct _cmdtab_t * )data;
+ WaitForSingleObject( process_queue.write_okay, INFINITE );
+ process_queue.job_index = slot - cmdtab;
+ assert( !is_timeout );
+ SetEvent( process_queue.read_okay );
+ /* Okay. Non-blocking. */
+ UnregisterWait( slot->wait_handle );
+}
+
+static int try_wait_impl( DWORD timeout )
+{
+ int job_index;
+ int res = WaitForSingleObject( process_queue.read_okay, timeout );
+ if ( res != WAIT_OBJECT_0 )
+ return -1;
+ job_index = process_queue.job_index;
+ SetEvent( process_queue.write_okay );
+ return job_index;
+}
+
+static void register_wait( int job_id )
+{
+ if ( globs.jobs > MAXIMUM_WAIT_OBJECTS )
+ {
+ RegisterWaitForSingleObject( &cmdtab[ job_id ].wait_handle,
+ cmdtab[ job_id ].pi.hProcess,
+ &try_wait_callback, &cmdtab[ job_id ], INFINITE,
+ WT_EXECUTEDEFAULT | WT_EXECUTEONLYONCE );
+ }
+}
+
+/*
+ * Waits for a single child process command to complete, or the timeout,
+ * whichever comes first. Returns the index of the completed command in the
+ * cmdtab array, or -1.
+ */
+
+static int try_wait( int const timeoutMillis )
+{
+ if ( globs.jobs <= MAXIMUM_WAIT_OBJECTS )
+ {
+ int i;
+ HANDLE active_handles[ MAXIMUM_WAIT_OBJECTS ];
+ int job_ids[ MAXIMUM_WAIT_OBJECTS ];
+ DWORD num_handles = 0;
+ DWORD wait_api_result;
+ for ( i = 0; i < globs.jobs; ++i )
+ {
+ if( cmdtab[ i ].pi.hProcess )
+ {
+ job_ids[ num_handles ] = i;
+ active_handles[ num_handles ] = cmdtab[ i ].pi.hProcess;
+ ++num_handles;
+ }
+ }
+ wait_api_result = WaitForMultipleObjects( num_handles, active_handles, FALSE, timeoutMillis );
+ if ( WAIT_OBJECT_0 <= wait_api_result && wait_api_result < WAIT_OBJECT_0 + globs.jobs )
+ {
+ return job_ids[ wait_api_result - WAIT_OBJECT_0 ];
+ }
+ else
+ {
+ return -1;
+ }
+ }
+ else
+ {
+ return try_wait_impl( timeoutMillis );
+ }
+
+}
+
+
+static int try_kill_one()
+{
+ /* Only need to check if a timeout was specified with the -l option. */
+ if ( globs.timeout > 0 )
+ {
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pi.hProcess )
+ {
+ double const t = running_time( cmdtab[ i ].pi.hProcess );
+ if ( t > (double)globs.timeout )
+ {
+ /* The job may have left an alert dialog around, try and get
+ * rid of it before killing the job itself.
+ */
+ close_alert( &cmdtab[ i ].pi );
+ /* We have a "runaway" job, kill it. */
+ kill_process_tree( cmdtab[ i ].pi.dwProcessId,
+ cmdtab[ i ].pi.hProcess );
+ /* And return its running commands table slot. */
+ return i;
+ }
+ }
+ }
+ return -1;
+}
+
+
+static void close_alerts()
+{
+ /* We only attempt this every 5 seconds or so, because it is not a cheap
+ * operation, and we will catch the alerts eventually. This check uses
+ * floats as some compilers define CLOCKS_PER_SEC as a float or double.
+ */
+ if ( ( (float)clock() / (float)( CLOCKS_PER_SEC * 5 ) ) < ( 1.0 / 5.0 ) )
+ {
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pi.hProcess )
+ close_alert( &cmdtab[ i ].pi );
+ }
+}
+
+
+/*
+ * Calc the current running time of an *active* process.
+ */
+
+static double running_time( HANDLE const process )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
+ {
+ /* Compute the elapsed time. */
+ FILETIME current;
+ GetSystemTimeAsFileTime( &current );
+ return filetime_to_seconds( add_FILETIME( current,
+ negate_FILETIME( creation ) ) );
+ }
+ return 0.0;
+}
+
+
+/*
+ * Not really optimal, or efficient, but it is easier this way, and it is not
+ * like we are going to be killing thousands, or even tens of processes.
+ */
+
+static void kill_process_tree( DWORD const pid, HANDLE const process )
+{
+ HANDLE const process_snapshot_h = CreateToolhelp32Snapshot(
+ TH32CS_SNAPPROCESS, 0 );
+ if ( INVALID_HANDLE_VALUE != process_snapshot_h )
+ {
+ BOOL ok = TRUE;
+ PROCESSENTRY32 pinfo;
+ pinfo.dwSize = sizeof( PROCESSENTRY32 );
+ for (
+ ok = Process32First( process_snapshot_h, &pinfo );
+ ok == TRUE;
+ ok = Process32Next( process_snapshot_h, &pinfo ) )
+ {
+ if ( pinfo.th32ParentProcessID == pid )
+ {
+ /* Found a child, recurse to kill it and anything else below it.
+ */
+ HANDLE const ph = OpenProcess( PROCESS_ALL_ACCESS, FALSE,
+ pinfo.th32ProcessID );
+ if ( ph )
+ {
+ kill_process_tree( pinfo.th32ProcessID, ph );
+ CloseHandle( ph );
+ }
+ }
+ }
+ CloseHandle( process_snapshot_h );
+ }
+ /* Now that the children are all dead, kill the root. */
+ TerminateProcess( process, -2 );
+}
+
+
+static double creation_time( HANDLE const process )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ return GetProcessTimes( process, &creation, &exit, &kernel, &user )
+ ? filetime_to_seconds( creation )
+ : 0.0;
+}
+
+
+/*
+ * Recursive check if first process is parent (directly or indirectly) of the
+ * second one. Both processes are passed as process ids, not handles. Special
+ * return value 2 means that the second process is smss.exe and its parent
+ * process is System (first argument is ignored).
+ */
+
+static int is_parent_child( DWORD const parent, DWORD const child )
+{
+ HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
+
+ if ( !child )
+ return 0;
+ if ( parent == child )
+ return 1;
+
+ process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
+ if ( INVALID_HANDLE_VALUE != process_snapshot_h )
+ {
+ BOOL ok = TRUE;
+ PROCESSENTRY32 pinfo;
+ pinfo.dwSize = sizeof( PROCESSENTRY32 );
+ for (
+ ok = Process32First( process_snapshot_h, &pinfo );
+ ok == TRUE;
+ ok = Process32Next( process_snapshot_h, &pinfo ) )
+ {
+ if ( pinfo.th32ProcessID == child )
+ {
+ /* Unfortunately, process ids are not really unique. There might
+ * be spurious "parent and child" relationship match between two
+ * non-related processes if real parent process of a given
+ * process has exited (while child process kept running as an
+ * "orphan") and the process id of such parent process has been
+ * reused by internals of the operating system when creating
+ * another process.
+ *
+ * Thus an additional check is needed - process creation time.
+ * This check may fail (i.e. return 0) for system processes due
+ * to insufficient privileges, and that is OK.
+ */
+ double tchild = 0.0;
+ double tparent = 0.0;
+ HANDLE const hchild = OpenProcess( PROCESS_QUERY_INFORMATION,
+ FALSE, pinfo.th32ProcessID );
+ CloseHandle( process_snapshot_h );
+
+ /* csrss.exe may display message box like following:
+ * xyz.exe - Unable To Locate Component
+ * This application has failed to start because
+ * boost_foo-bar.dll was not found. Re-installing the
+ * application may fix the problem
+ * This actually happens when starting a test process that
+ * depends on a dynamic library which failed to build. We want
+ * to automatically close these message boxes even though
+ * csrss.exe is not our child process. We may depend on the fact
+ * that (in all current versions of Windows) csrss.exe is a
+ * direct child of the smss.exe process, which in turn is a
+ * direct child of the System process, which always has process
+ * id == 4. This check must be performed before comparing
+ * process creation times.
+ */
+
+#ifdef UNICODE // no PROCESSENTRY32A
+ if ( !wcsicmp( pinfo.szExeFile, L"csrss.exe" ) &&
+#else
+ if ( !stricmp( pinfo.szExeFile, "csrss.exe" ) &&
+#endif
+ is_parent_child( parent, pinfo.th32ParentProcessID ) == 2 )
+ return 1;
+
+#ifdef UNICODE // no PROCESSENTRY32A
+ if ( !wcsicmp( pinfo.szExeFile, L"smss.exe" ) &&
+#else
+ if ( !stricmp( pinfo.szExeFile, "smss.exe" ) &&
+#endif
+ ( pinfo.th32ParentProcessID == 4 ) )
+ return 2;
+
+ if ( hchild )
+ {
+ HANDLE hparent = OpenProcess( PROCESS_QUERY_INFORMATION,
+ FALSE, pinfo.th32ParentProcessID );
+ if ( hparent )
+ {
+ tchild = creation_time( hchild );
+ tparent = creation_time( hparent );
+ CloseHandle( hparent );
+ }
+ CloseHandle( hchild );
+ }
+
+ /* Return 0 if one of the following is true:
+ * 1. we failed to read process creation time
+ * 2. child was created before alleged parent
+ */
+ if ( ( tchild == 0.0 ) || ( tparent == 0.0 ) ||
+ ( tchild < tparent ) )
+ return 0;
+
+ return is_parent_child( parent, pinfo.th32ParentProcessID ) & 1;
+ }
+ }
+
+ CloseHandle( process_snapshot_h );
+ }
+
+ return 0;
+}
+
+
+/*
+ * Called by the OS for each topmost window.
+ */
+
+BOOL CALLBACK close_alert_window_enum( HWND hwnd, LPARAM lParam )
+{
+ char buf[ 7 ] = { 0 };
+ PROCESS_INFORMATION const * const pi = (PROCESS_INFORMATION *)lParam;
+ DWORD pid;
+ DWORD tid;
+
+ /* We want to find and close any window that:
+ * 1. is visible and
+ * 2. is a dialog and
+ * 3. is displayed by any of our child processes
+ */
+ if (
+ /* We assume hidden windows do not require user interaction. */
+ !IsWindowVisible( hwnd )
+ /* Failed to read class name; presume it is not a dialog. */
+ || !GetClassNameA( hwnd, buf, sizeof( buf ) )
+ /* All Windows system dialogs use the same Window class name. */
+ || strcmp( buf, "#32770" ) )
+ return TRUE;
+
+ /* GetWindowThreadProcessId() returns 0 on error, otherwise thread id of
+ * the window's message pump thread.
+ */
+ tid = GetWindowThreadProcessId( hwnd, &pid );
+ if ( !tid || !is_parent_child( pi->dwProcessId, pid ) )
+ return TRUE;
+
+ /* Ask real nice. */
+ PostMessageA( hwnd, WM_CLOSE, 0, 0 );
+
+ /* Wait and see if it worked. If not, insist. */
+ if ( WaitForSingleObject( pi->hProcess, 200 ) == WAIT_TIMEOUT )
+ {
+ PostThreadMessageA( tid, WM_QUIT, 0, 0 );
+ WaitForSingleObject( pi->hProcess, 300 );
+ }
+
+ /* Done, we do not want to check any other windows now. */
+ return FALSE;
+}
+
+
+static void close_alert( PROCESS_INFORMATION const * const pi )
+{
+ EnumWindows( &close_alert_window_enum, (LPARAM)pi );
+}
+
+
+/*
+ * Open a command file to store the command into for executing using an external
+ * shell. Returns a pointer to a FILE open for writing or 0 in case such a file
+ * could not be opened. The file name used is stored back in the corresponding
+ * running commands table slot.
+ *
+ * Expects the running commands table slot's command_file attribute to contain
+ * either a zeroed out string object or one prepared previously by this same
+ * function.
+ */
+
+static FILE * open_command_file( int const slot )
+{
+ string * const command_file = cmdtab[ slot ].command_file;
+
+ /* If the temporary command file name has not already been prepared for this
+ * slot number, prepare a new one containing a '##' place holder that will
+ * be changed later and needs to be located at a fixed distance from the
+ * end.
+ */
+ if ( !command_file->value )
+ {
+ DWORD const procID = GetCurrentProcessId();
+ string const * const tmpdir = path_tmpdir();
+ string_new( command_file );
+ string_reserve( command_file, tmpdir->size + 64 );
+ command_file->size = sprintf( command_file->value,
+ "%s\\jam%lu-%02d-##.bat", tmpdir->value, procID, slot );
+ }
+
+ /* For some reason opening a command file can fail intermittently. But doing
+ * some retries works. Most likely this is due to a previously existing file
+ * of the same name that happens to still be opened by an active virus
+ * scanner. Originally pointed out and fixed by Bronek Kozicki.
+ *
+ * We first try to open several differently named files to avoid having to
+ * wait idly if not absolutely necessary. Our temporary command file names
+ * contain a fixed position place holder we use for generating different
+ * file names.
+ */
+ {
+ char * const index1 = command_file->value + command_file->size - 6;
+ char * const index2 = index1 + 1;
+ int waits_remaining;
+ assert( command_file->value < index1 );
+ assert( index2 + 1 < command_file->value + command_file->size );
+ assert( index2[ 1 ] == '.' );
+ for ( waits_remaining = 3; ; --waits_remaining )
+ {
+ int index;
+ for ( index = 0; index != 20; ++index )
+ {
+ FILE * f;
+ *index1 = '0' + index / 10;
+ *index2 = '0' + index % 10;
+ f = fopen( command_file->value, "w" );
+ if ( f ) return f;
+ }
+ if ( !waits_remaining ) break;
+ Sleep( 250 );
+ }
+ }
+
+ return 0;
+}
+
+
+/*
+ * Prepare a command file to be executed using an external shell.
+ */
+
+static char const * prepare_command_file( string const * command, int slot )
+{
+ FILE * const f = open_command_file( slot );
+ if ( !f )
+ {
+ err_printf( "failed to write command file!\n" );
+ exit( EXITBAD );
+ }
+ fputs( command->value, f );
+ fclose( f );
+ return cmdtab[ slot ].command_file->value;
+}
+
+
+/*
+ * Find a free slot in the running commands table.
+ */
+
+static int get_free_cmdtab_slot()
+{
+ int slot;
+ for ( slot = 0; slot < globs.jobs; ++slot )
+ if ( !cmdtab[ slot ].pi.hProcess )
+ return slot;
+ err_printf( "no slots for child!\n" );
+ exit( EXITBAD );
+}
+
+
+/*
+ * Put together the final command string we are to run.
+ */
+
+static void string_new_from_argv( string * result, char const * const * argv )
+{
+ assert( argv );
+ assert( argv[ 0 ] );
+ string_copy( result, *(argv++) );
+ while ( *argv )
+ {
+ string_push_back( result, ' ' );
+ string_push_back( result, '"' );
+ string_append( result, *(argv++) );
+ string_push_back( result, '"' );
+ }
+}
+
+
+/*
+ * Reports the last failed Windows API related error message.
+ */
+
+static void reportWindowsError( char const * const apiName, int slot )
+{
+ char * errorMessage;
+ char buf[24];
+ string * err_buf;
+ timing_info time;
+ DWORD const errorCode = GetLastError();
+ DWORD apiResult = FormatMessageA(
+ FORMAT_MESSAGE_ALLOCATE_BUFFER | /* __in DWORD dwFlags */
+ FORMAT_MESSAGE_FROM_SYSTEM |
+ FORMAT_MESSAGE_IGNORE_INSERTS,
+ NULL, /* __in_opt LPCVOID lpSource */
+ errorCode, /* __in DWORD dwMessageId */
+ 0, /* __in DWORD dwLanguageId */
+ (LPSTR)&errorMessage, /* __out LPTSTR lpBuffer */
+ 0, /* __in DWORD nSize */
+ 0 ); /* __in_opt va_list * Arguments */
+
+ /* Build a message as if the process had written to stderr. */
+ if ( globs.pipe_action )
+ err_buf = cmdtab[ slot ].buffer_err;
+ else
+ err_buf = cmdtab[ slot ].buffer_out;
+ string_append( err_buf, apiName );
+ string_append( err_buf, "() Windows API failed: " );
+ sprintf( buf, "%lu", errorCode );
+ string_append( err_buf, buf );
+
+ if ( !apiResult )
+ string_append( err_buf, ".\n" );
+ else
+ {
+ string_append( err_buf, " - " );
+ string_append( err_buf, errorMessage );
+ /* Make sure that the buffer is terminated with a newline */
+ if( err_buf->value[ err_buf->size - 1 ] != '\n' )
+ string_push_back( err_buf, '\n' );
+ LocalFree( errorMessage );
+ }
+
+ /* Since the process didn't actually start, use a blank timing_info. */
+ time.system = 0;
+ time.user = 0;
+ timestamp_current( &time.start );
+ timestamp_current( &time.end );
+
+ /* Invoke the callback with a failure status. */
+ (*cmdtab[ slot ].func)( cmdtab[ slot ].closure, EXEC_CMD_FAIL, &time,
+ cmdtab[ slot ].buffer_out->value, cmdtab[ slot ].buffer_err->value,
+ EXIT_OK );
+
+ /* Clean up any handles that were opened. */
+ closeWinHandle( &cmdtab[ slot ].pi.hProcess );
+ closeWinHandle( &cmdtab[ slot ].pi.hThread );
+ closeWinHandle( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ] );
+ closeWinHandle( &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ] );
+ string_renew( cmdtab[ slot ].buffer_out );
+ string_renew( cmdtab[ slot ].buffer_err );
+}
+
+
+#endif /* USE_EXECNT */
diff --git a/src/boost/tools/build/src/engine/execunix.cpp b/src/boost/tools/build/src/engine/execunix.cpp
new file mode 100644
index 000000000..2740743c7
--- /dev/null
+++ b/src/boost/tools/build/src/engine/execunix.cpp
@@ -0,0 +1,606 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ * Copyright 2007 Noel Belcourt.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "execcmd.h"
+
+#include "lists.h"
+#include "output.h"
+#include "jam_strings.h"
+
+#include <errno.h>
+#include <signal.h>
+#include <stdio.h>
+#include <time.h>
+#include <unistd.h> /* vfork(), _exit(), STDOUT_FILENO and such */
+#include <sys/resource.h>
+#include <sys/times.h>
+#include <sys/wait.h>
+#include <poll.h>
+
+#if defined(sun) || defined(__sun)
+ #include <wait.h>
+#endif
+
+#ifdef USE_EXECUNIX
+
+#include <sys/times.h>
+
+#if defined(__APPLE__)
+ #define NO_VFORK
+#endif
+
+#ifdef NO_VFORK
+ #define vfork() fork()
+#endif
+
+
+/*
+ * execunix.c - execute a shell script on UNIX/OS2/AmigaOS
+ *
+ * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp(). The
+ * default is: /bin/sh -c
+ *
+ * In $(JAMSHELL), % expands to the command string and ! expands to the slot
+ * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
+ * not include a %, it is tacked on as the last argument.
+ *
+ * Each word must be an individual element in a jam variable value.
+ *
+ * Do not just set JAMSHELL to /bin/sh - it will not work!
+ *
+ * External routines:
+ * exec_check() - preprocess and validate the command.
+ * exec_cmd() - launch an async command execution.
+ * exec_wait() - wait for any of the async command processes to terminate.
+ */
+
+/* find a free slot in the running commands table */
+static int get_free_cmdtab_slot();
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+static clock_t tps;
+
+/* We hold stdout & stderr child process information in two element arrays
+ * indexed as follows.
+ */
+#define OUT 0
+#define ERR 1
+
+static struct cmdtab_t
+{
+ int pid; /* on win32, a real process handle */
+ int fd[ 2 ]; /* file descriptors for stdout and stderr */
+ FILE * stream[ 2 ]; /* child's stdout and stderr file streams */
+ clock_t start_time; /* start time of child process */
+ int exit_reason; /* termination status */
+ char * buffer[ 2 ]; /* buffers to hold stdout and stderr, if any */
+ int buf_size[ 2 ]; /* buffer sizes in bytes */
+ timestamp start_dt; /* start of command timestamp */
+
+ int flags;
+
+ /* Function called when the command completes. */
+ ExecCmdCallback func;
+
+ /* Opaque data passed back to the 'func' callback. */
+ void * closure;
+} * cmdtab = NULL;
+static int cmdtab_size = 0;
+
+/* Contains both stdin and stdout of all processes.
+ * The length is either globs.jobs or globs.jobs * 2
+ * depending on globs.pipe_action.
+ */
+struct pollfd * wait_fds = NULL;
+#define WAIT_FDS_SIZE ( globs.jobs * ( globs.pipe_action ? 2 : 1 ) )
+#define GET_WAIT_FD( job_idx ) ( wait_fds + ( ( job_idx * ( globs.pipe_action ? 2 : 1 ) ) ) )
+
+/*
+ * exec_init() - global initialization
+ */
+void exec_init( void )
+{
+ int i;
+ if ( globs.jobs > cmdtab_size )
+ {
+ cmdtab = (cmdtab_t*)BJAM_REALLOC( cmdtab, globs.jobs * sizeof( *cmdtab ) );
+ memset( cmdtab + cmdtab_size, 0, ( globs.jobs - cmdtab_size ) * sizeof( *cmdtab ) );
+ wait_fds = (pollfd*)BJAM_REALLOC( wait_fds, WAIT_FDS_SIZE * sizeof ( *wait_fds ) );
+ for ( i = cmdtab_size; i < globs.jobs; ++i )
+ {
+ GET_WAIT_FD( i )[ OUT ].fd = -1;
+ GET_WAIT_FD( i )[ OUT ].events = POLLIN;
+ if ( globs.pipe_action )
+ {
+ GET_WAIT_FD( i )[ ERR ].fd = -1;
+ GET_WAIT_FD( i )[ ERR ].events = POLLIN;
+ }
+ }
+ cmdtab_size = globs.jobs;
+ }
+}
+
+void exec_done( void )
+{
+ BJAM_FREE( cmdtab );
+ BJAM_FREE( wait_fds );
+}
+
+/*
+ * exec_check() - preprocess and validate the command.
+ */
+
+int exec_check
+(
+ string const * command,
+ LIST * * pShell,
+ int * error_length,
+ int * error_max_length
+)
+{
+ int const is_raw_cmd = is_raw_command_request( *pShell );
+
+ /* We allow empty commands for non-default shells since we do not really
+ * know what they are going to do with such commands.
+ */
+ if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) )
+ return EXEC_CHECK_NOOP;
+
+ return is_raw_cmd
+ ? EXEC_CHECK_OK
+ : check_cmd_for_too_long_lines( command->value, MAXLINE, error_length,
+ error_max_length );
+}
+
+
+/*
+ * exec_cmd() - launch an async command execution.
+ */
+
+/* We hold file descriptors for pipes used to communicate with child processes
+ * in two element arrays indexed as follows.
+ */
+#define EXECCMD_PIPE_READ 0
+#define EXECCMD_PIPE_WRITE 1
+
+void exec_cmd
+(
+ string const * command,
+ int flags,
+ ExecCmdCallback func,
+ void * closure,
+ LIST * shell
+)
+{
+ struct sigaction ignore, saveintr, savequit;
+ sigset_t chldmask, savemask;
+
+ int const slot = get_free_cmdtab_slot();
+ int out[ 2 ];
+ int err[ 2 ];
+ char const * argv[ MAXARGC + 1 ]; /* +1 for NULL */
+
+ /* Initialize default shell. */
+ static LIST * default_shell;
+ if ( !default_shell )
+ default_shell = list_push_back( list_new(
+ object_new( "/bin/sh" ) ),
+ object_new( "-c" ) );
+
+ if ( list_empty( shell ) )
+ shell = default_shell;
+
+ /* Forumulate argv. If shell was defined, be prepared for % and ! subs.
+ * Otherwise, use stock /bin/sh.
+ */
+ argv_from_shell( argv, shell, command->value, slot );
+
+ if ( DEBUG_EXECCMD )
+ {
+ int i;
+ out_printf( "Using shell: " );
+ list_print( shell );
+ out_printf( "\n" );
+ for ( i = 0; argv[ i ]; ++i )
+ out_printf( " argv[%d] = '%s'\n", i, argv[ i ] );
+ }
+
+ /* Create pipes for collecting child output. */
+ if ( pipe( out ) < 0 || ( globs.pipe_action && pipe( err ) < 0 ) )
+ {
+ perror( "pipe" );
+ exit( EXITBAD );
+ }
+
+ /* Start the command */
+
+ timestamp_current( &cmdtab[ slot ].start_dt );
+
+ if ( 0 < globs.timeout )
+ {
+ /* Handle hung processes by manually tracking elapsed time and signal
+ * process when time limit expires.
+ */
+ struct tms buf;
+ cmdtab[ slot ].start_time = times( &buf );
+
+ /* Make a global, only do this once. */
+ if ( !tps ) tps = sysconf( _SC_CLK_TCK );
+ }
+
+ /* Child does not need the read pipe ends used by the parent. */
+ fcntl( out[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC );
+ if ( globs.pipe_action )
+ fcntl( err[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC );
+
+ /* ignore SIGINT and SIGQUIT */
+ ignore.sa_handler = SIG_IGN;
+ sigemptyset(&ignore.sa_mask);
+ ignore.sa_flags = 0;
+ if (sigaction(SIGINT, &ignore, &saveintr) < 0)
+ return;
+ if (sigaction(SIGQUIT, &ignore, &savequit) < 0)
+ return;
+
+ /* block SIGCHLD */
+ sigemptyset(&chldmask);
+ sigaddset(&chldmask, SIGCHLD);
+ if (sigprocmask(SIG_BLOCK, &chldmask, &savemask) < 0)
+ return;
+
+ if ( ( cmdtab[ slot ].pid = vfork() ) == -1 )
+ {
+ perror( "vfork" );
+ exit( EXITBAD );
+ }
+
+ if ( cmdtab[ slot ].pid == 0 )
+ {
+ /*****************/
+ /* Child process */
+ /*****************/
+ int const pid = getpid();
+
+ /* restore previous signals */
+ sigaction(SIGINT, &saveintr, NULL);
+ sigaction(SIGQUIT, &savequit, NULL);
+ sigprocmask(SIG_SETMASK, &savemask, NULL);
+
+ /* Redirect stdout and stderr to pipes inherited from the parent. */
+ dup2( out[ EXECCMD_PIPE_WRITE ], STDOUT_FILENO );
+ dup2( globs.pipe_action ? err[ EXECCMD_PIPE_WRITE ] :
+ out[ EXECCMD_PIPE_WRITE ], STDERR_FILENO );
+ close( out[ EXECCMD_PIPE_WRITE ] );
+ if ( globs.pipe_action )
+ close( err[ EXECCMD_PIPE_WRITE ] );
+
+ /* Make this process a process group leader so that when we kill it, all
+ * child processes of this process are terminated as well. We use
+ * killpg( pid, SIGKILL ) to kill the process group leader and all its
+ * children.
+ */
+ if ( 0 < globs.timeout )
+ {
+ struct rlimit r_limit;
+ r_limit.rlim_cur = globs.timeout;
+ r_limit.rlim_max = globs.timeout;
+ setrlimit( RLIMIT_CPU, &r_limit );
+ }
+ if (0 != setpgid( pid, pid )) {
+ perror("setpgid(child)");
+ /* exit( EXITBAD ); */
+ }
+ execvp( argv[ 0 ], (char * *)argv );
+ perror( "execvp" );
+ _exit( 127 );
+ }
+
+ /******************/
+ /* Parent process */
+ /******************/
+
+ /* redundant call, ignore return value */
+ setpgid(cmdtab[ slot ].pid, cmdtab[ slot ].pid);
+
+ /* Parent not need the write pipe ends used by the child. */
+ close( out[ EXECCMD_PIPE_WRITE ] );
+ if ( globs.pipe_action )
+ close( err[ EXECCMD_PIPE_WRITE ] );
+
+ /* Set both pipe read file descriptors to non-blocking. */
+ fcntl( out[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK );
+ if ( globs.pipe_action )
+ fcntl( err[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK );
+
+ /* Parent reads from out[ EXECCMD_PIPE_READ ]. */
+ cmdtab[ slot ].fd[ OUT ] = out[ EXECCMD_PIPE_READ ];
+ cmdtab[ slot ].stream[ OUT ] = fdopen( cmdtab[ slot ].fd[ OUT ], "rb" );
+ if ( !cmdtab[ slot ].stream[ OUT ] )
+ {
+ perror( "fdopen" );
+ exit( EXITBAD );
+ }
+
+ /* Parent reads from err[ EXECCMD_PIPE_READ ]. */
+ if ( globs.pipe_action )
+ {
+ cmdtab[ slot ].fd[ ERR ] = err[ EXECCMD_PIPE_READ ];
+ cmdtab[ slot ].stream[ ERR ] = fdopen( cmdtab[ slot ].fd[ ERR ], "rb" );
+ if ( !cmdtab[ slot ].stream[ ERR ] )
+ {
+ perror( "fdopen" );
+ exit( EXITBAD );
+ }
+ }
+
+ GET_WAIT_FD( slot )[ OUT ].fd = out[ EXECCMD_PIPE_READ ];
+ if ( globs.pipe_action )
+ GET_WAIT_FD( slot )[ ERR ].fd = err[ EXECCMD_PIPE_READ ];
+
+ cmdtab[ slot ].flags = flags;
+
+ /* Save input data into the selected running commands table slot. */
+ cmdtab[ slot ].func = func;
+ cmdtab[ slot ].closure = closure;
+
+ /* restore previous signals */
+ sigaction(SIGINT, &saveintr, NULL);
+ sigaction(SIGQUIT, &savequit, NULL);
+ sigprocmask(SIG_SETMASK, &savemask, NULL);
+}
+
+#undef EXECCMD_PIPE_READ
+#undef EXECCMD_PIPE_WRITE
+
+
+/* Returns 1 if file descriptor is closed, or 0 if it is still alive.
+ *
+ * i is index into cmdtab
+ *
+ * s (stream) indexes:
+ * - cmdtab[ i ].stream[ s ]
+ * - cmdtab[ i ].buffer[ s ]
+ * - cmdtab[ i ].fd [ s ]
+ */
+
+static int read_descriptor( int i, int s )
+{
+ int ret;
+ char buffer[ BUFSIZ ];
+
+ while ( 0 < ( ret = fread( buffer, sizeof( char ), BUFSIZ - 1,
+ cmdtab[ i ].stream[ s ] ) ) )
+ {
+ buffer[ ret ] = 0;
+
+ /* Copy it to our output if appropriate */
+ if ( ! ( cmdtab[ i ].flags & EXEC_CMD_QUIET ) )
+ {
+ if ( s == OUT && ( globs.pipe_action != 2 ) )
+ out_data( buffer );
+ else if ( s == ERR && ( globs.pipe_action & 2 ) )
+ err_data( buffer );
+ }
+
+ if ( !cmdtab[ i ].buffer[ s ] )
+ {
+ /* Never been allocated. */
+ if ( globs.max_buf && ret > globs.max_buf )
+ {
+ ret = globs.max_buf;
+ buffer[ ret ] = 0;
+ }
+ cmdtab[ i ].buf_size[ s ] = ret + 1;
+ cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( ret + 1 );
+ memcpy( cmdtab[ i ].buffer[ s ], buffer, ret + 1 );
+ }
+ else
+ {
+ /* Previously allocated. */
+ if ( cmdtab[ i ].buf_size[ s ] < globs.max_buf || !globs.max_buf )
+ {
+ char * tmp = cmdtab[ i ].buffer[ s ];
+ int const old_len = cmdtab[ i ].buf_size[ s ] - 1;
+ int const new_len = old_len + ret + 1;
+ cmdtab[ i ].buf_size[ s ] = new_len;
+ cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( new_len );
+ memcpy( cmdtab[ i ].buffer[ s ], tmp, old_len );
+ memcpy( cmdtab[ i ].buffer[ s ] + old_len, buffer, ret + 1 );
+ BJAM_FREE( tmp );
+ }
+ }
+ }
+
+ /* If buffer full, ensure last buffer char is newline so that jam log
+ * contains the command status at beginning of it own line instead of
+ * appended to end of the previous output.
+ */
+ if ( globs.max_buf && globs.max_buf <= cmdtab[ i ].buf_size[ s ] )
+ cmdtab[ i ].buffer[ s ][ cmdtab[ i ].buf_size[ s ] - 2 ] = '\n';
+
+ return feof( cmdtab[ i ].stream[ s ] );
+}
+
+
+/*
+ * close_streams() - Close the stream and pipe descriptor.
+ */
+
+static void close_streams( int const i, int const s )
+{
+ fclose( cmdtab[ i ].stream[ s ] );
+ cmdtab[ i ].stream[ s ] = 0;
+
+ close( cmdtab[ i ].fd[ s ] );
+ cmdtab[ i ].fd[ s ] = 0;
+
+ GET_WAIT_FD( i )[ s ].fd = -1;
+}
+
+
+/*
+ * exec_wait() - wait for any of the async command processes to terminate.
+ *
+ * May register more than one terminated child process but will exit as soon as
+ * at least one has been registered.
+ */
+
+void exec_wait()
+{
+ int finished = 0;
+
+ /* Process children that signaled. */
+ while ( !finished )
+ {
+ int i;
+ int select_timeout = globs.timeout;
+
+ /* Check for timeouts:
+ * - kill children that already timed out
+ * - decide how long until the next one times out
+ */
+ if ( globs.timeout > 0 )
+ {
+ struct tms buf;
+ clock_t const current = times( &buf );
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pid )
+ {
+ clock_t const consumed =
+ ( current - cmdtab[ i ].start_time ) / tps;
+ if ( consumed >= globs.timeout )
+ {
+ killpg( cmdtab[ i ].pid, SIGKILL );
+ cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
+ }
+ else if ( globs.timeout - consumed < select_timeout )
+ select_timeout = globs.timeout - consumed;
+ }
+ }
+
+ /* select() will wait for I/O on a descriptor, a signal, or timeout. */
+ {
+ /* disable child termination signals while in select */
+ int ret;
+ int timeout;
+ sigset_t sigmask;
+ sigemptyset(&sigmask);
+ sigaddset(&sigmask, SIGCHLD);
+ sigprocmask(SIG_BLOCK, &sigmask, NULL);
+
+ /* If no timeout is specified, pass -1 (which means no timeout,
+ * wait indefinitely) to poll, to prevent busy-looping.
+ */
+ timeout = select_timeout? select_timeout * 1000 : -1;
+ while ( ( ret = poll( wait_fds, WAIT_FDS_SIZE, timeout ) ) == -1 )
+ if ( errno != EINTR )
+ break;
+ /* restore original signal mask by unblocking sigchld */
+ sigprocmask(SIG_UNBLOCK, &sigmask, NULL);
+ if ( ret <= 0 )
+ continue;
+ }
+
+ for ( i = 0; i < globs.jobs; ++i )
+ {
+ int out_done = 0;
+ int err_done = 0;
+ if ( GET_WAIT_FD( i )[ OUT ].revents )
+ out_done = read_descriptor( i, OUT );
+
+ if ( globs.pipe_action && ( GET_WAIT_FD( i )[ ERR ].revents ) )
+ err_done = read_descriptor( i, ERR );
+
+ /* If feof on either descriptor, we are done. */
+ if ( out_done || err_done )
+ {
+ int pid;
+ int status;
+ int rstat;
+ timing_info time_info;
+ struct rusage cmd_usage;
+
+ /* We found a terminated child process - our search is done. */
+ finished = 1;
+
+ /* Close the stream and pipe descriptors. */
+ close_streams( i, OUT );
+ if ( globs.pipe_action )
+ close_streams( i, ERR );
+
+ /* Reap the child and release resources. */
+ while ( ( pid = wait4( cmdtab[ i ].pid, &status, 0, &cmd_usage ) ) == -1 )
+ if ( errno != EINTR )
+ break;
+ if ( pid != cmdtab[ i ].pid )
+ {
+ err_printf( "unknown pid %d with errno = %d\n", pid, errno );
+ exit( EXITBAD );
+ }
+
+ /* Set reason for exit if not timed out. */
+ if ( WIFEXITED( status ) )
+ cmdtab[ i ].exit_reason = WEXITSTATUS( status )
+ ? EXIT_FAIL
+ : EXIT_OK;
+
+ {
+ time_info.system = ((double)(cmd_usage.ru_stime.tv_sec)*1000000.0+(double)(cmd_usage.ru_stime.tv_usec))/1000000.0;
+ time_info.user = ((double)(cmd_usage.ru_utime.tv_sec)*1000000.0+(double)(cmd_usage.ru_utime.tv_usec))/1000000.0;
+ timestamp_copy( &time_info.start, &cmdtab[ i ].start_dt );
+ timestamp_current( &time_info.end );
+ }
+
+ /* Drive the completion. */
+ if ( interrupted() )
+ rstat = EXEC_CMD_INTR;
+ else if ( status )
+ rstat = EXEC_CMD_FAIL;
+ else
+ rstat = EXEC_CMD_OK;
+
+ /* Call the callback, may call back to jam rule land. */
+ (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat, &time_info,
+ cmdtab[ i ].buffer[ OUT ], cmdtab[ i ].buffer[ ERR ],
+ cmdtab[ i ].exit_reason );
+
+ /* Clean up the command's running commands table slot. */
+ BJAM_FREE( cmdtab[ i ].buffer[ OUT ] );
+ cmdtab[ i ].buffer[ OUT ] = 0;
+ cmdtab[ i ].buf_size[ OUT ] = 0;
+
+ BJAM_FREE( cmdtab[ i ].buffer[ ERR ] );
+ cmdtab[ i ].buffer[ ERR ] = 0;
+ cmdtab[ i ].buf_size[ ERR ] = 0;
+
+ cmdtab[ i ].pid = 0;
+ cmdtab[ i ].func = 0;
+ cmdtab[ i ].closure = 0;
+ cmdtab[ i ].start_time = 0;
+ }
+ }
+ }
+}
+
+
+/*
+ * Find a free slot in the running commands table.
+ */
+
+static int get_free_cmdtab_slot()
+{
+ int slot;
+ for ( slot = 0; slot < globs.jobs; ++slot )
+ if ( !cmdtab[ slot ].pid )
+ return slot;
+ err_printf( "no slots for child!\n" );
+ exit( EXITBAD );
+}
+
+# endif /* USE_EXECUNIX */
diff --git a/src/boost/tools/build/src/engine/execvms.cpp b/src/boost/tools/build/src/engine/execvms.cpp
new file mode 100644
index 000000000..6c95011c6
--- /dev/null
+++ b/src/boost/tools/build/src/engine/execvms.cpp
@@ -0,0 +1,419 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2015 Artur Shepilko.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+
+/*
+ * execvms.c - execute a shell script, ala VMS.
+ *
+ * The approach is this:
+ *
+ * If the command is a single line, and shorter than WRTLEN (what we believe to
+ * be the maximum line length), we just system() it.
+ *
+ * If the command is multi-line, or longer than WRTLEN, we write the command
+ * block to a temp file, splitting long lines (using "-" at the end of the line
+ * to indicate contiuation), and then source that temp file. We use special
+ * logic to make sure we do not continue in the middle of a quoted string.
+ *
+ * 05/04/94 (seiwald) - async multiprocess interface; noop on VMS
+ * 12/20/96 (seiwald) - rewritten to handle multi-line commands well
+ * 01/14/96 (seiwald) - do not put -'s between "'s
+ * 01/19/15 (shepilko)- adapt for jam-3.1.19
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "execcmd.h"
+#include "output.h"
+
+#ifdef OS_VMS
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <times.h>
+#include <unistd.h>
+#include <errno.h>
+
+
+#define WRTLEN 240
+
+#define MIN( a, b ) ((a) < (b) ? (a) : (b))
+
+#define CHAR_DQUOTE '"'
+
+#define VMS_PATH_MAX 1024
+#define VMS_COMMAND_MAX 1024
+
+#define VMS_WARNING 0
+#define VMS_SUCCESS 1
+#define VMS_ERROR 2
+#define VMS_FATAL 4
+
+char commandbuf[ VMS_COMMAND_MAX ] = { 0 };
+
+
+static int get_status(int vms_status);
+static clock_t get_cpu_time();
+
+/*
+ * exec_check() - preprocess and validate the command.
+ */
+
+int exec_check
+(
+ string const * command,
+ LIST * * pShell,
+ int * error_length,
+ int * error_max_length
+)
+{
+ int const is_raw_cmd = 1;
+
+ /* We allow empty commands for non-default shells since we do not really
+ * know what they are going to do with such commands.
+ */
+ if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) )
+ return EXEC_CHECK_NOOP;
+
+ return is_raw_cmd
+ ? EXEC_CHECK_OK
+ : check_cmd_for_too_long_lines( command->value, MAXLINE, error_length,
+ error_max_length );
+}
+
+
+/*
+ * exec_cmd() - execute system command.
+ */
+
+void exec_cmd
+(
+ string const * command,
+ int flags,
+ ExecCmdCallback func,
+ void * closure,
+ LIST * shell
+)
+{
+ char * s;
+ char * e;
+ char * p;
+ int vms_status;
+ int status;
+ int rstat = EXEC_CMD_OK;
+ int exit_reason = EXIT_OK;
+ timing_info time_info;
+ timestamp start_dt;
+ struct tms start_time;
+ struct tms end_time;
+ char * cmd_string = command->value;
+
+
+ /* Start the command */
+
+ timestamp_current( &time_info.start );
+ times( &start_time );
+
+ /* See if command is more than one line discounting leading/trailing white
+ * space.
+ */
+ for ( s = cmd_string; *s && isspace( *s ); ++s );
+
+ e = p = strchr( s, '\n' );
+
+ while ( p && isspace( *p ) )
+ ++p;
+
+ /* If multi line or long, write to com file. Otherwise, exec directly. */
+ if ( ( p && *p ) || ( e - s > WRTLEN ) )
+ {
+ FILE * f;
+
+ /* Create temp file invocation. */
+
+ if ( !*commandbuf )
+ {
+ OBJECT * tmp_filename = 0;
+
+ tmp_filename = path_tmpfile();
+
+
+ /* Get tmp file name is VMS-format. */
+ {
+ string os_filename[ 1 ];
+ string_new( os_filename );
+ path_translate_to_os( object_str( tmp_filename ), os_filename );
+ object_free( tmp_filename );
+ tmp_filename = object_new( os_filename->value );
+ string_free( os_filename );
+ }
+
+ commandbuf[0] = '@';
+ strncat( commandbuf + 1, object_str( tmp_filename ),
+ VMS_COMMAND_MAX - 2);
+ }
+
+
+ /* Open tempfile. */
+ if ( !( f = fopen( commandbuf + 1, "w" ) ) )
+ {
+ printf( "can't open cmd_string file\n" );
+ rstat = EXEC_CMD_FAIL;
+ exit_reason = EXIT_FAIL;
+
+ times( &end_time );
+
+ timestamp_current( &time_info.end );
+ time_info.system = (double)( end_time.tms_cstime -
+ start_time.tms_cstime ) / 100.;
+ time_info.user = (double)( end_time.tms_cutime -
+ start_time.tms_cutime ) / 100.;
+
+ (*func)( closure, rstat, &time_info, "" , "", exit_reason );
+ return;
+ }
+
+
+ /* Running from TMP, so explicitly set default to CWD. */
+ {
+ char * cwd = NULL;
+ int cwd_buf_size = VMS_PATH_MAX;
+
+ while ( !(cwd = getcwd( NULL, cwd_buf_size ) ) /* alloc internally */
+ && errno == ERANGE )
+ {
+ cwd_buf_size += VMS_PATH_MAX;
+ }
+
+ if ( !cwd )
+ {
+ perror( "can not get current working directory" );
+ exit( EXITBAD );
+ }
+
+ fprintf( f, "$ SET DEFAULT %s\n", cwd);
+
+ free( cwd );
+ }
+
+
+ /* For each line of the command. */
+ while ( *cmd_string )
+ {
+ char * s = strchr( cmd_string,'\n' );
+ int len = s ? s + 1 - cmd_string : strlen( cmd_string );
+
+ fputc( '$', f );
+
+ /* For each chunk of a line that needs to be split. */
+ while ( len > 0 )
+ {
+ char * q = cmd_string;
+ char * qe = cmd_string + MIN( len, WRTLEN );
+ char * qq = q;
+ int quote = 0;
+
+ /* Look for matching "s -- expected in the same line. */
+ for ( ; q < qe; ++q )
+ if ( ( *q == CHAR_DQUOTE ) && ( quote = !quote ) )
+ qq = q;
+
+ /* When needs splitting and is inside an open quote,
+ * back up to opening quote and split off at it.
+ * When the quoted string spans over a chunk,
+ * pass string as a whole.
+ * If no matching quote found, dump the rest of command.
+ */
+ if ( len > WRTLEN && quote )
+ {
+ q = qq;
+
+ if ( q == cmd_string )
+ {
+ for ( q = qe; q < ( cmd_string + len )
+ && *q != CHAR_DQUOTE ; ++q) {}
+ q = ( *q == CHAR_DQUOTE) ? ( q + 1 ) : ( cmd_string + len );
+ }
+ }
+
+ fwrite( cmd_string, ( q - cmd_string ), 1, f );
+
+ len -= ( q - cmd_string );
+ cmd_string = q;
+
+ if ( len )
+ {
+ fputc( '-', f );
+ fputc( '\n', f );
+ }
+ }
+ }
+
+ fclose( f );
+
+ if ( DEBUG_EXECCMD )
+ {
+ FILE * f;
+ char buf[ WRTLEN + 1 ] = { 0 };
+
+ if ( (f = fopen( commandbuf + 1, "r" ) ) )
+ {
+ int nbytes;
+ printf( "Command file: %s\n", commandbuf + 1 );
+
+ do
+ {
+ nbytes = fread( buf, sizeof( buf[0] ), sizeof( buf ) - 1, f );
+
+ if ( nbytes ) fwrite(buf, sizeof( buf[0] ), nbytes, stdout);
+ }
+ while ( !feof(f) );
+
+ fclose(f);
+ }
+ }
+
+ /* Execute command file */
+ vms_status = system( commandbuf );
+ status = get_status( vms_status );
+
+ unlink( commandbuf + 1 );
+ }
+ else
+ {
+ /* Execute single line command. Strip trailing newline before execing.
+ * TODO:Call via popen() with capture of the output may be better here.
+ */
+ if ( e ) *e = 0;
+
+ status = VMS_SUCCESS; /* success on empty command */
+ if ( *s )
+ {
+ vms_status = system( s );
+ status = get_status( vms_status );
+ }
+ }
+
+
+ times( &end_time );
+
+ timestamp_current( &time_info.end );
+ time_info.system = (double)( end_time.tms_cstime -
+ start_time.tms_cstime ) / 100.;
+ time_info.user = (double)( end_time.tms_cutime -
+ start_time.tms_cutime ) / 100.;
+
+
+ /* Fail for error or fatal error. OK on OK, warning or info exit. */
+ if ( ( status == VMS_ERROR ) || ( status == VMS_FATAL ) )
+ {
+ rstat = EXEC_CMD_FAIL;
+ exit_reason = EXIT_FAIL;
+ }
+
+ (*func)( closure, rstat, &time_info, "" , "", exit_reason );
+}
+
+
+void exec_wait()
+{
+ return;
+}
+
+
+/* get_status() - returns status of the VMS command execution.
+ - Map VMS status to its severity (lower 3-bits)
+ - W-DCL-IVVERB is returned on unrecognized command -- map to general ERROR
+*/
+int get_status( int vms_status )
+{
+#define VMS_STATUS_DCL_IVVERB 0x00038090
+
+ int status;
+
+ switch (vms_status)
+ {
+ case VMS_STATUS_DCL_IVVERB:
+ status = VMS_ERROR;
+ break;
+
+ default:
+ status = vms_status & 0x07; /* $SEVERITY bits */
+ }
+
+ return status;
+}
+
+
+#define __NEW_STARLET 1
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <time.h>
+#include <ssdef.h>
+#include <stsdef.h>
+#include <jpidef.h>
+#include <efndef.h>
+#include <iosbdef.h>
+#include <iledef.h>
+#include <lib$routines.h>
+#include <starlet.h>
+
+
+/*
+ * get_cpu_time() - returns CPU time in CLOCKS_PER_SEC since process start.
+ * on error returns (clock_t)-1.
+ *
+ * Intended to emulate (system + user) result of *NIX times(), if CRTL times()
+ * is not available.
+* However, this accounts only for the current process. To account for child
+* processes, these need to be directly spawned/forked via exec().
+* Moreover, child processes should be running a C main program or a program
+* that calls VAXC$CRTL_INIT or DECC$CRTL_INIT.
+*/
+
+clock_t get_cpu_time()
+{
+ clock_t result = (clock_t) 0;
+
+ IOSB iosb;
+ int status;
+ long cputime = 0;
+
+
+ ILE3 jpi_items[] = {
+ { sizeof( cputime ), JPI$_CPUTIM, &cputime, NULL }, /* longword int, 10ms */
+ { 0 },
+ };
+
+ status = sys$getjpiw (EFN$C_ENF, 0, 0, jpi_items, &iosb, 0, 0);
+
+ if ( !$VMS_STATUS_SUCCESS( status ) )
+ {
+ lib$signal( status );
+
+ result = (clock_t) -1;
+ return result;
+ }
+
+
+ result = ( cputime / 100 ) * CLOCKS_PER_SEC;
+
+ return result;
+}
+
+
+# endif /* VMS */
+
diff --git a/src/boost/tools/build/src/engine/filent.cpp b/src/boost/tools/build/src/engine/filent.cpp
new file mode 100644
index 000000000..af89d5ef2
--- /dev/null
+++ b/src/boost/tools/build/src/engine/filent.cpp
@@ -0,0 +1,517 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * filent.c - scan directories and archives on NT
+ *
+ * External routines:
+ * file_archscan() - scan an archive for files
+ * file_mkdir() - create a directory
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * External routines called only via routines in filesys.c:
+ * file_collect_dir_content_() - collects directory content information
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ * file_query_() - query information about a path from the OS
+ * file_collect_archive_content_() - collects information about archive members
+ * file_archivescan_() - OS specific file_archivescan() implementation
+ */
+
+#include "jam.h"
+#ifdef OS_NT
+#include "filesys.h"
+
+#include "object.h"
+#include "pathsys.h"
+#include "jam_strings.h"
+#include "output.h"
+
+#ifdef __BORLANDC__
+# undef FILENAME /* cpp namespace collision */
+#endif
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+
+#include <assert.h>
+#include <ctype.h>
+#include <direct.h>
+#include <io.h>
+
+
+int file_collect_archive_content_( file_archive_info_t * const archive );
+
+/*
+ * file_collect_dir_content_() - collects directory content information
+ */
+
+int file_collect_dir_content_( file_info_t * const d )
+{
+ PATHNAME f;
+ string pathspec[ 1 ];
+ string pathname[ 1 ];
+ LIST * files = L0;
+ int d_length;
+
+ assert( d );
+ assert( d->is_dir );
+ assert( list_empty( d->files ) );
+
+ d_length = strlen( object_str( d->name ) );
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+ f.f_dir.ptr = object_str( d->name );
+ f.f_dir.len = d_length;
+
+ /* Prepare file search specification for the FindXXX() Windows API. */
+ if ( !d_length )
+ string_copy( pathspec, ".\\*" );
+ else
+ {
+ /* We can not simply assume the given folder name will never include its
+ * trailing path separator or otherwise we would not support the Windows
+ * root folder specified without its drive letter, i.e. '\'.
+ */
+ char const trailingChar = object_str( d->name )[ d_length - 1 ] ;
+ string_copy( pathspec, object_str( d->name ) );
+ if ( ( trailingChar != '\\' ) && ( trailingChar != '/' ) )
+ string_append( pathspec, "\\" );
+ string_append( pathspec, "*" );
+ }
+
+ /* The following code for collecting information about all files in a folder
+ * needs to be kept synchronized with how the file_query() operation is
+ * implemented (collects information about a single file).
+ */
+ {
+ /* FIXME: Avoid duplicate FindXXX Windows API calls here and in the code
+ * determining a normalized path.
+ */
+ WIN32_FIND_DATAA finfo;
+ HANDLE const findHandle = FindFirstFileA( pathspec->value, &finfo );
+ if ( findHandle == INVALID_HANDLE_VALUE )
+ {
+ string_free( pathspec );
+ return -1;
+ }
+
+ string_new( pathname );
+ do
+ {
+ OBJECT * pathname_obj;
+
+ f.f_base.ptr = finfo.cFileName;
+ f.f_base.len = strlen( finfo.cFileName );
+
+ string_truncate( pathname, 0 );
+ path_build( &f, pathname );
+
+ pathname_obj = object_new( pathname->value );
+ path_register_key( pathname_obj );
+ files = list_push_back( files, pathname_obj );
+ {
+ int found;
+ file_info_t * const ff = file_info( pathname_obj, &found );
+ ff->is_dir = finfo.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY;
+ ff->is_file = !ff->is_dir;
+ ff->exists = 1;
+ timestamp_from_filetime( &ff->time, &finfo.ftLastWriteTime );
+ // Use the timestamp of the link target, not the link itself
+ // (i.e. stat instead of lstat)
+ if ( finfo.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT )
+ {
+ HANDLE hLink = CreateFileA( pathname->value, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL );
+ BY_HANDLE_FILE_INFORMATION target_finfo[ 1 ];
+ if ( hLink != INVALID_HANDLE_VALUE && GetFileInformationByHandle( hLink, target_finfo ) )
+ {
+ ff->is_file = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 0 : 1;
+ ff->is_dir = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 1 : 0;
+ timestamp_from_filetime( &ff->time, &target_finfo->ftLastWriteTime );
+ }
+ }
+ }
+ }
+ while ( FindNextFileA( findHandle, &finfo ) );
+
+ FindClose( findHandle );
+ }
+
+ string_free( pathname );
+ string_free( pathspec );
+
+ d->files = files;
+ return 0;
+}
+
+
+/*
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ */
+
+void file_dirscan_( file_info_t * const d, scanback func, void * closure )
+{
+ assert( d );
+ assert( d->is_dir );
+
+ /* Special case \ or d:\ : enter it */
+ {
+ char const * const name = object_str( d->name );
+ if ( name[ 0 ] == '\\' && !name[ 1 ] )
+ {
+ (*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
+ }
+ else if ( name[ 0 ] && name[ 1 ] == ':' && name[ 2 ] && !name[ 3 ] )
+ {
+ /* We have just entered a 3-letter drive name spelling (with a
+ * trailing slash), into the hash table. Now enter its two-letter
+ * variant, without the trailing slash, so that if we try to check
+ * whether "c:" exists, we hit it.
+ *
+ * Jam core has workarounds for that. Given:
+ * x = c:\whatever\foo ;
+ * p = $(x:D) ;
+ * p2 = $(p:D) ;
+ * There will be no trailing slash in $(p), but there will be one in
+ * $(p2). But, that seems rather fragile.
+ */
+ OBJECT * const dir_no_slash = object_new_range( name, 2 );
+ (*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
+ (*func)( closure, dir_no_slash, 1 /* stat()'ed */, &d->time );
+ object_free( dir_no_slash );
+ }
+ }
+}
+
+
+/*
+ * file_mkdir() - create a directory
+ */
+
+int file_mkdir( char const * const path )
+{
+ return _mkdir( path );
+}
+
+
+/*
+ * file_query_() - query information about a path from the OS
+ *
+ * The following code for collecting information about a single file needs to be
+ * kept synchronized with how the file_collect_dir_content_() operation is
+ * implemented (collects information about all files in a folder).
+ */
+
+int try_file_query_root( file_info_t * const info )
+{
+ WIN32_FILE_ATTRIBUTE_DATA fileData;
+ char buf[ 4 ];
+ char const * const pathstr = object_str( info->name );
+ if ( !pathstr[ 0 ] )
+ {
+ buf[ 0 ] = '.';
+ buf[ 1 ] = 0;
+ }
+ else if ( pathstr[ 0 ] == '\\' && ! pathstr[ 1 ] )
+ {
+ buf[ 0 ] = '\\';
+ buf[ 1 ] = '\0';
+ }
+ else if ( pathstr[ 1 ] == ':' )
+ {
+ if ( !pathstr[ 2 ] || ( pathstr[ 2 ] == '\\' && !pathstr[ 3 ] ) )
+ {
+ buf[ 0 ] = pathstr[ 0 ];
+ buf[ 1 ] = ':';
+ buf[ 2 ] = '\\';
+ buf[ 3 ] = '\0';
+ }
+ else
+ {
+ return 0;
+ }
+ }
+ else
+ {
+ return 0;
+ }
+
+ /* We have a root path */
+ if ( !GetFileAttributesExA( buf, GetFileExInfoStandard, &fileData ) )
+ {
+ info->is_dir = 0;
+ info->is_file = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ }
+ else
+ {
+ info->is_dir = fileData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY;
+ info->is_file = !info->is_dir;
+ info->exists = 1;
+ timestamp_from_filetime( &info->time, &fileData.ftLastWriteTime );
+ }
+ return 1;
+}
+
+void file_query_( file_info_t * const info )
+{
+ char const * const pathstr = object_str( info->name );
+ const char * dir;
+ OBJECT * parent;
+ file_info_t * parent_info;
+
+ if ( try_file_query_root( info ) )
+ return;
+
+ if ( ( dir = strrchr( pathstr, '\\' ) ) )
+ {
+ parent = object_new_range( pathstr, dir - pathstr );
+ }
+ else
+ {
+ parent = object_copy( constant_empty );
+ }
+ parent_info = file_query( parent );
+ object_free( parent );
+ if ( !parent_info || !parent_info->is_dir )
+ {
+ info->is_dir = 0;
+ info->is_file = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ }
+ else
+ {
+ info->is_dir = 0;
+ info->is_file = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ if ( list_empty( parent_info->files ) )
+ file_collect_dir_content_( parent_info );
+ }
+}
+
+
+/*
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * Returns the minimum file modification timestamp resolution supported by this
+ * Boost Jam implementation. File modification timestamp changes of less than
+ * the returned value might not be recognized.
+ *
+ * Does not take into consideration any OS or file system related restrictions.
+ *
+ * Return value 0 indicates that any value supported by the OS is also supported
+ * here.
+ */
+
+void file_supported_fmt_resolution( timestamp * const t )
+{
+ /* On Windows we support nano-second file modification timestamp resolution,
+ * just the same as the Windows OS itself.
+ */
+ timestamp_init( t, 0, 0 );
+}
+
+
+/*
+ * file_archscan() - scan an archive for files
+ */
+
+/* Straight from SunOS */
+
+#define ARMAG "!<arch>\n"
+#define SARMAG 8
+
+#define ARFMAG "`\n"
+
+struct ar_hdr
+{
+ char ar_name[ 16 ];
+ char ar_date[ 12 ];
+ char ar_uid[ 6 ];
+ char ar_gid[ 6 ];
+ char ar_mode[ 8 ];
+ char ar_size[ 10 ];
+ char ar_fmag[ 2 ];
+};
+
+#define SARFMAG 2
+#define SARHDR sizeof( struct ar_hdr )
+
+void file_archscan( char const * arch, scanback func, void * closure )
+{
+ OBJECT * path = object_new( arch );
+ file_archive_info_t * archive = file_archive_query( path );
+
+ object_free( path );
+
+ if ( filelist_empty( archive->members ) )
+ {
+ if ( file_collect_archive_content_( archive ) < 0 )
+ return;
+ }
+
+ /* Report the collected archive content. */
+ {
+ FILELISTITER iter = filelist_begin( archive->members );
+ FILELISTITER const end = filelist_end( archive->members );
+ char buf[ MAXJPATH ];
+
+ for ( ; iter != end ; iter = filelist_next( iter ) )
+ {
+ file_info_t * member_file = filelist_item( iter );
+
+ /* Construct member path: 'archive-path(member-name)'
+ */
+ sprintf( buf, "%s(%s)",
+ object_str( archive->file->name ),
+ object_str( member_file->name ) );
+ {
+ OBJECT * const member = object_new( buf );
+ (*func)( closure, member, 1 /* time valid */, &member_file->time );
+ object_free( member );
+ }
+ }
+ }
+}
+
+
+/*
+ * file_archivescan_() - OS specific file_archivescan() implementation
+ */
+
+void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
+ void * closure )
+{
+}
+
+
+/*
+ * file_collect_archive_content_() - collects information about archive members
+ */
+
+int file_collect_archive_content_( file_archive_info_t * const archive )
+{
+ struct ar_hdr ar_hdr;
+ char * string_table = 0;
+ char buf[ MAXJPATH ];
+ long offset;
+ const char * path = object_str( archive->file->name );
+ int const fd = open( path , O_RDONLY | O_BINARY, 0 );
+
+ if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
+
+ if ( fd < 0 )
+ return -1;
+
+ if ( read( fd, buf, SARMAG ) != SARMAG || strncmp( ARMAG, buf, SARMAG ) )
+ {
+ close( fd );
+ return -1;
+ }
+
+ offset = SARMAG;
+
+ if ( DEBUG_BINDSCAN )
+ out_printf( "scan archive %s\n", path );
+
+ while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
+ !memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) )
+ {
+ long lar_date;
+ long lar_size;
+ char * name = 0;
+ char * endname;
+
+ sscanf( ar_hdr.ar_date, "%ld", &lar_date );
+ sscanf( ar_hdr.ar_size, "%ld", &lar_size );
+
+ lar_size = ( lar_size + 1 ) & ~1;
+
+ if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] == '/' )
+ {
+ /* This is the "string table" entry of the symbol table, holding
+ * filename strings longer than 15 characters, i.e. those that do
+ * not fit into ar_name.
+ */
+ string_table = (char*)BJAM_MALLOC_ATOMIC( lar_size + 1 );
+ if ( read( fd, string_table, lar_size ) != lar_size )
+ out_printf( "error reading string table\n" );
+ string_table[ lar_size ] = '\0';
+ offset += SARHDR + lar_size;
+ continue;
+ }
+ else if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] != ' ' )
+ {
+ /* Long filenames are recognized by "/nnnn" where nnnn is the
+ * string's offset in the string table represented in ASCII
+ * decimals.
+ */
+ name = string_table + atoi( ar_hdr.ar_name + 1 );
+ for ( endname = name; *endname && *endname != '\n'; ++endname );
+ }
+ else
+ {
+ /* normal name */
+ name = ar_hdr.ar_name;
+ endname = name + sizeof( ar_hdr.ar_name );
+ }
+
+ /* strip trailing white-space, slashes, and backslashes */
+
+ while ( endname-- > name )
+ if ( !isspace( *endname ) && ( *endname != '\\' ) && ( *endname !=
+ '/' ) )
+ break;
+ *++endname = 0;
+
+ /* strip leading directory names, an NT specialty */
+ {
+ char * c;
+ if ( (c = strrchr( name, '/' )) != nullptr )
+ name = c + 1;
+ if ( (c = strrchr( name, '\\' )) != nullptr )
+ name = c + 1;
+ }
+
+ sprintf( buf, "%.*s", int(endname - name), name );
+
+ if ( strcmp( buf, "") != 0 )
+ {
+ file_info_t * member = 0;
+
+ /* NT static libraries appear to store the objects in a sequence
+ * reverse to the order in which they were inserted.
+ * Here we reverse the stored sequence by pushing members to front of
+ * member file list to get the intended members order.
+ */
+ archive->members = filelist_push_front( archive->members, object_new( buf ) );
+ member = filelist_front( archive->members );
+ member->is_file = 1;
+ member->is_dir = 0;
+ member->exists = 0;
+ timestamp_init( &member->time, (time_t)lar_date, 0 );
+ }
+
+ offset += SARHDR + lar_size;
+ lseek( fd, offset, 0 );
+ }
+
+ close( fd );
+
+ return 0;
+}
+
+#endif /* OS_NT */
diff --git a/src/boost/tools/build/src/engine/filesys.cpp b/src/boost/tools/build/src/engine/filesys.cpp
new file mode 100644
index 000000000..c4b0756dc
--- /dev/null
+++ b/src/boost/tools/build/src/engine/filesys.cpp
@@ -0,0 +1,711 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * filesys.c - OS independent file system manipulation support
+ *
+ * External routines:
+ * file_build1() - construct a path string based on PATHNAME information
+ * file_dirscan() - scan a directory for files
+ * file_done() - module cleanup called on shutdown
+ * file_info() - return cached information about a path
+ * file_is_file() - return whether a path identifies an existing file
+ * file_query() - get cached information about a path, query the OS if
+ * needed
+ * file_remove_atexit() - schedule a path to be removed on program exit
+ * file_time() - get a file timestamp
+ *
+ * External routines - utilities for OS specific module implementations:
+ * file_query_posix_() - query information about a path using POSIX stat()
+ *
+ * Internal routines:
+ * file_dirscan_impl() - no-profiling worker for file_dirscan()
+ */
+
+
+#include "jam.h"
+#include "filesys.h"
+
+#include "lists.h"
+#include "object.h"
+#include "pathsys.h"
+#include "jam_strings.h"
+#include "output.h"
+
+#include <assert.h>
+#include <sys/stat.h>
+
+
+/* Internal OS specific implementation details - have names ending with an
+ * underscore and are expected to be implemented in an OS specific fileXXX.c
+ * module.
+ */
+void file_dirscan_( file_info_t * const dir, scanback func, void * closure );
+int file_collect_dir_content_( file_info_t * const dir );
+void file_query_( file_info_t * const );
+
+void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
+ void * closure );
+int file_collect_archive_content_( file_archive_info_t * const archive );
+void file_archive_query_( file_archive_info_t * const );
+
+static void file_archivescan_impl( OBJECT * path, archive_scanback func,
+ void * closure );
+static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure );
+static void free_file_archive_info( void * xarchive, void * data );
+static void free_file_info( void * xfile, void * data );
+
+static void remove_files_atexit( void );
+
+
+static struct hash * filecache_hash;
+static struct hash * archivecache_hash;
+
+
+/*
+ * file_archive_info() - return cached information about an archive
+ *
+ * Returns a default initialized structure containing only queried file's info
+ * in case this is the first time this file system entity has been
+ * referenced.
+ */
+
+file_archive_info_t * file_archive_info( OBJECT * const path, int * found )
+{
+ OBJECT * const path_key = path_as_key( path );
+ file_archive_info_t * archive;
+
+ if ( !archivecache_hash )
+ archivecache_hash = hashinit( sizeof( file_archive_info_t ),
+ "file_archive_info" );
+
+ archive = (file_archive_info_t *)hash_insert( archivecache_hash, path_key,
+ found );
+
+ if ( !*found )
+ {
+ archive->name = path_key;
+ archive->file = 0;
+ archive->members = FL0;
+ }
+ else
+ object_free( path_key );
+
+ return archive;
+}
+
+
+/*
+ * file_archive_query() - get cached information about a archive file path
+ *
+ * Returns 0 in case querying the OS about the given path fails, e.g. because
+ * the path does not reference an existing file system object.
+ */
+
+file_archive_info_t * file_archive_query( OBJECT * const path )
+{
+ int found;
+ file_archive_info_t * const archive = file_archive_info( path, &found );
+ file_info_t * file = file_query( path );
+
+ if ( !( file && file->is_file ) )
+ {
+ return 0;
+ }
+
+ archive->file = file;
+
+
+ return archive;
+}
+
+
+
+/*
+ * file_archivescan() - scan an archive for members
+ */
+
+void file_archivescan( OBJECT * path, archive_scanback func, void * closure )
+{
+ PROFILE_ENTER( FILE_ARCHIVESCAN );
+ file_archivescan_impl( path, func, closure );
+ PROFILE_EXIT( FILE_ARCHIVESCAN );
+}
+
+
+/*
+ * file_build1() - construct a path string based on PATHNAME information
+ */
+
+void file_build1( PATHNAME * const f, string * file )
+{
+ if ( DEBUG_SEARCH )
+ {
+ out_printf( "build file: " );
+ if ( f->f_root.len )
+ out_printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr );
+ if ( f->f_dir.len )
+ out_printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr );
+ if ( f->f_base.len )
+ out_printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr );
+ out_printf( "\n" );
+ }
+
+ /* Start with the grist. If the current grist is not surrounded by <>'s, add
+ * them.
+ */
+ if ( f->f_grist.len )
+ {
+ if ( f->f_grist.ptr[ 0 ] != '<' )
+ string_push_back( file, '<' );
+ string_append_range(
+ file, f->f_grist.ptr, f->f_grist.ptr + f->f_grist.len );
+ if ( file->value[ file->size - 1 ] != '>' )
+ string_push_back( file, '>' );
+ }
+}
+
+
+/*
+ * file_dirscan() - scan a directory for files
+ */
+
+void file_dirscan( OBJECT * dir, scanback func, void * closure )
+{
+ PROFILE_ENTER( FILE_DIRSCAN );
+ file_dirscan_impl( dir, func, closure );
+ PROFILE_EXIT( FILE_DIRSCAN );
+}
+
+
+/*
+ * file_done() - module cleanup called on shutdown
+ */
+
+void file_done()
+{
+ remove_files_atexit();
+ if ( filecache_hash )
+ {
+ hashenumerate( filecache_hash, free_file_info, (void *)0 );
+ hashdone( filecache_hash );
+ }
+
+ if ( archivecache_hash )
+ {
+ hashenumerate( archivecache_hash, free_file_archive_info, (void *)0 );
+ hashdone( archivecache_hash );
+ }
+}
+
+
+/*
+ * file_info() - return cached information about a path
+ *
+ * Returns a default initialized structure containing only the path's normalized
+ * name in case this is the first time this file system entity has been
+ * referenced.
+ */
+
+file_info_t * file_info( OBJECT * const path, int * found )
+{
+ OBJECT * const path_key = path_as_key( path );
+ file_info_t * finfo;
+
+ if ( !filecache_hash )
+ filecache_hash = hashinit( sizeof( file_info_t ), "file_info" );
+
+ finfo = (file_info_t *)hash_insert( filecache_hash, path_key, found );
+ if ( !*found )
+ {
+ finfo->name = path_key;
+ finfo->files = L0;
+ }
+ else
+ object_free( path_key );
+
+ return finfo;
+}
+
+
+/*
+ * file_is_file() - return whether a path identifies an existing file
+ */
+
+int file_is_file( OBJECT * const path )
+{
+ file_info_t const * const ff = file_query( path );
+ return ff ? ff->is_file : -1;
+}
+
+
+/*
+ * file_time() - get a file timestamp
+ */
+
+int file_time( OBJECT * const path, timestamp * const time )
+{
+ file_info_t const * const ff = file_query( path );
+ if ( !ff ) return -1;
+ timestamp_copy( time, &ff->time );
+ return 0;
+}
+
+
+/*
+ * file_query() - get cached information about a path, query the OS if needed
+ *
+ * Returns 0 in case querying the OS about the given path fails, e.g. because
+ * the path does not reference an existing file system object.
+ */
+
+file_info_t * file_query( OBJECT * const path )
+{
+ /* FIXME: Add tracking for disappearing files (i.e. those that can not be
+ * detected by stat() even though they had been detected successfully
+ * before) and see how they should be handled in the rest of Boost Jam code.
+ * Possibly allow Jamfiles to specify some files as 'volatile' which would
+ * make Boost Jam avoid caching information about those files and instead
+ * ask the OS about them every time.
+ */
+ int found;
+ file_info_t * const ff = file_info( path, &found );
+ if ( !found )
+ {
+ file_query_( ff );
+ if ( ff->exists )
+ {
+ /* Set the path's timestamp to 1 in case it is 0 or undetected to avoid
+ * confusion with non-existing paths.
+ */
+ if ( timestamp_empty( &ff->time ) )
+ timestamp_init( &ff->time, 1, 0 );
+ }
+ }
+ if ( !ff->exists )
+ {
+ return 0;
+ }
+ return ff;
+}
+
+#ifndef OS_NT
+
+/*
+ * file_query_posix_() - query information about a path using POSIX stat()
+ *
+ * Fallback file_query_() implementation for OS specific modules.
+ *
+ * Note that the Windows POSIX stat() function implementation suffers from
+ * several issues:
+ * * Does not support file timestamps with resolution finer than 1 second,
+ * meaning it can not be used to detect file timestamp changes of less than
+ * 1 second. One possible consequence is that some fast-paced touch commands
+ * (such as those done by Boost Build's internal testing system if it does
+ * not do some extra waiting) will not be detected correctly by the build
+ * system.
+ * * Returns file modification times automatically adjusted for daylight
+ * savings time even though daylight savings time should have nothing to do
+ * with internal time representation.
+ */
+
+void file_query_posix_( file_info_t * const info )
+{
+ struct stat statbuf;
+ char const * const pathstr = object_str( info->name );
+ char const * const pathspec = *pathstr ? pathstr : ".";
+
+ if ( stat( pathspec, &statbuf ) < 0 )
+ {
+ info->is_file = 0;
+ info->is_dir = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ }
+ else
+ {
+ info->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
+ info->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
+ info->exists = 1;
+#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809
+#if defined(OS_MACOSX)
+ timestamp_init( &info->time, statbuf.st_mtimespec.tv_sec, statbuf.st_mtimespec.tv_nsec );
+#else
+ timestamp_init( &info->time, statbuf.st_mtim.tv_sec, statbuf.st_mtim.tv_nsec );
+#endif
+#else
+ timestamp_init( &info->time, statbuf.st_mtime, 0 );
+#endif
+ }
+}
+
+/*
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * Returns the minimum file modification timestamp resolution supported by this
+ * Boost Jam implementation. File modification timestamp changes of less than
+ * the returned value might not be recognized.
+ *
+ * Does not take into consideration any OS or file system related restrictions.
+ *
+ * Return value 0 indicates that any value supported by the OS is also supported
+ * here.
+ */
+
+void file_supported_fmt_resolution( timestamp * const t )
+{
+#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809
+ timestamp_init( t, 0, 1 );
+#else
+ /* The current implementation does not support file modification timestamp
+ * resolution of less than one second.
+ */
+ timestamp_init( t, 1, 0 );
+#endif
+}
+
+#endif
+
+
+/*
+ * file_remove_atexit() - schedule a path to be removed on program exit
+ */
+
+static LIST * files_to_remove = L0;
+
+void file_remove_atexit( OBJECT * const path )
+{
+ files_to_remove = list_push_back( files_to_remove, object_copy( path ) );
+}
+
+
+/*
+ * file_archivescan_impl() - no-profiling worker for file_archivescan()
+ */
+
+static void file_archivescan_impl( OBJECT * path, archive_scanback func, void * closure )
+{
+ file_archive_info_t * const archive = file_archive_query( path );
+ if ( !archive || !archive->file->is_file )
+ return;
+
+ /* Lazy collect the archive content information. */
+ if ( filelist_empty( archive->members ) )
+ {
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", object_str( archive->file->name ) );
+ if ( file_collect_archive_content_( archive ) < 0 )
+ return;
+ }
+
+ /* OS specific part of the file_archivescan operation. */
+ file_archivescan_( archive, func, closure );
+
+ /* Report the collected archive content. */
+ {
+ FILELISTITER iter = filelist_begin( archive->members );
+ FILELISTITER const end = filelist_end( archive->members );
+ char buf[ MAXJPATH ];
+
+ for ( ; iter != end ; iter = filelist_next( iter ) )
+ {
+ file_info_t * member_file = filelist_item( iter );
+ LIST * symbols = member_file->files;
+
+ /* Construct member path: 'archive-path(member-name)'
+ */
+ sprintf( buf, "%s(%s)",
+ object_str( archive->file->name ),
+ object_str( member_file->name ) );
+
+ {
+ OBJECT * const member = object_new( buf );
+ (*func)( closure, member, symbols, 1, &member_file->time );
+ object_free( member );
+ }
+ }
+ }
+}
+
+
+/*
+ * file_dirscan_impl() - no-profiling worker for file_dirscan()
+ */
+
+static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure )
+{
+ file_info_t * const d = file_query( dir );
+ if ( !d || !d->is_dir )
+ return;
+
+ /* Lazy collect the directory content information. */
+ if ( list_empty( d->files ) )
+ {
+ if ( DEBUG_BINDSCAN )
+ out_printf( "scan directory %s\n", object_str( d->name ) );
+ if ( file_collect_dir_content_( d ) < 0 )
+ return;
+ }
+
+ /* OS specific part of the file_dirscan operation. */
+ file_dirscan_( d, func, closure );
+
+ /* Report the collected directory content. */
+ {
+ LISTITER iter = list_begin( d->files );
+ LISTITER const end = list_end( d->files );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ OBJECT * const path = list_item( iter );
+ file_info_t const * const ffq = file_query( path );
+ /* Using a file name read from a file_info_t structure allows OS
+ * specific implementations to store some kind of a normalized file
+ * name there. Using such a normalized file name then allows us to
+ * correctly recognize different file paths actually identifying the
+ * same file. For instance, an implementation may:
+ * - convert all file names internally to lower case on a case
+ * insensitive file system
+ * - convert the NTFS paths to their long path variants as that
+ * file system each file system entity may have a long and a
+ * short path variant thus allowing for many different path
+ * strings identifying the same file.
+ */
+ (*func)( closure, ffq->name, 1 /* stat()'ed */, &ffq->time );
+ }
+ }
+}
+
+
+static void free_file_archive_info( void * xarchive, void * data )
+{
+ file_archive_info_t * const archive = (file_archive_info_t *)xarchive;
+
+ if ( archive ) filelist_free( archive->members );
+}
+
+
+static void free_file_info( void * xfile, void * data )
+{
+ file_info_t * const file = (file_info_t *)xfile;
+ object_free( file->name );
+ list_free( file->files );
+}
+
+
+static void remove_files_atexit( void )
+{
+ LISTITER iter = list_begin( files_to_remove );
+ LISTITER const end = list_end( files_to_remove );
+ for ( ; iter != end; iter = list_next( iter ) )
+ remove( object_str( list_item( iter ) ) );
+ list_free( files_to_remove );
+ files_to_remove = L0;
+}
+
+
+/*
+ * FILELIST linked-list implementation
+ */
+
+FILELIST * filelist_new( OBJECT * path )
+{
+ FILELIST * list = (FILELIST *)BJAM_MALLOC( sizeof( FILELIST ) );
+
+ memset( list, 0, sizeof( *list ) );
+ list->size = 0;
+ list->head = 0;
+ list->tail = 0;
+
+ return filelist_push_back( list, path );
+}
+
+FILELIST * filelist_push_back( FILELIST * list, OBJECT * path )
+{
+ FILEITEM * item;
+ file_info_t * file;
+
+ /* Lazy initialization
+ */
+ if ( filelist_empty( list ) )
+ {
+ list = filelist_new( path );
+ return list;
+ }
+
+
+ item = (FILEITEM *)BJAM_MALLOC( sizeof( FILEITEM ) );
+ memset( item, 0, sizeof( *item ) );
+ item->value = (file_info_t *)BJAM_MALLOC( sizeof( file_info_t ) );
+
+ file = item->value;
+ memset( file, 0, sizeof( *file ) );
+
+ file->name = path;
+ file->files = L0;
+
+ if ( list->tail )
+ {
+ list->tail->next = item;
+ }
+ else
+ {
+ list->head = item;
+ }
+ list->tail = item;
+ list->size++;
+
+ return list;
+}
+
+FILELIST * filelist_push_front( FILELIST * list, OBJECT * path )
+{
+ FILEITEM * item;
+ file_info_t * file;
+
+ /* Lazy initialization
+ */
+ if ( filelist_empty( list ) )
+ {
+ list = filelist_new( path );
+ return list;
+ }
+
+
+ item = (FILEITEM *)BJAM_MALLOC( sizeof( FILEITEM ) );
+ memset( item, 0, sizeof( *item ) );
+ item->value = (file_info_t *)BJAM_MALLOC( sizeof( file_info_t ) );
+
+ file = item->value;
+ memset( file, 0, sizeof( *file ) );
+
+ file->name = path;
+ file->files = L0;
+
+ if ( list->head )
+ {
+ item->next = list->head;
+ }
+ else
+ {
+ list->tail = item;
+ }
+ list->head = item;
+ list->size++;
+
+ return list;
+}
+
+
+FILELIST * filelist_pop_front( FILELIST * list )
+{
+ FILEITEM * item;
+
+ if ( filelist_empty( list ) ) return list;
+
+ item = list->head;
+
+ if ( item )
+ {
+ if ( item->value ) free_file_info( item->value, 0 );
+
+ list->head = item->next;
+ list->size--;
+ if ( !list->size ) list->tail = list->head;
+
+#ifdef BJAM_NO_MEM_CACHE
+ BJAM_FREE( item );
+#endif
+ }
+
+ return list;
+}
+
+int filelist_length( FILELIST * list )
+{
+ int result = 0;
+ if ( !filelist_empty( list ) ) result = list->size;
+
+ return result;
+}
+
+void filelist_free( FILELIST * list )
+{
+ if ( filelist_empty( list ) ) return;
+
+ while ( filelist_length( list ) ) filelist_pop_front( list );
+
+#ifdef BJAM_NO_MEM_CACHE
+ BJAM_FREE( list );
+#endif
+}
+
+int filelist_empty( FILELIST * list )
+{
+ return ( list == FL0 );
+}
+
+
+FILELISTITER filelist_begin( FILELIST * list )
+{
+ if ( filelist_empty( list )
+ || list->head == 0 ) return (FILELISTITER)0;
+
+ return &list->head->value;
+}
+
+
+FILELISTITER filelist_end( FILELIST * list )
+{
+ return (FILELISTITER)0;
+}
+
+
+FILELISTITER filelist_next( FILELISTITER iter )
+{
+ if ( iter )
+ {
+ /* Given FILEITEM.value is defined as first member of FILEITEM structure
+ * and FILELISTITER = &FILEITEM.value,
+ * FILEITEM = *(FILEITEM **)FILELISTITER
+ */
+ FILEITEM * item = (FILEITEM *)iter;
+ iter = ( item->next ? &item->next->value : (FILELISTITER)0 );
+ }
+
+ return iter;
+}
+
+
+file_info_t * filelist_item( FILELISTITER it )
+{
+ file_info_t * result = (file_info_t *)0;
+
+ if ( it )
+ {
+ result = (file_info_t *)*it;
+ }
+
+ return result;
+}
+
+
+file_info_t * filelist_front( FILELIST * list )
+{
+ if ( filelist_empty( list )
+ || list->head == 0 ) return (file_info_t *)0;
+
+ return list->head->value;
+}
+
+
+file_info_t * filelist_back( FILELIST * list )
+{
+ if ( filelist_empty( list )
+ || list->tail == 0 ) return (file_info_t *)0;
+
+ return list->tail->value;
+}
diff --git a/src/boost/tools/build/src/engine/filesys.h b/src/boost/tools/build/src/engine/filesys.h
new file mode 100644
index 000000000..48ea6d171
--- /dev/null
+++ b/src/boost/tools/build/src/engine/filesys.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * filesys.h - OS specific file routines
+ */
+
+#ifndef FILESYS_DWA20011025_H
+#define FILESYS_DWA20011025_H
+
+#include "config.h"
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "pathsys.h"
+#include "timestamp.h"
+
+
+typedef struct file_info_t
+{
+ OBJECT * name;
+ char is_file;
+ char is_dir;
+ char exists;
+ timestamp time;
+ LIST * files;
+} file_info_t;
+
+typedef struct file_item FILEITEM;
+struct file_item
+{
+ file_info_t * value; /* expected to be equivalent with &FILEITEM */
+ FILEITEM * next;
+};
+
+typedef struct file_list
+{
+ FILEITEM * head;
+ FILEITEM * tail;
+ int size;
+} FILELIST;
+
+typedef file_info_t * * FILELISTITER; /* also &FILEITEM equivalent */
+
+
+typedef struct file_archive_info_t
+{
+ OBJECT * name;
+ file_info_t * file;
+ FILELIST * members;
+} file_archive_info_t;
+
+
+typedef void (*archive_scanback)( void * closure, OBJECT * path, LIST * symbols,
+ int found, timestamp const * const );
+typedef void (*scanback)( void * closure, OBJECT * path, int found,
+ timestamp const * const );
+
+
+void file_archscan( char const * arch, scanback func, void * closure );
+void file_archivescan( OBJECT * path, archive_scanback func, void * closure );
+void file_build1( PATHNAME * const f, string * file ) ;
+void file_dirscan( OBJECT * dir, scanback func, void * closure );
+file_info_t * file_info( OBJECT * const path, int * found );
+int file_is_file( OBJECT * const path );
+int file_mkdir( char const * const path );
+file_info_t * file_query( OBJECT * const path );
+void file_remove_atexit( OBJECT * const path );
+void file_supported_fmt_resolution( timestamp * const );
+int file_time( OBJECT * const path, timestamp * const );
+
+
+/* Archive/library file support */
+file_archive_info_t * file_archive_info( OBJECT * const path, int * found );
+file_archive_info_t * file_archive_query( OBJECT * const path );
+
+/* FILELIST linked-list */
+FILELIST * filelist_new( OBJECT * path );
+FILELIST * filelist_push_back( FILELIST * list, OBJECT * path );
+FILELIST * filelist_push_front( FILELIST * list, OBJECT * path );
+FILELIST * filelist_pop_front( FILELIST * list );
+int filelist_length( FILELIST * list );
+void filelist_free( FILELIST * list );
+
+FILELISTITER filelist_begin( FILELIST * list );
+FILELISTITER filelist_end( FILELIST * list );
+FILELISTITER filelist_next( FILELISTITER it );
+file_info_t * filelist_item( FILELISTITER it );
+file_info_t * filelist_front( FILELIST * list );
+file_info_t * filelist_back( FILELIST * list );
+
+int filelist_empty( FILELIST * list );
+
+#define FL0 ((FILELIST *)0)
+
+
+/* Internal utility worker functions. */
+void file_query_posix_( file_info_t * const );
+
+void file_done();
+
+#endif
diff --git a/src/boost/tools/build/src/engine/fileunix.cpp b/src/boost/tools/build/src/engine/fileunix.cpp
new file mode 100644
index 000000000..4c1b6adf3
--- /dev/null
+++ b/src/boost/tools/build/src/engine/fileunix.cpp
@@ -0,0 +1,527 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * fileunix.c - manipulate file names and scan directories on UNIX/AmigaOS
+ *
+ * External routines:
+ * file_archscan() - scan an archive for files
+ * file_mkdir() - create a directory
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * External routines called only via routines in filesys.c:
+ * file_collect_dir_content_() - collects directory content information
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ * file_query_() - query information about a path from the OS
+ * file_collect_archive_content_() - collects information about archive members
+ * file_archivescan_() - OS specific file_archivescan() implementation
+ */
+
+#include "jam.h"
+#ifdef USE_FILEUNIX
+#include "filesys.h"
+
+#include "object.h"
+#include "pathsys.h"
+#include "jam_strings.h"
+#include "output.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <sys/stat.h> /* needed for mkdir() */
+
+#if defined( sun ) || defined( __sun ) || defined( linux )
+# include <unistd.h> /* needed for read and close prototype */
+#endif
+
+#if defined( OS_SEQUENT ) || \
+ defined( OS_DGUX ) || \
+ defined( OS_SCO ) || \
+ defined( OS_ISC )
+# define PORTAR 1
+#endif
+
+#if defined( OS_RHAPSODY ) || defined( OS_MACOSX ) || defined( OS_NEXT )
+# include <sys/dir.h>
+# include <unistd.h> /* need unistd for rhapsody's proper lseek */
+# define STRUCT_DIRENT struct direct
+#else
+# include <dirent.h>
+# define STRUCT_DIRENT struct dirent
+#endif
+
+#ifdef OS_COHERENT
+# include <arcoff.h>
+# define HAVE_AR
+#endif
+
+#if defined( OS_MVS ) || defined( OS_INTERIX )
+#define ARMAG "!<arch>\n"
+#define SARMAG 8
+#define ARFMAG "`\n"
+#define HAVE_AR
+
+struct ar_hdr /* archive file member header - printable ascii */
+{
+ char ar_name[ 16 ]; /* file member name - `/' terminated */
+ char ar_date[ 12 ]; /* file member date - decimal */
+ char ar_uid[ 6 ]; /* file member user id - decimal */
+ char ar_gid[ 6 ]; /* file member group id - decimal */
+ char ar_mode[ 8 ]; /* file member mode - octal */
+ char ar_size[ 10 ]; /* file member size - decimal */
+ char ar_fmag[ 2 ]; /* ARFMAG - string to end header */
+};
+#endif
+
+#if defined( OS_QNX ) || \
+ defined( OS_BEOS ) || \
+ defined( OS_HAIKU ) || \
+ defined( OS_MPEIX )
+# define NO_AR
+# define HAVE_AR
+#endif
+
+#ifndef HAVE_AR
+# ifdef OS_AIX
+/* Define these for AIX to get the definitions for both small and big archive
+ * file format variants.
+ */
+# define __AR_SMALL__
+# define __AR_BIG__
+# endif
+# include <ar.h>
+#endif
+
+
+/*
+ * file_collect_dir_content_() - collects directory content information
+ */
+
+int file_collect_dir_content_( file_info_t * const d )
+{
+ LIST * files = L0;
+ PATHNAME f;
+ int n;
+ STRUCT_DIRENT ** namelist;
+ STRUCT_DIRENT * dirent;
+ string path[ 1 ];
+ char const * dirstr;
+
+ assert( d );
+ assert( d->is_dir );
+ assert( list_empty( d->files ) );
+
+ dirstr = object_str( d->name );
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+ f.f_dir.ptr = dirstr;
+ f.f_dir.len = strlen( dirstr );
+
+ if ( !*dirstr ) dirstr = ".";
+
+ if ( -1 == ( n = scandir( dirstr, &namelist, NULL, alphasort ) ) )
+ return -1;
+
+ string_new( path );
+ while ( n-- )
+ {
+ OBJECT * name;
+ dirent = namelist[ n ];
+ f.f_base.ptr = dirent->d_name
+ #ifdef old_sinix
+ - 2 /* Broken structure definition on sinix. */
+ #endif
+ ;
+ f.f_base.len = strlen( f.f_base.ptr );
+
+ string_truncate( path, 0 );
+ path_build( &f, path );
+ name = object_new( path->value );
+ /* Immediately stat the file to preserve invariants. */
+ if ( file_query( name ) )
+ files = list_push_back( files, name );
+ else
+ object_free( name );
+ free( dirent );
+ }
+ string_free( path );
+
+ free( namelist );
+
+ d->files = files;
+ return 0;
+}
+
+
+/*
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ */
+
+void file_dirscan_( file_info_t * const d, scanback func, void * closure )
+{
+ assert( d );
+ assert( d->is_dir );
+
+ /* Special case / : enter it */
+ if ( !strcmp( object_str( d->name ), "/" ) )
+ (*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
+}
+
+
+/*
+ * file_mkdir() - create a directory
+ */
+
+int file_mkdir( char const * const path )
+{
+ /* Explicit cast to remove const modifiers and avoid related compiler
+ * warnings displayed when using the intel compiler.
+ */
+ return mkdir( (char *)path, 0777 );
+}
+
+
+/*
+ * file_query_() - query information about a path from the OS
+ */
+
+void file_query_( file_info_t * const info )
+{
+ file_query_posix_( info );
+}
+
+
+int file_collect_archive_content_( file_archive_info_t * const archive );
+
+/*
+ * file_archscan() - scan an archive for files
+ */
+void file_archscan( char const * arch, scanback func, void * closure )
+{
+ OBJECT * path = object_new( arch );
+ file_archive_info_t * archive = file_archive_query( path );
+
+ object_free( path );
+
+ if ( filelist_empty( archive->members ) )
+ {
+ if ( file_collect_archive_content_( archive ) < 0 )
+ return;
+ }
+
+ /* Report the collected archive content. */
+ {
+ FILELISTITER iter = filelist_begin( archive->members );
+ FILELISTITER const end = filelist_end( archive->members );
+ char buf[ MAXJPATH ];
+
+ for ( ; iter != end ; iter = filelist_next( iter ) )
+ {
+ file_info_t * member_file = filelist_item( iter );
+
+ /* Construct member path: 'archive-path(member-name)'
+ */
+ sprintf( buf, "%s(%s)",
+ object_str( archive->file->name ),
+ object_str( member_file->name ) );
+ {
+ OBJECT * const member = object_new( buf );
+ (*func)( closure, member, 1 /* time valid */, &member_file->time );
+ object_free( member );
+ }
+ }
+ }
+}
+
+
+/*
+ * file_archivescan_() - OS specific file_archivescan() implementation
+ */
+
+void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
+ void * closure )
+{
+}
+
+
+/*
+ * file_collect_archive_content_() - collects information about archive members
+ */
+
+#ifndef AIAMAG /* God-fearing UNIX */
+
+#define SARFMAG 2
+#define SARHDR sizeof( struct ar_hdr )
+
+int file_collect_archive_content_( file_archive_info_t * const archive )
+{
+#ifndef NO_AR
+ struct ar_hdr ar_hdr;
+ char * string_table = 0;
+ char buf[ MAXJPATH ];
+ long offset;
+ int fd;
+ const char * path = object_str( archive->file->name );
+
+ if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
+
+ if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 )
+ return -1;
+
+ if ( read( fd, buf, SARMAG ) != SARMAG ||
+ strncmp( ARMAG, buf, SARMAG ) )
+ {
+ close( fd );
+ return -1;
+ }
+
+ offset = SARMAG;
+
+ if ( DEBUG_BINDSCAN )
+ out_printf( "scan archive %s\n", path );
+
+ while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
+ !( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG )
+#ifdef ARFZMAG
+ /* OSF also has a compressed format */
+ && memcmp( ar_hdr.ar_fmag, ARFZMAG, SARFMAG )
+#endif
+ ) )
+ {
+ char lar_name_[ 257 ];
+ char * lar_name = lar_name_ + 1;
+ long lar_date;
+ long lar_size;
+ long lar_offset;
+ char * c;
+ char * src;
+ char * dest;
+
+ size_t ar_hdr_name_size = sizeof( ar_hdr.ar_name ); // Workaround for sizeof strncpy warning.
+ strncpy( lar_name, ar_hdr.ar_name, ar_hdr_name_size );
+
+ sscanf( ar_hdr.ar_date, "%ld", &lar_date );
+ sscanf( ar_hdr.ar_size, "%ld", &lar_size );
+
+ if ( ar_hdr.ar_name[ 0 ] == '/' )
+ {
+ if ( ar_hdr.ar_name[ 1 ] == '/' )
+ {
+ /* This is the "string table" entry of the symbol table, holding
+ * filename strings longer than 15 characters, i.e. those that
+ * do not fit into ar_name.
+ */
+ string_table = (char *)BJAM_MALLOC_ATOMIC( lar_size );
+ lseek( fd, offset + SARHDR, 0 );
+ if ( read( fd, string_table, lar_size ) != lar_size )
+ out_printf("error reading string table\n");
+ }
+ else if ( string_table && ar_hdr.ar_name[ 1 ] != ' ' )
+ {
+ /* Long filenames are recognized by "/nnnn" where nnnn is the
+ * offset of the string in the string table represented in ASCII
+ * decimals.
+ */
+ dest = lar_name;
+ lar_offset = atoi( lar_name + 1 );
+ src = &string_table[ lar_offset ];
+ while ( *src != '/' )
+ *dest++ = *src++;
+ *dest = '/';
+ }
+ }
+
+ c = lar_name - 1;
+ while ( ( *++c != ' ' ) && ( *c != '/' ) );
+ *c = '\0';
+
+ if ( DEBUG_BINDSCAN )
+ out_printf( "archive name %s found\n", lar_name );
+
+ sprintf( buf, "%s", lar_name );
+
+ if ( strcmp( buf, "") != 0 )
+ {
+ file_info_t * member = 0;
+
+ archive->members = filelist_push_back( archive->members, object_new( buf ) );
+ member = filelist_back( archive->members );
+ member->is_file = 1;
+ member->is_dir = 0;
+ member->exists = 0;
+ timestamp_init( &member->time, (time_t)lar_date, 0 );
+ }
+
+ offset += SARHDR + ( ( lar_size + 1 ) & ~1 );
+ lseek( fd, offset, 0 );
+ }
+
+ if ( string_table )
+ BJAM_FREE( string_table );
+
+ close( fd );
+#endif /* NO_AR */
+
+ return 0;
+}
+
+#else /* AIAMAG - RS6000 AIX */
+
+static void collect_archive_content_small( int fd, file_archive_info_t * const archive )
+{
+ struct fl_hdr fl_hdr;
+
+ struct {
+ struct ar_hdr hdr;
+ char pad[ 256 ];
+ } ar_hdr ;
+
+ char buf[ MAXJPATH ];
+ long offset;
+ const char * path = object_str( archive->file->name );
+
+ if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ )
+ return;
+
+ sscanf( fl_hdr.fl_fstmoff, "%ld", &offset );
+
+ if ( DEBUG_BINDSCAN )
+ out_printf( "scan archive %s\n", path );
+
+ while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 &&
+ read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= (int)sizeof( ar_hdr.hdr ) )
+ {
+ long lar_date;
+ int lar_namlen;
+
+ sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
+ sscanf( ar_hdr.hdr.ar_date , "%ld", &lar_date );
+ sscanf( ar_hdr.hdr.ar_nxtmem, "%ld", &offset );
+
+ if ( !lar_namlen )
+ continue;
+
+ ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
+
+ sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name );
+
+ if ( strcmp( buf, "") != 0 )
+ {
+ file_info_t * member = 0;
+
+ archive->members = filelist_push_back( archive->members, object_new( buf ) );
+ member = filelist_back( archive->members );
+ member->is_file = 1;
+ member->is_dir = 0;
+ member->exists = 0;
+ timestamp_init( &member->time, (time_t)lar_date, 0 );
+ }
+ }
+}
+
+/* Check for OS versions supporting the big variant. */
+#ifdef AR_HSZ_BIG
+
+static void collect_archive_content_big( int fd, file_archive_info_t * const archive )
+{
+ struct fl_hdr_big fl_hdr;
+
+ struct {
+ struct ar_hdr_big hdr;
+ char pad[ 256 ];
+ } ar_hdr ;
+
+ char buf[ MAXJPATH ];
+ long long offset;
+ const char * path = object_str( archive->file->name );
+
+ if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG ) != FL_HSZ_BIG )
+ return;
+
+ sscanf( fl_hdr.fl_fstmoff, "%lld", &offset );
+
+ if ( DEBUG_BINDSCAN )
+ out_printf( "scan archive %s\n", path );
+
+ while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 &&
+ read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) )
+ {
+ long lar_date;
+ int lar_namlen;
+
+ sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
+ sscanf( ar_hdr.hdr.ar_date , "%ld" , &lar_date );
+ sscanf( ar_hdr.hdr.ar_nxtmem, "%lld", &offset );
+
+ if ( !lar_namlen )
+ continue;
+
+ ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
+
+ sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name );
+
+ if ( strcmp( buf, "") != 0 )
+ {
+ file_info_t * member = 0;
+
+ archive->members = filelist_push_back( archive->members, object_new( buf ) );
+ member = filelist_back( archive->members );
+ member->is_file = 1;
+ member->is_dir = 0;
+ member->exists = 0;
+ timestamp_init( &member->time, (time_t)lar_date, 0 );
+ }
+ }
+}
+
+#endif /* AR_HSZ_BIG */
+
+int file_collect_archive_content_( file_archive_info_t * const archive )
+{
+ int fd;
+ char fl_magic[ SAIAMAG ];
+ const char * path = object_str( archive->file->name );
+
+ if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
+
+ if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 )
+ return -1;
+
+ if ( read( fd, fl_magic, SAIAMAG ) != SAIAMAG ||
+ lseek( fd, 0, SEEK_SET ) == -1 )
+ {
+ close( fd );
+ return -1;
+ }
+
+ if ( !strncmp( AIAMAG, fl_magic, SAIAMAG ) )
+ {
+ /* read small variant */
+ collect_archive_content_small( fd, archive );
+ }
+#ifdef AR_HSZ_BIG
+ else if ( !strncmp( AIAMAGBIG, fl_magic, SAIAMAG ) )
+ {
+ /* read big variant */
+ collect_archive_content_big( fd, archive );
+ }
+#endif
+
+ close( fd );
+
+ return 0;
+}
+
+#endif /* AIAMAG - RS6000 AIX */
+
+#endif /* USE_FILEUNIX */
diff --git a/src/boost/tools/build/src/engine/filevms.cpp b/src/boost/tools/build/src/engine/filevms.cpp
new file mode 100644
index 000000000..ab038d065
--- /dev/null
+++ b/src/boost/tools/build/src/engine/filevms.cpp
@@ -0,0 +1,440 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Copyright 2015 Artur Shepilko.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+
+#include "jam.h"
+#include "filesys.h"
+
+#include "object.h"
+#include "pathsys.h"
+#include "jam_strings.h"
+
+
+#ifdef OS_VMS
+
+/*
+ * filevms.c - manipulate file names and scan directories on VMS.
+ *
+ * This implementation is based on POSIX-style path manipulation.
+ *
+ * VMS CTRL directly supports both POSIX- and native VMS-style path expressions,
+ * with the POSIX-to-VMS path translation performed internally by the same
+ * set of functions. For the most part such processing is transparent, with
+ * few differences mainly related to file versions (in POSIX mode only the recent
+ * version is visible).
+ *
+ * This should allow us to re-use fileunix.c implementation,
+ * excluding archive/library member processing.
+ *
+ * Thus in jam-files the path references can also remain POSIX/UNIX-style on all
+ * levels EXCEPT in actions scope, where these must be translated to the native
+ * VMS-style. This approach is somewhat similar to jam CYGWIN handling.
+ *
+ *
+ * External routines:
+ * file_archscan() - scan an archive for files
+ * file_mkdir() - create a directory
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * External routines called only via routines in filesys.c:
+ * file_collect_dir_content_() - collects directory content information
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ * file_query_() - query information about a path from the OS
+ * file_collect_archive_content_() - collects information about archive members
+ * file_archivescan_() - OS specific file_archivescan() implementation
+ */
+
+#include <assert.h>
+#include <stdio.h>
+
+#include <sys/stat.h> /* needed for mkdir() */
+#include <unistd.h> /* needed for read and close prototype */
+
+#include <dirent.h>
+#define STRUCT_DIRENT struct dirent
+
+
+void path_translate_to_os_( char const * f, string * file );
+
+/*
+ * file_collect_dir_content_() - collects directory content information
+ */
+
+int file_collect_dir_content_( file_info_t * const d )
+{
+ LIST * files = L0;
+ PATHNAME f;
+ DIR * dd;
+ STRUCT_DIRENT * dirent;
+ string path[ 1 ];
+ char const * dirstr;
+
+ assert( d );
+ assert( d->is_dir );
+ assert( list_empty( d->files ) );
+
+ dirstr = object_str( d->name );
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+ f.f_dir.ptr = dirstr;
+ f.f_dir.len = strlen( dirstr );
+
+ if ( !*dirstr ) dirstr = ".";
+
+ if ( !( dd = opendir( dirstr ) ) )
+ return -1;
+
+ string_new( path );
+ while ( ( dirent = readdir( dd ) ) )
+ {
+ OBJECT * name;
+ f.f_base.ptr = dirent->d_name
+ #ifdef old_sinix
+ - 2 /* Broken structure definition on sinix. */
+ #endif
+ ;
+ f.f_base.len = strlen( f.f_base.ptr );
+
+ string_truncate( path, 0 );
+ path_build( &f, path );
+ name = object_new( path->value );
+ /* Immediately stat the file to preserve invariants. */
+ if ( file_query( name ) )
+ files = list_push_back( files, name );
+ else
+ object_free( name );
+ }
+ string_free( path );
+
+ closedir( dd );
+
+ d->files = files;
+ return 0;
+}
+
+
+/*
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ */
+
+void file_dirscan_( file_info_t * const d, scanback func, void * closure )
+{
+ assert( d );
+ assert( d->is_dir );
+
+ /* Special case / : enter it */
+ if ( !strcmp( object_str( d->name ), "/" ) )
+ (*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
+}
+
+
+/*
+ * file_mkdir() - create a directory
+ */
+
+int file_mkdir( char const * const path )
+{
+ /* Explicit cast to remove const modifiers and avoid related compiler
+ * warnings displayed when using the intel compiler.
+ */
+ return mkdir( (char *)path, 0777 );
+}
+
+
+/*
+ * file_query_() - query information about a path from the OS
+ */
+
+void file_query_( file_info_t * const info )
+{
+ file_query_posix_( info );
+}
+
+
+/*------------------------------------------------------------------------------
+* VMS-specific processing:
+*
+*/
+
+#include <descrip.h>
+#include <lbrdef.h>
+#include <credef.h>
+#include <mhddef.h>
+#include <lhidef.h>
+#include <lib$routines.h>
+#include <starlet.h>
+
+/* Supply missing prototypes for lbr$-routines*/
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+int lbr$set_module(
+ void **,
+ unsigned long *,
+ struct dsc$descriptor_s *,
+ unsigned short *,
+ void * );
+
+int lbr$open( void **,
+ struct dsc$descriptor_s *,
+ void *,
+ void *,
+ void *,
+ void *,
+ void * );
+
+int lbr$ini_control(
+ void **,
+ unsigned long *,
+ unsigned long *,
+ void * );
+
+int lbr$get_index(
+ void **,
+ unsigned long * const,
+ int (*func)( struct dsc$descriptor_s *, unsigned long *),
+ void * );
+
+int lbr$search(
+ void **,
+ unsigned long * const,
+ unsigned short *,
+ int (*func)( struct dsc$descriptor_s *, unsigned long *),
+ unsigned long *);
+
+int lbr$close(
+ void ** );
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+
+
+static void
+file_cvttime(
+ unsigned int *curtime,
+ time_t *unixtime )
+{
+ static const size_t divisor = 10000000;
+ static unsigned int bastim[2] = { 0x4BEB4000, 0x007C9567 }; /* 1/1/1970 */
+ int delta[2], remainder;
+
+ lib$subx( curtime, bastim, delta );
+ lib$ediv( &divisor, delta, unixtime, &remainder );
+}
+
+
+static void downcase_inplace( char * p )
+{
+ for ( ; *p; ++p )
+ *p = tolower( *p );
+}
+
+
+static file_archive_info_t * m_archive = NULL;
+static file_info_t * m_member_found = NULL;
+static void * m_lbr_context = NULL;
+static unsigned short * m_rfa_found = NULL;
+static const unsigned long LBR_MODINDEX_NUM = 1,
+ LBR_SYMINDEX_NUM = 2; /* GST:global symbol table */
+
+
+static unsigned int set_archive_symbol( struct dsc$descriptor_s *symbol,
+ unsigned long *rfa )
+{
+ file_info_t * member = m_member_found;
+ char buf[ MAXJPATH ] = { 0 };
+
+ strncpy(buf, symbol->dsc$a_pointer, symbol->dsc$w_length);
+ buf[ symbol->dsc$w_length ] = 0;
+
+ member->files = list_push_back( member->files, object_new( buf ) );
+
+ return ( 1 ); /* continue */
+}
+
+
+static unsigned int set_archive_member( struct dsc$descriptor_s *module,
+ unsigned long *rfa )
+{
+ file_archive_info_t * archive = m_archive;
+
+ static struct dsc$descriptor_s bufdsc =
+ {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
+
+ struct mhddef *mhd;
+ char filename[128] = { 0 };
+ char buf[ MAXJPATH ] = { 0 };
+
+ int status;
+ time_t library_date;
+
+ register int i;
+ register char *p;
+
+ bufdsc.dsc$a_pointer = filename;
+ bufdsc.dsc$w_length = sizeof( filename );
+ status = lbr$set_module( &m_lbr_context, rfa, &bufdsc,
+ &bufdsc.dsc$w_length, NULL );
+
+ if ( !(status & 1) )
+ return ( 1 ); /* continue */
+
+ mhd = (struct mhddef *)filename;
+
+ file_cvttime( &mhd->mhd$l_datim, &library_date );
+
+ /* strncpy( filename, module->dsc$a_pointer, module->dsc$w_length );
+ */
+ for ( i = 0, p = module->dsc$a_pointer; i < module->dsc$w_length; ++i, ++p )
+ filename[ i ] = *p;
+
+ filename[ i ] = '\0';
+
+ if ( strcmp( filename, "" ) != 0 )
+ {
+ file_info_t * member = 0;
+
+ /* Construct member's filename as lowercase "module.obj" */
+ sprintf( buf, "%s.obj", filename );
+ downcase_inplace( buf );
+ archive->members = filelist_push_back( archive->members, object_new( buf ) );
+
+ member = filelist_back( archive->members );
+ member->is_file = 1;
+ member->is_dir = 0;
+ member->exists = 0;
+ timestamp_init( &member->time, (time_t)library_date, 0 );
+
+ m_member_found = member;
+ m_rfa_found = rfa;
+ status = lbr$search(&m_lbr_context, &LBR_SYMINDEX_NUM, m_rfa_found, set_archive_symbol, NULL);
+ }
+
+ return ( 1 ); /* continue */
+}
+
+
+
+void file_archscan( char const * arch, scanback func, void * closure )
+{
+ OBJECT * path = object_new( arch );
+ file_archive_info_t * archive = file_archive_query( path );
+
+ object_free( path );
+
+ if ( filelist_empty( archive->members ) )
+ {
+ if ( DEBUG_BINDSCAN )
+ out_printf( "scan archive %s\n", object_str( archive->file->name ) );
+
+ if ( file_collect_archive_content_( archive ) < 0 )
+ return;
+ }
+
+ /* Report the collected archive content. */
+ {
+ FILELISTITER iter = filelist_begin( archive->members );
+ FILELISTITER const end = filelist_end( archive->members );
+ char buf[ MAXJPATH ];
+
+ for ( ; iter != end ; iter = filelist_next( iter ) )
+ {
+ file_info_t * member_file = filelist_item( iter );
+ LIST * symbols = member_file->files;
+
+ /* Construct member path: 'archive-path(member-name)'
+ */
+ sprintf( buf, "%s(%s)",
+ object_str( archive->file->name ),
+ object_str( member_file->name ) );
+ {
+ OBJECT * const member = object_new( buf );
+ (*func)( closure, member, 1 /* time valid */, &member_file->time );
+ object_free( member );
+ }
+ }
+ }
+}
+
+
+/*
+ * file_archivescan_() - OS specific file_archivescan() implementation
+ */
+void file_archivescan_( file_archive_info_t * const archive, archive_scanback func,
+ void * closure )
+{
+}
+
+
+/*
+ * file_collect_archive_content_() - collects information about archive members
+ */
+
+int file_collect_archive_content_( file_archive_info_t * const archive )
+{
+ unsigned short rfa[3];
+
+ static struct dsc$descriptor_s library =
+ {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL};
+
+ unsigned long lfunc = LBR$C_READ;
+ unsigned long typ = LBR$C_TYP_UNK;
+
+ register int status;
+ string buf[ 1 ];
+ char vmspath[ MAXJPATH ] = { 0 };
+
+ m_archive = archive;
+
+ if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members );
+
+ /* Translate path to VMS
+ */
+ string_new( buf );
+ path_translate_to_os_( object_str( archive->file->name ), buf );
+ strcpy( vmspath, buf->value );
+ string_free( buf );
+
+
+ status = lbr$ini_control( &m_lbr_context, &lfunc, &typ, NULL );
+ if ( !( status & 1 ) )
+ return -1;
+
+ library.dsc$a_pointer = vmspath;
+ library.dsc$w_length = strlen( vmspath );
+
+ status = lbr$open( &m_lbr_context, &library, NULL, NULL, NULL, NULL, NULL );
+ if ( !( status & 1 ) )
+ return -1;
+
+ /* Scan main index for modules.
+ * For each module search symbol-index to collect module's symbols.
+ */
+ status = lbr$get_index( &m_lbr_context, &LBR_MODINDEX_NUM, set_archive_member, NULL );
+
+ if ( !( status & 1 ) )
+ return -1;
+
+
+ (void) lbr$close( &m_lbr_context );
+
+ return 0;
+}
+
+#endif /* OS_VMS */
+
diff --git a/src/boost/tools/build/src/engine/frames.cpp b/src/boost/tools/build/src/engine/frames.cpp
new file mode 100644
index 000000000..0491c5c32
--- /dev/null
+++ b/src/boost/tools/build/src/engine/frames.cpp
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "frames.h"
+
+
+FRAME * frame_before_python_call;
+
+
+void frame_init( FRAME * frame )
+{
+ frame->prev = 0;
+ frame->prev_user = 0;
+ lol_init( frame->args );
+ frame->module = root_module();
+ frame->rulename = "module scope";
+ frame->file = 0;
+ frame->line = -1;
+}
+
+
+void frame_free( FRAME * frame )
+{
+ lol_free( frame->args );
+}
diff --git a/src/boost/tools/build/src/engine/frames.h b/src/boost/tools/build/src/engine/frames.h
new file mode 100644
index 000000000..c134d9c79
--- /dev/null
+++ b/src/boost/tools/build/src/engine/frames.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef FRAMES_DWA20011021_H
+#define FRAMES_DWA20011021_H
+
+#include "config.h"
+#include "lists.h"
+#include "modules.h"
+#include "object.h"
+
+
+typedef struct frame FRAME;
+
+struct frame
+{
+ FRAME * prev;
+ FRAME * prev_user; /* The nearest enclosing frame for which
+ module->user_module is true. */
+ LOL args[ 1 ];
+ module_t * module;
+ OBJECT * file;
+ int line;
+ char const * rulename;
+#ifdef JAM_DEBUGGER
+ void * function;
+#endif
+};
+
+
+/* When a call into Python is in progress, this variable points to the bjam
+ * frame that was current at the moment of the call. When the call completes,
+ * the variable is not defined. Furthermore, if Jam calls Python which calls Jam
+ * and so on, this variable only keeps the most recent Jam frame.
+ */
+extern FRAME * frame_before_python_call;
+
+
+void frame_init( FRAME * );
+void frame_free( FRAME * );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/function.cpp b/src/boost/tools/build/src/engine/function.cpp
new file mode 100644
index 000000000..71f9b119a
--- /dev/null
+++ b/src/boost/tools/build/src/engine/function.cpp
@@ -0,0 +1,5341 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ * Copyright 2016 Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "function.h"
+
+#include "class.h"
+#include "compile.h"
+#include "constants.h"
+#include "debugger.h"
+#include "filesys.h"
+#include "frames.h"
+#include "lists.h"
+#include "mem.h"
+#include "pathsys.h"
+#include "rules.h"
+#include "search.h"
+#include "variable.h"
+#include "output.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+/*
+#define FUNCTION_DEBUG_PROFILE
+*/
+
+#ifndef FUNCTION_DEBUG_PROFILE
+#undef PROFILE_ENTER_LOCAL
+#define PROFILE_ENTER_LOCAL(x) while (false)
+#undef PROFILE_EXIT_LOCAL
+#define PROFILE_EXIT_LOCAL(x)
+#endif
+
+int glob( char const * s, char const * c );
+void backtrace( FRAME * );
+void backtrace_line( FRAME * );
+
+#define INSTR_PUSH_EMPTY 0
+#define INSTR_PUSH_CONSTANT 1
+#define INSTR_PUSH_ARG 2
+#define INSTR_PUSH_VAR 3
+#define INSTR_PUSH_VAR_FIXED 57
+#define INSTR_PUSH_GROUP 4
+#define INSTR_PUSH_RESULT 5
+#define INSTR_PUSH_APPEND 6
+#define INSTR_SWAP 7
+
+#define INSTR_JUMP_EMPTY 8
+#define INSTR_JUMP_NOT_EMPTY 9
+
+#define INSTR_JUMP 10
+#define INSTR_JUMP_LT 11
+#define INSTR_JUMP_LE 12
+#define INSTR_JUMP_GT 13
+#define INSTR_JUMP_GE 14
+#define INSTR_JUMP_EQ 15
+#define INSTR_JUMP_NE 16
+#define INSTR_JUMP_IN 17
+#define INSTR_JUMP_NOT_IN 18
+
+#define INSTR_JUMP_NOT_GLOB 19
+
+#define INSTR_FOR_INIT 56
+#define INSTR_FOR_LOOP 20
+
+#define INSTR_SET_RESULT 21
+#define INSTR_RETURN 22
+#define INSTR_POP 23
+
+#define INSTR_PUSH_LOCAL 24
+#define INSTR_POP_LOCAL 25
+#define INSTR_SET 26
+#define INSTR_APPEND 27
+#define INSTR_DEFAULT 28
+
+#define INSTR_PUSH_LOCAL_FIXED 58
+#define INSTR_POP_LOCAL_FIXED 59
+#define INSTR_SET_FIXED 60
+#define INSTR_APPEND_FIXED 61
+#define INSTR_DEFAULT_FIXED 62
+
+#define INSTR_PUSH_LOCAL_GROUP 29
+#define INSTR_POP_LOCAL_GROUP 30
+#define INSTR_SET_GROUP 31
+#define INSTR_APPEND_GROUP 32
+#define INSTR_DEFAULT_GROUP 33
+
+#define INSTR_PUSH_ON 34
+#define INSTR_POP_ON 35
+#define INSTR_SET_ON 36
+#define INSTR_APPEND_ON 37
+#define INSTR_DEFAULT_ON 38
+#define INSTR_GET_ON 65
+
+#define INSTR_CALL_RULE 39
+#define INSTR_CALL_MEMBER_RULE 66
+
+#define INSTR_APPLY_MODIFIERS 40
+#define INSTR_APPLY_INDEX 41
+#define INSTR_APPLY_INDEX_MODIFIERS 42
+#define INSTR_APPLY_MODIFIERS_GROUP 43
+#define INSTR_APPLY_INDEX_GROUP 44
+#define INSTR_APPLY_INDEX_MODIFIERS_GROUP 45
+#define INSTR_COMBINE_STRINGS 46
+#define INSTR_GET_GRIST 64
+
+#define INSTR_INCLUDE 47
+#define INSTR_RULE 48
+#define INSTR_ACTIONS 49
+#define INSTR_PUSH_MODULE 50
+#define INSTR_POP_MODULE 51
+#define INSTR_CLASS 52
+#define INSTR_BIND_MODULE_VARIABLES 63
+
+#define INSTR_APPEND_STRINGS 53
+#define INSTR_WRITE_FILE 54
+#define INSTR_OUTPUT_STRINGS 55
+
+#define INSTR_DEBUG_LINE 67
+#define INSTR_FOR_POP 70
+
+typedef struct instruction
+{
+ unsigned int op_code;
+ int arg;
+} instruction;
+
+typedef struct _subfunction
+{
+ OBJECT * name;
+ FUNCTION * code;
+ int local;
+} SUBFUNCTION;
+
+typedef struct _subaction
+{
+ OBJECT * name;
+ FUNCTION * command;
+ int flags;
+} SUBACTION;
+
+#define FUNCTION_BUILTIN 0
+#define FUNCTION_JAM 1
+
+struct argument
+{
+ int flags;
+#define ARG_ONE 0
+#define ARG_OPTIONAL 1
+#define ARG_PLUS 2
+#define ARG_STAR 3
+#define ARG_VARIADIC 4
+ OBJECT * type_name;
+ OBJECT * arg_name;
+ int index;
+};
+
+struct arg_list
+{
+ int size;
+ struct argument * args;
+};
+
+struct _function
+{
+ int type;
+ int reference_count;
+ OBJECT * rulename;
+ struct arg_list * formal_arguments;
+ int num_formal_arguments;
+};
+
+typedef struct _builtin_function
+{
+ FUNCTION base;
+ LIST * ( * func )( FRAME *, int flags );
+ int flags;
+} BUILTIN_FUNCTION;
+
+typedef struct _jam_function
+{
+ FUNCTION base;
+ int code_size;
+ instruction * code;
+ int num_constants;
+ OBJECT * * constants;
+ int num_subfunctions;
+ SUBFUNCTION * functions;
+ int num_subactions;
+ SUBACTION * actions;
+ FUNCTION * generic;
+ OBJECT * file;
+ int line;
+} JAM_FUNCTION;
+
+
+#ifdef HAVE_PYTHON
+
+#define FUNCTION_PYTHON 2
+
+typedef struct _python_function
+{
+ FUNCTION base;
+ PyObject * python_function;
+} PYTHON_FUNCTION;
+
+static LIST * call_python_function( PYTHON_FUNCTION *, FRAME * );
+
+#endif
+
+
+struct _stack
+{
+ void * data;
+};
+
+static void * stack;
+
+STACK * stack_global()
+{
+ static STACK result;
+ if ( !stack )
+ {
+ int const size = 1 << 21;
+ stack = BJAM_MALLOC( size );
+ result.data = (char *)stack + size;
+ }
+ return &result;
+}
+
+struct list_alignment_helper
+{
+ char ch;
+ LIST * l;
+};
+
+#define LISTPTR_ALIGN_BASE ( sizeof( struct list_alignment_helper ) - sizeof( LIST * ) )
+#define LISTPTR_ALIGN ( ( LISTPTR_ALIGN_BASE > sizeof( LIST * ) ) ? sizeof( LIST * ) : LISTPTR_ALIGN_BASE )
+
+static void check_alignment( STACK * s )
+{
+ assert( (size_t)s->data % LISTPTR_ALIGN == 0 );
+}
+
+void * stack_allocate( STACK * s, int size )
+{
+ check_alignment( s );
+ s->data = (char *)s->data - size;
+ check_alignment( s );
+ return s->data;
+}
+
+void stack_deallocate( STACK * s, int size )
+{
+ check_alignment( s );
+ s->data = (char *)s->data + size;
+ check_alignment( s );
+}
+
+void stack_push( STACK * s, LIST * l )
+{
+ *(LIST * *)stack_allocate( s, sizeof( LIST * ) ) = l;
+}
+
+LIST * stack_pop( STACK * s )
+{
+ LIST * const result = *(LIST * *)s->data;
+ stack_deallocate( s, sizeof( LIST * ) );
+ return result;
+}
+
+LIST * stack_top( STACK * s )
+{
+ check_alignment( s );
+ return *(LIST * *)s->data;
+}
+
+LIST * stack_at( STACK * s, int n )
+{
+ check_alignment( s );
+ return *( (LIST * *)s->data + n );
+}
+
+void stack_set( STACK * s, int n, LIST * value )
+{
+ check_alignment( s );
+ *((LIST * *)s->data + n) = value;
+}
+
+void * stack_get( STACK * s )
+{
+ check_alignment( s );
+ return s->data;
+}
+
+LIST * frame_get_local( FRAME * frame, int idx )
+{
+ /* The only local variables are the arguments. */
+ return list_copy( lol_get( frame->args, idx ) );
+}
+
+static OBJECT * function_get_constant( JAM_FUNCTION * function, int idx )
+{
+ return function->constants[ idx ];
+}
+
+static LIST * function_get_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx )
+{
+ return list_copy( var_get( frame->module, function->constants[ idx ] ) );
+}
+
+static void function_set_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ var_set( frame->module, function->constants[ idx ], value, VAR_SET );
+}
+
+static LIST * function_swap_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ return var_swap( frame->module, function->constants[ idx ], value );
+}
+
+static void function_append_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ var_set( frame->module, function->constants[ idx ], value, VAR_APPEND );
+}
+
+static void function_default_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ var_set( frame->module, function->constants[ idx ], value, VAR_DEFAULT );
+}
+
+static void function_set_rule( JAM_FUNCTION * function, FRAME * frame,
+ STACK * s, int idx )
+{
+ SUBFUNCTION * sub = function->functions + idx;
+ new_rule_body( frame->module, sub->name, sub->code, !sub->local );
+}
+
+static void function_set_actions( JAM_FUNCTION * function, FRAME * frame,
+ STACK * s, int idx )
+{
+ SUBACTION * sub = function->actions + idx;
+ LIST * bindlist = stack_pop( s );
+ new_rule_actions( frame->module, sub->name, sub->command, bindlist,
+ sub->flags );
+}
+
+
+/*
+ * Returns the index if name is "<", ">", "1", "2", ... or "19" otherwise
+ * returns -1.
+ */
+
+static int get_argument_index( char const * s )
+{
+ if ( s[ 0 ] != '\0')
+ {
+ if ( s[ 1 ] == '\0' )
+ {
+ switch ( s[ 0 ] )
+ {
+ case '<': return 0;
+ case '>': return 1;
+
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ return s[ 0 ] - '1';
+ }
+ }
+ else if ( s[ 0 ] == '1' && s[ 2 ] == '\0' )
+ {
+ switch( s[ 1 ] )
+ {
+ case '0':
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ return s[ 1 ] - '0' + 10 - 1;
+ }
+ }
+ }
+ return -1;
+}
+
+static LIST * function_get_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name )
+{
+ int const idx = get_argument_index( object_str( name ) );
+ return idx == -1
+ ? list_copy( var_get( frame->module, name ) )
+ : list_copy( lol_get( frame->args, idx ) );
+}
+
+static void function_set_named_variable( JAM_FUNCTION * function, FRAME * frame,
+ OBJECT * name, LIST * value)
+{
+ var_set( frame->module, name, value, VAR_SET );
+}
+
+static LIST * function_swap_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name, LIST * value )
+{
+ return var_swap( frame->module, name, value );
+}
+
+static void function_append_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name, LIST * value)
+{
+ var_set( frame->module, name, value, VAR_APPEND );
+}
+
+static void function_default_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name, LIST * value )
+{
+ var_set( frame->module, name, value, VAR_DEFAULT );
+}
+
+static LIST * function_call_rule( JAM_FUNCTION * function, FRAME * frame,
+ STACK * s, int n_args, char const * unexpanded, OBJECT * file, int line )
+{
+ FRAME inner[ 1 ];
+ int i;
+ LIST * first = stack_pop( s );
+ LIST * result = L0;
+ OBJECT * rulename;
+ LIST * trailing;
+
+ frame->file = file;
+ frame->line = line;
+
+ if ( list_empty( first ) )
+ {
+ backtrace_line( frame );
+ out_printf( "warning: rulename %s expands to empty string\n", unexpanded );
+ backtrace( frame );
+ list_free( first );
+ for ( i = 0; i < n_args; ++i )
+ list_free( stack_pop( s ) );
+ return result;
+ }
+
+ rulename = object_copy( list_front( first ) );
+
+ frame_init( inner );
+ inner->prev = frame;
+ inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
+ inner->module = frame->module; /* This gets fixed up in evaluate_rule(). */
+
+ for ( i = 0; i < n_args; ++i )
+ lol_add( inner->args, stack_at( s, n_args - i - 1 ) );
+
+ for ( i = 0; i < n_args; ++i )
+ stack_pop( s );
+
+ trailing = list_pop_front( first );
+ if ( trailing )
+ {
+ if ( inner->args->count == 0 )
+ lol_add( inner->args, trailing );
+ else
+ {
+ LIST * * const l = &inner->args->list[ 0 ];
+ *l = list_append( trailing, *l );
+ }
+ }
+
+ result = evaluate_rule( bindrule( rulename, inner->module ), rulename, inner );
+ frame_free( inner );
+ object_free( rulename );
+ return result;
+}
+
+static LIST * function_call_member_rule( JAM_FUNCTION * function, FRAME * frame, STACK * s, int n_args, OBJECT * rulename, OBJECT * file, int line )
+{
+ FRAME inner[ 1 ];
+ int i;
+ LIST * first = stack_pop( s );
+ LIST * result = L0;
+ RULE * rule;
+ module_t * module;
+ OBJECT * real_rulename = 0;
+
+ frame->file = file;
+ frame->line = line;
+
+ if ( list_empty( first ) )
+ {
+ backtrace_line( frame );
+ out_printf( "warning: object is empty\n" );
+ backtrace( frame );
+
+ list_free( first );
+
+ for( i = 0; i < n_args; ++i )
+ {
+ list_free( stack_pop( s ) );
+ }
+
+ return result;
+ }
+
+ /* FIXME: handle generic case */
+ assert( list_length( first ) == 1 );
+
+ module = bindmodule( list_front( first ) );
+ if ( module->class_module )
+ {
+ rule = bindrule( rulename, module );
+ if ( rule->procedure )
+ {
+ real_rulename = object_copy( function_rulename( rule->procedure ) );
+ }
+ else
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append( buf, object_str( module->name ) );
+ string_push_back( buf, '.' );
+ string_append( buf, object_str( rulename ) );
+ real_rulename = object_new( buf->value );
+ string_free( buf );
+ }
+ }
+ else
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append( buf, object_str( list_front( first ) ) );
+ string_push_back( buf, '.' );
+ string_append( buf, object_str( rulename ) );
+ real_rulename = object_new( buf->value );
+ string_free( buf );
+ rule = bindrule( real_rulename, frame->module );
+ }
+
+ frame_init( inner );
+
+ inner->prev = frame;
+ inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
+ inner->module = frame->module; /* This gets fixed up in evaluate_rule(), below. */
+
+ for( i = 0; i < n_args; ++i )
+ {
+ lol_add( inner->args, stack_at( s, n_args - i - 1 ) );
+ }
+
+ for( i = 0; i < n_args; ++i )
+ {
+ stack_pop( s );
+ }
+
+ if ( list_length( first ) > 1 )
+ {
+ string buf[ 1 ];
+ LIST * trailing = L0;
+ LISTITER iter = list_begin( first ), end = list_end( first );
+ iter = list_next( iter );
+ string_new( buf );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ string_append( buf, object_str( list_item( iter ) ) );
+ string_push_back( buf, '.' );
+ string_append( buf, object_str( rulename ) );
+ trailing = list_push_back( trailing, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ string_free( buf );
+ if ( inner->args->count == 0 )
+ lol_add( inner->args, trailing );
+ else
+ {
+ LIST * * const l = &inner->args->list[ 0 ];
+ *l = list_append( trailing, *l );
+ }
+ }
+
+ list_free( first );
+ result = evaluate_rule( rule, real_rulename, inner );
+ frame_free( inner );
+ object_free( real_rulename );
+ return result;
+}
+
+
+/* Variable expansion */
+
+typedef struct
+{
+ int sub1;
+ int sub2;
+} subscript_t;
+
+typedef struct
+{
+ PATHNAME f; /* :GDBSMR -- pieces */
+ char parent; /* :P -- go to parent directory */
+ char filemods; /* one of the above applied */
+ char downshift; /* :L -- downshift result */
+ char upshift; /* :U -- upshift result */
+ char to_slashes; /* :T -- convert "\" to "/" */
+ char to_windows; /* :W -- convert cygwin to native paths */
+ PATHPART empty; /* :E -- default for empties */
+ PATHPART join; /* :J -- join list with char */
+} VAR_EDITS;
+
+static LIST * apply_modifiers_impl( LIST * result, string * buf,
+ VAR_EDITS * edits, int n, LISTITER iter, LISTITER end );
+static void get_iters( subscript_t const subscript, LISTITER * const first,
+ LISTITER * const last, int const length );
+
+
+/*
+ * var_edit_parse() - parse : modifiers into PATHNAME structure
+ *
+ * The : modifiers in a $(varname:modifier) currently support replacing or
+ * omitting elements of a filename, and so they are parsed into a PATHNAME
+ * structure (which contains pointers into the original string).
+ *
+ * Modifiers of the form "X=value" replace the component X with the given value.
+ * Modifiers without the "=value" cause everything but the component X to be
+ * omitted. X is one of:
+ *
+ * G <grist>
+ * D directory name
+ * B base name
+ * S .suffix
+ * M (member)
+ * R root directory - prepended to whole path
+ *
+ * This routine sets:
+ *
+ * f->f_xxx.ptr = 0
+ * f->f_xxx.len = 0
+ * -> leave the original component xxx
+ *
+ * f->f_xxx.ptr = string
+ * f->f_xxx.len = strlen( string )
+ * -> replace component xxx with string
+ *
+ * f->f_xxx.ptr = ""
+ * f->f_xxx.len = 0
+ * -> omit component xxx
+ *
+ * var_edit_file() below and path_build() obligingly follow this convention.
+ */
+
+static int var_edit_parse( char const * mods, VAR_EDITS * edits, int havezeroed
+ )
+{
+ while ( *mods )
+ {
+ PATHPART * fp;
+
+ switch ( *mods++ )
+ {
+ case 'L': edits->downshift = 1; continue;
+ case 'U': edits->upshift = 1; continue;
+ case 'P': edits->parent = edits->filemods = 1; continue;
+ case 'E': fp = &edits->empty; goto strval;
+ case 'J': fp = &edits->join; goto strval;
+ case 'G': fp = &edits->f.f_grist; goto fileval;
+ case 'R': fp = &edits->f.f_root; goto fileval;
+ case 'D': fp = &edits->f.f_dir; goto fileval;
+ case 'B': fp = &edits->f.f_base; goto fileval;
+ case 'S': fp = &edits->f.f_suffix; goto fileval;
+ case 'M': fp = &edits->f.f_member; goto fileval;
+ case 'T': edits->to_slashes = 1; continue;
+ case 'W': edits->to_windows = 1; continue;
+ default:
+ continue; /* Should complain, but so what... */
+ }
+
+ fileval:
+ /* Handle :CHARS, where each char (without a following =) selects a
+ * particular file path element. On the first such char, we deselect all
+ * others (by setting ptr = "", len = 0) and for each char we select
+ * that element (by setting ptr = 0).
+ */
+ edits->filemods = 1;
+
+ if ( *mods != '=' )
+ {
+ if ( !havezeroed++ )
+ {
+ int i;
+ for ( i = 0; i < 6; ++i )
+ {
+ edits->f.part[ i ].len = 0;
+ edits->f.part[ i ].ptr = "";
+ }
+ }
+
+ fp->ptr = 0;
+ continue;
+ }
+
+ strval:
+ /* Handle :X=value, or :X */
+ if ( *mods != '=' )
+ {
+ fp->ptr = "";
+ fp->len = 0;
+ }
+ else
+ {
+ fp->ptr = ++mods;
+ fp->len = strlen( mods );
+ mods += fp->len;
+ }
+ }
+
+ return havezeroed;
+}
+
+
+/*
+ * var_edit_file() - copy input target name to output, modifying filename.
+ */
+
+static void var_edit_file( char const * in, string * out, VAR_EDITS * edits )
+{
+ if ( edits->filemods )
+ {
+ PATHNAME pathname;
+
+ /* Parse apart original filename, putting parts into "pathname". */
+ path_parse( in, &pathname );
+
+ /* Replace any pathname with edits->f */
+ if ( edits->f.f_grist .ptr ) pathname.f_grist = edits->f.f_grist;
+ if ( edits->f.f_root .ptr ) pathname.f_root = edits->f.f_root;
+ if ( edits->f.f_dir .ptr ) pathname.f_dir = edits->f.f_dir;
+ if ( edits->f.f_base .ptr ) pathname.f_base = edits->f.f_base;
+ if ( edits->f.f_suffix.ptr ) pathname.f_suffix = edits->f.f_suffix;
+ if ( edits->f.f_member.ptr ) pathname.f_member = edits->f.f_member;
+
+ /* If requested, modify pathname to point to parent. */
+ if ( edits->parent )
+ path_parent( &pathname );
+
+ /* Put filename back together. */
+ path_build( &pathname, out );
+ }
+ else
+ string_append( out, in );
+}
+
+
+#if defined( OS_CYGWIN ) || defined( OS_VMS )
+
+/*
+ * var_edit_translate_path() - translate path to os native format.
+ */
+
+static void var_edit_translate_path( string * out, size_t pos, VAR_EDITS * edits )
+{
+ if ( edits->to_windows )
+ {
+ string result[ 1 ];
+ int translated;
+
+ /* Translate path to os native format. */
+ translated = path_translate_to_os( out->value + pos, result );
+ if ( translated )
+ {
+ string_truncate( out, pos );
+ string_append( out, result->value );
+ edits->to_slashes = 0;
+ }
+
+ string_free( result );
+ }
+}
+
+#endif
+
+
+/*
+ * var_edit_shift() - do upshift/downshift & other mods.
+ */
+
+static void var_edit_shift( string * out, size_t pos, VAR_EDITS * edits )
+{
+#if defined( OS_CYGWIN ) || defined( OS_VMS )
+ var_edit_translate_path( out, pos, edits );
+#endif
+
+ if ( edits->upshift || edits->downshift || edits->to_slashes )
+ {
+ /* Handle upshifting, downshifting and slash translation now. */
+ char * p;
+ for ( p = out->value + pos; *p; ++p )
+ {
+ if ( edits->upshift )
+ *p = toupper( *p );
+ else if ( edits->downshift )
+ *p = tolower( *p );
+ if ( edits->to_slashes && ( *p == '\\' ) )
+ *p = '/';
+ }
+ }
+}
+
+
+/*
+ * Reads n LISTs from the top of the STACK and combines them to form VAR_EDITS.
+ * Returns the number of VAR_EDITS pushed onto the STACK.
+ */
+
+static int expand_modifiers( STACK * s, int n )
+{
+ int i;
+ int total = 1;
+ LIST * * args = (LIST**)stack_get( s );
+ for ( i = 0; i < n; ++i )
+ total *= list_length( args[ i ] );
+
+ if ( total != 0 )
+ {
+ VAR_EDITS * out = (VAR_EDITS*)stack_allocate( s, total * sizeof( VAR_EDITS ) );
+ LISTITER * iter = (LISTITER*)stack_allocate( s, n * sizeof( LIST * ) );
+ for ( i = 0; i < n; ++i )
+ iter[ i ] = list_begin( args[ i ] );
+ i = 0;
+ {
+ int havezeroed;
+ loop:
+ memset( out, 0, sizeof( *out ) );
+ havezeroed = 0;
+ for ( i = 0; i < n; ++i )
+ havezeroed = var_edit_parse( object_str( list_item( iter[ i ] )
+ ), out, havezeroed );
+ ++out;
+ while ( --i >= 0 )
+ {
+ if ( list_next( iter[ i ] ) != list_end( args[ i ] ) )
+ {
+ iter[ i ] = list_next( iter[ i ] );
+ goto loop;
+ }
+ iter[ i ] = list_begin( args[ i ] );
+ }
+ }
+ stack_deallocate( s, n * sizeof( LIST * ) );
+ }
+ return total;
+}
+
+static LIST * apply_modifiers( STACK * s, int n )
+{
+ LIST * value = stack_top( s );
+ LIST * result = L0;
+ VAR_EDITS * const edits = (VAR_EDITS *)( (LIST * *)stack_get( s ) + 1 );
+ string buf[ 1 ];
+ string_new( buf );
+ result = apply_modifiers_impl( result, buf, edits, n, list_begin( value ),
+ list_end( value ) );
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * Parse a string of the form "1-2", "-2--1", "2-" and return the two
+ * subscripts.
+ */
+
+subscript_t parse_subscript( char const * s )
+{
+ subscript_t result;
+ result.sub1 = 0;
+ result.sub2 = 0;
+ do /* so we can use "break" */
+ {
+ /* Allow negative subscripts. */
+ if ( !isdigit( *s ) && ( *s != '-' ) )
+ {
+ result.sub2 = 0;
+ break;
+ }
+ result.sub1 = atoi( s );
+
+ /* Skip over the first symbol, which is either a digit or dash. */
+ ++s;
+ while ( isdigit( *s ) ) ++s;
+
+ if ( *s == '\0' )
+ {
+ result.sub2 = result.sub1;
+ break;
+ }
+
+ if ( *s != '-' )
+ {
+ result.sub2 = 0;
+ break;
+ }
+
+ ++s;
+
+ if ( *s == '\0' )
+ {
+ result.sub2 = -1;
+ break;
+ }
+
+ if ( !isdigit( *s ) && ( *s != '-' ) )
+ {
+ result.sub2 = 0;
+ break;
+ }
+
+ /* First, compute the index of the last element. */
+ result.sub2 = atoi( s );
+ while ( isdigit( *++s ) );
+
+ if ( *s != '\0' )
+ result.sub2 = 0;
+
+ } while ( 0 );
+ return result;
+}
+
+static LIST * apply_subscript( STACK * s )
+{
+ LIST * value = stack_top( s );
+ LIST * indices = stack_at( s, 1 );
+ LIST * result = L0;
+ int length = list_length( value );
+ string buf[ 1 ];
+ LISTITER indices_iter = list_begin( indices );
+ LISTITER const indices_end = list_end( indices );
+ string_new( buf );
+ for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter
+ ) )
+ {
+ LISTITER iter = list_begin( value );
+ LISTITER end = list_end( value );
+ subscript_t const subscript = parse_subscript( object_str( list_item(
+ indices_iter ) ) );
+ get_iters( subscript, &iter, &end, length );
+ for ( ; iter != end; iter = list_next( iter ) )
+ result = list_push_back( result, object_copy( list_item( iter ) ) );
+ }
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * Reads the LIST from first and applies subscript to it. The results are
+ * written to *first and *last.
+ */
+
+static void get_iters( subscript_t const subscript, LISTITER * const first,
+ LISTITER * const last, int const length )
+{
+ int start;
+ int size;
+ LISTITER iter;
+ LISTITER end;
+ {
+
+ if ( subscript.sub1 < 0 )
+ start = length + subscript.sub1;
+ else if ( subscript.sub1 > length )
+ start = length;
+ else
+ start = subscript.sub1 - 1;
+
+ size = subscript.sub2 < 0
+ ? length + 1 + subscript.sub2 - start
+ : subscript.sub2 - start;
+
+ /*
+ * HACK: When the first subscript is before the start of the list, it
+ * magically becomes the beginning of the list. This is inconsistent,
+ * but needed for backwards compatibility.
+ */
+ if ( start < 0 )
+ start = 0;
+
+ /* The "sub2 < 0" test handles the semantic error of sub2 < sub1. */
+ if ( size < 0 )
+ size = 0;
+
+ if ( start + size > length )
+ size = length - start;
+ }
+
+ iter = *first;
+ while ( start-- > 0 )
+ iter = list_next( iter );
+
+ end = iter;
+ while ( size-- > 0 )
+ end = list_next( end );
+
+ *first = iter;
+ *last = end;
+}
+
+static LIST * apply_modifiers_empty( LIST * result, string * buf,
+ VAR_EDITS * edits, int n )
+{
+ int i;
+ for ( i = 0; i < n; ++i )
+ {
+ if ( edits[ i ].empty.ptr )
+ {
+ /** FIXME: is empty.ptr always null-terminated? */
+ var_edit_file( edits[ i ].empty.ptr, buf, edits + i );
+ var_edit_shift( buf, 0, edits + i );
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ return result;
+}
+
+static LIST * apply_modifiers_non_empty( LIST * result, string * buf,
+ VAR_EDITS * edits, int n, LISTITER begin, LISTITER end )
+{
+ int i;
+ LISTITER iter;
+ for ( i = 0; i < n; ++i )
+ {
+ if ( edits[ i ].join.ptr )
+ {
+ var_edit_file( object_str( list_item( begin ) ), buf, edits + i );
+ var_edit_shift( buf, 0, edits + i );
+ for ( iter = list_next( begin ); iter != end; iter = list_next( iter
+ ) )
+ {
+ size_t size;
+ string_append( buf, edits[ i ].join.ptr );
+ size = buf->size;
+ var_edit_file( object_str( list_item( iter ) ), buf, edits + i
+ );
+ var_edit_shift( buf, size, edits + i );
+ }
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ else
+ {
+ for ( iter = begin; iter != end; iter = list_next( iter ) )
+ {
+ var_edit_file( object_str( list_item( iter ) ), buf, edits + i );
+ var_edit_shift( buf, 0, edits + i );
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ return result;
+}
+
+static LIST * apply_modifiers_impl( LIST * result, string * buf,
+ VAR_EDITS * edits, int n, LISTITER iter, LISTITER end )
+{
+ return iter == end
+ ? apply_modifiers_empty( result, buf, edits, n )
+ : apply_modifiers_non_empty( result, buf, edits, n, iter, end );
+}
+
+static LIST * apply_subscript_and_modifiers( STACK * s, int n )
+{
+ LIST * const value = stack_top( s );
+ LIST * const indices = stack_at( s, 1 );
+ LIST * result = L0;
+ VAR_EDITS * const edits = (VAR_EDITS *)((LIST * *)stack_get( s ) + 2);
+ int const length = list_length( value );
+ string buf[ 1 ];
+ LISTITER indices_iter = list_begin( indices );
+ LISTITER const indices_end = list_end( indices );
+ string_new( buf );
+ for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter
+ ) )
+ {
+ LISTITER iter = list_begin( value );
+ LISTITER end = list_end( value );
+ subscript_t const sub = parse_subscript( object_str( list_item(
+ indices_iter ) ) );
+ get_iters( sub, &iter, &end, length );
+ result = apply_modifiers_impl( result, buf, edits, n, iter, end );
+ }
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * expand() - expands a list of concatenated strings and variable references
+ *
+ * Takes a list of expansion items - each representing one element to be
+ * concatenated and each containing a list of its values. Returns a list of all
+ * possible values constructed by selecting a single value from each of the
+ * elements and concatenating them together.
+ *
+ * For example, in the following code:
+ *
+ * local a = one two three four ;
+ * local b = foo bar ;
+ * ECHO /$(a)/$(b)/$(a)/ ;
+ *
+ * When constructing the result of /$(a)/$(b)/ this function would get called
+ * with the following 7 expansion items:
+ * 1. /
+ * 2. one two three four
+ * 3. /
+ * 4. foo bar
+ * 5. /
+ * 6. one two three four
+ * 7. /
+ *
+ * And would result in a list containing 32 values:
+ * 1. /one/foo/one/
+ * 2. /one/foo/two/
+ * 3. /one/foo/three/
+ * 4. /one/foo/four/
+ * 5. /one/bar/one/
+ * ...
+ *
+ */
+
+typedef struct expansion_item
+{
+ /* Item's value list initialized prior to calling expand(). */
+ LIST * values;
+
+ /* Internal data initialized and used inside expand(). */
+ LISTITER current; /* Currently used value. */
+ int size; /* Concatenated string length prior to concatenating the
+ * item's current value.
+ */
+} expansion_item;
+
+static LIST * expand( expansion_item * items, int const length )
+{
+ LIST * result = L0;
+ string buf[ 1 ];
+ int size = 0;
+ int i;
+
+ assert( length > 0 );
+ for ( i = 0; i < length; ++i )
+ {
+ LISTITER iter = list_begin( items[ i ].values );
+ LISTITER const end = list_end( items[ i ].values );
+
+ /* If any of the items has no values - the result is an empty list. */
+ if ( iter == end ) return L0;
+
+ /* Set each item's 'current' to its first listed value. This indicates
+ * each item's next value to be used when constructing the list of all
+ * possible concatenated values.
+ */
+ items[ i ].current = iter;
+
+ /* Calculate the longest concatenated string length - to know how much
+ * memory we need to allocate as a buffer for holding the concatenated
+ * strings.
+ */
+ {
+ int max = 0;
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ int const len = strlen( object_str( list_item( iter ) ) );
+ if ( len > max ) max = len;
+ }
+ size += max;
+ }
+ }
+
+ string_new( buf );
+ string_reserve( buf, size );
+
+ i = 0;
+ while ( i >= 0 )
+ {
+ for ( ; i < length; ++i )
+ {
+ items[ i ].size = buf->size;
+ string_append( buf, object_str( list_item( items[ i ].current ) ) );
+ }
+ result = list_push_back( result, object_new( buf->value ) );
+ while ( --i >= 0 )
+ {
+ if ( list_next( items[ i ].current ) != list_end( items[ i ].values
+ ) )
+ {
+ items[ i ].current = list_next( items[ i ].current );
+ string_truncate( buf, items[ i ].size );
+ break;
+ }
+ else
+ items[ i ].current = list_begin( items[ i ].values );
+ }
+ }
+
+ string_free( buf );
+ return result;
+}
+
+static void combine_strings( STACK * s, int n, string * out )
+{
+ int i;
+ for ( i = 0; i < n; ++i )
+ {
+ LIST * const values = stack_pop( s );
+ LISTITER iter = list_begin( values );
+ LISTITER const end = list_end( values );
+ if ( iter != end )
+ {
+ string_append( out, object_str( list_item( iter ) ) );
+ for ( iter = list_next( iter ); iter != end; iter = list_next( iter
+ ) )
+ {
+ string_push_back( out, ' ' );
+ string_append( out, object_str( list_item( iter ) ) );
+ }
+ list_free( values );
+ }
+ }
+}
+
+struct dynamic_array
+{
+ int size;
+ int capacity;
+ void * data;
+};
+
+static void dynamic_array_init( struct dynamic_array * array )
+{
+ array->size = 0;
+ array->capacity = 0;
+ array->data = 0;
+}
+
+static void dynamic_array_free( struct dynamic_array * array )
+{
+ BJAM_FREE( array->data );
+}
+
+static void dynamic_array_push_impl( struct dynamic_array * const array,
+ void const * const value, int const unit_size )
+{
+ if ( array->capacity == 0 )
+ {
+ array->capacity = 2;
+ array->data = BJAM_MALLOC( array->capacity * unit_size );
+ }
+ else if ( array->capacity == array->size )
+ {
+ void * new_data;
+ array->capacity *= 2;
+ new_data = BJAM_MALLOC( array->capacity * unit_size );
+ memcpy( new_data, array->data, array->size * unit_size );
+ BJAM_FREE( array->data );
+ array->data = new_data;
+ }
+ memcpy( (char *)array->data + array->size * unit_size, value, unit_size );
+ ++array->size;
+}
+
+#define dynamic_array_push( array, value ) (dynamic_array_push_impl(array, &value, sizeof(value)))
+#define dynamic_array_at( type, array, idx ) (((type *)(array)->data)[idx])
+#define dynamic_array_pop( array ) (--(array)->size)
+
+/*
+ * struct compiler
+ */
+
+struct label_info
+{
+ int absolute_position;
+ struct dynamic_array uses[ 1 ];
+};
+
+#define LOOP_INFO_BREAK 0
+#define LOOP_INFO_CONTINUE 1
+
+struct loop_info
+{
+ int type;
+ int label;
+ int cleanup_depth;
+};
+
+struct stored_rule
+{
+ OBJECT * name;
+ PARSE * parse;
+ int num_arguments;
+ struct arg_list * arguments;
+ int local;
+};
+
+typedef struct compiler
+{
+ struct dynamic_array code[ 1 ];
+ struct dynamic_array constants[ 1 ];
+ struct dynamic_array labels[ 1 ];
+ struct dynamic_array rules[ 1 ];
+ struct dynamic_array actions[ 1 ];
+ struct dynamic_array cleanups[ 1 ];
+ struct dynamic_array loop_scopes[ 1 ];
+} compiler;
+
+static void compiler_init( compiler * c )
+{
+ dynamic_array_init( c->code );
+ dynamic_array_init( c->constants );
+ dynamic_array_init( c->labels );
+ dynamic_array_init( c->rules );
+ dynamic_array_init( c->actions );
+ dynamic_array_init( c->cleanups );
+ dynamic_array_init( c->loop_scopes );
+}
+
+static void compiler_free( compiler * c )
+{
+ int i;
+ dynamic_array_free( c->actions );
+ dynamic_array_free( c->rules );
+ for ( i = 0; i < c->labels->size; ++i )
+ dynamic_array_free( dynamic_array_at( struct label_info, c->labels, i
+ ).uses );
+ dynamic_array_free( c->labels );
+ dynamic_array_free( c->constants );
+ dynamic_array_free( c->code );
+ dynamic_array_free( c->cleanups );
+ dynamic_array_free( c->loop_scopes );
+}
+
+static void compile_emit_instruction( compiler * c, instruction instr )
+{
+ dynamic_array_push( c->code, instr );
+}
+
+static int compile_new_label( compiler * c )
+{
+ int result = c->labels->size;
+ struct label_info info;
+ info.absolute_position = -1;
+ dynamic_array_init( info.uses );
+ dynamic_array_push( c->labels, info );
+ return result;
+}
+
+static void compile_set_label( compiler * c, int label )
+{
+ struct label_info * const l = &dynamic_array_at( struct label_info,
+ c->labels, label );
+ int const pos = c->code->size;
+ int i;
+ assert( l->absolute_position == -1 );
+ l->absolute_position = pos;
+ for ( i = 0; i < l->uses->size; ++i )
+ {
+ int id = dynamic_array_at( int, l->uses, i );
+ int offset = (int)( pos - id - 1 );
+ dynamic_array_at( instruction, c->code, id ).arg = offset;
+ }
+}
+
+static void compile_emit( compiler * c, unsigned int op_code, int arg )
+{
+ instruction instr;
+ instr.op_code = op_code;
+ instr.arg = arg;
+ compile_emit_instruction( c, instr );
+}
+
+static void compile_emit_branch( compiler * c, unsigned int op_code, int label )
+{
+ struct label_info * const l = &dynamic_array_at( struct label_info,
+ c->labels, label );
+ int const pos = c->code->size;
+ instruction instr;
+ instr.op_code = op_code;
+ if ( l->absolute_position == -1 )
+ {
+ instr.arg = 0;
+ dynamic_array_push( l->uses, pos );
+ }
+ else
+ instr.arg = (int)( l->absolute_position - pos - 1 );
+ compile_emit_instruction( c, instr );
+}
+
+static int compile_emit_constant( compiler * c, OBJECT * value )
+{
+ OBJECT * copy = object_copy( value );
+ dynamic_array_push( c->constants, copy );
+ return c->constants->size - 1;
+}
+
+static void compile_push_cleanup( compiler * c, unsigned int op_code, int arg )
+{
+ instruction instr;
+ instr.op_code = op_code;
+ instr.arg = arg;
+ dynamic_array_push( c->cleanups, instr );
+}
+
+static void compile_pop_cleanup( compiler * c )
+{
+ dynamic_array_pop( c->cleanups );
+}
+
+static void compile_emit_cleanups( compiler * c, int end )
+{
+ int i;
+ for ( i = c->cleanups->size; --i >= end; )
+ {
+ compile_emit_instruction( c, dynamic_array_at( instruction, c->cleanups, i ) );
+ }
+}
+
+static void compile_emit_loop_jump( compiler * c, int type )
+{
+ struct loop_info * info = NULL;
+ int i;
+ for ( i = c->loop_scopes->size; --i >= 0; )
+ {
+ struct loop_info * elem = &dynamic_array_at( struct loop_info, c->loop_scopes, i );
+ if ( elem->type == type )
+ {
+ info = elem;
+ break;
+ }
+ }
+ if ( info == NULL )
+ {
+ printf( "warning: ignoring break statement used outside of loop\n" );
+ return;
+ }
+ compile_emit_cleanups( c, info->cleanup_depth );
+ compile_emit_branch( c, INSTR_JUMP, info->label );
+}
+
+static void compile_push_break_scope( compiler * c, int label )
+{
+ struct loop_info info;
+ info.type = LOOP_INFO_BREAK;
+ info.label = label;
+ info.cleanup_depth = c->cleanups->size;
+ dynamic_array_push( c->loop_scopes, info );
+}
+
+static void compile_push_continue_scope( compiler * c, int label )
+{
+ struct loop_info info;
+ info.type = LOOP_INFO_CONTINUE;
+ info.label = label;
+ info.cleanup_depth = c->cleanups->size;
+ dynamic_array_push( c->loop_scopes, info );
+}
+
+static void compile_pop_break_scope( compiler * c )
+{
+ assert( c->loop_scopes->size > 0 );
+ assert( dynamic_array_at( struct loop_info, c->loop_scopes, c->loop_scopes->size - 1 ).type == LOOP_INFO_BREAK );
+ dynamic_array_pop( c->loop_scopes );
+}
+
+static void compile_pop_continue_scope( compiler * c )
+{
+ assert( c->loop_scopes->size > 0 );
+ assert( dynamic_array_at( struct loop_info, c->loop_scopes, c->loop_scopes->size - 1 ).type == LOOP_INFO_CONTINUE );
+ dynamic_array_pop( c->loop_scopes );
+}
+
+static int compile_emit_rule( compiler * c, OBJECT * name, PARSE * parse,
+ int num_arguments, struct arg_list * arguments, int local )
+{
+ struct stored_rule rule;
+ rule.name = object_copy( name );
+ rule.parse = parse;
+ rule.num_arguments = num_arguments;
+ rule.arguments = arguments;
+ rule.local = local;
+ dynamic_array_push( c->rules, rule );
+ return (int)( c->rules->size - 1 );
+}
+
+static int compile_emit_actions( compiler * c, PARSE * parse )
+{
+ SUBACTION a;
+ a.name = object_copy( parse->string );
+ a.command = function_compile_actions( object_str( parse->string1 ),
+ parse->file, parse->line );
+ a.flags = parse->num;
+ dynamic_array_push( c->actions, a );
+ return (int)( c->actions->size - 1 );
+}
+
+static JAM_FUNCTION * compile_to_function( compiler * c )
+{
+ JAM_FUNCTION * const result = (JAM_FUNCTION*)BJAM_MALLOC( sizeof( JAM_FUNCTION ) );
+ int i;
+ result->base.type = FUNCTION_JAM;
+ result->base.reference_count = 1;
+ result->base.formal_arguments = 0;
+ result->base.num_formal_arguments = 0;
+
+ result->base.rulename = 0;
+
+ result->code_size = c->code->size;
+ result->code = (instruction*)BJAM_MALLOC( c->code->size * sizeof( instruction ) );
+ memcpy( result->code, c->code->data, c->code->size * sizeof( instruction ) );
+
+ result->constants = (OBJECT**)BJAM_MALLOC( c->constants->size * sizeof( OBJECT * ) );
+ if ( c->constants->size != 0 )
+ memcpy( result->constants, c->constants->data,
+ c->constants->size * sizeof( OBJECT * ) );
+ result->num_constants = c->constants->size;
+
+ result->num_subfunctions = c->rules->size;
+ result->functions = (SUBFUNCTION*)BJAM_MALLOC( c->rules->size * sizeof( SUBFUNCTION ) );
+ for ( i = 0; i < c->rules->size; ++i )
+ {
+ struct stored_rule * const rule = &dynamic_array_at( struct stored_rule,
+ c->rules, i );
+ result->functions[ i ].name = rule->name;
+ result->functions[ i ].code = function_compile( rule->parse );
+ result->functions[ i ].code->num_formal_arguments = rule->num_arguments;
+ result->functions[ i ].code->formal_arguments = rule->arguments;
+ result->functions[ i ].local = rule->local;
+ }
+
+ result->actions = (SUBACTION*)BJAM_MALLOC( c->actions->size * sizeof( SUBACTION ) );
+ if ( c->actions->size != 0 )
+ memcpy( result->actions, c->actions->data,
+ c->actions->size * sizeof( SUBACTION ) );
+ result->num_subactions = c->actions->size;
+
+ result->generic = 0;
+
+ result->file = 0;
+ result->line = -1;
+
+ return result;
+}
+
+
+/*
+ * Parsing of variable expansions
+ */
+
+typedef struct VAR_PARSE_GROUP
+{
+ struct dynamic_array elems[ 1 ];
+} VAR_PARSE_GROUP;
+
+typedef struct VAR_PARSE_ACTIONS
+{
+ struct dynamic_array elems[ 1 ];
+} VAR_PARSE_ACTIONS;
+
+#define VAR_PARSE_TYPE_VAR 0
+#define VAR_PARSE_TYPE_STRING 1
+#define VAR_PARSE_TYPE_FILE 2
+
+typedef struct _var_parse
+{
+ int type; /* string, variable or file */
+} VAR_PARSE;
+
+typedef struct
+{
+ VAR_PARSE base;
+ VAR_PARSE_GROUP * name;
+ VAR_PARSE_GROUP * subscript;
+ struct dynamic_array modifiers[ 1 ];
+} VAR_PARSE_VAR;
+
+typedef struct
+{
+ VAR_PARSE base;
+ OBJECT * s;
+} VAR_PARSE_STRING;
+
+typedef struct
+{
+ VAR_PARSE base;
+ struct dynamic_array filename[ 1 ];
+ struct dynamic_array contents[ 1 ];
+} VAR_PARSE_FILE;
+
+static void var_parse_free( VAR_PARSE * );
+
+
+/*
+ * VAR_PARSE_GROUP
+ */
+
+static VAR_PARSE_GROUP * var_parse_group_new()
+{
+ VAR_PARSE_GROUP * const result = (VAR_PARSE_GROUP*)BJAM_MALLOC( sizeof( VAR_PARSE_GROUP ) );
+ dynamic_array_init( result->elems );
+ return result;
+}
+
+static void var_parse_group_free( VAR_PARSE_GROUP * group )
+{
+ int i;
+ for ( i = 0; i < group->elems->size; ++i )
+ var_parse_free( dynamic_array_at( VAR_PARSE *, group->elems, i ) );
+ dynamic_array_free( group->elems );
+ BJAM_FREE( group );
+}
+
+static void var_parse_group_add( VAR_PARSE_GROUP * group, VAR_PARSE * elem )
+{
+ dynamic_array_push( group->elems, elem );
+}
+
+static void var_parse_group_maybe_add_constant( VAR_PARSE_GROUP * group,
+ char const * start, char const * end )
+{
+ if ( start != end )
+ {
+ string buf[ 1 ];
+ VAR_PARSE_STRING * const value = (VAR_PARSE_STRING *)BJAM_MALLOC(
+ sizeof(VAR_PARSE_STRING) );
+ value->base.type = VAR_PARSE_TYPE_STRING;
+ string_new( buf );
+ string_append_range( buf, start, end );
+ value->s = object_new( buf->value );
+ string_free( buf );
+ var_parse_group_add( group, (VAR_PARSE *)value );
+ }
+}
+
+VAR_PARSE_STRING * var_parse_group_as_literal( VAR_PARSE_GROUP * group )
+{
+ if ( group->elems->size == 1 )
+ {
+ VAR_PARSE * result = dynamic_array_at( VAR_PARSE *, group->elems, 0 );
+ if ( result->type == VAR_PARSE_TYPE_STRING )
+ return (VAR_PARSE_STRING *)result;
+ }
+ return 0;
+}
+
+
+/*
+ * VAR_PARSE_ACTIONS
+ */
+
+static VAR_PARSE_ACTIONS * var_parse_actions_new()
+{
+ VAR_PARSE_ACTIONS * const result = (VAR_PARSE_ACTIONS *)BJAM_MALLOC(
+ sizeof(VAR_PARSE_ACTIONS) );
+ dynamic_array_init( result->elems );
+ return result;
+}
+
+static void var_parse_actions_free( VAR_PARSE_ACTIONS * actions )
+{
+ int i;
+ for ( i = 0; i < actions->elems->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ actions->elems, i ) );
+ dynamic_array_free( actions->elems );
+ BJAM_FREE( actions );
+}
+
+
+/*
+ * VAR_PARSE_VAR
+ */
+
+static VAR_PARSE_VAR * var_parse_var_new()
+{
+ VAR_PARSE_VAR * result = (VAR_PARSE_VAR*)BJAM_MALLOC( sizeof( VAR_PARSE_VAR ) );
+ result->base.type = VAR_PARSE_TYPE_VAR;
+ result->name = var_parse_group_new();
+ result->subscript = 0;
+ dynamic_array_init( result->modifiers );
+ return result;
+}
+
+static void var_parse_var_free( VAR_PARSE_VAR * var )
+{
+ int i;
+ var_parse_group_free( var->name );
+ if ( var->subscript )
+ var_parse_group_free( var->subscript );
+ for ( i = 0; i < var->modifiers->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ var->modifiers, i ) );
+ dynamic_array_free( var->modifiers );
+ BJAM_FREE( var );
+}
+
+static VAR_PARSE_GROUP * var_parse_var_new_modifier( VAR_PARSE_VAR * var )
+{
+ VAR_PARSE_GROUP * result = var_parse_group_new();
+ dynamic_array_push( var->modifiers, result );
+ return result;
+}
+
+
+/*
+ * VAR_PARSE_STRING
+ */
+
+static void var_parse_string_free( VAR_PARSE_STRING * string )
+{
+ object_free( string->s );
+ BJAM_FREE( string );
+}
+
+
+/*
+ * VAR_PARSE_FILE
+ */
+
+static VAR_PARSE_FILE * var_parse_file_new( void )
+{
+ VAR_PARSE_FILE * const result = (VAR_PARSE_FILE *)BJAM_MALLOC( sizeof(
+ VAR_PARSE_FILE ) );
+ result->base.type = VAR_PARSE_TYPE_FILE;
+ dynamic_array_init( result->filename );
+ dynamic_array_init( result->contents );
+ return result;
+}
+
+static void var_parse_file_free( VAR_PARSE_FILE * file )
+{
+ int i;
+ for ( i = 0; i < file->filename->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ file->filename, i ) );
+ dynamic_array_free( file->filename );
+ for ( i = 0; i < file->contents->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ file->contents, i ) );
+ dynamic_array_free( file->contents );
+ BJAM_FREE( file );
+}
+
+
+/*
+ * VAR_PARSE
+ */
+
+static void var_parse_free( VAR_PARSE * parse )
+{
+ switch ( parse->type )
+ {
+ case VAR_PARSE_TYPE_VAR:
+ var_parse_var_free( (VAR_PARSE_VAR *)parse );
+ break;
+
+ case VAR_PARSE_TYPE_STRING:
+ var_parse_string_free( (VAR_PARSE_STRING *)parse );
+ break;
+
+ case VAR_PARSE_TYPE_FILE:
+ var_parse_file_free( (VAR_PARSE_FILE *)parse );
+ break;
+
+ default:
+ assert( !"Invalid type" );
+ }
+}
+
+
+/*
+ * Compile VAR_PARSE
+ */
+
+static void var_parse_group_compile( VAR_PARSE_GROUP const * parse,
+ compiler * c );
+
+static void var_parse_var_compile( VAR_PARSE_VAR const * parse, compiler * c )
+{
+ int expand_name = 0;
+ int is_get_grist = 0;
+ int has_modifiers = 0;
+ /* Special case common modifiers */
+ if ( parse->modifiers->size == 1 )
+ {
+ VAR_PARSE_GROUP * mod = dynamic_array_at( VAR_PARSE_GROUP *, parse->modifiers, 0 );
+ if ( mod->elems->size == 1 )
+ {
+ VAR_PARSE * mod1 = dynamic_array_at( VAR_PARSE *, mod->elems, 0 );
+ if ( mod1->type == VAR_PARSE_TYPE_STRING )
+ {
+ OBJECT * s = ( (VAR_PARSE_STRING *)mod1 )->s;
+ if ( ! strcmp ( object_str( s ), "G" ) )
+ {
+ is_get_grist = 1;
+ }
+ }
+ }
+ }
+ /* If there are modifiers, emit them in reverse order. */
+ if ( parse->modifiers->size > 0 && !is_get_grist )
+ {
+ int i;
+ has_modifiers = 1;
+ for ( i = 0; i < parse->modifiers->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ parse->modifiers, parse->modifiers->size - i - 1 ), c );
+ }
+
+ /* If there is a subscript, emit it. */
+ if ( parse->subscript )
+ var_parse_group_compile( parse->subscript, c );
+
+ /* If the variable name is empty, look it up. */
+ if ( parse->name->elems->size == 0 )
+ compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c,
+ constant_empty ) );
+ /* If the variable name does not need to be expanded, look it up. */
+ else if ( parse->name->elems->size == 1 && dynamic_array_at( VAR_PARSE *,
+ parse->name->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
+ {
+ OBJECT * const name = ( (VAR_PARSE_STRING *)dynamic_array_at(
+ VAR_PARSE *, parse->name->elems, 0 ) )->s;
+ int const idx = get_argument_index( object_str( name ) );
+ if ( idx != -1 )
+ compile_emit( c, INSTR_PUSH_ARG, idx );
+ else
+ compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c, name ) );
+ }
+ /* Otherwise, push the var names and use the group instruction. */
+ else
+ {
+ var_parse_group_compile( parse->name, c );
+ expand_name = 1;
+ }
+
+ /** Select the instruction for expanding the variable. */
+ if ( !has_modifiers && !parse->subscript && !expand_name )
+ ;
+ else if ( !has_modifiers && !parse->subscript && expand_name )
+ compile_emit( c, INSTR_PUSH_GROUP, 0 );
+ else if ( !has_modifiers && parse->subscript && !expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX, 0 );
+ else if ( !has_modifiers && parse->subscript && expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX_GROUP, 0 );
+ else if ( has_modifiers && !parse->subscript && !expand_name )
+ compile_emit( c, INSTR_APPLY_MODIFIERS, parse->modifiers->size );
+ else if ( has_modifiers && !parse->subscript && expand_name )
+ compile_emit( c, INSTR_APPLY_MODIFIERS_GROUP, parse->modifiers->size );
+ else if ( has_modifiers && parse->subscript && !expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS, parse->modifiers->size );
+ else if ( has_modifiers && parse->subscript && expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS_GROUP,
+ parse->modifiers->size );
+
+ /* Now apply any special modifiers */
+ if ( is_get_grist )
+ {
+ compile_emit( c, INSTR_GET_GRIST, 0 );
+ }
+}
+
+static void var_parse_string_compile( VAR_PARSE_STRING const * parse,
+ compiler * c )
+{
+ compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, parse->s )
+ );
+}
+
+static void var_parse_file_compile( VAR_PARSE_FILE const * parse, compiler * c )
+{
+ int i;
+ for ( i = 0; i < parse->filename->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ parse->filename, parse->filename->size - i - 1 ), c );
+ compile_emit( c, INSTR_APPEND_STRINGS, parse->filename->size );
+ for ( i = 0; i < parse->contents->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ parse->contents, parse->contents->size - i - 1 ), c );
+ compile_emit( c, INSTR_WRITE_FILE, parse->contents->size );
+}
+
+static void var_parse_compile( VAR_PARSE const * parse, compiler * c )
+{
+ switch ( parse->type )
+ {
+ case VAR_PARSE_TYPE_VAR:
+ var_parse_var_compile( (VAR_PARSE_VAR const *)parse, c );
+ break;
+
+ case VAR_PARSE_TYPE_STRING:
+ var_parse_string_compile( (VAR_PARSE_STRING const *)parse, c );
+ break;
+
+ case VAR_PARSE_TYPE_FILE:
+ var_parse_file_compile( (VAR_PARSE_FILE const *)parse, c );
+ break;
+
+ default:
+ assert( !"Unknown var parse type." );
+ }
+}
+
+static void var_parse_group_compile( VAR_PARSE_GROUP const * parse, compiler * c
+ )
+{
+ /* Emit the elements in reverse order. */
+ int i;
+ for ( i = 0; i < parse->elems->size; ++i )
+ var_parse_compile( dynamic_array_at( VAR_PARSE *, parse->elems,
+ parse->elems->size - i - 1 ), c );
+ /* If there are no elements, emit an empty string. */
+ if ( parse->elems->size == 0 )
+ compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c,
+ constant_empty ) );
+ /* If there is more than one element, combine them. */
+ if ( parse->elems->size > 1 )
+ compile_emit( c, INSTR_COMBINE_STRINGS, parse->elems->size );
+}
+
+static void var_parse_actions_compile( VAR_PARSE_ACTIONS const * actions,
+ compiler * c )
+{
+ int i;
+ for ( i = 0; i < actions->elems->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ actions->elems, actions->elems->size - i - 1 ), c );
+ compile_emit( c, INSTR_OUTPUT_STRINGS, actions->elems->size );
+}
+
+
+/*
+ * Parse VAR_PARSE_VAR
+ */
+
+static VAR_PARSE * parse_at_file( char const * start, char const * mid,
+ char const * end );
+static VAR_PARSE * parse_variable( char const * * string );
+static int try_parse_variable( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out );
+static void balance_parentheses( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out );
+static void parse_var_string( char const * first, char const * last,
+ struct dynamic_array * out );
+
+
+/*
+ * Parses a string that can contain variables to expand.
+ */
+
+static VAR_PARSE_GROUP * parse_expansion( char const * * string )
+{
+ VAR_PARSE_GROUP * result = var_parse_group_new();
+ char const * s = *string;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, result ) ) {}
+ else if ( s[ 0 ] == '\0' )
+ {
+ var_parse_group_maybe_add_constant( result, *string, s );
+ return result;
+ }
+ else
+ ++s;
+ }
+}
+
+static VAR_PARSE_ACTIONS * parse_actions( char const * string )
+{
+ VAR_PARSE_ACTIONS * const result = var_parse_actions_new();
+ parse_var_string( string, string + strlen( string ), result->elems );
+ return result;
+}
+
+/*
+ * Checks whether the string a *s_ starts with a variable expansion "$(".
+ * *string should point to the first unemitted character before *s. If *s_
+ * starts with variable expansion, appends elements to out up to the closing
+ * ")", and adjusts *s_ and *string to point to next character. Returns 1 if s_
+ * starts with a variable, 0 otherwise.
+ */
+
+static int try_parse_variable( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out )
+{
+ char const * s = *s_;
+ if ( s[ 0 ] == '$' && s[ 1 ] == '(' )
+ {
+ var_parse_group_maybe_add_constant( out, *string, s );
+ s += 2;
+ var_parse_group_add( out, parse_variable( &s ) );
+ *string = s;
+ *s_ = s;
+ return 1;
+ }
+ if ( s[ 0 ] == '@' && s[ 1 ] == '(' )
+ {
+ int depth = 1;
+ char const * ine;
+ char const * split = 0;
+ var_parse_group_maybe_add_constant( out, *string, s );
+ s += 2;
+ ine = s;
+
+ /* Scan the content of the response file @() section. */
+ while ( *ine && ( depth > 0 ) )
+ {
+ switch ( *ine )
+ {
+ case '(': ++depth; break;
+ case ')': --depth; break;
+ case ':':
+ if ( ( depth == 1 ) && ( ine[ 1 ] == 'E' ) && ( ine[ 2 ] == '='
+ ) )
+ split = ine;
+ break;
+ }
+ ++ine;
+ }
+
+ if ( !split || depth )
+ return 0;
+
+ var_parse_group_add( out, parse_at_file( s, split, ine - 1 ) );
+ *string = ine;
+ *s_ = ine;
+ return 1;
+ }
+ return 0;
+}
+
+
+static char const * current_file = "";
+static int current_line;
+
+static void parse_error( char const * message )
+{
+ out_printf( "%s:%d: %s\n", current_file, current_line, message );
+}
+
+
+/*
+ * Parses a single variable up to the closing ")" and adjusts *string to point
+ * to the next character. *string should point to the character immediately
+ * after the initial "$(".
+ */
+
+static VAR_PARSE * parse_variable( char const * * string )
+{
+ VAR_PARSE_VAR * const result = var_parse_var_new();
+ VAR_PARSE_GROUP * const name = result->name;
+ char const * s = *string;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, name ) ) {}
+ else if ( s[ 0 ] == ':' )
+ {
+ VAR_PARSE_GROUP * mod;
+ var_parse_group_maybe_add_constant( name, *string, s );
+ ++s;
+ *string = s;
+ mod = var_parse_var_new_modifier( result );
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, mod ) ) {}
+ else if ( s[ 0 ] == ')' )
+ {
+ var_parse_group_maybe_add_constant( mod, *string, s );
+ *string = ++s;
+ return (VAR_PARSE *)result;
+ }
+ else if ( s[ 0 ] == '(' )
+ {
+ ++s;
+ balance_parentheses( &s, string, mod );
+ }
+ else if ( s[ 0 ] == ':' )
+ {
+ var_parse_group_maybe_add_constant( mod, *string, s );
+ *string = ++s;
+ mod = var_parse_var_new_modifier( result );
+ }
+ else if ( s[ 0 ] == '[' )
+ {
+ parse_error("unexpected subscript");
+ ++s;
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "unbalanced parentheses" );
+ var_parse_group_maybe_add_constant( mod, *string, s );
+ *string = s;
+ return (VAR_PARSE *)result;
+ }
+ else
+ ++s;
+ }
+ }
+ else if ( s[ 0 ] == '[' )
+ {
+ VAR_PARSE_GROUP * subscript = var_parse_group_new();
+ result->subscript = subscript;
+ var_parse_group_maybe_add_constant( name, *string, s );
+ *string = ++s;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, subscript ) ) {}
+ else if ( s[ 0 ] == ']' )
+ {
+ var_parse_group_maybe_add_constant( subscript, *string, s );
+ *string = ++s;
+ if ( s[ 0 ] != ')' && s[ 0 ] != ':' && s[ 0 ] != '\0' )
+ parse_error( "unexpected text following []" );
+ break;
+ }
+ else if ( isdigit( s[ 0 ] ) || s[ 0 ] == '-' )
+ {
+ ++s;
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "malformed subscript" );
+ break;
+ }
+ else
+ {
+ parse_error( "malformed subscript" );
+ ++s;
+ }
+ }
+ }
+ else if ( s[ 0 ] == ')' )
+ {
+ var_parse_group_maybe_add_constant( name, *string, s );
+ *string = ++s;
+ return (VAR_PARSE *)result;
+ }
+ else if ( s[ 0 ] == '(' )
+ {
+ ++s;
+ balance_parentheses( &s, string, name );
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "unbalanced parentheses" );
+ var_parse_group_maybe_add_constant( name, *string, s );
+ *string = s;
+ return (VAR_PARSE *)result;
+ }
+ else
+ ++s;
+ }
+}
+
+static void parse_var_string( char const * first, char const * last,
+ struct dynamic_array * out )
+{
+ char const * saved = first;
+ while ( first != last )
+ {
+ /* Handle whitespace. */
+ while ( first != last && isspace( *first ) ) ++first;
+ if ( saved != first )
+ {
+ VAR_PARSE_GROUP * const group = var_parse_group_new();
+ var_parse_group_maybe_add_constant( group, saved, first );
+ saved = first;
+ dynamic_array_push( out, group );
+ }
+ if ( first == last ) break;
+
+ /* Handle non-whitespace */
+ {
+ VAR_PARSE_GROUP * group = var_parse_group_new();
+ for ( ; ; )
+ {
+ if ( first == last || isspace( *first ) )
+ {
+ var_parse_group_maybe_add_constant( group, saved, first );
+ saved = first;
+ break;
+ }
+ if ( try_parse_variable( &first, &saved, group ) )
+ assert( first <= last );
+ else
+ ++first;
+ }
+ dynamic_array_push( out, group );
+ }
+ }
+}
+
+/*
+ * start should point to the character immediately following the opening "@(",
+ * mid should point to the ":E=", and end should point to the closing ")".
+ */
+
+static VAR_PARSE * parse_at_file( char const * start, char const * mid,
+ char const * end )
+{
+ VAR_PARSE_FILE * result = var_parse_file_new();
+ parse_var_string( start, mid, result->filename );
+ parse_var_string( mid + 3, end, result->contents );
+ return (VAR_PARSE *)result;
+}
+
+/*
+ * Given that *s_ points to the character after a "(", parses up to the matching
+ * ")". *string should point to the first unemitted character before *s_.
+ *
+ * When the function returns, *s_ will point to the character after the ")", and
+ * *string will point to the first unemitted character before *s_. The range
+ * from *string to *s_ does not contain any variables that need to be expanded.
+ */
+
+void balance_parentheses( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out)
+{
+ int depth = 1;
+ char const * s = *s_;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, out ) ) { }
+ else if ( s[ 0 ] == ':' || s[ 0 ] == '[' )
+ {
+ parse_error( "unbalanced parentheses" );
+ ++s;
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "unbalanced parentheses" );
+ break;
+ }
+ else if ( s[ 0 ] == ')' )
+ {
+ ++s;
+ if ( --depth == 0 ) break;
+ }
+ else if ( s[ 0 ] == '(' )
+ {
+ ++depth;
+ ++s;
+ }
+ else
+ ++s;
+ }
+ *s_ = s;
+}
+
+
+/*
+ * Main compile.
+ */
+
+#define RESULT_STACK 0
+#define RESULT_RETURN 1
+#define RESULT_NONE 2
+
+static void compile_parse( PARSE * parse, compiler * c, int result_location );
+static struct arg_list * arg_list_compile( PARSE * parse, int * num_arguments );
+
+static void compile_condition( PARSE * parse, compiler * c, int branch_true, int label )
+{
+ assert( parse->type == PARSE_EVAL );
+ switch ( parse->num )
+ {
+ case EXPR_EXISTS:
+ compile_parse( parse->left, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_NOT_EMPTY, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_EMPTY, label );
+ break;
+
+ case EXPR_EQUALS:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_EQ, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_NE, label );
+ break;
+
+ case EXPR_NOTEQ:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_NE, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_EQ, label );
+ break;
+
+ case EXPR_LESS:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_LT, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_GE, label );
+ break;
+
+ case EXPR_LESSEQ:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_LE, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_GT, label );
+ break;
+
+ case EXPR_MORE:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_GT, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_LE, label );
+ break;
+
+ case EXPR_MOREEQ:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_GE, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_LT, label );
+ break;
+
+ case EXPR_IN:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_IN, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_NOT_IN, label );
+ break;
+
+ case EXPR_AND:
+ if ( branch_true )
+ {
+ int f = compile_new_label( c );
+ compile_condition( parse->left, c, 0, f );
+ compile_condition( parse->right, c, 1, label );
+ compile_set_label( c, f );
+ }
+ else
+ {
+ compile_condition( parse->left, c, 0, label );
+ compile_condition( parse->right, c, 0, label );
+ }
+ break;
+
+ case EXPR_OR:
+ if ( branch_true )
+ {
+ compile_condition( parse->left, c, 1, label );
+ compile_condition( parse->right, c, 1, label );
+ }
+ else
+ {
+ int t = compile_new_label( c );
+ compile_condition( parse->left, c, 1, t );
+ compile_condition( parse->right, c, 0, label );
+ compile_set_label( c, t );
+ }
+ break;
+
+ case EXPR_NOT:
+ compile_condition( parse->left, c, !branch_true, label );
+ break;
+ }
+}
+
+static void adjust_result( compiler * c, int actual_location,
+ int desired_location )
+{
+ if ( actual_location == desired_location )
+ ;
+ else if ( actual_location == RESULT_STACK && desired_location == RESULT_RETURN )
+ compile_emit( c, INSTR_SET_RESULT, 0 );
+ else if ( actual_location == RESULT_STACK && desired_location == RESULT_NONE )
+ compile_emit( c, INSTR_POP, 0 );
+ else if ( actual_location == RESULT_RETURN && desired_location == RESULT_STACK )
+ compile_emit( c, INSTR_PUSH_RESULT, 0 );
+ else if ( actual_location == RESULT_RETURN && desired_location == RESULT_NONE )
+ ;
+ else if ( actual_location == RESULT_NONE && desired_location == RESULT_STACK )
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ else if ( actual_location == RESULT_NONE && desired_location == RESULT_RETURN )
+ {
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_emit( c, INSTR_SET_RESULT, 0 );
+ }
+ else
+ assert( !"invalid result location" );
+}
+
+static void compile_append_chain( PARSE * parse, compiler * c )
+{
+ assert( parse->type == PARSE_APPEND );
+ if ( parse->left->type == PARSE_NULL )
+ compile_parse( parse->right, c, RESULT_STACK );
+ else
+ {
+ if ( parse->left->type == PARSE_APPEND )
+ compile_append_chain( parse->left, c );
+ else
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit( c, INSTR_PUSH_APPEND, 0 );
+ }
+}
+
+static void compile_emit_debug(compiler * c, int line)
+{
+#ifdef JAM_DEBUGGER
+ if ( debug_is_debugging() )
+ compile_emit( c, INSTR_DEBUG_LINE, line );
+#endif
+}
+
+static void compile_parse( PARSE * parse, compiler * c, int result_location )
+{
+ compile_emit_debug(c, parse->line);
+ if ( parse->type == PARSE_APPEND )
+ {
+ compile_append_chain( parse, c );
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_EVAL )
+ {
+ /* FIXME: This is only needed because of the bizarre parsing of
+ * conditions.
+ */
+ if ( parse->num == EXPR_EXISTS )
+ compile_parse( parse->left, c, result_location );
+ else
+ {
+ int f = compile_new_label( c );
+ int end = compile_new_label( c );
+
+ out_printf( "%s:%d: Conditional used as list (check operator "
+ "precedence).\n", object_str( parse->file ), parse->line );
+
+ /* Emit the condition */
+ compile_condition( parse, c, 0, f );
+ compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c,
+ constant_true ) );
+ compile_emit_branch( c, INSTR_JUMP, end );
+ compile_set_label( c, f );
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_set_label( c, end );
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ }
+ else if ( parse->type == PARSE_FOREACH )
+ {
+ int var = compile_emit_constant( c, parse->string );
+ int top = compile_new_label( c );
+ int end = compile_new_label( c );
+ int continue_ = compile_new_label( c );
+
+ /*
+ * Evaluate the list.
+ */
+ compile_parse( parse->left, c, RESULT_STACK );
+
+ /* Localize the loop variable */
+ if ( parse->num )
+ {
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_emit( c, INSTR_PUSH_LOCAL, var );
+ compile_emit( c, INSTR_SWAP, 1 );
+ compile_push_cleanup( c, INSTR_POP_LOCAL, var );
+ }
+
+ compile_emit( c, INSTR_FOR_INIT, 0 );
+ compile_set_label( c, top );
+ compile_emit_branch( c, INSTR_FOR_LOOP, end );
+ compile_emit_debug( c, parse->line );
+ compile_emit( c, INSTR_SET, var );
+
+ compile_push_break_scope( c, end );
+ compile_push_cleanup( c, INSTR_FOR_POP, 0 );
+ compile_push_continue_scope( c, continue_ );
+
+ /* Run the loop body */
+ compile_parse( parse->right, c, RESULT_NONE );
+
+ compile_pop_continue_scope( c );
+ compile_pop_cleanup( c );
+ compile_pop_break_scope( c );
+
+ compile_set_label( c, continue_ );
+ compile_emit_branch( c, INSTR_JUMP, top );
+ compile_set_label( c, end );
+
+ if ( parse->num )
+ {
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_LOCAL, var );
+ }
+
+ adjust_result( c, RESULT_NONE, result_location);
+ }
+ else if ( parse->type == PARSE_IF )
+ {
+ int f = compile_new_label( c );
+ /* Emit the condition */
+ compile_condition( parse->left, c, 0, f );
+ /* Emit the if block */
+ compile_parse( parse->right, c, result_location );
+ if ( parse->third->type != PARSE_NULL || result_location != RESULT_NONE )
+ {
+ /* Emit the else block */
+ int end = compile_new_label( c );
+ compile_emit_branch( c, INSTR_JUMP, end );
+ compile_set_label( c, f );
+ compile_parse( parse->third, c, result_location );
+ compile_set_label( c, end );
+ }
+ else
+ compile_set_label( c, f );
+
+ }
+ else if ( parse->type == PARSE_WHILE )
+ {
+ int nested_result = result_location == RESULT_NONE
+ ? RESULT_NONE
+ : RESULT_RETURN;
+ int test = compile_new_label( c );
+ int top = compile_new_label( c );
+ int end = compile_new_label( c );
+ /* Make sure that we return an empty list if the loop runs zero times.
+ */
+ adjust_result( c, RESULT_NONE, nested_result );
+ /* Jump to the loop test. */
+ compile_emit_branch( c, INSTR_JUMP, test );
+ compile_set_label( c, top );
+ /* Emit the loop body. */
+ compile_push_break_scope( c, end );
+ compile_push_continue_scope( c, test );
+ compile_parse( parse->right, c, nested_result );
+ compile_pop_continue_scope( c );
+ compile_pop_break_scope( c );
+ /* Emit the condition. */
+ compile_set_label( c, test );
+ compile_condition( parse->left, c, 1, top );
+ compile_set_label( c, end );
+
+ adjust_result( c, nested_result, result_location );
+ }
+ else if ( parse->type == PARSE_INCLUDE )
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_INCLUDE, 0 );
+ compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 );
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_MODULE )
+ {
+ int const nested_result = result_location == RESULT_NONE
+ ? RESULT_NONE
+ : RESULT_RETURN;
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_PUSH_MODULE, 0 );
+ compile_push_cleanup( c, INSTR_POP_MODULE, 0 );
+ compile_parse( parse->right, c, nested_result );
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_MODULE, 0 );
+ adjust_result( c, nested_result, result_location );
+ }
+ else if ( parse->type == PARSE_CLASS )
+ {
+ /* Evaluate the class name. */
+ compile_parse( parse->left->right, c, RESULT_STACK );
+ /* Evaluate the base classes. */
+ if ( parse->left->left )
+ compile_parse( parse->left->left->right, c, RESULT_STACK );
+ else
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_emit( c, INSTR_CLASS, 0 );
+ compile_push_cleanup( c, INSTR_POP_MODULE, 0 );
+ compile_parse( parse->right, c, RESULT_NONE );
+ compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 );
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_MODULE, 0 );
+
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_LIST )
+ {
+ OBJECT * const o = parse->string;
+ char const * s = object_str( o );
+ VAR_PARSE_GROUP * group;
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_LOCAL )
+ {
+ int nested_result = result_location == RESULT_NONE
+ ? RESULT_NONE
+ : RESULT_RETURN;
+ /* This should be left recursive group of compile_appends. */
+ PARSE * vars = parse->left;
+
+ /* Special case an empty list of vars */
+ if ( vars->type == PARSE_NULL )
+ {
+ compile_parse( parse->right, c, RESULT_NONE );
+ compile_parse( parse->third, c, result_location );
+ nested_result = result_location;
+ }
+ /* Check whether there is exactly one variable with a constant name. */
+ else if ( vars->left->type == PARSE_NULL &&
+ vars->right->type == PARSE_LIST )
+ {
+ char const * s = object_str( vars->right->string );
+ VAR_PARSE_GROUP * group;
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+ if ( group->elems->size == 1 && dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
+ {
+ int const name = compile_emit_constant( c, (
+ (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 ) )->s );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit_debug(c, parse->line);
+ compile_emit( c, INSTR_PUSH_LOCAL, name );
+ compile_push_cleanup( c, INSTR_POP_LOCAL, name );
+ compile_parse( parse->third, c, nested_result );
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_LOCAL, name );
+ }
+ else
+ {
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit_debug(c, parse->line);
+ compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
+ compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 );
+ compile_parse( parse->third, c, nested_result );
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 );
+ }
+ }
+ else
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit_debug(c, parse->line);
+ compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
+ compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 );
+ compile_parse( parse->third, c, nested_result );
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 );
+ }
+ adjust_result( c, nested_result, result_location );
+ }
+ else if ( parse->type == PARSE_ON )
+ {
+ if ( parse->right->type == PARSE_APPEND &&
+ parse->right->left->type == PARSE_NULL &&
+ parse->right->right->type == PARSE_LIST )
+ {
+ /* [ on $(target) return $(variable) ] */
+ PARSE * value = parse->right->right;
+ OBJECT * const o = value->string;
+ char const * s = object_str( o );
+ VAR_PARSE_GROUP * group;
+ OBJECT * varname = 0;
+ current_file = object_str( value->file );
+ current_line = value->line;
+ group = parse_expansion( &s );
+ if ( group->elems->size == 1 )
+ {
+ VAR_PARSE * one = dynamic_array_at( VAR_PARSE *, group->elems, 0 );
+ if ( one->type == VAR_PARSE_TYPE_VAR )
+ {
+ VAR_PARSE_VAR * var = ( VAR_PARSE_VAR * )one;
+ if ( var->modifiers->size == 0 && !var->subscript && var->name->elems->size == 1 )
+ {
+ VAR_PARSE * name = dynamic_array_at( VAR_PARSE *, var->name->elems, 0 );
+ if ( name->type == VAR_PARSE_TYPE_STRING )
+ {
+ varname = ( ( VAR_PARSE_STRING * )name )->s;
+ }
+ }
+ }
+ }
+ if ( varname )
+ {
+ /* We have one variable with a fixed name and no modifiers. */
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_GET_ON, compile_emit_constant( c, varname ) );
+ }
+ else
+ {
+ /* Too complex. Fall back on push/pop. */
+ int end = compile_new_label( c );
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit_branch( c, INSTR_PUSH_ON, end );
+ compile_push_cleanup( c, INSTR_POP_ON, 0 );
+ var_parse_group_compile( group, c );
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_ON, 0 );
+ compile_set_label( c, end );
+ }
+ var_parse_group_free( group );
+ }
+ else
+ {
+ int end = compile_new_label( c );
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit_branch( c, INSTR_PUSH_ON, end );
+ compile_push_cleanup( c, INSTR_POP_ON, 0 );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_pop_cleanup( c );
+ compile_emit( c, INSTR_POP_ON, 0 );
+ compile_set_label( c, end );
+ }
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_RULE )
+ {
+ PARSE * p;
+ int n = 0;
+ VAR_PARSE_GROUP * group;
+ char const * s = object_str( parse->string );
+
+ if ( parse->left->left || parse->left->right->type != PARSE_NULL )
+ for ( p = parse->left; p; p = p->left )
+ {
+ compile_parse( p->right, c, RESULT_STACK );
+ ++n;
+ }
+
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+
+ if ( group->elems->size == 2 &&
+ dynamic_array_at( VAR_PARSE *, group->elems, 0 )->type == VAR_PARSE_TYPE_VAR &&
+ dynamic_array_at( VAR_PARSE *, group->elems, 1 )->type == VAR_PARSE_TYPE_STRING &&
+ ( object_str( ( (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 1 ) )->s )[ 0 ] == '.' ) )
+ {
+ VAR_PARSE_STRING * access = (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 1 );
+ OBJECT * member = object_new( object_str( access->s ) + 1 );
+ /* Emit the object */
+ var_parse_var_compile( (VAR_PARSE_VAR *)dynamic_array_at( VAR_PARSE *, group->elems, 0 ), c );
+ var_parse_group_free( group );
+ compile_emit( c, INSTR_CALL_MEMBER_RULE, n );
+ compile_emit( c, compile_emit_constant( c, member ), parse->line );
+ object_free( member );
+ }
+ else
+ {
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ compile_emit( c, INSTR_CALL_RULE, n );
+ compile_emit( c, compile_emit_constant( c, parse->string ), parse->line );
+ }
+
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_RULES )
+ {
+ do compile_parse( parse->left, c, RESULT_NONE );
+ while ( ( parse = parse->right )->type == PARSE_RULES );
+ compile_parse( parse, c, result_location );
+ }
+ else if ( parse->type == PARSE_SET )
+ {
+ PARSE * vars = parse->left;
+ unsigned int op_code;
+ unsigned int op_code_group;
+
+ switch ( parse->num )
+ {
+ case ASSIGN_APPEND: op_code = INSTR_APPEND; op_code_group = INSTR_APPEND_GROUP; break;
+ case ASSIGN_DEFAULT: op_code = INSTR_DEFAULT; op_code_group = INSTR_DEFAULT_GROUP; break;
+ default: op_code = INSTR_SET; op_code_group = INSTR_SET_GROUP; break;
+ }
+
+ /* Check whether there is exactly one variable with a constant name. */
+ if ( vars->type == PARSE_LIST )
+ {
+ char const * s = object_str( vars->string );
+ VAR_PARSE_GROUP * group;
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+ if ( group->elems->size == 1 && dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
+ {
+ int const name = compile_emit_constant( c, (
+ (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 ) )->s );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit_debug(c, parse->line);
+ if ( result_location != RESULT_NONE )
+ {
+ compile_emit( c, INSTR_SET_RESULT, 1 );
+ }
+ compile_emit( c, op_code, name );
+ }
+ else
+ {
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit_debug(c, parse->line);
+ if ( result_location != RESULT_NONE )
+ {
+ compile_emit( c, INSTR_SET_RESULT, 1 );
+ }
+ compile_emit( c, op_code_group, 0 );
+ }
+ }
+ else
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit_debug(c, parse->line);
+ if ( result_location != RESULT_NONE )
+ {
+ compile_emit( c, INSTR_SET_RESULT, 1 );
+ }
+ compile_emit( c, op_code_group, 0 );
+ }
+ if ( result_location != RESULT_NONE )
+ {
+ adjust_result( c, RESULT_RETURN, result_location );
+ }
+ }
+ else if ( parse->type == PARSE_SETCOMP )
+ {
+ int n_args;
+ struct arg_list * args = arg_list_compile( parse->right, &n_args );
+ int const rule_id = compile_emit_rule( c, parse->string, parse->left,
+ n_args, args, parse->num );
+ compile_emit( c, INSTR_RULE, rule_id );
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_SETEXEC )
+ {
+ int const actions_id = compile_emit_actions( c, parse );
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_ACTIONS, actions_id );
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_SETTINGS )
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->third, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+
+ compile_emit_debug(c, parse->line);
+ switch ( parse->num )
+ {
+ case ASSIGN_APPEND: compile_emit( c, INSTR_APPEND_ON, 0 ); break;
+ case ASSIGN_DEFAULT: compile_emit( c, INSTR_DEFAULT_ON, 0 ); break;
+ default: compile_emit( c, INSTR_SET_ON, 0 ); break;
+ }
+
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_SWITCH )
+ {
+ int const switch_end = compile_new_label( c );
+ compile_parse( parse->left, c, RESULT_STACK );
+
+ for ( parse = parse->right; parse; parse = parse->right )
+ {
+ int const id = compile_emit_constant( c, parse->left->string );
+ int const next_case = compile_new_label( c );
+ compile_emit( c, INSTR_PUSH_CONSTANT, id );
+ compile_emit_branch( c, INSTR_JUMP_NOT_GLOB, next_case );
+ compile_parse( parse->left->left, c, result_location );
+ compile_emit_branch( c, INSTR_JUMP, switch_end );
+ compile_set_label( c, next_case );
+ }
+ compile_emit( c, INSTR_POP, 0 );
+ adjust_result( c, RESULT_NONE, result_location );
+ compile_set_label( c, switch_end );
+ }
+ else if ( parse->type == PARSE_RETURN )
+ {
+ compile_parse( parse->left, c, RESULT_RETURN );
+ compile_emit_cleanups( c, 0 );
+ compile_emit( c, INSTR_RETURN, 0 ); /* 0 for return in the middle of a function. */
+ }
+ else if ( parse->type == PARSE_BREAK )
+ {
+ compile_emit_loop_jump( c, LOOP_INFO_BREAK );
+ }
+ else if ( parse->type == PARSE_CONTINUE )
+ {
+ compile_emit_loop_jump( c, LOOP_INFO_CONTINUE );
+ }
+ else if ( parse->type == PARSE_NULL )
+ adjust_result( c, RESULT_NONE, result_location );
+ else
+ assert( !"unknown PARSE type." );
+}
+
+OBJECT * function_rulename( FUNCTION * function )
+{
+ return function->rulename;
+}
+
+void function_set_rulename( FUNCTION * function, OBJECT * rulename )
+{
+ function->rulename = rulename;
+}
+
+void function_location( FUNCTION * function_, OBJECT * * file, int * line )
+{
+ if ( function_->type == FUNCTION_BUILTIN )
+ {
+ *file = constant_builtin;
+ *line = -1;
+ }
+#ifdef HAVE_PYTHON
+ if ( function_->type == FUNCTION_PYTHON )
+ {
+ *file = constant_builtin;
+ *line = -1;
+ }
+#endif
+ else
+ {
+ JAM_FUNCTION * function = (JAM_FUNCTION *)function_;
+ assert( function_->type == FUNCTION_JAM );
+ *file = function->file;
+ *line = function->line;
+ }
+}
+
+static struct arg_list * arg_list_compile_builtin( char const * * args,
+ int * num_arguments );
+
+FUNCTION * function_builtin( LIST * ( * func )( FRAME * frame, int flags ),
+ int flags, char const * * args )
+{
+ BUILTIN_FUNCTION * result = (BUILTIN_FUNCTION*)BJAM_MALLOC( sizeof( BUILTIN_FUNCTION ) );
+ result->base.type = FUNCTION_BUILTIN;
+ result->base.reference_count = 1;
+ result->base.rulename = 0;
+ result->base.formal_arguments = arg_list_compile_builtin( args,
+ &result->base.num_formal_arguments );
+ result->func = func;
+ result->flags = flags;
+ return (FUNCTION *)result;
+}
+
+FUNCTION * function_compile( PARSE * parse )
+{
+ compiler c[ 1 ];
+ JAM_FUNCTION * result;
+ compiler_init( c );
+ compile_parse( parse, c, RESULT_RETURN );
+ compile_emit( c, INSTR_RETURN, 1 );
+ result = compile_to_function( c );
+ compiler_free( c );
+ result->file = object_copy( parse->file );
+ result->line = parse->line;
+ return (FUNCTION *)result;
+}
+
+FUNCTION * function_compile_actions( char const * actions, OBJECT * file,
+ int line )
+{
+ compiler c[ 1 ];
+ JAM_FUNCTION * result;
+ VAR_PARSE_ACTIONS * parse;
+ current_file = object_str( file );
+ current_line = line;
+ parse = parse_actions( actions );
+ compiler_init( c );
+ var_parse_actions_compile( parse, c );
+ var_parse_actions_free( parse );
+ compile_emit( c, INSTR_RETURN, 1 );
+ result = compile_to_function( c );
+ compiler_free( c );
+ result->file = object_copy( file );
+ result->line = line;
+ return (FUNCTION *)result;
+}
+
+static void argument_list_print( struct arg_list * args, int num_args );
+
+
+/* Define delimiters for type check elements in argument lists (and return type
+ * specifications, eventually).
+ */
+# define TYPE_OPEN_DELIM '['
+# define TYPE_CLOSE_DELIM ']'
+
+/*
+ * is_type_name() - true iff the given string represents a type check
+ * specification.
+ */
+
+int is_type_name( char const * s )
+{
+ return s[ 0 ] == TYPE_OPEN_DELIM && s[ strlen( s ) - 1 ] ==
+ TYPE_CLOSE_DELIM;
+}
+
+static void argument_error( char const * message, FUNCTION * procedure,
+ FRAME * frame, OBJECT * arg )
+{
+ extern void print_source_line( FRAME * );
+ LOL * actual = frame->args;
+ backtrace_line( frame->prev );
+ out_printf( "*** argument error\n* rule %s ( ", frame->rulename );
+ argument_list_print( procedure->formal_arguments,
+ procedure->num_formal_arguments );
+ out_printf( " )\n* called with: ( " );
+ lol_print( actual );
+ out_printf( " )\n* %s %s\n", message, arg ? object_str ( arg ) : "" );
+ function_location( procedure, &frame->file, &frame->line );
+ print_source_line( frame );
+ out_printf( "see definition of rule '%s' being called\n", frame->rulename );
+ backtrace( frame->prev );
+ exit( EXITBAD );
+}
+
+static void type_check_range( OBJECT * type_name, LISTITER iter, LISTITER end,
+ FRAME * caller, FUNCTION * called, OBJECT * arg_name )
+{
+ static module_t * typecheck = 0;
+
+ /* If nothing to check, bail now. */
+ if ( iter == end || !type_name )
+ return;
+
+ if ( !typecheck )
+ typecheck = bindmodule( constant_typecheck );
+
+ /* If the checking rule can not be found, also bail. */
+ if ( !typecheck->rules || !hash_find( typecheck->rules, type_name ) )
+ return;
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ LIST * error;
+ FRAME frame[ 1 ];
+ frame_init( frame );
+ frame->module = typecheck;
+ frame->prev = caller;
+ frame->prev_user = caller->module->user_module
+ ? caller
+ : caller->prev_user;
+
+ /* Prepare the argument list */
+ lol_add( frame->args, list_new( object_copy( list_item( iter ) ) ) );
+ error = evaluate_rule( bindrule( type_name, frame->module ), type_name, frame );
+
+ if ( !list_empty( error ) )
+ argument_error( object_str( list_front( error ) ), called, caller,
+ arg_name );
+
+ frame_free( frame );
+ }
+}
+
+static void type_check( OBJECT * type_name, LIST * values, FRAME * caller,
+ FUNCTION * called, OBJECT * arg_name )
+{
+ type_check_range( type_name, list_begin( values ), list_end( values ),
+ caller, called, arg_name );
+}
+
+void argument_list_check( struct arg_list * formal, int formal_count,
+ FUNCTION * function, FRAME * frame )
+{
+ LOL * all_actual = frame->args;
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ LISTITER actual_iter = list_begin( actual );
+ LISTITER const actual_end = list_end( actual );
+ int j;
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+
+ switch ( formal_arg->flags )
+ {
+ case ARG_ONE:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ type_check_range( formal_arg->type_name, actual_iter,
+ list_next( actual_iter ), frame, function,
+ formal_arg->arg_name );
+ actual_iter = list_next( actual_iter );
+ break;
+ case ARG_OPTIONAL:
+ if ( actual_iter != actual_end )
+ {
+ type_check_range( formal_arg->type_name, actual_iter,
+ list_next( actual_iter ), frame, function,
+ formal_arg->arg_name );
+ actual_iter = list_next( actual_iter );
+ }
+ break;
+ case ARG_PLUS:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ /* fallthrough */
+ case ARG_STAR:
+ type_check_range( formal_arg->type_name, actual_iter,
+ actual_end, frame, function, formal_arg->arg_name );
+ actual_iter = actual_end;
+ break;
+ case ARG_VARIADIC:
+ return;
+ }
+ }
+
+ if ( actual_iter != actual_end )
+ argument_error( "extra argument", function, frame, list_item(
+ actual_iter ) );
+ }
+
+ for ( ; i < all_actual->count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ if ( !list_empty( actual ) )
+ argument_error( "extra argument", function, frame, list_front(
+ actual ) );
+ }
+}
+
+void argument_list_push( struct arg_list * formal, int formal_count,
+ FUNCTION * function, FRAME * frame, STACK * s )
+{
+ LOL * all_actual = frame->args;
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ LISTITER actual_iter = list_begin( actual );
+ LISTITER const actual_end = list_end( actual );
+ int j;
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+ LIST * value;
+
+ switch ( formal_arg->flags )
+ {
+ case ARG_ONE:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ value = list_new( object_copy( list_item( actual_iter ) ) );
+ actual_iter = list_next( actual_iter );
+ break;
+ case ARG_OPTIONAL:
+ if ( actual_iter == actual_end )
+ value = L0;
+ else
+ {
+ value = list_new( object_copy( list_item( actual_iter ) ) );
+ actual_iter = list_next( actual_iter );
+ }
+ break;
+ case ARG_PLUS:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ /* fallthrough */
+ case ARG_STAR:
+ value = list_copy_range( actual, actual_iter, actual_end );
+ actual_iter = actual_end;
+ break;
+ case ARG_VARIADIC:
+ return;
+ }
+
+ type_check( formal_arg->type_name, value, frame, function,
+ formal_arg->arg_name );
+
+ if ( formal_arg->index != -1 )
+ {
+ LIST * * const old = &frame->module->fixed_variables[
+ formal_arg->index ];
+ stack_push( s, *old );
+ *old = value;
+ }
+ else
+ stack_push( s, var_swap( frame->module, formal_arg->arg_name,
+ value ) );
+ }
+
+ if ( actual_iter != actual_end )
+ argument_error( "extra argument", function, frame, list_item(
+ actual_iter ) );
+ }
+
+ for ( ; i < all_actual->count; ++i )
+ {
+ LIST * const actual = lol_get( all_actual, i );
+ if ( !list_empty( actual ) )
+ argument_error( "extra argument", function, frame, list_front(
+ actual ) );
+ }
+}
+
+void argument_list_pop( struct arg_list * formal, int formal_count,
+ FRAME * frame, STACK * s )
+{
+ int i;
+ for ( i = formal_count - 1; i >= 0; --i )
+ {
+ int j;
+ for ( j = formal[ i ].size - 1; j >= 0 ; --j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+
+ if ( formal_arg->flags == ARG_VARIADIC )
+ continue;
+ if ( formal_arg->index != -1 )
+ {
+ LIST * const old = stack_pop( s );
+ LIST * * const pos = &frame->module->fixed_variables[
+ formal_arg->index ];
+ list_free( *pos );
+ *pos = old;
+ }
+ else
+ var_set( frame->module, formal_arg->arg_name, stack_pop( s ),
+ VAR_SET );
+ }
+ }
+}
+
+
+struct argument_compiler
+{
+ struct dynamic_array args[ 1 ];
+ struct argument arg;
+ int state;
+#define ARGUMENT_COMPILER_START 0
+#define ARGUMENT_COMPILER_FOUND_TYPE 1
+#define ARGUMENT_COMPILER_FOUND_OBJECT 2
+#define ARGUMENT_COMPILER_DONE 3
+};
+
+
+static void argument_compiler_init( struct argument_compiler * c )
+{
+ dynamic_array_init( c->args );
+ c->state = ARGUMENT_COMPILER_START;
+}
+
+static void argument_compiler_free( struct argument_compiler * c )
+{
+ dynamic_array_free( c->args );
+}
+
+static void argument_compiler_add( struct argument_compiler * c, OBJECT * arg,
+ OBJECT * file, int line )
+{
+ switch ( c->state )
+ {
+ case ARGUMENT_COMPILER_FOUND_OBJECT:
+
+ if ( object_equal( arg, constant_question_mark ) )
+ {
+ c->arg.flags = ARG_OPTIONAL;
+ }
+ else if ( object_equal( arg, constant_plus ) )
+ {
+ c->arg.flags = ARG_PLUS;
+ }
+ else if ( object_equal( arg, constant_star ) )
+ {
+ c->arg.flags = ARG_STAR;
+ }
+
+ dynamic_array_push( c->args, c->arg );
+ c->state = ARGUMENT_COMPILER_START;
+
+ if ( c->arg.flags != ARG_ONE )
+ break;
+ /* fall-through */
+
+ case ARGUMENT_COMPILER_START:
+
+ c->arg.type_name = 0;
+ c->arg.index = -1;
+ c->arg.flags = ARG_ONE;
+
+ if ( is_type_name( object_str( arg ) ) )
+ {
+ c->arg.type_name = object_copy( arg );
+ c->state = ARGUMENT_COMPILER_FOUND_TYPE;
+ break;
+ }
+ /* fall-through */
+
+ case ARGUMENT_COMPILER_FOUND_TYPE:
+
+ if ( is_type_name( object_str( arg ) ) )
+ {
+ err_printf( "%s:%d: missing argument name before type name: %s\n",
+ object_str( file ), line, object_str( arg ) );
+ exit( EXITBAD );
+ }
+
+ c->arg.arg_name = object_copy( arg );
+ if ( object_equal( arg, constant_star ) )
+ {
+ c->arg.flags = ARG_VARIADIC;
+ dynamic_array_push( c->args, c->arg );
+ c->state = ARGUMENT_COMPILER_DONE;
+ }
+ else
+ {
+ c->state = ARGUMENT_COMPILER_FOUND_OBJECT;
+ }
+ break;
+
+ case ARGUMENT_COMPILER_DONE:
+ break;
+ }
+}
+
+static void argument_compiler_recurse( struct argument_compiler * c,
+ PARSE * parse )
+{
+ if ( parse->type == PARSE_APPEND )
+ {
+ argument_compiler_recurse( c, parse->left );
+ argument_compiler_recurse( c, parse->right );
+ }
+ else if ( parse->type != PARSE_NULL )
+ {
+ assert( parse->type == PARSE_LIST );
+ argument_compiler_add( c, parse->string, parse->file, parse->line );
+ }
+}
+
+static struct arg_list arg_compile_impl( struct argument_compiler * c,
+ OBJECT * file, int line )
+{
+ struct arg_list result;
+ switch ( c->state )
+ {
+ case ARGUMENT_COMPILER_START:
+ case ARGUMENT_COMPILER_DONE:
+ break;
+ case ARGUMENT_COMPILER_FOUND_TYPE:
+ err_printf( "%s:%d: missing argument name after type name: %s\n",
+ object_str( file ), line, object_str( c->arg.type_name ) );
+ exit( EXITBAD );
+ case ARGUMENT_COMPILER_FOUND_OBJECT:
+ dynamic_array_push( c->args, c->arg );
+ break;
+ }
+ result.size = c->args->size;
+ result.args = (struct argument*)BJAM_MALLOC( c->args->size * sizeof( struct argument ) );
+ if ( c->args->size != 0 )
+ memcpy( result.args, c->args->data,
+ c->args->size * sizeof( struct argument ) );
+ return result;
+}
+
+static struct arg_list arg_compile( PARSE * parse )
+{
+ struct argument_compiler c[ 1 ];
+ struct arg_list result;
+ argument_compiler_init( c );
+ argument_compiler_recurse( c, parse );
+ result = arg_compile_impl( c, parse->file, parse->line );
+ argument_compiler_free( c );
+ return result;
+}
+
+struct argument_list_compiler
+{
+ struct dynamic_array args[ 1 ];
+};
+
+static void argument_list_compiler_init( struct argument_list_compiler * c )
+{
+ dynamic_array_init( c->args );
+}
+
+static void argument_list_compiler_free( struct argument_list_compiler * c )
+{
+ dynamic_array_free( c->args );
+}
+
+static void argument_list_compiler_add( struct argument_list_compiler * c,
+ PARSE * parse )
+{
+ struct arg_list args = arg_compile( parse );
+ dynamic_array_push( c->args, args );
+}
+
+static void argument_list_compiler_recurse( struct argument_list_compiler * c,
+ PARSE * parse )
+{
+ if ( parse )
+ {
+ argument_list_compiler_add( c, parse->right );
+ argument_list_compiler_recurse( c, parse->left );
+ }
+}
+
+static struct arg_list * arg_list_compile( PARSE * parse, int * num_arguments )
+{
+ if ( parse )
+ {
+ struct argument_list_compiler c[ 1 ];
+ struct arg_list * result;
+ argument_list_compiler_init( c );
+ argument_list_compiler_recurse( c, parse );
+ *num_arguments = c->args->size;
+ result = (struct arg_list*)BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
+ memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list )
+ );
+ argument_list_compiler_free( c );
+ return result;
+ }
+ *num_arguments = 0;
+ return 0;
+}
+
+static struct arg_list * arg_list_compile_builtin( char const * * args,
+ int * num_arguments )
+{
+ if ( args )
+ {
+ struct argument_list_compiler c[ 1 ];
+ struct arg_list * result;
+ argument_list_compiler_init( c );
+ while ( *args )
+ {
+ struct argument_compiler arg_comp[ 1 ];
+ struct arg_list arg;
+ argument_compiler_init( arg_comp );
+ for ( ; *args; ++args )
+ {
+ OBJECT * token;
+ if ( strcmp( *args, ":" ) == 0 )
+ {
+ ++args;
+ break;
+ }
+ token = object_new( *args );
+ argument_compiler_add( arg_comp, token, constant_builtin, -1 );
+ object_free( token );
+ }
+ arg = arg_compile_impl( arg_comp, constant_builtin, -1 );
+ dynamic_array_push( c->args, arg );
+ argument_compiler_free( arg_comp );
+ }
+ *num_arguments = c->args->size;
+ result = (struct arg_list *)BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
+ if ( c->args->size != 0 )
+ memcpy( result, c->args->data,
+ c->args->size * sizeof( struct arg_list ) );
+ argument_list_compiler_free( c );
+ return result;
+ }
+ *num_arguments = 0;
+ return 0;
+}
+
+static void argument_list_print( struct arg_list * args, int num_args )
+{
+ if ( args )
+ {
+ int i;
+ for ( i = 0; i < num_args; ++i )
+ {
+ int j;
+ if ( i ) out_printf( " : " );
+ for ( j = 0; j < args[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &args[ i ].args[ j ];
+ if ( j ) out_printf( " " );
+ if ( formal_arg->type_name )
+ out_printf( "%s ", object_str( formal_arg->type_name ) );
+ out_printf( "%s", object_str( formal_arg->arg_name ) );
+ switch ( formal_arg->flags )
+ {
+ case ARG_OPTIONAL: out_printf( " ?" ); break;
+ case ARG_PLUS: out_printf( " +" ); break;
+ case ARG_STAR: out_printf( " *" ); break;
+ }
+ }
+ }
+ }
+}
+
+
+struct arg_list * argument_list_bind_variables( struct arg_list * formal,
+ int formal_count, module_t * module, int * counter )
+{
+ if ( formal )
+ {
+ struct arg_list * result = (struct arg_list *)BJAM_MALLOC( sizeof(
+ struct arg_list ) * formal_count );
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ int j;
+ struct argument * args = (struct argument *)BJAM_MALLOC( sizeof(
+ struct argument ) * formal[ i ].size );
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ args[ j ] = formal[ i ].args[ j ];
+ if ( args[ j ].type_name )
+ args[ j ].type_name = object_copy( args[ j ].type_name );
+ args[ j ].arg_name = object_copy( args[ j ].arg_name );
+ if ( args[ j ].flags != ARG_VARIADIC )
+ args[ j ].index = module_add_fixed_var( module,
+ args[ j ].arg_name, counter );
+ }
+ result[ i ].args = args;
+ result[ i ].size = formal[ i ].size;
+ }
+
+ return result;
+ }
+ return 0;
+}
+
+
+void argument_list_free( struct arg_list * args, int args_count )
+{
+ int i;
+ for ( i = 0; i < args_count; ++i )
+ {
+ int j;
+ for ( j = 0; j < args[ i ].size; ++j )
+ {
+ if ( args[ i ].args[ j ].type_name )
+ object_free( args[ i ].args[ j ].type_name );
+ object_free( args[ i ].args[ j ].arg_name );
+ }
+ BJAM_FREE( args[ i ].args );
+ }
+ BJAM_FREE( args );
+}
+
+
+FUNCTION * function_unbind_variables( FUNCTION * f )
+{
+ if ( f->type == FUNCTION_JAM )
+ {
+ JAM_FUNCTION * const func = (JAM_FUNCTION *)f;
+ return func->generic ? func->generic : f;
+ }
+#ifdef HAVE_PYTHON
+ if ( f->type == FUNCTION_PYTHON )
+ return f;
+#endif
+ assert( f->type == FUNCTION_BUILTIN );
+ return f;
+}
+
+FUNCTION * function_bind_variables( FUNCTION * f, module_t * module,
+ int * counter )
+{
+ if ( f->type == FUNCTION_BUILTIN )
+ return f;
+#ifdef HAVE_PYTHON
+ if ( f->type == FUNCTION_PYTHON )
+ return f;
+#endif
+ {
+ JAM_FUNCTION * func = (JAM_FUNCTION *)f;
+ JAM_FUNCTION * new_func = (JAM_FUNCTION *)BJAM_MALLOC( sizeof( JAM_FUNCTION ) );
+ instruction * code;
+ int i;
+ assert( f->type == FUNCTION_JAM );
+ memcpy( new_func, func, sizeof( JAM_FUNCTION ) );
+ new_func->base.reference_count = 1;
+ new_func->base.formal_arguments = argument_list_bind_variables(
+ f->formal_arguments, f->num_formal_arguments, module, counter );
+ new_func->code = (instruction *)BJAM_MALLOC( func->code_size * sizeof( instruction ) );
+ memcpy( new_func->code, func->code, func->code_size * sizeof(
+ instruction ) );
+ new_func->generic = (FUNCTION *)func;
+ func = new_func;
+ for ( i = 0; ; ++i )
+ {
+ OBJECT * key;
+ int op_code;
+ code = func->code + i;
+ switch ( code->op_code )
+ {
+ case INSTR_PUSH_VAR: op_code = INSTR_PUSH_VAR_FIXED; break;
+ case INSTR_PUSH_LOCAL: op_code = INSTR_PUSH_LOCAL_FIXED; break;
+ case INSTR_POP_LOCAL: op_code = INSTR_POP_LOCAL_FIXED; break;
+ case INSTR_SET: op_code = INSTR_SET_FIXED; break;
+ case INSTR_APPEND: op_code = INSTR_APPEND_FIXED; break;
+ case INSTR_DEFAULT: op_code = INSTR_DEFAULT_FIXED; break;
+ case INSTR_RETURN:
+ if( code->arg == 1 ) return (FUNCTION *)new_func;
+ else continue;
+ case INSTR_CALL_MEMBER_RULE:
+ case INSTR_CALL_RULE: ++i; continue;
+ case INSTR_PUSH_MODULE:
+ {
+ int depth = 1;
+ ++i;
+ while ( depth > 0 )
+ {
+ code = func->code + i;
+ switch ( code->op_code )
+ {
+ case INSTR_PUSH_MODULE:
+ case INSTR_CLASS:
+ ++depth;
+ break;
+ case INSTR_POP_MODULE:
+ --depth;
+ break;
+ case INSTR_CALL_RULE:
+ ++i;
+ break;
+ }
+ ++i;
+ }
+ --i;
+ }
+ default: continue;
+ }
+ key = func->constants[ code->arg ];
+ if ( !( object_equal( key, constant_TMPDIR ) ||
+ object_equal( key, constant_TMPNAME ) ||
+ object_equal( key, constant_TMPFILE ) ||
+ object_equal( key, constant_STDOUT ) ||
+ object_equal( key, constant_STDERR ) ) )
+ {
+ code->op_code = op_code;
+ code->arg = module_add_fixed_var( module, key, counter );
+ }
+ }
+ }
+}
+
+LIST * function_get_variables( FUNCTION * f )
+{
+ if ( f->type == FUNCTION_BUILTIN )
+ return L0;
+#ifdef HAVE_PYTHON
+ if ( f->type == FUNCTION_PYTHON )
+ return L0;
+#endif
+ {
+ JAM_FUNCTION * func = (JAM_FUNCTION *)f;
+ LIST * result = L0;
+ instruction * code;
+ int i;
+ assert( f->type == FUNCTION_JAM );
+ if ( func->generic ) func = ( JAM_FUNCTION * )func->generic;
+
+ for ( i = 0; ; ++i )
+ {
+ OBJECT * var;
+ code = func->code + i;
+ switch ( code->op_code )
+ {
+ case INSTR_PUSH_LOCAL: break;
+ case INSTR_RETURN: return result;
+ case INSTR_CALL_MEMBER_RULE:
+ case INSTR_CALL_RULE: ++i; continue;
+ case INSTR_PUSH_MODULE:
+ {
+ int depth = 1;
+ ++i;
+ while ( depth > 0 )
+ {
+ code = func->code + i;
+ switch ( code->op_code )
+ {
+ case INSTR_PUSH_MODULE:
+ case INSTR_CLASS:
+ ++depth;
+ break;
+ case INSTR_POP_MODULE:
+ --depth;
+ break;
+ case INSTR_CALL_RULE:
+ ++i;
+ break;
+ }
+ ++i;
+ }
+ --i;
+ }
+ default: continue;
+ }
+ var = func->constants[ code->arg ];
+ if ( !( object_equal( var, constant_TMPDIR ) ||
+ object_equal( var, constant_TMPNAME ) ||
+ object_equal( var, constant_TMPFILE ) ||
+ object_equal( var, constant_STDOUT ) ||
+ object_equal( var, constant_STDERR ) ) )
+ {
+ result = list_push_back( result, var );
+ }
+ }
+ }
+}
+
+void function_refer( FUNCTION * func )
+{
+ ++func->reference_count;
+}
+
+void function_free( FUNCTION * function_ )
+{
+ int i;
+
+ if ( --function_->reference_count != 0 )
+ return;
+
+ if ( function_->formal_arguments )
+ argument_list_free( function_->formal_arguments,
+ function_->num_formal_arguments );
+
+ if ( function_->type == FUNCTION_JAM )
+ {
+ JAM_FUNCTION * func = (JAM_FUNCTION *)function_;
+
+ BJAM_FREE( func->code );
+
+ if ( func->generic )
+ function_free( func->generic );
+ else
+ {
+ if ( function_->rulename ) object_free( function_->rulename );
+
+ for ( i = 0; i < func->num_constants; ++i )
+ object_free( func->constants[ i ] );
+ BJAM_FREE( func->constants );
+
+ for ( i = 0; i < func->num_subfunctions; ++i )
+ {
+ object_free( func->functions[ i ].name );
+ function_free( func->functions[ i ].code );
+ }
+ BJAM_FREE( func->functions );
+
+ for ( i = 0; i < func->num_subactions; ++i )
+ {
+ object_free( func->actions[ i ].name );
+ function_free( func->actions[ i ].command );
+ }
+ BJAM_FREE( func->actions );
+
+ object_free( func->file );
+ }
+ }
+#ifdef HAVE_PYTHON
+ else if ( function_->type == FUNCTION_PYTHON )
+ {
+ PYTHON_FUNCTION * func = (PYTHON_FUNCTION *)function_;
+ Py_DECREF( func->python_function );
+ if ( function_->rulename ) object_free( function_->rulename );
+ }
+#endif
+ else
+ {
+ assert( function_->type == FUNCTION_BUILTIN );
+ if ( function_->rulename ) object_free( function_->rulename );
+ }
+
+ BJAM_FREE( function_ );
+}
+
+
+/* Alignment check for stack */
+
+struct align_var_edits
+{
+ char ch;
+ VAR_EDITS e;
+};
+
+struct align_expansion_item
+{
+ char ch;
+ expansion_item e;
+};
+
+static_assert(
+ sizeof(struct align_var_edits) <= sizeof(VAR_EDITS) + sizeof(void *),
+ "sizeof(struct align_var_edits) <= sizeof(VAR_EDITS) + sizeof(void *)" );
+static_assert(
+ sizeof(struct align_expansion_item) <= sizeof(expansion_item) + sizeof(void *),
+ "sizeof(struct align_expansion_item) <= sizeof(expansion_item) + sizeof(void *)" );
+
+static_assert( sizeof(LIST *) <= sizeof(void *), "sizeof(LIST *) <= sizeof(void *)" );
+static_assert( sizeof(char *) <= sizeof(void *), "sizeof(char *) <= sizeof(void *)" );
+
+void function_run_actions( FUNCTION * function, FRAME * frame, STACK * s,
+ string * out )
+{
+ *(string * *)stack_allocate( s, sizeof( string * ) ) = out;
+ list_free( function_run( function, frame, s ) );
+ stack_deallocate( s, sizeof( string * ) );
+}
+
+/*
+ * WARNING: The instruction set is tuned for Jam and is not really generic. Be
+ * especially careful about stack push/pop.
+ */
+
+LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s )
+{
+ JAM_FUNCTION * function;
+ instruction * code;
+ LIST * l;
+ LIST * r;
+ LIST * result = L0;
+ void * saved_stack = s->data;
+
+ PROFILE_ENTER_LOCAL(function_run);
+
+#ifdef JAM_DEBUGGER
+ frame->function = function_;
+#endif
+
+ if ( function_->type == FUNCTION_BUILTIN )
+ {
+ PROFILE_ENTER_LOCAL(function_run_FUNCTION_BUILTIN);
+ BUILTIN_FUNCTION const * const f = (BUILTIN_FUNCTION *)function_;
+ if ( function_->formal_arguments )
+ argument_list_check( function_->formal_arguments,
+ function_->num_formal_arguments, function_, frame );
+
+ debug_on_enter_function( frame, f->base.rulename, NULL, -1 );
+ result = f->func( frame, f->flags );
+ debug_on_exit_function( f->base.rulename );
+ PROFILE_EXIT_LOCAL(function_run_FUNCTION_BUILTIN);
+ PROFILE_EXIT_LOCAL(function_run);
+ return result;
+ }
+
+#ifdef HAVE_PYTHON
+ else if ( function_->type == FUNCTION_PYTHON )
+ {
+ PROFILE_ENTER_LOCAL(function_run_FUNCTION_PYTHON);
+ PYTHON_FUNCTION * f = (PYTHON_FUNCTION *)function_;
+ debug_on_enter_function( frame, f->base.rulename, NULL, -1 );
+ result = call_python_function( f, frame );
+ debug_on_exit_function( f->base.rulename );
+ PROFILE_EXIT_LOCAL(function_run_FUNCTION_PYTHON);
+ PROFILE_EXIT_LOCAL(function_run);
+ return result;
+ }
+#endif
+
+ assert( function_->type == FUNCTION_JAM );
+
+ if ( function_->formal_arguments )
+ argument_list_push( function_->formal_arguments,
+ function_->num_formal_arguments, function_, frame, s );
+
+ function = (JAM_FUNCTION *)function_;
+ debug_on_enter_function( frame, function->base.rulename, function->file, function->line );
+ code = function->code;
+ for ( ; ; )
+ {
+ switch ( code->op_code )
+ {
+
+ /*
+ * Basic stack manipulation
+ */
+
+ case INSTR_PUSH_EMPTY:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_EMPTY);
+ stack_push( s, L0 );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_EMPTY);
+ break;
+ }
+
+ case INSTR_PUSH_CONSTANT:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_CONSTANT);
+ OBJECT * value = function_get_constant( function, code->arg );
+ stack_push( s, list_new( object_copy( value ) ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_CONSTANT);
+ break;
+ }
+
+ case INSTR_PUSH_ARG:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_ARG);
+ stack_push( s, frame_get_local( frame, code->arg ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_ARG);
+ break;
+ }
+
+ case INSTR_PUSH_VAR:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_VAR);
+ stack_push( s, function_get_variable( function, frame, code->arg ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_VAR);
+ break;
+ }
+
+ case INSTR_PUSH_VAR_FIXED:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_VAR_FIXED);
+ stack_push( s, list_copy( frame->module->fixed_variables[ code->arg
+ ] ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_VAR_FIXED);
+ break;
+ }
+
+ case INSTR_PUSH_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_GROUP);
+ LIST * value = L0;
+ LISTITER iter;
+ LISTITER end;
+ l = stack_pop( s );
+ for ( iter = list_begin( l ), end = list_end( l ); iter != end;
+ iter = list_next( iter ) )
+ value = list_append( value, function_get_named_variable(
+ function, frame, list_item( iter ) ) );
+ list_free( l );
+ stack_push( s, value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_GROUP);
+ break;
+ }
+
+ case INSTR_PUSH_APPEND:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_APPEND);
+ r = stack_pop( s );
+ l = stack_pop( s );
+ stack_push( s, list_append( l, r ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_APPEND);
+ break;
+ }
+
+ case INSTR_SWAP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_SWAP);
+ l = stack_top( s );
+ stack_set( s, 0, stack_at( s, code->arg ) );
+ stack_set( s, code->arg, l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_SWAP);
+ break;
+ }
+
+ case INSTR_POP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_POP);
+ list_free( stack_pop( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_POP);
+ break;
+ }
+
+ /*
+ * Branch instructions
+ */
+
+ case INSTR_JUMP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP);
+ code += code->arg;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP);
+ break;
+ }
+
+ case INSTR_JUMP_EMPTY:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_EMPTY);
+ l = stack_pop( s );
+ if ( !list_cmp( l, L0 ) ) code += code->arg;
+ list_free( l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_EMPTY);
+ break;
+ }
+
+ case INSTR_JUMP_NOT_EMPTY:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NOT_EMPTY);
+ l = stack_pop( s );
+ if ( list_cmp( l, L0 ) ) code += code->arg;
+ list_free( l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NOT_EMPTY);
+ break;
+ }
+
+ case INSTR_JUMP_LT:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_LT);
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) < 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_LT);
+ break;
+ }
+
+ case INSTR_JUMP_LE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_LE);
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) <= 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_LE);
+ break;
+ }
+
+ case INSTR_JUMP_GT:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_GT);
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) > 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_GT);
+ break;
+ }
+
+ case INSTR_JUMP_GE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_GE);
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) >= 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_GE);
+ break;
+ }
+
+ case INSTR_JUMP_EQ:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_EQ);
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) == 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_EQ);
+ break;
+ }
+
+ case INSTR_JUMP_NE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NE);
+ r = stack_pop(s);
+ l = stack_pop(s);
+ if ( list_cmp(l, r) != 0 ) code += code->arg;
+ list_free(l);
+ list_free(r);
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NE);
+ break;
+ }
+
+ case INSTR_JUMP_IN:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_IN);
+ r = stack_pop(s);
+ l = stack_pop(s);
+ if ( list_is_sublist( l, r ) ) code += code->arg;
+ list_free(l);
+ list_free(r);
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_IN);
+ break;
+ }
+
+ case INSTR_JUMP_NOT_IN:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NOT_IN);
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( !list_is_sublist( l, r ) ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NOT_IN);
+ break;
+ }
+
+ /*
+ * For
+ */
+
+ case INSTR_FOR_INIT:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_FOR_INIT);
+ l = stack_top( s );
+ *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) =
+ list_begin( l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_FOR_INIT);
+ break;
+ }
+
+ case INSTR_FOR_LOOP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_FOR_LOOP);
+ LISTITER iter = *(LISTITER *)stack_get( s );
+ stack_deallocate( s, sizeof( LISTITER ) );
+ l = stack_top( s );
+ if ( iter == list_end( l ) )
+ {
+ list_free( stack_pop( s ) );
+ code += code->arg;
+ }
+ else
+ {
+ r = list_new( object_copy( list_item( iter ) ) );
+ iter = list_next( iter );
+ *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) = iter;
+ stack_push( s, r );
+ }
+ PROFILE_EXIT_LOCAL(function_run_INSTR_FOR_LOOP);
+ break;
+ }
+
+ case INSTR_FOR_POP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_FOR_POP);
+ stack_deallocate( s, sizeof( LISTITER ) );
+ list_free( stack_pop( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_FOR_POP);
+ break;
+ }
+
+ /*
+ * Switch
+ */
+
+ case INSTR_JUMP_NOT_GLOB:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NOT_GLOB);
+ char const * pattern;
+ char const * match;
+ l = stack_pop( s );
+ r = stack_top( s );
+ pattern = list_empty( l ) ? "" : object_str( list_front( l ) );
+ match = list_empty( r ) ? "" : object_str( list_front( r ) );
+ if ( glob( pattern, match ) )
+ code += code->arg;
+ else
+ list_free( stack_pop( s ) );
+ list_free( l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NOT_GLOB);
+ break;
+ }
+
+ /*
+ * Return
+ */
+
+ case INSTR_SET_RESULT:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_SET_RESULT);
+ list_free( result );
+ if ( !code->arg )
+ result = stack_pop( s );
+ else
+ result = list_copy( stack_top( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_SET_RESULT);
+ break;
+ }
+
+ case INSTR_PUSH_RESULT:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_RESULT);
+ stack_push( s, result );
+ result = L0;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_RESULT);
+ break;
+ }
+
+ case INSTR_RETURN:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_RETURN);
+ if ( function_->formal_arguments )
+ argument_list_pop( function_->formal_arguments,
+ function_->num_formal_arguments, frame, s );
+#ifndef NDEBUG
+ if ( !( saved_stack == s->data ) )
+ {
+ frame->file = function->file;
+ frame->line = function->line;
+ backtrace_line( frame );
+ out_printf( "error: stack check failed.\n" );
+ backtrace( frame );
+ assert( saved_stack == s->data );
+ }
+#endif
+ assert( saved_stack == s->data );
+ debug_on_exit_function( function->base.rulename );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_RETURN);
+ PROFILE_EXIT_LOCAL(function_run);
+ return result;
+ }
+
+ /*
+ * Local variables
+ */
+
+ case INSTR_PUSH_LOCAL:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_LOCAL);
+ LIST * value = stack_pop( s );
+ stack_push( s, function_swap_variable( function, frame, code->arg,
+ value ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_LOCAL);
+ break;
+ }
+
+ case INSTR_POP_LOCAL:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_POP_LOCAL);
+ function_set_variable( function, frame, code->arg, stack_pop( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_POP_LOCAL);
+ break;
+ }
+
+ case INSTR_PUSH_LOCAL_FIXED:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_LOCAL_FIXED);
+ LIST * value = stack_pop( s );
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ stack_push( s, *ptr );
+ *ptr = value;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_LOCAL_FIXED);
+ break;
+ }
+
+ case INSTR_POP_LOCAL_FIXED:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_POP_LOCAL_FIXED);
+ LIST * value = stack_pop( s );
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ list_free( *ptr );
+ *ptr = value;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_POP_LOCAL_FIXED);
+ break;
+ }
+
+ case INSTR_PUSH_LOCAL_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_LOCAL_GROUP);
+ LIST * const value = stack_pop( s );
+ LISTITER iter;
+ LISTITER end;
+ l = stack_pop( s );
+ for ( iter = list_begin( l ), end = list_end( l ); iter != end;
+ iter = list_next( iter ) )
+ stack_push( s, function_swap_named_variable( function, frame,
+ list_item( iter ), list_copy( value ) ) );
+ list_free( value );
+ stack_push( s, l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_LOCAL_GROUP);
+ break;
+ }
+
+ case INSTR_POP_LOCAL_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_POP_LOCAL_GROUP);
+ LISTITER iter;
+ LISTITER end;
+ r = stack_pop( s );
+ l = list_reverse( r );
+ list_free( r );
+ for ( iter = list_begin( l ), end = list_end( l ); iter != end;
+ iter = list_next( iter ) )
+ function_set_named_variable( function, frame, list_item( iter ),
+ stack_pop( s ) );
+ list_free( l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_POP_LOCAL_GROUP);
+ break;
+ }
+
+ /*
+ * on $(TARGET) variables
+ */
+
+ case INSTR_PUSH_ON:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_ON);
+ LIST * targets = stack_top( s );
+ if ( !list_empty( targets ) )
+ {
+ /* FIXME: push the state onto the stack instead of using
+ * pushsettings.
+ */
+ TARGET * t = bindtarget( list_front( targets ) );
+ pushsettings( frame->module, t->settings );
+ }
+ else
+ {
+ /* [ on $(TARGET) ... ] is ignored if $(TARGET) is empty. */
+ list_free( stack_pop( s ) );
+ stack_push( s, L0 );
+ code += code->arg;
+ }
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_ON);
+ break;
+ }
+
+ case INSTR_POP_ON:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_POP_ON);
+ LIST * result = stack_pop( s );
+ LIST * targets = stack_pop( s );
+ if ( !list_empty( targets ) )
+ {
+ TARGET * t = bindtarget( list_front( targets ) );
+ popsettings( frame->module, t->settings );
+ }
+ list_free( targets );
+ stack_push( s, result );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_POP_ON);
+ break;
+ }
+
+ case INSTR_SET_ON:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_SET_ON);
+ LIST * targets = stack_pop( s );
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * t = bindtarget( list_item( iter ) );
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter
+ ) )
+ t->settings = addsettings( t->settings, VAR_SET, list_item(
+ vars_iter ), list_copy( value ) );
+ }
+ list_free( vars );
+ list_free( targets );
+ stack_push( s, value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_SET_ON);
+ break;
+ }
+
+ case INSTR_APPEND_ON:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_ON);
+ LIST * targets = stack_pop( s );
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter
+ ) )
+ t->settings = addsettings( t->settings, VAR_APPEND,
+ list_item( vars_iter ), list_copy( value ) );
+ }
+ list_free( vars );
+ list_free( targets );
+ stack_push( s, value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_ON);
+ break;
+ }
+
+ case INSTR_DEFAULT_ON:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT_ON);
+ LIST * targets = stack_pop( s );
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * t = bindtarget( list_item( iter ) );
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter
+ ) )
+ t->settings = addsettings( t->settings, VAR_DEFAULT,
+ list_item( vars_iter ), list_copy( value ) );
+ }
+ list_free( vars );
+ list_free( targets );
+ stack_push( s, value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT_ON);
+ break;
+ }
+
+ /* [ on $(target) return $(variable) ] */
+ case INSTR_GET_ON:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_GET_ON);
+ LIST * targets = stack_pop( s );
+ LIST * result = L0;
+ if ( !list_empty( targets ) )
+ {
+ OBJECT * varname = function->constants[ code->arg ];
+ TARGET * t = bindtarget( list_front( targets ) );
+ SETTINGS * s = t->settings;
+ int found = 0;
+ for ( ; s != 0; s = s->next )
+ {
+ if ( object_equal( s->symbol, varname ) )
+ {
+ result = s->value;
+ found = 1;
+ break;
+ }
+ }
+ if ( !found )
+ {
+ result = var_get( frame->module, varname ) ;
+ }
+ }
+ list_free( targets );
+ stack_push( s, list_copy( result ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_GET_ON);
+ break;
+ }
+
+ /*
+ * Variable setting
+ */
+
+ case INSTR_SET:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_SET);
+ function_set_variable( function, frame, code->arg,
+ stack_pop( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_SET);
+ break;
+ }
+
+ case INSTR_APPEND:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND);
+ function_append_variable( function, frame, code->arg,
+ stack_pop( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND);
+ break;
+ }
+
+ case INSTR_DEFAULT:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT);
+ function_default_variable( function, frame, code->arg,
+ stack_pop( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT);
+ break;
+ }
+
+ case INSTR_SET_FIXED:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_SET_FIXED);
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ list_free( *ptr );
+ *ptr = stack_pop( s );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_SET_FIXED);
+ break;
+ }
+
+ case INSTR_APPEND_FIXED:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_FIXED);
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ *ptr = list_append( *ptr, stack_pop( s ) );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_FIXED);
+ break;
+ }
+
+ case INSTR_DEFAULT_FIXED:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT_FIXED);
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ LIST * value = stack_pop( s );
+ assert( code->arg < frame->module->num_fixed_variables );
+ if ( list_empty( *ptr ) )
+ *ptr = value;
+ else
+ list_free( value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT_FIXED);
+ break;
+ }
+
+ case INSTR_SET_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_SET_GROUP);
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ function_set_named_variable( function, frame, list_item( iter ),
+ list_copy( value ) );
+ list_free( vars );
+ list_free( value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_SET_GROUP);
+ break;
+ }
+
+ case INSTR_APPEND_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_GROUP);
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ function_append_named_variable( function, frame, list_item( iter
+ ), list_copy( value ) );
+ list_free( vars );
+ list_free( value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_GROUP);
+ break;
+ }
+
+ case INSTR_DEFAULT_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT_GROUP);
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ function_default_named_variable( function, frame, list_item(
+ iter ), list_copy( value ) );
+ list_free( vars );
+ list_free( value );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT_GROUP);
+ break;
+ }
+
+ /*
+ * Rules
+ */
+
+ case INSTR_CALL_RULE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_CALL_RULE);
+ char const * unexpanded = object_str( function_get_constant(
+ function, code[ 1 ].op_code ) );
+ LIST * result = function_call_rule( function, frame, s, code->arg,
+ unexpanded, function->file, code[ 1 ].arg );
+ stack_push( s, result );
+ ++code;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_CALL_RULE);
+ break;
+ }
+
+ case INSTR_CALL_MEMBER_RULE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_CALL_MEMBER_RULE);
+ OBJECT * rule_name = function_get_constant( function, code[1].op_code );
+ LIST * result = function_call_member_rule( function, frame, s, code->arg, rule_name, function->file, code[1].arg );
+ stack_push( s, result );
+ ++code;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_CALL_MEMBER_RULE);
+ break;
+ }
+
+ case INSTR_RULE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_RULE);
+ function_set_rule( function, frame, s, code->arg );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_RULE);
+ break;
+ }
+
+ case INSTR_ACTIONS:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_ACTIONS);
+ function_set_actions( function, frame, s, code->arg );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_ACTIONS);
+ break;
+ }
+
+ /*
+ * Variable expansion
+ */
+
+ case INSTR_APPLY_MODIFIERS:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_MODIFIERS);
+ int n;
+ int i;
+ l = stack_pop( s );
+ n = expand_modifiers( s, code->arg );
+ stack_push( s, l );
+ l = apply_modifiers( s, n );
+ list_free( stack_pop( s ) );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_MODIFIERS);
+ break;
+ }
+
+ case INSTR_APPLY_INDEX:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX);
+ l = apply_subscript( s );
+ list_free( stack_pop( s ) );
+ list_free( stack_pop( s ) );
+ stack_push( s, l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX);
+ break;
+ }
+
+ case INSTR_APPLY_INDEX_MODIFIERS:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS);
+ int i;
+ int n;
+ l = stack_pop( s );
+ r = stack_pop( s );
+ n = expand_modifiers( s, code->arg );
+ stack_push( s, r );
+ stack_push( s, l );
+ l = apply_subscript_and_modifiers( s, n );
+ list_free( stack_pop( s ) );
+ list_free( stack_pop( s ) );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, l );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS);
+ break;
+ }
+
+ case INSTR_APPLY_MODIFIERS_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_MODIFIERS_GROUP);
+ int i;
+ LIST * const vars = stack_pop( s );
+ int const n = expand_modifiers( s, code->arg );
+ LIST * result = L0;
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ stack_push( s, function_get_named_variable( function, frame,
+ list_item( iter ) ) );
+ result = list_append( result, apply_modifiers( s, n ) );
+ list_free( stack_pop( s ) );
+ }
+ list_free( vars );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, result );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_MODIFIERS_GROUP);
+ break;
+ }
+
+ case INSTR_APPLY_INDEX_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX_GROUP);
+ LIST * vars = stack_pop( s );
+ LIST * result = L0;
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ stack_push( s, function_get_named_variable( function, frame,
+ list_item( iter ) ) );
+ result = list_append( result, apply_subscript( s ) );
+ list_free( stack_pop( s ) );
+ }
+ list_free( vars );
+ list_free( stack_pop( s ) );
+ stack_push( s, result );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX_GROUP);
+ break;
+ }
+
+ case INSTR_APPLY_INDEX_MODIFIERS_GROUP:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS_GROUP);
+ int i;
+ LIST * const vars = stack_pop( s );
+ LIST * const r = stack_pop( s );
+ int const n = expand_modifiers( s, code->arg );
+ LIST * result = L0;
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ stack_push( s, r );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ stack_push( s, function_get_named_variable( function, frame,
+ list_item( iter ) ) );
+ result = list_append( result, apply_subscript_and_modifiers( s,
+ n ) );
+ list_free( stack_pop( s ) );
+ }
+ list_free( stack_pop( s ) );
+ list_free( vars );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, result );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS_GROUP);
+ break;
+ }
+
+ case INSTR_COMBINE_STRINGS:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_COMBINE_STRINGS);
+ size_t const buffer_size = code->arg * sizeof( expansion_item );
+ LIST * * const stack_pos = (LIST * * const)stack_get( s );
+ expansion_item * items = (expansion_item *)stack_allocate( s, buffer_size );
+ LIST * result;
+ int i;
+ for ( i = 0; i < code->arg; ++i )
+ items[ i ].values = stack_pos[ i ];
+ result = expand( items, code->arg );
+ stack_deallocate( s, buffer_size );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) );
+ stack_push( s, result );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_COMBINE_STRINGS);
+ break;
+ }
+
+ case INSTR_GET_GRIST:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_GET_GRIST);
+ LIST * vals = stack_pop( s );
+ LIST * result = L0;
+ LISTITER iter, end;
+
+ for ( iter = list_begin( vals ), end = list_end( vals ); iter != end; ++iter )
+ {
+ OBJECT * new_object;
+ const char * value = object_str( list_item( iter ) );
+ const char * p;
+ if ( value[ 0 ] == '<' && ( p = strchr( value, '>' ) ) )
+ {
+ if( p[ 1 ] )
+ new_object = object_new_range( value, p - value + 1 );
+ else
+ new_object = object_copy( list_item( iter ) );
+ }
+ else
+ {
+ new_object = object_copy( constant_empty );
+ }
+ result = list_push_back( result, new_object );
+ }
+
+ list_free( vals );
+ stack_push( s, result );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_GET_GRIST);
+ break;
+ }
+
+ case INSTR_INCLUDE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_INCLUDE);
+ LIST * nt = stack_pop( s );
+ if ( !list_empty( nt ) )
+ {
+ TARGET * const t = bindtarget( list_front( nt ) );
+ list_free( nt );
+
+ /* DWA 2001/10/22 - Perforce Jam cleared the arguments here,
+ * which prevented an included file from being treated as part
+ * of the body of a rule. I did not see any reason to do that,
+ * so I lifted the restriction.
+ */
+
+ /* Bind the include file under the influence of "on-target"
+ * variables. Though they are targets, include files are not
+ * built with make().
+ */
+
+ pushsettings( root_module(), t->settings );
+ /* We do not expect that a file to be included is generated by
+ * some action. Therefore, pass 0 as third argument. If the name
+ * resolves to a directory, let it error out.
+ */
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, 0, 0 );
+ popsettings( root_module(), t->settings );
+
+ parse_file( t->boundname, frame );
+#ifdef JAM_DEBUGGER
+ frame->function = function_;
+#endif
+ }
+ PROFILE_EXIT_LOCAL(function_run_INSTR_INCLUDE);
+ break;
+ }
+
+ /*
+ * Classes and modules
+ */
+
+ case INSTR_PUSH_MODULE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_MODULE);
+ LIST * const module_name = stack_pop( s );
+ module_t * const outer_module = frame->module;
+ frame->module = !list_empty( module_name )
+ ? bindmodule( list_front( module_name ) )
+ : root_module();
+ list_free( module_name );
+ *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) =
+ outer_module;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_MODULE);
+ break;
+ }
+
+ case INSTR_POP_MODULE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_POP_MODULE);
+ module_t * const outer_module = *(module_t * *)stack_get( s );
+ stack_deallocate( s, sizeof( module_t * ) );
+ frame->module = outer_module;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_POP_MODULE);
+ break;
+ }
+
+ case INSTR_CLASS:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_CLASS);
+ LIST * bases = stack_pop( s );
+ LIST * name = stack_pop( s );
+ OBJECT * class_module = make_class_module( name, bases, frame );
+
+ module_t * const outer_module = frame->module;
+ frame->module = bindmodule( class_module );
+ object_free( class_module );
+
+ *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) =
+ outer_module;
+ PROFILE_EXIT_LOCAL(function_run_INSTR_CLASS);
+ break;
+ }
+
+ case INSTR_BIND_MODULE_VARIABLES:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_BIND_MODULE_VARIABLES);
+ module_bind_variables( frame->module );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_BIND_MODULE_VARIABLES);
+ break;
+ }
+
+ case INSTR_APPEND_STRINGS:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_STRINGS);
+ string buf[ 1 ];
+ string_new( buf );
+ combine_strings( s, code->arg, buf );
+ stack_push( s, list_new( object_new( buf->value ) ) );
+ string_free( buf );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_STRINGS);
+ break;
+ }
+
+ case INSTR_WRITE_FILE:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_WRITE_FILE);
+ string buf[ 1 ];
+ char const * out;
+ OBJECT * tmp_filename = 0;
+ int out_debug = DEBUG_EXEC ? 1 : 0;
+ FILE * out_file = 0;
+ string_new( buf );
+ combine_strings( s, code->arg, buf );
+ out = object_str( list_front( stack_top( s ) ) );
+
+ /* For stdout/stderr we will create a temp file and generate a
+ * command that outputs the content as needed.
+ */
+ if ( ( strcmp( "STDOUT", out ) == 0 ) ||
+ ( strcmp( "STDERR", out ) == 0 ) )
+ {
+ int err_redir = strcmp( "STDERR", out ) == 0;
+ string result[ 1 ];
+
+ tmp_filename = path_tmpfile();
+
+ /* Construct os-specific cat command. */
+ {
+ const char * command = "cat";
+ const char * quote = "\"";
+ const char * redirect = "1>&2";
+
+ #ifdef OS_NT
+ command = "type";
+ quote = "\"";
+ #elif defined( OS_VMS )
+ command = "pipe type";
+ quote = "";
+
+ /* Get tmp file name is os-format. */
+ {
+ string os_filename[ 1 ];
+
+ string_new( os_filename );
+ path_translate_to_os( object_str( tmp_filename ), os_filename );
+ object_free( tmp_filename );
+ tmp_filename = object_new( os_filename->value );
+ string_free( os_filename );
+ }
+ #endif
+
+ string_new( result );
+ string_append( result, command );
+ string_append( result, " " );
+ string_append( result, quote );
+ string_append( result, object_str( tmp_filename ) );
+ string_append( result, quote );
+ if ( err_redir )
+ {
+ string_append( result, " " );
+ string_append( result, redirect );
+ }
+ }
+
+ /* Replace STDXXX with the temporary file. */
+ list_free( stack_pop( s ) );
+ stack_push( s, list_new( object_new( result->value ) ) );
+ out = object_str( tmp_filename );
+
+ string_free( result );
+
+ /* Make sure temp files created by this get nuked eventually. */
+ file_remove_atexit( tmp_filename );
+ }
+
+ if ( !globs.noexec )
+ {
+ string out_name[ 1 ];
+ /* Handle "path to file" filenames. */
+ if ( ( out[ 0 ] == '"' ) && ( out[ strlen( out ) - 1 ] == '"' )
+ )
+ {
+ string_copy( out_name, out + 1 );
+ string_truncate( out_name, out_name->size - 1 );
+ }
+ else
+ string_copy( out_name, out );
+ out_file = fopen( out_name->value, "w" );
+
+ if ( !out_file )
+ {
+ err_printf( "failed to write output file '%s'!\n",
+ out_name->value );
+ exit( EXITBAD );
+ }
+ string_free( out_name );
+ }
+
+ if ( out_debug ) out_printf( "\nfile %s\n", out );
+ if ( out_file ) fputs( buf->value, out_file );
+ if ( out_debug ) out_puts( buf->value );
+ if ( out_file )
+ {
+ fflush( out_file );
+ fclose( out_file );
+ }
+ string_free( buf );
+ if ( tmp_filename )
+ object_free( tmp_filename );
+
+ if ( out_debug ) out_putc( '\n' );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_WRITE_FILE);
+ break;
+ }
+
+ case INSTR_OUTPUT_STRINGS:
+ {
+ PROFILE_ENTER_LOCAL(function_run_INSTR_OUTPUT_STRINGS);
+ string * const buf = *(string * *)( (char *)stack_get( s ) + (
+ code->arg * sizeof( LIST * ) ) );
+ combine_strings( s, code->arg, buf );
+ PROFILE_EXIT_LOCAL(function_run_INSTR_OUTPUT_STRINGS);
+ break;
+ }
+
+ case INSTR_DEBUG_LINE:
+ {
+ debug_on_instruction( frame, function->file, code->arg );
+ break;
+ }
+
+ }
+ ++code;
+ }
+
+ PROFILE_EXIT_LOCAL(function_run);
+}
+
+
+#ifdef HAVE_PYTHON
+
+static struct arg_list * arg_list_compile_python( PyObject * bjam_signature,
+ int * num_arguments )
+{
+ if ( bjam_signature )
+ {
+ struct argument_list_compiler c[ 1 ];
+ struct arg_list * result;
+ Py_ssize_t s;
+ Py_ssize_t i;
+ argument_list_compiler_init( c );
+
+ s = PySequence_Size( bjam_signature );
+ for ( i = 0; i < s; ++i )
+ {
+ struct argument_compiler arg_comp[ 1 ];
+ struct arg_list arg;
+ PyObject * v = PySequence_GetItem( bjam_signature, i );
+ Py_ssize_t j;
+ Py_ssize_t inner;
+ argument_compiler_init( arg_comp );
+
+ inner = PySequence_Size( v );
+ for ( j = 0; j < inner; ++j )
+ argument_compiler_add( arg_comp, object_new( PyString_AsString(
+ PySequence_GetItem( v, j ) ) ), constant_builtin, -1 );
+
+ arg = arg_compile_impl( arg_comp, constant_builtin, -1 );
+ dynamic_array_push( c->args, arg );
+ argument_compiler_free( arg_comp );
+ Py_DECREF( v );
+ }
+
+ *num_arguments = c->args->size;
+ result = (struct arg_list *)BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
+ memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list )
+ );
+ argument_list_compiler_free( c );
+ return result;
+ }
+ *num_arguments = 0;
+ return 0;
+}
+
+FUNCTION * function_python( PyObject * function, PyObject * bjam_signature )
+{
+ PYTHON_FUNCTION * result = (PYTHON_FUNCTION *)BJAM_MALLOC( sizeof( PYTHON_FUNCTION ) );
+
+ result->base.type = FUNCTION_PYTHON;
+ result->base.reference_count = 1;
+ result->base.rulename = 0;
+ result->base.formal_arguments = arg_list_compile_python( bjam_signature,
+ &result->base.num_formal_arguments );
+ Py_INCREF( function );
+ result->python_function = function;
+
+ return (FUNCTION *)result;
+}
+
+
+static void argument_list_to_python( struct arg_list * formal, int formal_count,
+ FUNCTION * function, FRAME * frame, PyObject * kw )
+{
+ LOL * all_actual = frame->args;
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ LISTITER actual_iter = list_begin( actual );
+ LISTITER const actual_end = list_end( actual );
+ int j;
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+ PyObject * value;
+ LIST * l;
+
+ switch ( formal_arg->flags )
+ {
+ case ARG_ONE:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ type_check_range( formal_arg->type_name, actual_iter, list_next(
+ actual_iter ), frame, function, formal_arg->arg_name );
+ value = PyString_FromString( object_str( list_item( actual_iter
+ ) ) );
+ actual_iter = list_next( actual_iter );
+ break;
+ case ARG_OPTIONAL:
+ if ( actual_iter == actual_end )
+ value = 0;
+ else
+ {
+ type_check_range( formal_arg->type_name, actual_iter,
+ list_next( actual_iter ), frame, function,
+ formal_arg->arg_name );
+ value = PyString_FromString( object_str( list_item(
+ actual_iter ) ) );
+ actual_iter = list_next( actual_iter );
+ }
+ break;
+ case ARG_PLUS:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ /* fallthrough */
+ case ARG_STAR:
+ type_check_range( formal_arg->type_name, actual_iter,
+ actual_end, frame, function, formal_arg->arg_name );
+ l = list_copy_range( actual, actual_iter, actual_end );
+ value = list_to_python( l );
+ list_free( l );
+ actual_iter = actual_end;
+ break;
+ case ARG_VARIADIC:
+ return;
+ }
+
+ if ( value )
+ {
+ PyObject * key = PyString_FromString( object_str(
+ formal_arg->arg_name ) );
+ PyDict_SetItem( kw, key, value );
+ Py_DECREF( key );
+ Py_DECREF( value );
+ }
+ }
+
+ if ( actual_iter != actual_end )
+ argument_error( "extra argument", function, frame, list_item(
+ actual_iter ) );
+ }
+
+ for ( ; i < all_actual->count; ++i )
+ {
+ LIST * const actual = lol_get( all_actual, i );
+ if ( !list_empty( actual ) )
+ argument_error( "extra argument", function, frame, list_front(
+ actual ) );
+ }
+}
+
+
+/* Given a Python object, return a string to use in Jam code instead of the said
+ * object.
+ *
+ * If the object is a string, use the string value.
+ * If the object implemenets __jam_repr__ method, use that.
+ * Otherwise return 0.
+ */
+
+OBJECT * python_to_string( PyObject * value )
+{
+ if ( PyString_Check( value ) )
+ return object_new( PyString_AS_STRING( value ) );
+
+ /* See if this instance defines the special __jam_repr__ method. */
+ if ( PyInstance_Check( value )
+ && PyObject_HasAttrString( value, "__jam_repr__" ) )
+ {
+ PyObject * repr = PyObject_GetAttrString( value, "__jam_repr__" );
+ if ( repr )
+ {
+ PyObject * arguments2 = PyTuple_New( 0 );
+ PyObject * value2 = PyObject_Call( repr, arguments2, 0 );
+ Py_DECREF( repr );
+ Py_DECREF( arguments2 );
+ if ( PyString_Check( value2 ) )
+ return object_new( PyString_AS_STRING( value2 ) );
+ Py_DECREF( value2 );
+ }
+ }
+ return 0;
+}
+
+
+static module_t * python_module()
+{
+ static module_t * python = 0;
+ if ( !python )
+ python = bindmodule( constant_python );
+ return python;
+}
+
+
+static LIST * call_python_function( PYTHON_FUNCTION * function, FRAME * frame )
+{
+ LIST * result = 0;
+ PyObject * arguments = 0;
+ PyObject * kw = NULL;
+ int i;
+ PyObject * py_result;
+ FRAME * prev_frame_before_python_call;
+
+ if ( function->base.formal_arguments )
+ {
+ arguments = PyTuple_New( 0 );
+ kw = PyDict_New();
+ argument_list_to_python( function->base.formal_arguments,
+ function->base.num_formal_arguments, &function->base, frame, kw );
+ }
+ else
+ {
+ arguments = PyTuple_New( frame->args->count );
+ for ( i = 0; i < frame->args->count; ++i )
+ PyTuple_SetItem( arguments, i, list_to_python( lol_get( frame->args,
+ i ) ) );
+ }
+
+ frame->module = python_module();
+
+ prev_frame_before_python_call = frame_before_python_call;
+ frame_before_python_call = frame;
+ py_result = PyObject_Call( function->python_function, arguments, kw );
+ frame_before_python_call = prev_frame_before_python_call;
+ Py_DECREF( arguments );
+ Py_XDECREF( kw );
+ if ( py_result != NULL )
+ {
+ if ( PyList_Check( py_result ) )
+ {
+ int size = PyList_Size( py_result );
+ int i;
+ for ( i = 0; i < size; ++i )
+ {
+ OBJECT * s = python_to_string( PyList_GetItem( py_result, i ) );
+ if ( !s )
+ err_printf(
+ "Non-string object returned by Python call.\n" );
+ else
+ result = list_push_back( result, s );
+ }
+ }
+ else if ( py_result == Py_None )
+ {
+ result = L0;
+ }
+ else
+ {
+ OBJECT * const s = python_to_string( py_result );
+ if ( s )
+ result = list_new( s );
+ else
+ /* We have tried all we could. Return empty list. There are
+ * cases, e.g. feature.feature function that should return a
+ * value for the benefit of Python code and which also can be
+ * called by Jam code, where no sensible value can be returned.
+ * We cannot even emit a warning, since there would be a pile of
+ * them.
+ */
+ result = L0;
+ }
+
+ Py_DECREF( py_result );
+ }
+ else
+ {
+ PyErr_Print();
+ err_printf( "Call failed\n" );
+ }
+
+ return result;
+}
+
+#endif
+
+
+void function_done( void )
+{
+ BJAM_FREE( stack );
+}
diff --git a/src/boost/tools/build/src/engine/function.h b/src/boost/tools/build/src/engine/function.h
new file mode 100644
index 000000000..73c837f52
--- /dev/null
+++ b/src/boost/tools/build/src/engine/function.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef FUNCTION_SW20111123_H
+#define FUNCTION_SW20111123_H
+
+#include "config.h"
+#include "object.h"
+#include "frames.h"
+#include "lists.h"
+#include "parse.h"
+#include "jam_strings.h"
+
+typedef struct _function FUNCTION;
+typedef struct _stack STACK;
+
+STACK * stack_global( void );
+void stack_push( STACK * s, LIST * l );
+LIST * stack_pop( STACK * s );
+
+FUNCTION * function_compile( PARSE * parse );
+FUNCTION * function_builtin( LIST * ( * func )( FRAME * frame, int flags ), int flags, const char * * args );
+void function_refer( FUNCTION * );
+void function_free( FUNCTION * );
+OBJECT * function_rulename( FUNCTION * );
+void function_set_rulename( FUNCTION *, OBJECT * );
+void function_location( FUNCTION *, OBJECT * *, int * );
+LIST * function_run( FUNCTION * function, FRAME * frame, STACK * s );
+
+FUNCTION * function_compile_actions( const char * actions, OBJECT * file, int line );
+void function_run_actions( FUNCTION * function, FRAME * frame, STACK * s, string * out );
+
+FUNCTION * function_bind_variables( FUNCTION * f, module_t * module, int * counter );
+FUNCTION * function_unbind_variables( FUNCTION * f );
+
+LIST * function_get_variables( FUNCTION * f );
+
+void function_done( void );
+
+#ifdef HAVE_PYTHON
+
+FUNCTION * function_python( PyObject * function, PyObject * bjam_signature );
+
+#endif
+
+#endif
diff --git a/src/boost/tools/build/src/engine/glob.cpp b/src/boost/tools/build/src/engine/glob.cpp
new file mode 100644
index 000000000..bec00ee56
--- /dev/null
+++ b/src/boost/tools/build/src/engine/glob.cpp
@@ -0,0 +1,152 @@
+/*
+ * Copyright 1994 Christopher Seiwald. All rights reserved.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * glob.c - match a string against a simple pattern
+ *
+ * Understands the following patterns:
+ *
+ * * any number of characters
+ * ? any single character
+ * [a-z] any single character in the range a-z
+ * [^a-z] any single character not in the range a-z
+ * \x match x
+ *
+ * External functions:
+ *
+ * glob() - match a string against a simple pattern
+ *
+ * Internal functions:
+ *
+ * globchars() - build a bitlist to check for character group match
+ */
+
+# include "jam.h"
+
+# define CHECK_BIT( tab, bit ) ( tab[ (bit)/8 ] & (1<<( (bit)%8 )) )
+# define BITLISTSIZE 16 /* bytes used for [chars] in compiled expr */
+
+static void globchars( const char * s, const char * e, char * b );
+
+
+/*
+ * glob() - match a string against a simple pattern.
+ */
+
+int glob( const char * c, const char * s )
+{
+ char bitlist[ BITLISTSIZE ];
+ const char * here;
+
+ for ( ; ; )
+ switch ( *c++ )
+ {
+ case '\0':
+ return *s ? -1 : 0;
+
+ case '?':
+ if ( !*s++ )
+ return 1;
+ break;
+
+ case '[':
+ /* Scan for matching ]. */
+
+ here = c;
+ do if ( !*c++ ) return 1;
+ while ( ( here == c ) || ( *c != ']' ) );
+ ++c;
+
+ /* Build character class bitlist. */
+
+ globchars( here, c, bitlist );
+
+ if ( !CHECK_BIT( bitlist, *(const unsigned char *)s ) )
+ return 1;
+ ++s;
+ break;
+
+ case '*':
+ here = s;
+
+ while ( *s )
+ ++s;
+
+ /* Try to match the rest of the pattern in a recursive */
+ /* call. If the match fails we'll back up chars, retrying. */
+
+ while ( s != here )
+ {
+ int r;
+
+ /* A fast path for the last token in a pattern. */
+ r = *c ? glob( c, s ) : *s ? -1 : 0;
+
+ if ( !r )
+ return 0;
+ if ( r < 0 )
+ return 1;
+ --s;
+ }
+ break;
+
+ case '\\':
+ /* Force literal match of next char. */
+ if ( !*c || ( *s++ != *c++ ) )
+ return 1;
+ break;
+
+ default:
+ if ( *s++ != c[ -1 ] )
+ return 1;
+ break;
+ }
+}
+
+
+/*
+ * globchars() - build a bitlist to check for character group match.
+ */
+
+static void globchars( const char * s, const char * e, char * b )
+{
+ int neg = 0;
+
+ memset( b, '\0', BITLISTSIZE );
+
+ if ( *s == '^' )
+ {
+ ++neg;
+ ++s;
+ }
+
+ while ( s < e )
+ {
+ int c;
+
+ if ( ( s + 2 < e ) && ( s[1] == '-' ) )
+ {
+ for ( c = s[0]; c <= s[2]; ++c )
+ b[ c/8 ] |= ( 1 << ( c % 8 ) );
+ s += 3;
+ }
+ else
+ {
+ c = *s++;
+ b[ c/8 ] |= ( 1 << ( c % 8 ) );
+ }
+ }
+
+ if ( neg )
+ {
+ int i;
+ for ( i = 0; i < BITLISTSIZE; ++i )
+ b[ i ] ^= 0377;
+ }
+
+ /* Do not include \0 in either $[chars] or $[^chars]. */
+ b[0] &= 0376;
+}
diff --git a/src/boost/tools/build/src/engine/guess_toolset.bat b/src/boost/tools/build/src/engine/guess_toolset.bat
new file mode 100644
index 000000000..51d2d26c1
--- /dev/null
+++ b/src/boost/tools/build/src/engine/guess_toolset.bat
@@ -0,0 +1,115 @@
+@ECHO OFF
+
+REM ~ Copyright 2002-2018 Rene Rivera.
+REM ~ Distributed under the Boost Software License, Version 1.0.
+REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+if "_%1_" == "_yacc_" goto Guess_Yacc
+if "_%1_" == "_test_path_" (
+ shift
+ goto Test_Path)
+goto Guess
+
+
+:Clear_Error
+ver >nul
+goto :eof
+
+
+:Test_Path
+REM Tests for the given executable file presence in the directories in the PATH
+REM environment variable. Additionally sets FOUND_PATH to the path of the
+REM found file.
+call :Clear_Error
+setlocal
+set test=%~$PATH:1
+endlocal
+if not errorlevel 1 set FOUND_PATH=%~dp$PATH:1
+goto :eof
+
+
+:Guess
+REM Let vswhere tell us where msvc is at, if available.
+call :Clear_Error
+call vswhere_usability_wrapper.cmd
+call :Clear_Error
+REM VSUNKCOMNTOOLS represents unknown but detected version from vswhere
+if NOT "_%VSUNKCOMNTOOLS%_" == "__" (
+ set "B2_TOOLSET=vcunk"
+ set "B2_TOOLSET_ROOT=%VSUNKCOMNTOOLS%..\..\VC\"
+ goto :eof)
+if NOT "_%VS160COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET=vc142"
+ set "B2_TOOLSET_ROOT=%VS160COMNTOOLS%..\..\VC\"
+ goto :eof)
+if NOT "_%VS150COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET=vc141"
+ set "B2_TOOLSET_ROOT=%VS150COMNTOOLS%..\..\VC\"
+ goto :eof)
+if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" (
+ set "B2_TOOLSET=vc141"
+ set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Enterprise\VC\"
+ exit /b 0)
+if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvarsall.bat" (
+ set "B2_TOOLSET=vc141"
+ set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Professional\VC\"
+ exit /b 0)
+if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" (
+ set "B2_TOOLSET=vc141"
+ set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Community\VC\"
+ exit /b 0)
+if NOT "_%VS140COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET=vc14"
+ set "B2_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\"
+ exit /b 0)
+if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio 14.0\VC\VCVARSALL.BAT" (
+ set "B2_TOOLSET=vc14"
+ set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio 14.0\VC\"
+ exit /b 0)
+if NOT "_%VS120COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET=vc12"
+ set "B2_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\"
+ exit /b 0)
+if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio 12.0\VC\VCVARSALL.BAT" (
+ set "B2_TOOLSET=vc12"
+ set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio 12.0\VC\"
+ exit /b 0)
+if NOT "_%VS110COMNTOOLS%_" == "__" (
+ set "B2_TOOLSET=vc11"
+ set "B2_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\"
+ exit /b 0)
+if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio 11.0\VC\VCVARSALL.BAT" (
+ set "B2_TOOLSET=vc11"
+ set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio 11.0\VC\"
+ exit /b 0)
+call :Test_Path cl.exe
+if not errorlevel 1 (
+ set "B2_TOOLSET=msvc"
+ set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
+ exit /b 0)
+call :Test_Path vcvars32.bat
+if not errorlevel 1 (
+ set "B2_TOOLSET=msvc"
+ call "%FOUND_PATH%VCVARS32.BAT"
+ set "B2_TOOLSET_ROOT=%MSVCDir%\"
+ exit /b 0)
+if EXIST "C:\Borland\BCC55\Bin\bcc32.exe" (
+ set "B2_TOOLSET=borland"
+ set "B2_TOOLSET_ROOT=C:\Borland\BCC55\"
+ exit /b 0)
+call :Test_Path bcc32.exe
+if not errorlevel 1 (
+ set "B2_TOOLSET=borland"
+ set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
+ exit /b 0)
+call :Test_Path icl.exe
+if not errorlevel 1 (
+ set "B2_TOOLSET=intel-win32"
+ set "B2_TOOLSET_ROOT=%FOUND_PATH%..\"
+ exit /b 0)
+if EXIST "C:\MinGW\bin\gcc.exe" (
+ set "B2_TOOLSET=mingw"
+ set "B2_TOOLSET_ROOT=C:\MinGW\"
+ exit /b 0)
+REM Could not find a suitable toolset
+exit /b 1
diff --git a/src/boost/tools/build/src/engine/hash.cpp b/src/boost/tools/build/src/engine/hash.cpp
new file mode 100644
index 000000000..f3dcef88a
--- /dev/null
+++ b/src/boost/tools/build/src/engine/hash.cpp
@@ -0,0 +1,388 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hash.c - simple in-memory hashing routines
+ *
+ * External routines:
+ * hashinit() - initialize a hash table, returning a handle
+ * hashitem() - find a record in the table, and optionally enter a new one
+ * hashdone() - free a hash table, given its handle
+ *
+ * Internal routines:
+ * hashrehash() - resize and rebuild hp->tab, the hash table
+ */
+
+#include "jam.h"
+#include "hash.h"
+
+#include "compile.h"
+#include "output.h"
+
+#include <assert.h>
+
+/*
+#define HASH_DEBUG_PROFILE 1
+*/
+
+/* Header attached to all hash table data items. */
+
+typedef struct item ITEM;
+struct item
+{
+ ITEM * next;
+};
+
+#define MAX_LISTS 32
+
+struct hash
+{
+ /*
+ * the hash table, just an array of item pointers
+ */
+ struct
+ {
+ int nel;
+ ITEM * * base;
+ } tab;
+
+ int bloat; /* tab.nel / items.nel */
+ int inel; /* initial number of elements */
+
+ /*
+ * the array of records, maintained by these routines - essentially a
+ * microallocator
+ */
+ struct
+ {
+ int more; /* how many more ITEMs fit in lists[ list ] */
+ ITEM * free; /* free list of items */
+ char * next; /* where to put more ITEMs in lists[ list ] */
+ int size; /* sizeof( ITEM ) + aligned datalen */
+ int nel; /* total ITEMs held by all lists[] */
+ int list; /* index into lists[] */
+
+ struct
+ {
+ int nel; /* total ITEMs held by this list */
+ char * base; /* base of ITEMs array */
+ } lists[ MAX_LISTS ];
+ } items;
+
+ char const * name; /* just for hashstats() */
+};
+
+static void hashrehash( struct hash * );
+static void hashstat( struct hash * );
+
+static unsigned int hash_keyval( OBJECT * key )
+{
+ return object_hash( key );
+}
+
+#define hash_bucket(hp, keyval) ((hp)->tab.base + ((keyval) % (hp)->tab.nel))
+
+#define hash_data_key(data) (*(OBJECT * *)(data))
+#define hash_item_data(item) ((HASHDATA *)((char *)item + sizeof(ITEM)))
+#define hash_item_key(item) (hash_data_key(hash_item_data(item)))
+
+
+#define ALIGNED(x) ((x + sizeof(ITEM) - 1) & ~(sizeof(ITEM) - 1))
+
+/*
+ * hashinit() - initialize a hash table, returning a handle
+ */
+
+struct hash * hashinit( int datalen, char const * name )
+{
+ struct hash * hp = (struct hash *)BJAM_MALLOC( sizeof( *hp ) );
+
+ hp->bloat = 3;
+ hp->tab.nel = 0;
+ hp->tab.base = 0;
+ hp->items.more = 0;
+ hp->items.free = 0;
+ hp->items.size = sizeof( ITEM ) + ALIGNED( datalen );
+ hp->items.list = -1;
+ hp->items.nel = 0;
+ hp->inel = 11; /* 47 */
+ hp->name = name;
+
+ return hp;
+}
+
+
+/*
+ * hash_search() - Find the hash item for the given data.
+ *
+ * Returns a pointer to a hashed item with the given key. If given a 'previous'
+ * pointer, makes it point to the item prior to the found item in the same
+ * bucket or to 0 if our item is the first item in its bucket.
+ */
+
+static ITEM * hash_search( struct hash * hp, unsigned int keyval,
+ OBJECT * keydata, ITEM * * previous )
+{
+ ITEM * i = *hash_bucket( hp, keyval );
+ ITEM * p = 0;
+ for ( ; i; i = i->next )
+ {
+ if ( object_equal( hash_item_key( i ), keydata ) )
+ {
+ if ( previous )
+ *previous = p;
+ return i;
+ }
+ p = i;
+ }
+ return 0;
+}
+
+
+/*
+ * hash_insert() - insert a record in the table or return the existing one
+ */
+
+HASHDATA * hash_insert( struct hash * hp, OBJECT * key, int * found )
+{
+ ITEM * i;
+ unsigned int keyval = hash_keyval( key );
+
+ #ifdef HASH_DEBUG_PROFILE
+ profile_frame prof[ 1 ];
+ if ( DEBUG_PROFILE )
+ profile_enter( 0, prof );
+ #endif
+
+ if ( !hp->items.more )
+ hashrehash( hp );
+
+ i = hash_search( hp, keyval, key, 0 );
+ if ( i )
+ *found = 1;
+ else
+ {
+ ITEM * * base = hash_bucket( hp, keyval );
+
+ /* Try to grab one from the free list. */
+ if ( hp->items.free )
+ {
+ i = hp->items.free;
+ hp->items.free = i->next;
+ assert( !hash_item_key( i ) );
+ }
+ else
+ {
+ i = (ITEM *)hp->items.next;
+ hp->items.next += hp->items.size;
+ }
+ --hp->items.more;
+ i->next = *base;
+ *base = i;
+ *found = 0;
+ }
+
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+
+ return hash_item_data( i );
+}
+
+
+/*
+ * hash_find() - find a record in the table or NULL if none exists
+ */
+
+HASHDATA * hash_find( struct hash * hp, OBJECT * key )
+{
+ ITEM * i;
+ unsigned int keyval = hash_keyval( key );
+
+ #ifdef HASH_DEBUG_PROFILE
+ profile_frame prof[ 1 ];
+ if ( DEBUG_PROFILE )
+ profile_enter( 0, prof );
+ #endif
+
+ if ( !hp->items.nel )
+ {
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+ return 0;
+ }
+
+ i = hash_search( hp, keyval, key, 0 );
+
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+
+ return i ? hash_item_data( i ) : 0;
+}
+
+
+/*
+ * hashrehash() - resize and rebuild hp->tab, the hash table
+ */
+
+static void hashrehash( struct hash * hp )
+{
+ int i = ++hp->items.list;
+ hp->items.more = i ? 2 * hp->items.nel : hp->inel;
+ hp->items.next = (char *)BJAM_MALLOC( hp->items.more * hp->items.size );
+ hp->items.free = 0;
+
+ hp->items.lists[ i ].nel = hp->items.more;
+ hp->items.lists[ i ].base = hp->items.next;
+ hp->items.nel += hp->items.more;
+
+ if ( hp->tab.base )
+ BJAM_FREE( (char *)hp->tab.base );
+
+ hp->tab.nel = hp->items.nel * hp->bloat;
+ hp->tab.base = (ITEM * *)BJAM_MALLOC( hp->tab.nel * sizeof( ITEM * ) );
+
+ memset( (char *)hp->tab.base, '\0', hp->tab.nel * sizeof( ITEM * ) );
+
+ for ( i = 0; i < hp->items.list; ++i )
+ {
+ int nel = hp->items.lists[ i ].nel;
+ char * next = hp->items.lists[ i ].base;
+
+ for ( ; nel--; next += hp->items.size )
+ {
+ ITEM * i = (ITEM *)next;
+ ITEM * * ip = hp->tab.base + object_hash( hash_item_key( i ) ) %
+ hp->tab.nel;
+ /* code currently assumes rehashing only when there are no free
+ * items
+ */
+ assert( hash_item_key( i ) );
+
+ i->next = *ip;
+ *ip = i;
+ }
+ }
+}
+
+
+void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data
+ )
+{
+ int i;
+ for ( i = 0; i <= hp->items.list; ++i )
+ {
+ char * next = hp->items.lists[ i ].base;
+ int nel = hp->items.lists[ i ].nel;
+ if ( i == hp->items.list )
+ nel -= hp->items.more;
+
+ for ( ; nel--; next += hp->items.size )
+ {
+ ITEM * const i = (ITEM *)next;
+ if ( hash_item_key( i ) != 0 ) /* Do not enumerate freed items. */
+ f( hash_item_data( i ), data );
+ }
+ }
+}
+
+
+/*
+ * hash_free() - free a hash table, given its handle
+ */
+
+void hash_free( struct hash * hp )
+{
+ int i;
+ if ( !hp )
+ return;
+ if ( hp->tab.base )
+ BJAM_FREE( (char *)hp->tab.base );
+ for ( i = 0; i <= hp->items.list; ++i )
+ BJAM_FREE( hp->items.lists[ i ].base );
+ BJAM_FREE( (char *)hp );
+}
+
+
+static void hashstat( struct hash * hp )
+{
+ struct hashstats stats[ 1 ];
+ hashstats_init( stats );
+ hashstats_add( stats, hp );
+ hashstats_print( stats, hp->name );
+}
+
+
+void hashstats_init( struct hashstats * stats )
+{
+ stats->count = 0;
+ stats->num_items = 0;
+ stats->tab_size = 0;
+ stats->item_size = 0;
+ stats->sets = 0;
+ stats->num_hashes = 0;
+}
+
+
+void hashstats_add( struct hashstats * stats, struct hash * hp )
+{
+ if ( hp )
+ {
+ ITEM * * tab = hp->tab.base;
+ int nel = hp->tab.nel;
+ int count = 0;
+ int sets = 0;
+ int i;
+
+ for ( i = 0; i < nel; ++i )
+ {
+ ITEM * item;
+ int here = 0;
+ for ( item = tab[ i ]; item; item = item->next )
+ ++here;
+
+ count += here;
+ if ( here > 0 )
+ ++sets;
+ }
+
+ stats->count += count;
+ stats->sets += sets;
+ stats->num_items += hp->items.nel;
+ stats->tab_size += hp->tab.nel;
+ stats->item_size = hp->items.size;
+ ++stats->num_hashes;
+ }
+}
+
+
+void hashstats_print( struct hashstats * stats, char const * name )
+{
+ out_printf( "%s table: %d+%d+%d (%dK+%luK+%luK) items+table+hash, %f density\n",
+ name,
+ stats->count,
+ stats->num_items,
+ stats->tab_size,
+ stats->num_items * stats->item_size / 1024,
+ (long unsigned)stats->tab_size * sizeof( ITEM * * ) / 1024,
+ (long unsigned)stats->num_hashes * sizeof( struct hash ) / 1024,
+ (float)stats->count / (float)stats->sets );
+}
+
+
+void hashdone( struct hash * hp )
+{
+ if ( !hp )
+ return;
+ if ( DEBUG_MEM || DEBUG_PROFILE )
+ hashstat( hp );
+ hash_free( hp );
+}
diff --git a/src/boost/tools/build/src/engine/hash.h b/src/boost/tools/build/src/engine/hash.h
new file mode 100644
index 000000000..7ed633d73
--- /dev/null
+++ b/src/boost/tools/build/src/engine/hash.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hash.h - simple in-memory hashing routines
+ */
+
+#ifndef BOOST_JAM_HASH_H
+#define BOOST_JAM_HASH_H
+
+#include "config.h"
+#include "object.h"
+
+/*
+ * An opaque struct representing an item in the hash table. The first element of
+ * every struct stored in the table must be an OBJECT * which is treated as the
+ * key.
+ */
+typedef struct hashdata HASHDATA;
+
+/*
+ * hashinit() - initialize a hash table, returning a handle.
+ *
+ * Parameters:
+ * datalen - item size
+ * name - used for debugging
+ */
+struct hash * hashinit( int datalen, char const * name );
+
+/*
+ * hash_free() - free a hash table, given its handle
+ */
+void hash_free( struct hash * );
+void hashdone( struct hash * );
+
+/*
+ * hashenumerate() - call f(i, data) on each item, i in the hash table. The
+ * enumeration order is unspecified.
+ */
+void hashenumerate( struct hash *, void (* f)( void *, void * ), void * data );
+
+/*
+ * hash_insert() - insert a new item in a hash table, or return an existing one.
+ *
+ * Preconditions:
+ * - hp must be a hash table created by hashinit()
+ * - key must be an object created by object_new()
+ *
+ * Postconditions:
+ * - if the key does not already exist in the hash table, *found == 0 and the
+ * result will be a pointer to an uninitialized item. The key of the new
+ * item must be set to a value equal to key before any further operations on
+ * the hash table except hashdone().
+ * - if the key is present then *found == 1 and the result is a pointer to the
+ * existing record.
+ */
+HASHDATA * hash_insert( struct hash *, OBJECT * key, int * found );
+
+/*
+ * hash_find() - find a record in the table or NULL if none exists
+ */
+HASHDATA * hash_find( struct hash *, OBJECT * key );
+
+struct hashstats {
+ int count;
+ int num_items;
+ int tab_size;
+ int item_size;
+ int sets;
+ int num_hashes;
+};
+
+void hashstats_init( struct hashstats * stats );
+void hashstats_add( struct hashstats * stats, struct hash * );
+void hashstats_print( struct hashstats * stats, char const * name );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/hcache.cpp b/src/boost/tools/build/src/engine/hcache.cpp
new file mode 100644
index 000000000..15ab12a35
--- /dev/null
+++ b/src/boost/tools/build/src/engine/hcache.cpp
@@ -0,0 +1,522 @@
+/*
+ * This file has been donated to Jam.
+ */
+
+/*
+ * Craig W. McPheeters, Alias|Wavefront.
+ *
+ * hcache.c hcache.h - handle caching of #includes in source files.
+ *
+ * Create a cache of files scanned for headers. When starting jam, look for the
+ * cache file and load it if present. When finished the binding phase, create a
+ * new header cache. The cache contains files, their timestamps and the header
+ * files found in their scan. During the binding phase of jam, look in the
+ * header cache first for the headers contained in a file. If the cache is
+ * present and valid, use its contents. This results in dramatic speedups with
+ * large projects (e.g. 3min -> 1min startup for one project.)
+ *
+ * External routines:
+ * hcache_init() - read and parse the local .jamdeps file.
+ * hcache_done() - write a new .jamdeps file.
+ * hcache() - return list of headers on target. Use cache or do a scan.
+ *
+ * The dependency file format is an ASCII file with 1 line per target. Each line
+ * has the following fields:
+ * @boundname@ timestamp_sec timestamp_nsec @file@ @file@ @file@ ...
+ */
+
+#include "config.h"
+
+#ifdef OPT_HEADER_CACHE_EXT
+
+#include "jam.h"
+#include "hcache.h"
+
+#include "hash.h"
+#include "headers.h"
+#include "lists.h"
+#include "modules.h"
+#include "object.h"
+#include "parse.h"
+#include "regexp.h"
+#include "rules.h"
+#include "search.h"
+#include "timestamp.h"
+#include "variable.h"
+#include "output.h"
+
+typedef struct hcachedata HCACHEDATA ;
+
+struct hcachedata
+{
+ OBJECT * boundname;
+ timestamp time;
+ LIST * includes;
+ LIST * hdrscan; /* the HDRSCAN value for this target */
+ int age; /* if too old, we will remove it from cache */
+ HCACHEDATA * next;
+};
+
+
+static struct hash * hcachehash = 0;
+static HCACHEDATA * hcachelist = 0;
+
+static int queries = 0;
+static int hits = 0;
+
+#define CACHE_FILE_VERSION "version 5"
+#define CACHE_RECORD_HEADER "header"
+#define CACHE_RECORD_END "end"
+
+
+/*
+ * Return the name of the header cache file. May return NULL.
+ *
+ * The user sets this by setting the HCACHEFILE variable in a Jamfile. We cache
+ * the result so the user can not change the cache file during header scanning.
+ */
+
+static const char * cache_name( void )
+{
+ static OBJECT * name = 0;
+ if ( !name )
+ {
+ LIST * const hcachevar = var_get( root_module(), constant_HCACHEFILE );
+
+ if ( !list_empty( hcachevar ) )
+ {
+ TARGET * const t = bindtarget( list_front( hcachevar ) );
+
+ pushsettings( root_module(), t->settings );
+ /* Do not expect the cache file to be generated, so pass 0 as the
+ * third argument to search. Expect the location to be specified via
+ * LOCATE, so pass 0 as the fourth argument.
+ */
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, 0, 0 );
+ popsettings( root_module(), t->settings );
+
+ name = object_copy( t->boundname );
+ }
+ }
+ return name ? object_str( name ) : 0;
+}
+
+
+/*
+ * Return the maximum age a cache entry can have before it is purged from the
+ * cache.
+ */
+
+static int cache_maxage( void )
+{
+ int age = 100;
+ LIST * const var = var_get( root_module(), constant_HCACHEMAXAGE );
+ if ( !list_empty( var ) )
+ {
+ age = atoi( object_str( list_front( var ) ) );
+ if ( age < 0 )
+ age = 0;
+ }
+ return age;
+}
+
+
+/*
+ * Read a netstring. The caveat is that the string can not contain ASCII 0. The
+ * returned value is as returned by object_new().
+ */
+
+OBJECT * read_netstring( FILE * f )
+{
+ unsigned long len;
+ static char * buf = NULL;
+ static unsigned long buf_len = 0;
+
+ if ( fscanf( f, " %9lu", &len ) != 1 )
+ return NULL;
+ if ( fgetc( f ) != (int)'\t' )
+ return NULL;
+
+ if ( len > 1024 * 64 )
+ return NULL; /* sanity check */
+
+ if ( len > buf_len )
+ {
+ unsigned long new_len = buf_len * 2;
+ if ( new_len < len )
+ new_len = len;
+ buf = (char *)BJAM_REALLOC( buf, new_len + 1 );
+ if ( buf )
+ buf_len = new_len;
+ }
+
+ if ( !buf )
+ return NULL;
+
+ if ( fread( buf, 1, len, f ) != len )
+ return NULL;
+ if ( fgetc( f ) != (int)'\n' )
+ return NULL;
+
+ buf[ len ] = 0;
+ return object_new( buf );
+}
+
+
+/*
+ * Write a netstring.
+ */
+
+void write_netstring( FILE * f, char const * s )
+{
+ if ( !s )
+ s = "";
+ fprintf( f, "%lu\t%s\n", (long unsigned)strlen( s ), s );
+}
+
+
+void hcache_init()
+{
+ FILE * f;
+ OBJECT * version = 0;
+ int header_count = 0;
+ const char * hcachename;
+
+ if ( hcachehash )
+ return;
+
+ hcachehash = hashinit( sizeof( HCACHEDATA ), "hcache" );
+
+ if ( !( hcachename = cache_name() ) )
+ return;
+
+ if ( !( f = fopen( hcachename, "rb" ) ) )
+ return;
+
+ version = read_netstring( f );
+
+ if ( !version || strcmp( object_str( version ), CACHE_FILE_VERSION ) )
+ goto bail;
+
+ while ( 1 )
+ {
+ HCACHEDATA cachedata;
+ HCACHEDATA * c;
+ OBJECT * record_type = 0;
+ OBJECT * time_secs_str = 0;
+ OBJECT * time_nsecs_str = 0;
+ OBJECT * age_str = 0;
+ OBJECT * includes_count_str = 0;
+ OBJECT * hdrscan_count_str = 0;
+ int i;
+ int count;
+ LIST * l;
+ int found;
+
+ cachedata.boundname = 0;
+ cachedata.includes = 0;
+ cachedata.hdrscan = 0;
+
+ record_type = read_netstring( f );
+ if ( !record_type )
+ {
+ err_printf( "invalid %s\n", hcachename );
+ goto cleanup;
+ }
+ if ( !strcmp( object_str( record_type ), CACHE_RECORD_END ) )
+ {
+ object_free( record_type );
+ break;
+ }
+ if ( strcmp( object_str( record_type ), CACHE_RECORD_HEADER ) )
+ {
+ err_printf( "invalid %s with record separator <%s>\n",
+ hcachename, record_type ? object_str( record_type ) : "<null>" );
+ goto cleanup;
+ }
+
+ cachedata.boundname = read_netstring( f );
+ time_secs_str = read_netstring( f );
+ time_nsecs_str = read_netstring( f );
+ age_str = read_netstring( f );
+ includes_count_str = read_netstring( f );
+
+ if ( !cachedata.boundname || !time_secs_str || !time_nsecs_str ||
+ !age_str || !includes_count_str )
+ {
+ err_printf( "invalid %s\n", hcachename );
+ goto cleanup;
+ }
+
+ timestamp_init( &cachedata.time, atoi( object_str( time_secs_str ) ),
+ atoi( object_str( time_nsecs_str ) ) );
+ cachedata.age = atoi( object_str( age_str ) ) + 1;
+
+ count = atoi( object_str( includes_count_str ) );
+ for ( l = L0, i = 0; i < count; ++i )
+ {
+ OBJECT * const s = read_netstring( f );
+ if ( !s )
+ {
+ err_printf( "invalid %s\n", hcachename );
+ list_free( l );
+ goto cleanup;
+ }
+ l = list_push_back( l, s );
+ }
+ cachedata.includes = l;
+
+ hdrscan_count_str = read_netstring( f );
+ if ( !hdrscan_count_str )
+ {
+ err_printf( "invalid %s\n", hcachename );
+ goto cleanup;
+ }
+
+ count = atoi( object_str( hdrscan_count_str ) );
+ for ( l = L0, i = 0; i < count; ++i )
+ {
+ OBJECT * const s = read_netstring( f );
+ if ( !s )
+ {
+ err_printf( "invalid %s\n", hcachename );
+ list_free( l );
+ goto cleanup;
+ }
+ l = list_push_back( l, s );
+ }
+ cachedata.hdrscan = l;
+
+ c = (HCACHEDATA *)hash_insert( hcachehash, cachedata.boundname, &found )
+ ;
+ if ( !found )
+ {
+ c->boundname = cachedata.boundname;
+ c->includes = cachedata.includes;
+ c->hdrscan = cachedata.hdrscan;
+ c->age = cachedata.age;
+ timestamp_copy( &c->time, &cachedata.time );
+ }
+ else
+ {
+ err_printf( "can not insert header cache item, bailing on %s"
+ "\n", hcachename );
+ goto cleanup;
+ }
+
+ c->next = hcachelist;
+ hcachelist = c;
+
+ ++header_count;
+
+ object_free( record_type );
+ object_free( time_secs_str );
+ object_free( time_nsecs_str );
+ object_free( age_str );
+ object_free( includes_count_str );
+ object_free( hdrscan_count_str );
+ continue;
+
+cleanup:
+
+ if ( record_type ) object_free( record_type );
+ if ( time_secs_str ) object_free( time_secs_str );
+ if ( time_nsecs_str ) object_free( time_nsecs_str );
+ if ( age_str ) object_free( age_str );
+ if ( includes_count_str ) object_free( includes_count_str );
+ if ( hdrscan_count_str ) object_free( hdrscan_count_str );
+
+ if ( cachedata.boundname ) object_free( cachedata.boundname );
+ if ( cachedata.includes ) list_free( cachedata.includes );
+ if ( cachedata.hdrscan ) list_free( cachedata.hdrscan );
+
+ goto bail;
+ }
+
+ if ( DEBUG_HEADER )
+ out_printf( "hcache read from file %s\n", hcachename );
+
+bail:
+ if ( version )
+ object_free( version );
+ fclose( f );
+}
+
+
+void hcache_done()
+{
+ FILE * f;
+ HCACHEDATA * c;
+ int header_count = 0;
+ const char * hcachename;
+ int maxage;
+
+ if ( !hcachehash )
+ return;
+
+ if ( !( hcachename = cache_name() ) )
+ goto cleanup;
+
+ if ( !( f = fopen( hcachename, "wb" ) ) )
+ goto cleanup;
+
+ maxage = cache_maxage();
+
+ /* Print out the version. */
+ write_netstring( f, CACHE_FILE_VERSION );
+
+ c = hcachelist;
+ for ( c = hcachelist; c; c = c->next )
+ {
+ LISTITER iter;
+ LISTITER end;
+ char time_secs_str[ 30 ];
+ char time_nsecs_str[ 30 ];
+ char age_str[ 30 ];
+ char includes_count_str[ 30 ];
+ char hdrscan_count_str[ 30 ];
+
+ if ( maxage == 0 )
+ c->age = 0;
+ else if ( c->age > maxage )
+ continue;
+
+ sprintf( includes_count_str, "%lu", (long unsigned)list_length(
+ c->includes ) );
+ sprintf( hdrscan_count_str, "%lu", (long unsigned)list_length(
+ c->hdrscan ) );
+ sprintf( time_secs_str, "%lu", (long unsigned)c->time.secs );
+ sprintf( time_nsecs_str, "%lu", (long unsigned)c->time.nsecs );
+ sprintf( age_str, "%lu", (long unsigned)c->age );
+
+ write_netstring( f, CACHE_RECORD_HEADER );
+ write_netstring( f, object_str( c->boundname ) );
+ write_netstring( f, time_secs_str );
+ write_netstring( f, time_nsecs_str );
+ write_netstring( f, age_str );
+ write_netstring( f, includes_count_str );
+ for ( iter = list_begin( c->includes ), end = list_end( c->includes );
+ iter != end; iter = list_next( iter ) )
+ write_netstring( f, object_str( list_item( iter ) ) );
+ write_netstring( f, hdrscan_count_str );
+ for ( iter = list_begin( c->hdrscan ), end = list_end( c->hdrscan );
+ iter != end; iter = list_next( iter ) )
+ write_netstring( f, object_str( list_item( iter ) ) );
+ fputs( "\n", f );
+ ++header_count;
+ }
+ write_netstring( f, CACHE_RECORD_END );
+
+ if ( DEBUG_HEADER )
+ out_printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n",
+ hcachename, header_count, queries ? 100.0 * hits / queries : 0 );
+
+ fclose ( f );
+
+cleanup:
+ for ( c = hcachelist; c; c = c->next )
+ {
+ list_free( c->includes );
+ list_free( c->hdrscan );
+ object_free( c->boundname );
+ }
+
+ hcachelist = 0;
+ if ( hcachehash )
+ hashdone( hcachehash );
+ hcachehash = 0;
+}
+
+
+LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan )
+{
+ HCACHEDATA * c;
+
+ ++queries;
+
+ if ( ( c = (HCACHEDATA *)hash_find( hcachehash, t->boundname ) ) )
+ {
+ if ( !timestamp_cmp( &c->time, &t->time ) )
+ {
+ LIST * const l1 = hdrscan;
+ LIST * const l2 = c->hdrscan;
+ LISTITER iter1 = list_begin( l1 );
+ LISTITER const end1 = list_end( l1 );
+ LISTITER iter2 = list_begin( l2 );
+ LISTITER const end2 = list_end( l2 );
+ while ( iter1 != end1 && iter2 != end2 )
+ {
+ if ( !object_equal( list_item( iter1 ), list_item( iter2 ) ) )
+ iter1 = end1;
+ else
+ {
+ iter1 = list_next( iter1 );
+ iter2 = list_next( iter2 );
+ }
+ }
+ if ( iter1 != end1 || iter2 != end2 )
+ {
+ if ( DEBUG_HEADER )
+ {
+ out_printf( "HDRSCAN out of date in cache for %s\n",
+ object_str( t->boundname ) );
+ out_printf(" real : ");
+ list_print( hdrscan );
+ out_printf( "\n cached: " );
+ list_print( c->hdrscan );
+ out_printf( "\n" );
+ }
+
+ list_free( c->includes );
+ list_free( c->hdrscan );
+ c->includes = L0;
+ c->hdrscan = L0;
+ }
+ else
+ {
+ if ( DEBUG_HEADER )
+ out_printf( "using header cache for %s\n", object_str(
+ t->boundname ) );
+ c->age = 0;
+ ++hits;
+ return list_copy( c->includes );
+ }
+ }
+ else
+ {
+ if ( DEBUG_HEADER )
+ out_printf ("header cache out of date for %s\n", object_str(
+ t->boundname ) );
+ list_free( c->includes );
+ list_free( c->hdrscan );
+ c->includes = L0;
+ c->hdrscan = L0;
+ }
+ }
+ else
+ {
+ int found;
+ c = (HCACHEDATA *)hash_insert( hcachehash, t->boundname, &found );
+ if ( !found )
+ {
+ c->boundname = object_copy( t->boundname );
+ c->next = hcachelist;
+ hcachelist = c;
+ }
+ }
+
+ /* 'c' points at the cache entry. Its out of date. */
+ {
+ LIST * const l = headers1( L0, t->boundname, rec, re );
+
+ timestamp_copy( &c->time, &t->time );
+ c->age = 0;
+ c->includes = list_copy( l );
+ c->hdrscan = list_copy( hdrscan );
+
+ return l;
+ }
+}
+
+#endif /* OPT_HEADER_CACHE_EXT */
diff --git a/src/boost/tools/build/src/engine/hcache.h b/src/boost/tools/build/src/engine/hcache.h
new file mode 100644
index 000000000..95267c267
--- /dev/null
+++ b/src/boost/tools/build/src/engine/hcache.h
@@ -0,0 +1,20 @@
+/*
+ * This file is not part of Jam
+ */
+
+/*
+ * hcache.h - handle #includes in source files
+ */
+#ifndef HCACHE_H
+#define HCACHE_H
+
+#include "config.h"
+#include "lists.h"
+#include "regexp.h"
+#include "rules.h"
+
+void hcache_init( void );
+void hcache_done( void );
+LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/hdrmacro.cpp b/src/boost/tools/build/src/engine/hdrmacro.cpp
new file mode 100644
index 000000000..9b6507ec6
--- /dev/null
+++ b/src/boost/tools/build/src/engine/hdrmacro.cpp
@@ -0,0 +1,140 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * hdrmacro.c - handle header files that define macros used in #include
+ * statements.
+ *
+ * we look for lines like "#define MACRO <....>" or '#define MACRO " "' in
+ * the target file. When found, we then phony up a rule invocation like:
+ *
+ * $(HDRRULE) <target> : <resolved included files> ;
+ *
+ * External routines:
+ * headers1() - scan a target for "#include MACRO" lines and try to resolve
+ * them when needed
+ *
+ * Internal routines:
+ * headers1() - using regexp, scan a file and build include LIST
+ */
+
+#include "jam.h"
+#include "hdrmacro.h"
+
+#include "compile.h"
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "jam_strings.h"
+#include "subst.h"
+#include "variable.h"
+#include "output.h"
+
+
+/* this type is used to store a dictionary of file header macros */
+typedef struct header_macro
+{
+ OBJECT * symbol;
+ OBJECT * filename; /* we could maybe use a LIST here ?? */
+} HEADER_MACRO;
+
+static struct hash * header_macros_hash = 0;
+
+
+/*
+ * headers() - scan a target for include files and call HDRRULE
+ */
+
+#define MAXINC 10
+
+void macro_headers( TARGET * t )
+{
+ static regexp * re = 0;
+ FILE * f;
+ char buf[ 1024 ];
+
+ if ( DEBUG_HEADER )
+ out_printf( "macro header scan for %s\n", object_str( t->name ) );
+
+ /* This regexp is used to detect lines of the form
+ * "#define MACRO <....>" or "#define MACRO "....."
+ * in the header macro files.
+ */
+ if ( !re )
+ {
+ OBJECT * const re_str = object_new(
+ "^[ ]*#[ ]*define[ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*"
+ "[<\"]([^\">]*)[\">].*$" );
+ re = regex_compile( re_str );
+ object_free( re_str );
+ }
+
+ if ( !( f = fopen( object_str( t->boundname ), "r" ) ) )
+ return;
+
+ while ( fgets( buf, sizeof( buf ), f ) )
+ {
+ HEADER_MACRO var;
+ HEADER_MACRO * v = &var;
+
+ if ( regexec( re, buf ) && re->startp[ 1 ] )
+ {
+ OBJECT * symbol;
+ int found;
+ /* we detected a line that looks like "#define MACRO filename */
+ ( (char *)re->endp[ 1 ] )[ 0 ] = '\0';
+ ( (char *)re->endp[ 2 ] )[ 0 ] = '\0';
+
+ if ( DEBUG_HEADER )
+ out_printf( "macro '%s' used to define filename '%s' in '%s'\n",
+ re->startp[ 1 ], re->startp[ 2 ], object_str( t->boundname )
+ );
+
+ /* add macro definition to hash table */
+ if ( !header_macros_hash )
+ header_macros_hash = hashinit( sizeof( HEADER_MACRO ),
+ "hdrmacros" );
+
+ symbol = object_new( re->startp[ 1 ] );
+ v = (HEADER_MACRO *)hash_insert( header_macros_hash, symbol, &found
+ );
+ if ( !found )
+ {
+ v->symbol = symbol;
+ v->filename = object_new( re->startp[ 2 ] ); /* never freed */
+ }
+ else
+ object_free( symbol );
+ /* XXXX: FOR NOW, WE IGNORE MULTIPLE MACRO DEFINITIONS !! */
+ /* WE MIGHT AS WELL USE A LIST TO STORE THEM.. */
+ }
+ }
+
+ fclose( f );
+}
+
+
+OBJECT * macro_header_get( OBJECT * macro_name )
+{
+ HEADER_MACRO * v;
+ if ( header_macros_hash && ( v = (HEADER_MACRO *)hash_find(
+ header_macros_hash, macro_name ) ) )
+ {
+ if ( DEBUG_HEADER )
+ out_printf( "### macro '%s' evaluated to '%s'\n", object_str( macro_name
+ ), object_str( v->filename ) );
+ return v->filename;
+ }
+ return 0;
+}
diff --git a/src/boost/tools/build/src/engine/hdrmacro.h b/src/boost/tools/build/src/engine/hdrmacro.h
new file mode 100644
index 000000000..1489aef9c
--- /dev/null
+++ b/src/boost/tools/build/src/engine/hdrmacro.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hdrmacro.h - parses header files for #define MACRO <filename> or
+ * #define MACRO "filename" definitions
+ */
+
+#ifndef HDRMACRO_SW20111118_H
+#define HDRMACRO_SW20111118_H
+
+#include "config.h"
+#include "object.h"
+#include "rules.h"
+
+void macro_headers( TARGET * );
+OBJECT * macro_header_get( OBJECT * macro_name );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/headers.cpp b/src/boost/tools/build/src/engine/headers.cpp
new file mode 100644
index 000000000..e653abcfa
--- /dev/null
+++ b/src/boost/tools/build/src/engine/headers.cpp
@@ -0,0 +1,198 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * headers.c - handle #includes in source files
+ *
+ * Using regular expressions provided as the variable $(HDRSCAN), headers()
+ * searches a file for #include files and phonies up a rule invocation:
+ * $(HDRRULE) <target> : <include files> ;
+ *
+ * External routines:
+ * headers() - scan a target for include files and call HDRRULE
+ *
+ * Internal routines:
+ * headers1() - using regexp, scan a file and build include LIST
+ */
+
+#include "jam.h"
+#include "headers.h"
+
+#include "compile.h"
+#include "hdrmacro.h"
+#include "lists.h"
+#include "modules.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "subst.h"
+#include "variable.h"
+#include "output.h"
+
+#ifdef OPT_HEADER_CACHE_EXT
+# include "hcache.h"
+#endif
+
+#ifndef OPT_HEADER_CACHE_EXT
+static LIST * headers1( LIST *, OBJECT * file, int rec, regexp * re[] );
+#endif
+
+
+/*
+ * headers() - scan a target for include files and call HDRRULE
+ */
+
+#define MAXINC 10
+
+void headers( TARGET * t )
+{
+ LIST * hdrscan;
+ LIST * hdrrule;
+ #ifndef OPT_HEADER_CACHE_EXT
+ LIST * headlist = L0;
+ #endif
+ regexp * re[ MAXINC ];
+ int rec = 0;
+ LISTITER iter;
+ LISTITER end;
+
+ hdrscan = var_get( root_module(), constant_HDRSCAN );
+ if ( list_empty( hdrscan ) )
+ return;
+
+ hdrrule = var_get( root_module(), constant_HDRRULE );
+ if ( list_empty( hdrrule ) )
+ return;
+
+ if ( DEBUG_HEADER )
+ out_printf( "header scan %s\n", object_str( t->name ) );
+
+ /* Compile all regular expressions in HDRSCAN */
+ iter = list_begin( hdrscan );
+ end = list_end( hdrscan );
+ for ( ; ( rec < MAXINC ) && iter != end; iter = list_next( iter ) )
+ {
+ re[ rec++ ] = regex_compile( list_item( iter ) );
+ }
+
+ /* Doctor up call to HDRRULE rule */
+ /* Call headers1() to get LIST of included files. */
+ {
+ FRAME frame[ 1 ];
+ frame_init( frame );
+ lol_add( frame->args, list_new( object_copy( t->name ) ) );
+#ifdef OPT_HEADER_CACHE_EXT
+ lol_add( frame->args, hcache( t, rec, re, hdrscan ) );
+#else
+ lol_add( frame->args, headers1( headlist, t->boundname, rec, re ) );
+#endif
+
+ if ( lol_get( frame->args, 1 ) )
+ {
+ OBJECT * rulename = list_front( hdrrule );
+ /* The third argument to HDRRULE is the bound name of $(<). */
+ lol_add( frame->args, list_new( object_copy( t->boundname ) ) );
+ list_free( evaluate_rule( bindrule( rulename, frame->module ), rulename, frame ) );
+ }
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * headers1() - using regexp, scan a file and build include LIST.
+ */
+
+#ifndef OPT_HEADER_CACHE_EXT
+static
+#endif
+LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] )
+{
+ FILE * f;
+ char buf[ 1024 ];
+ int i;
+ static regexp * re_macros = 0;
+
+#ifdef OPT_IMPROVED_PATIENCE_EXT
+ static int count = 0;
+ ++count;
+ if ( ( ( count == 100 ) || !( count % 1000 ) ) && DEBUG_MAKE )
+ {
+ out_printf( "...patience...\n" );
+ out_flush();
+ }
+#endif
+
+ /* The following regexp is used to detect cases where a file is included
+ * through a line like "#include MACRO".
+ */
+ if ( re_macros == 0 )
+ {
+ OBJECT * const re_str = object_new(
+ "#[ \t]*include[ \t]*([A-Za-z][A-Za-z0-9_]*).*$" );
+ re_macros = regex_compile( re_str );
+ object_free( re_str );
+ }
+
+ if ( !( f = fopen( object_str( file ), "r" ) ) )
+ return l;
+
+ while ( fgets( buf, sizeof( buf ), f ) )
+ {
+ for ( i = 0; i < rec; ++i )
+ if ( regexec( re[ i ], buf ) && re[ i ]->startp[ 1 ] )
+ {
+ ( (char *)re[ i ]->endp[ 1 ] )[ 0 ] = '\0';
+ if ( DEBUG_HEADER )
+ out_printf( "header found: %s\n", re[ i ]->startp[ 1 ] );
+ l = list_push_back( l, object_new( re[ i ]->startp[ 1 ] ) );
+ }
+
+ /* Special treatment for #include MACRO. */
+ if ( regexec( re_macros, buf ) && re_macros->startp[ 1 ] )
+ {
+ OBJECT * header_filename;
+ OBJECT * macro_name;
+
+ ( (char *)re_macros->endp[ 1 ] )[ 0 ] = '\0';
+
+ if ( DEBUG_HEADER )
+ out_printf( "macro header found: %s", re_macros->startp[ 1 ] );
+
+ macro_name = object_new( re_macros->startp[ 1 ] );
+ header_filename = macro_header_get( macro_name );
+ object_free( macro_name );
+ if ( header_filename )
+ {
+ if ( DEBUG_HEADER )
+ out_printf( " resolved to '%s'\n", object_str( header_filename )
+ );
+ l = list_push_back( l, object_copy( header_filename ) );
+ }
+ else
+ {
+ if ( DEBUG_HEADER )
+ out_printf( " ignored !!\n" );
+ }
+ }
+ }
+
+ fclose( f );
+ return l;
+}
+
+
+void regerror( char const * s )
+{
+ out_printf( "re error %s\n", s );
+}
diff --git a/src/boost/tools/build/src/engine/headers.h b/src/boost/tools/build/src/engine/headers.h
new file mode 100644
index 000000000..a875c2d87
--- /dev/null
+++ b/src/boost/tools/build/src/engine/headers.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * headers.h - handle #includes in source files
+ */
+
+#ifndef HEADERS_SW20111118_H
+#define HEADERS_SW20111118_H
+
+#include "config.h"
+#include "object.h"
+#include "rules.h"
+#include "regexp.h"
+
+void headers( TARGET * t );
+
+#ifdef OPT_HEADER_CACHE_EXT
+struct regexp;
+LIST * headers1( LIST *l, OBJECT * file, int rec, struct regexp *re[] );
+#endif
+
+#endif
diff --git a/src/boost/tools/build/src/engine/jam.cpp b/src/boost/tools/build/src/engine/jam.cpp
new file mode 100644
index 000000000..eab9bae2d
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jam.cpp
@@ -0,0 +1,794 @@
+/*
+ * /+\
+ * +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ * \+/
+ *
+ * This file is part of jam.
+ *
+ * License is hereby granted to use this software and distribute it freely, as
+ * long as this copyright notice is retained and modifications are clearly
+ * marked.
+ *
+ * ALL WARRANTIES ARE HEREBY DISCLAIMED.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2018 Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jam.c - make redux
+ *
+ * See Jam.html for usage information.
+ *
+ * These comments document the code.
+ *
+ * The top half of the code is structured such:
+ *
+ * jam
+ * / | \
+ * +---+ | \
+ * / | \
+ * jamgram option \
+ * / | \ \
+ * / | \ \
+ * / | \ |
+ * scan | compile make
+ * | | / | \ / | \
+ * | | / | \ / | \
+ * | | / | \ / | \
+ * jambase parse | rules search make1
+ * | | | \
+ * | | | \
+ * | | | \
+ * builtins timestamp command execute
+ * |
+ * |
+ * |
+ * filesys
+ *
+ *
+ * The support routines are called by all of the above, but themselves are
+ * layered thus:
+ *
+ * variable|expand
+ * / | |
+ * / | |
+ * / | |
+ * lists | pathsys
+ * \ |
+ * \ hash
+ * \ |
+ * \ |
+ * \ |
+ * \ |
+ * \ |
+ * object
+ *
+ * Roughly, the modules are:
+ *
+ * builtins.c - jam's built-in rules
+ * command.c - maintain lists of commands
+ * compile.c - compile parsed jam statements
+ * exec*.c - execute a shell script on a specific OS
+ * file*.c - scan directories and archives on a specific OS
+ * hash.c - simple in-memory hashing routines
+ * hdrmacro.c - handle header file parsing for filename macro definitions
+ * headers.c - handle #includes in source files
+ * jambase.c - compilable copy of Jambase
+ * jamgram.y - jam grammar
+ * lists.c - maintain lists of strings
+ * make.c - bring a target up to date, once rules are in place
+ * make1.c - execute command to bring targets up to date
+ * object.c - string manipulation routines
+ * option.c - command line option processing
+ * parse.c - make and destroy parse trees as driven by the parser
+ * path*.c - manipulate file names on a specific OS
+ * hash.c - simple in-memory hashing routines
+ * regexp.c - Henry Spencer's regexp
+ * rules.c - access to RULEs, TARGETs, and ACTIONs
+ * scan.c - the jam yacc scanner
+ * search.c - find a target along $(SEARCH) or $(LOCATE)
+ * timestamp.c - get the timestamp of a file or archive member
+ * variable.c - handle jam multi-element variables
+ */
+
+
+#include "jam.h"
+
+#include "patchlevel.h"
+
+/* Keep JAMVERSYM in sync with VERSION. */
+/* It can be accessed as $(JAMVERSION) in the Jamfile. */
+#define JAM_STRINGIZE(X) JAM_DO_STRINGIZE(X)
+#define JAM_DO_STRINGIZE(X) #X
+#define VERSION_MAJOR_SYM JAM_STRINGIZE(VERSION_MAJOR)
+#define VERSION_MINOR_SYM JAM_STRINGIZE(VERSION_MINOR)
+#define VERSION_PATCH_SYM JAM_STRINGIZE(VERSION_PATCH)
+#define VERSION VERSION_MAJOR_SYM "." VERSION_MINOR_SYM
+#define JAMVERSYM "JAMVERSION=" VERSION
+
+#include "builtins.h"
+#include "class.h"
+#include "compile.h"
+#include "constants.h"
+#include "debugger.h"
+#include "filesys.h"
+#include "function.h"
+#include "hcache.h"
+#include "lists.h"
+#include "make.h"
+#include "object.h"
+#include "option.h"
+#include "output.h"
+#include "parse.h"
+#include "cwd.h"
+#include "rules.h"
+#include "scan.h"
+#include "search.h"
+#include "jam_strings.h"
+#include "timestamp.h"
+#include "variable.h"
+#include "execcmd.h"
+#include "sysinfo.h"
+
+/* Macintosh is "special" */
+#ifdef OS_MAC
+# include <QuickDraw.h>
+#endif
+
+/* And UNIX for this. */
+#ifdef unix
+# include <sys/utsname.h>
+# include <signal.h>
+#endif
+
+struct globs globs =
+{
+ 0, /* noexec */
+ 1, /* jobs */
+ 0, /* quitquick */
+ 0, /* newestfirst */
+ 0, /* pipes action stdout and stderr merged to action output */
+#ifdef OS_MAC
+ { 0, 0 }, /* debug - suppress tracing output */
+#else
+ { 0, 1 }, /* debug ... */
+#endif
+ 0, /* output commands, not run them */
+ 0, /* action timeout */
+ 0 /* maximum buffer size zero is all output */
+};
+
+/* Symbols to be defined as true for use in Jambase. */
+static const char * othersyms[] = { OSMAJOR, OSMINOR, OSPLAT, JAMVERSYM, 0 };
+
+
+/* Known for sure:
+ * mac needs arg_enviro
+ * OS2 needs extern environ
+ */
+
+#ifdef OS_MAC
+# define use_environ arg_environ
+# ifdef MPW
+ QDGlobals qd;
+# endif
+#endif
+
+
+#ifdef OS_VMS
+# define use_environ arg_environ
+#endif
+
+
+/* on Win32-LCC */
+#if defined( OS_NT ) && defined( __LCC__ )
+# define use_environ _environ
+#endif
+
+#if defined( __MWERKS__)
+# define use_environ _environ
+ extern char * * _environ;
+#endif
+
+#ifndef use_environ
+# define use_environ environ
+# if !defined( __WATCOM__ ) && !defined( OS_OS2 ) && !defined( OS_NT )
+ extern char **environ;
+# endif
+#endif
+
+#if YYDEBUG != 0
+ extern int yydebug;
+#endif
+
+#ifndef NDEBUG
+static void run_unit_tests()
+{
+# if defined( USE_EXECNT )
+ extern void execnt_unit_test();
+ execnt_unit_test();
+# endif
+ string_unit_test();
+}
+#endif
+
+int anyhow = 0;
+
+#ifdef HAVE_PYTHON
+ extern PyObject * bjam_call ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_import_rule ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_define_action( PyObject * self, PyObject * args );
+ extern PyObject * bjam_variable ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_caller ( PyObject * self, PyObject * args );
+ int python_optimize = 1; /* Set Python optimzation on by default */
+#endif
+
+void regex_done();
+
+char const * saved_argv0;
+
+static void usage( const char * progname )
+{
+ err_printf("\nusage: %s [ options ] targets...\n\n", progname);
+
+ err_printf("-a Build all targets, even if they are current.\n");
+ err_printf("-dx Set the debug level to x (0-13,console,mi).\n");
+ err_printf("-fx Read x instead of Jambase.\n");
+ /* err_printf( "-g Build from newest sources first.\n" ); */
+ err_printf("-jx Run up to x shell commands concurrently.\n");
+ err_printf("-lx Limit actions to x number of seconds after which they are stopped.\n");
+ err_printf("-mx Maximum target output saved (kb), default is to save all output.\n");
+ err_printf("-n Don't actually execute the updating actions.\n");
+ err_printf("-ox Mirror all output to file x.\n");
+ err_printf("-px x=0, pipes action stdout and stderr merged into action output.\n");
+ err_printf("-q Quit quickly as soon as a target fails.\n");
+ err_printf("-sx=y Set variable x=y, overriding environment.\n");
+ err_printf("-tx Rebuild x, even if it is up-to-date.\n");
+ err_printf("-v Print the version of jam and exit.\n");
+#ifdef HAVE_PYTHON
+ err_printf("-z Disable Python Optimization and enable asserts\n");
+#endif
+ err_printf("--x Option is ignored.\n\n");
+
+ exit( EXITBAD );
+}
+
+int main( int argc, char * * argv, char * * arg_environ )
+{
+ int n;
+ char * s;
+ struct bjam_option optv[ N_OPTS ];
+ int status;
+ int arg_c = argc;
+ char * * arg_v = argv;
+ char const * progname = argv[ 0 ];
+ module_t * environ_module;
+ int is_debugger;
+ b2::system_info sys_info;
+
+ saved_argv0 = argv[ 0 ];
+ last_update_now_status = 0;
+
+ BJAM_MEM_INIT();
+
+#ifdef OS_MAC
+ InitGraf( &qd.thePort );
+#endif
+
+ cwd_init();
+ constants_init();
+
+#ifdef JAM_DEBUGGER
+
+ is_debugger = 0;
+
+ if ( getoptions( argc - 1, argv + 1, "-:l:m:d:j:p:f:gs:t:ano:qv", optv ) < 0 )
+ usage( progname );
+
+ if ( ( s = getoptval( optv, 'd', 0 ) ) )
+ {
+ if ( strcmp( s, "mi" ) == 0 )
+ {
+ debug_interface = DEBUG_INTERFACE_MI;
+ is_debugger = 1;
+ }
+ else if ( strcmp( s, "console" ) == 0 )
+ {
+ debug_interface = DEBUG_INTERFACE_CONSOLE;
+ is_debugger = 1;
+ }
+ }
+
+#if NT
+
+ if ( argc >= 3 )
+ {
+ /* Check whether this instance is being run by the debugger. */
+ size_t opt_len = strlen( debugger_opt );
+ if ( strncmp( argv[ 1 ], debugger_opt, opt_len ) == 0 &&
+ strncmp( argv[ 2 ], debugger_opt, opt_len ) == 0 )
+ {
+ debug_init_handles( argv[ 1 ] + opt_len, argv[ 2 ] + opt_len );
+ /* Fix up argc/argv to hide the internal options */
+ arg_c = argc = (argc - 2);
+ argv[ 2 ] = argv[ 0 ];
+ arg_v = argv = (argv + 2);
+ debug_interface = DEBUG_INTERFACE_CHILD;
+ }
+ }
+
+ if ( is_debugger )
+ {
+ return debugger();
+ }
+
+#else
+
+ if ( is_debugger )
+ {
+ if ( setjmp( debug_child_data.jmp ) != 0 )
+ {
+ arg_c = argc = debug_child_data.argc;
+ arg_v = argv = (char * *)debug_child_data.argv;
+ debug_interface = DEBUG_INTERFACE_CHILD;
+ }
+ else
+ {
+ return debugger();
+ }
+ }
+
+#endif
+
+#endif
+
+ --argc;
+ ++argv;
+
+ #ifdef HAVE_PYTHON
+ #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qvz"
+ #else
+ #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qv"
+ #endif
+
+ if ( getoptions( argc, argv, OPTSTRING, optv ) < 0 )
+ {
+ usage( progname );
+ }
+
+ /* Set default parallel jobs to match cpu threads. This can be overridden
+ the usual way with -jX or PARALLELISM env var. */
+ globs.jobs = sys_info.cpu_thread_count();
+
+ /* Version info. */
+ if ( ( s = getoptval( optv, 'v', 0 ) ) )
+ {
+ out_printf( "B2 Version %s. %s.\n", VERSION, OSMINOR );
+ out_printf( " Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.\n" );
+ out_printf( " Copyright 2001 David Turner.\n" );
+ out_printf( " Copyright 2001-2004 David Abrahams.\n" );
+ out_printf( " Copyright 2002-2019 Rene Rivera.\n" );
+ out_printf( " Copyright 2003-2015 Vladimir Prus.\n" );
+ out_printf( "\n DEFAULTS: jobs = %i\n", globs.jobs);
+ return EXITOK;
+ }
+
+ /* Pick up interesting options. */
+ if ( ( s = getoptval( optv, 'n', 0 ) ) )
+ {
+ ++globs.noexec;
+ globs.debug[ 2 ] = 1;
+ }
+
+ if ( ( s = getoptval( optv, 'p', 0 ) ) )
+ {
+ /* Undocumented -p3 (acts like both -p1 -p2) means separate pipe action
+ * stdout and stderr.
+ */
+ globs.pipe_action = atoi( s );
+ if ( globs.pipe_action < 0 || 3 < globs.pipe_action )
+ {
+ err_printf( "Invalid pipe descriptor '%d', valid values are -p[0..3]."
+ "\n", globs.pipe_action );
+ exit( EXITBAD );
+ }
+ }
+
+ if ( ( s = getoptval( optv, 'q', 0 ) ) )
+ globs.quitquick = 1;
+
+ if ( ( s = getoptval( optv, 'a', 0 ) ) )
+ anyhow++;
+
+ if ( ( s = getoptval( optv, 'j', 0 ) ) )
+ {
+ globs.jobs = atoi( s );
+ if ( globs.jobs < 1 )
+ {
+ err_printf( "Invalid value for the '-j' option.\n" );
+ exit( EXITBAD );
+ }
+ }
+
+ if ( ( s = getoptval( optv, 'g', 0 ) ) )
+ globs.newestfirst = 1;
+
+ if ( ( s = getoptval( optv, 'l', 0 ) ) )
+ globs.timeout = atoi( s );
+
+ if ( ( s = getoptval( optv, 'm', 0 ) ) )
+ globs.max_buf = atoi( s ) * 1024; /* convert to kb */
+
+ #ifdef HAVE_PYTHON
+ if ( ( s = getoptval( optv, 'z', 0 ) ) )
+ python_optimize = 0; /* disable python optimization */
+ #endif
+
+ /* Turn on/off debugging */
+ for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n )
+ {
+ int i;
+
+ /* First -d, turn off defaults. */
+ if ( !n )
+ for ( i = 0; i < DEBUG_MAX; ++i )
+ globs.debug[i] = 0;
+
+ i = atoi( s );
+
+ if ( ( i < 0 ) || ( i >= DEBUG_MAX ) )
+ {
+ out_printf( "Invalid debug level '%s'.\n", s );
+ continue;
+ }
+
+ /* n turns on levels 1-n. */
+ /* +n turns on level n. */
+ if ( *s == '+' )
+ globs.debug[ i ] = 1;
+ else while ( i )
+ globs.debug[ i-- ] = 1;
+ }
+
+ /* If an output file is specified, set globs.out to that. */
+ if ( ( s = getoptval( optv, 'o', 0 ) ) )
+ {
+ if ( !( globs.out = fopen( s, "w" ) ) )
+ {
+ err_printf( "Failed to write to '%s'\n", s );
+ exit( EXITBAD );
+ }
+ /* ++globs.noexec; */
+ }
+
+ {
+ PROFILE_ENTER( MAIN );
+
+#ifdef HAVE_PYTHON
+ {
+ PROFILE_ENTER( MAIN_PYTHON );
+ Py_OptimizeFlag = python_optimize;
+ Py_Initialize();
+ {
+ static PyMethodDef BjamMethods[] = {
+ {"call", bjam_call, METH_VARARGS,
+ "Call the specified bjam rule."},
+ {"import_rule", bjam_import_rule, METH_VARARGS,
+ "Imports Python callable to bjam."},
+ {"define_action", bjam_define_action, METH_VARARGS,
+ "Defines a command line action."},
+ {"variable", bjam_variable, METH_VARARGS,
+ "Obtains a variable from bjam's global module."},
+ {"backtrace", bjam_backtrace, METH_VARARGS,
+ "Returns bjam backtrace from the last call into Python."},
+ {"caller", bjam_caller, METH_VARARGS,
+ "Returns the module from which the last call into Python is made."},
+ {NULL, NULL, 0, NULL}
+ };
+
+ Py_InitModule( "bjam", BjamMethods );
+ }
+ PROFILE_EXIT( MAIN_PYTHON );
+ }
+#endif
+
+#ifndef NDEBUG
+ run_unit_tests();
+#endif
+#if YYDEBUG != 0
+ if ( DEBUG_PARSE )
+ yydebug = 1;
+#endif
+
+ /* Set JAMDATE. */
+ {
+ timestamp current;
+ timestamp_current( &current );
+ var_set( root_module(), constant_JAMDATE, list_new( outf_time(
+ &current ) ), VAR_SET );
+ }
+
+ /* Set JAM_VERSION. */
+ var_set( root_module(), constant_JAM_VERSION,
+ list_push_back( list_push_back( list_new(
+ object_new( VERSION_MAJOR_SYM ) ),
+ object_new( VERSION_MINOR_SYM ) ),
+ object_new( VERSION_PATCH_SYM ) ),
+ VAR_SET );
+
+ /* Set JAMUNAME. */
+#ifdef unix
+ {
+ struct utsname u;
+
+ if ( uname( &u ) >= 0 )
+ {
+ var_set( root_module(), constant_JAMUNAME,
+ list_push_back(
+ list_push_back(
+ list_push_back(
+ list_push_back(
+ list_new(
+ object_new( u.sysname ) ),
+ object_new( u.nodename ) ),
+ object_new( u.release ) ),
+ object_new( u.version ) ),
+ object_new( u.machine ) ), VAR_SET );
+ }
+ }
+#endif /* unix */
+
+ /* Set JAM_TIMESTAMP_RESOLUTION. */
+ {
+ timestamp fmt_resolution[ 1 ];
+ file_supported_fmt_resolution( fmt_resolution );
+ var_set( root_module(), constant_JAM_TIMESTAMP_RESOLUTION, list_new(
+ object_new( timestamp_timestr( fmt_resolution ) ) ), VAR_SET );
+ }
+
+ /* Load up environment variables. */
+
+ /* First into the global module, with splitting, for backward
+ * compatibility.
+ */
+ var_defines( root_module(), use_environ, 1 );
+
+ environ_module = bindmodule( constant_ENVIRON );
+ /* Then into .ENVIRON, without splitting. */
+ var_defines( environ_module, use_environ, 0 );
+
+ /*
+ * Jam defined variables OS & OSPLAT. We load them after environment, so
+ * that setting OS in environment does not change Jam's notion of the
+ * current platform.
+ */
+ var_defines( root_module(), othersyms, 1 );
+
+ /* Load up variables set on command line. */
+ for ( n = 0; ( s = getoptval( optv, 's', n ) ); ++n )
+ {
+ char * symv[ 2 ];
+ symv[ 0 ] = s;
+ symv[ 1 ] = 0;
+ var_defines( root_module(), symv, 1 );
+ var_defines( environ_module, symv, 0 );
+ }
+
+ /* Set the ARGV to reflect the complete list of arguments of invocation.
+ */
+ for ( n = 0; n < arg_c; ++n )
+ var_set( root_module(), constant_ARGV, list_new( object_new(
+ arg_v[ n ] ) ), VAR_APPEND );
+
+ /* Initialize built-in rules. */
+ load_builtins();
+
+ /* Add the targets in the command line to the update list. */
+ for ( n = 1; n < arg_c; ++n )
+ {
+ if ( arg_v[ n ][ 0 ] == '-' )
+ {
+ const char * f = "-:l:d:j:f:gs:t:ano:qv";
+ for ( ; *f; ++f ) if ( *f == arg_v[ n ][ 1 ] ) break;
+ if ( f[0] && f[1] && ( f[ 1 ] == ':' ) && ( arg_v[ n ][ 2 ] == '\0' ) ) ++n;
+ }
+ else
+ {
+ OBJECT * const target = object_new( arg_v[ n ] );
+ mark_target_for_updating( target );
+ object_free( target );
+ }
+ }
+
+ /* The build system may set the PARALLELISM variable to override -j
+ * options.
+ */
+ {
+ LIST * const p = var_get( root_module(), constant_PARALLELISM );
+ if ( !list_empty( p ) )
+ {
+ int const j = atoi( object_str( list_front( p ) ) );
+ if ( j < 1 )
+ out_printf( "Invalid value of PARALLELISM: %s.\n",
+ object_str( list_front( p ) ) );
+ else
+ globs.jobs = j;
+ }
+ }
+
+ /* KEEP_GOING overrides -q option. */
+ {
+ LIST * const p = var_get( root_module(), constant_KEEP_GOING );
+ if ( !list_empty( p ) )
+ globs.quitquick = atoi( object_str( list_front( p ) ) ) ? 0 : 1;
+ }
+
+
+ if ( list_empty( targets_to_update() ) )
+ mark_target_for_updating( constant_all );
+
+ /* Parse ruleset. */
+ {
+ FRAME frame[ 1 ];
+ frame_init( frame );
+ for ( n = 0; ( s = getoptval( optv, 'f', n ) ); ++n )
+ {
+ OBJECT * const filename = object_new( s );
+ parse_file( filename, frame );
+ object_free( filename );
+ }
+
+ if ( !n )
+ parse_file( constant_plus, frame );
+ }
+
+ /* FIXME: What shall we do if builtin_update_now,
+ * the sole place setting last_update_now_status,
+ * failed earlier?
+ */
+
+ status = yyanyerrors();
+ if ( !status )
+ {
+ /* Manually touch -t targets. */
+ for ( n = 0; ( s = getoptval( optv, 't', n ) ); ++n )
+ {
+ OBJECT * const target = object_new( s );
+ touch_target( target );
+ object_free( target );
+ }
+
+ /* Now make target. */
+ {
+ PROFILE_ENTER( MAIN_MAKE );
+ LIST * const targets = targets_to_update();
+ if ( !list_empty( targets ) )
+ status |= make( targets, anyhow );
+ else
+ status = last_update_now_status;
+ PROFILE_EXIT( MAIN_MAKE );
+ }
+ }
+
+ PROFILE_EXIT( MAIN );
+ }
+
+ if ( DEBUG_PROFILE )
+ profile_dump();
+
+
+#ifdef OPT_HEADER_CACHE_EXT
+ hcache_done();
+#endif
+
+ clear_targets_to_update();
+
+ /* Widely scattered cleanup. */
+ property_set_done();
+ exec_done();
+ file_done();
+ rules_done();
+ timestamp_done();
+ search_done();
+ class_done();
+ modules_done();
+ regex_done();
+ cwd_done();
+ path_done();
+ function_done();
+ list_done();
+ constants_done();
+ object_done();
+
+ /* Close log out. */
+ if ( globs.out )
+ fclose( globs.out );
+
+#ifdef HAVE_PYTHON
+ Py_Finalize();
+#endif
+
+ BJAM_MEM_CLOSE();
+
+ return status ? EXITBAD : EXITOK;
+}
+
+
+/*
+ * executable_path()
+ */
+
+#if defined(_WIN32)
+# define WIN32_LEAN_AND_MEAN
+# include <windows.h>
+char * executable_path( char const * argv0 )
+{
+ char buf[ 1024 ];
+ DWORD const ret = GetModuleFileNameA( NULL, buf, sizeof( buf ) );
+ return ( !ret || ret == sizeof( buf ) ) ? NULL : strdup( buf );
+}
+#elif defined(__APPLE__) /* Not tested */
+# include <mach-o/dyld.h>
+char *executable_path( char const * argv0 )
+{
+ char buf[ 1024 ];
+ uint32_t size = sizeof( buf );
+ return _NSGetExecutablePath( buf, &size ) ? NULL : strdup( buf );
+}
+#elif defined(sun) || defined(__sun) /* Not tested */
+# include <stdlib.h>
+char * executable_path( char const * argv0 )
+{
+ const char * execname = getexecname();
+ return execname ? strdup( execname ) : NULL;
+}
+#elif defined(__FreeBSD__)
+# include <sys/sysctl.h>
+char * executable_path( char const * argv0 )
+{
+ int mib[ 4 ] = { CTL_KERN, KERN_PROC, KERN_PROC_PATHNAME, -1 };
+ char buf[ 1024 ];
+ size_t size = sizeof( buf );
+ sysctl( mib, 4, buf, &size, NULL, 0 );
+ return ( !size || size == sizeof( buf ) ) ? NULL : strndup( buf, size );
+}
+#elif defined(__linux__)
+# include <unistd.h>
+char * executable_path( char const * argv0 )
+{
+ char buf[ 1024 ];
+ ssize_t const ret = readlink( "/proc/self/exe", buf, sizeof( buf ) );
+ return ( !ret || ret == sizeof( buf ) ) ? NULL : strndup( buf, ret );
+}
+#elif defined(OS_VMS)
+# include <unixlib.h>
+char * executable_path( char const * argv0 )
+{
+ char * vms_path = NULL;
+ char * posix_path = NULL;
+ char * p;
+
+ /* On VMS argv[0] shows absolute path to the image file.
+ * So, just remove VMS file version and translate path to POSIX-style.
+ */
+ vms_path = strdup( argv0 );
+ if ( vms_path && ( p = strchr( vms_path, ';') ) ) *p = '\0';
+ posix_path = decc$translate_vms( vms_path );
+ if ( vms_path ) free( vms_path );
+
+ return posix_path > 0 ? strdup( posix_path ) : NULL;
+}
+#else
+char * executable_path( char const * argv0 )
+{
+ /* If argv0 is an absolute path, assume it is the right absolute path. */
+ return argv0[ 0 ] == '/' ? strdup( argv0 ) : NULL;
+}
+#endif
diff --git a/src/boost/tools/build/src/engine/jam.h b/src/boost/tools/build/src/engine/jam.h
new file mode 100644
index 000000000..984a4a41b
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jam.h
@@ -0,0 +1,524 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jam.h - includes and globals for jam
+ */
+
+#ifndef JAM_H_VP_2003_08_01
+#define JAM_H_VP_2003_08_01
+
+#include "config.h"
+
+#ifdef HAVE_PYTHON
+#include <Python.h>
+#endif
+
+/* Assume popen support is available unless known otherwise. */
+#define HAVE_POPEN 1
+
+/*
+ * VMS, OPENVMS
+ */
+
+#ifdef VMS
+
+#include <types.h>
+#include <file.h>
+#include <stat.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <stdlib.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+#include <unistd.h>
+#include <unixlib.h>
+
+#define OSMINOR "OS=VMS"
+#define OSMAJOR "VMS=true"
+#define OS_VMS
+#define MAXLINE 1024 /* longest 'together' actions */
+#define PATH_DELIM '/' /* use CRTL POSIX-style handling */
+#define SPLITPATH ','
+#define EXITOK EXIT_SUCCESS
+#define EXITBAD EXIT_FAILURE
+#define DOWNSHIFT_PATHS
+
+/* This may be inaccurate. */
+#ifndef __DECC
+#define OSPLAT "OSPLAT=VAX"
+#endif
+
+#define glob jam_glob /* use jam's glob, not CRTL's */
+
+#endif
+
+/*
+ * Windows NT
+ */
+
+#ifdef NT
+
+#include <ctype.h>
+#include <fcntl.h>
+#include <malloc.h>
+#ifndef __MWERKS__
+ #include <memory.h>
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+
+#define OSMAJOR "NT=true"
+#define OSMINOR "OS=NT"
+#define OS_NT
+#define SPLITPATH ';'
+#define MAXLINE (undefined__see_execnt_c) /* max chars per command line */
+#define USE_EXECNT
+#define PATH_DELIM '\\'
+
+/* AS400 cross-compile from NT. */
+
+#ifdef AS400
+ #undef OSMINOR
+ #undef OSMAJOR
+ #define OSMAJOR "AS400=true"
+ #define OSMINOR "OS=AS400"
+ #define OS_AS400
+#endif
+
+/* Metrowerks Standard Library on Windows. */
+
+#ifdef __MSL__
+ #undef HAVE_POPEN
+#endif
+
+#endif /* #ifdef NT */
+
+
+/*
+ * Windows MingW32
+ */
+
+#ifdef MINGW
+
+#include <fcntl.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <malloc.h>
+#include <memory.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+
+#define OSMAJOR "MINGW=true"
+#define OSMINOR "OS=MINGW"
+#define OS_NT
+#define SPLITPATH ';'
+#define MAXLINE 996 /* max chars per command line */
+#define USE_EXECUNIX
+#define PATH_DELIM '\\'
+
+#endif /* #ifdef MINGW */
+
+
+/*
+ * God fearing UNIX.
+ */
+
+#ifndef OSMINOR
+
+#define OSMAJOR "UNIX=true"
+#define USE_EXECUNIX
+#define USE_FILEUNIX
+#define PATH_DELIM '/'
+
+#ifdef _AIX
+ #define unix
+ #define MAXLINE 23552 /* 24k - 1k, max chars per command line */
+ #define OSMINOR "OS=AIX"
+ #define OS_AIX
+ #define NO_VFORK
+#endif
+#ifdef AMIGA
+ #define OSMINOR "OS=AMIGA"
+ #define OS_AMIGA
+#endif
+#ifdef __BEOS__
+ #define unix
+ #define OSMINOR "OS=BEOS"
+ #define OS_BEOS
+ #define NO_VFORK
+#endif
+#ifdef __bsdi__
+ #define OSMINOR "OS=BSDI"
+ #define OS_BSDI
+#endif
+#if defined (COHERENT) && defined (_I386)
+ #define OSMINOR "OS=COHERENT"
+ #define OS_COHERENT
+ #define NO_VFORK
+#endif
+#if defined(__cygwin__) || defined(__CYGWIN__)
+ #define OSMINOR "OS=CYGWIN"
+ #define OS_CYGWIN
+#endif
+#if defined(__FreeBSD__) && !defined(__DragonFly__)
+ #define OSMINOR "OS=FREEBSD"
+ #define OS_FREEBSD
+#endif
+#ifdef __DragonFly__
+ #define OSMINOR "OS=DRAGONFLYBSD"
+ #define OS_DRAGONFLYBSD
+#endif
+#ifdef __DGUX__
+ #define OSMINOR "OS=DGUX"
+ #define OS_DGUX
+#endif
+#ifdef __hpux
+ #define OSMINOR "OS=HPUX"
+ #define OS_HPUX
+#endif
+#ifdef __HAIKU__
+ #define unix
+ #define OSMINOR "OS=HAIKU"
+ #define OS_HAIKU
+#endif
+#ifdef __OPENNT
+ #define unix
+ #define OSMINOR "OS=INTERIX"
+ #define OS_INTERIX
+ #define NO_VFORK
+#endif
+#ifdef __sgi
+ #define OSMINOR "OS=IRIX"
+ #define OS_IRIX
+ #define NO_VFORK
+#endif
+#ifdef __ISC
+ #define OSMINOR "OS=ISC"
+ #define OS_ISC
+ #define NO_VFORK
+#endif
+#if defined(linux) || defined(__linux) || \
+ defined(__linux__) || defined(__gnu_linux__)
+ #define OSMINOR "OS=LINUX"
+ #define OS_LINUX
+#endif
+#ifdef __Lynx__
+ #define OSMINOR "OS=LYNX"
+ #define OS_LYNX
+ #define NO_VFORK
+ #define unix
+#endif
+#ifdef __MACHTEN__
+ #define OSMINOR "OS=MACHTEN"
+ #define OS_MACHTEN
+#endif
+#ifdef mpeix
+ #define unix
+ #define OSMINOR "OS=MPEIX"
+ #define OS_MPEIX
+ #define NO_VFORK
+#endif
+#ifdef __MVS__
+ #define unix
+ #define OSMINOR "OS=MVS"
+ #define OS_MVS
+#endif
+#ifdef _ATT4
+ #define OSMINOR "OS=NCR"
+ #define OS_NCR
+#endif
+#ifdef __NetBSD__
+ #define unix
+ #define OSMINOR "OS=NETBSD"
+ #define OS_NETBSD
+ #define NO_VFORK
+#endif
+#ifdef __QNX__
+ #define unix
+ #ifdef __QNXNTO__
+ #define OSMINOR "OS=QNXNTO"
+ #define OS_QNXNTO
+ #else
+ #define OSMINOR "OS=QNX"
+ #define OS_QNX
+ #define NO_VFORK
+ #define MAXLINE 996 /* max chars per command line */
+ #endif
+#endif
+#ifdef NeXT
+ #ifdef __APPLE__
+ #define OSMINOR "OS=RHAPSODY"
+ #define OS_RHAPSODY
+ #else
+ #define OSMINOR "OS=NEXT"
+ #define OS_NEXT
+ #endif
+#endif
+#ifdef __APPLE__
+ #define unix
+ #define OSMINOR "OS=MACOSX"
+ #define OS_MACOSX
+#endif
+#ifdef __osf__
+ #ifndef unix
+ #define unix
+ #endif
+ #define OSMINOR "OS=OSF"
+ #define OS_OSF
+#endif
+#ifdef _SEQUENT_
+ #define OSMINOR "OS=PTX"
+ #define OS_PTX
+#endif
+#ifdef M_XENIX
+ #define OSMINOR "OS=SCO"
+ #define OS_SCO
+ #define NO_VFORK
+#endif
+#ifdef sinix
+ #define unix
+ #define OSMINOR "OS=SINIX"
+ #define OS_SINIX
+#endif
+#ifdef sun
+ #if defined(__svr4__) || defined(__SVR4)
+ #define OSMINOR "OS=SOLARIS"
+ #define OS_SOLARIS
+ #else
+ #define OSMINOR "OS=SUNOS"
+ #define OS_SUNOS
+ #endif
+#endif
+#ifdef ultrix
+ #define OSMINOR "OS=ULTRIX"
+ #define OS_ULTRIX
+#endif
+#ifdef _UNICOS
+ #define OSMINOR "OS=UNICOS"
+ #define OS_UNICOS
+#endif
+#if defined(__USLC__) && !defined(M_XENIX)
+ #define OSMINOR "OS=UNIXWARE"
+ #define OS_UNIXWARE
+#endif
+#ifdef __OpenBSD__
+ #define OSMINOR "OS=OPENBSD"
+ #define OS_OPENBSD
+ #define unix
+#endif
+#if defined (__FreeBSD_kernel__) && !defined(__FreeBSD__)
+ #define OSMINOR "OS=KFREEBSD"
+ #define OS_KFREEBSD
+#endif
+#ifndef OSMINOR
+ #define OSMINOR "OS=UNKNOWN"
+#endif
+
+/* All the UNIX includes */
+
+#include <sys/types.h>
+
+#ifndef OS_MPEIX
+ #include <sys/file.h>
+#endif
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+#include <unistd.h>
+
+#ifndef OS_QNX
+ #include <memory.h>
+#endif
+
+#ifndef OS_ULTRIX
+ #include <stdlib.h>
+#endif
+
+#if !defined( OS_BSDI ) && \
+ !defined( OS_FREEBSD ) && \
+ !defined( OS_DRAGONFLYBSD ) && \
+ !defined( OS_NEXT ) && \
+ !defined( OS_MACHTEN ) && \
+ !defined( OS_MACOSX ) && \
+ !defined( OS_RHAPSODY ) && \
+ !defined( OS_MVS ) && \
+ !defined( OS_OPENBSD )
+ #include <malloc.h>
+#endif
+
+#endif /* #ifndef OSMINOR */
+
+
+/*
+ * OSPLAT definitions - suppressed when it is a one-of-a-kind.
+ */
+
+#if defined( _M_PPC ) || \
+ defined( PPC ) || \
+ defined( ppc ) || \
+ defined( __powerpc__ ) || \
+ defined( __ppc__ )
+ #define OSPLAT "OSPLAT=PPC"
+#endif
+
+#if defined( _ALPHA_ ) || \
+ defined( __alpha__ )
+ #define OSPLAT "OSPLAT=AXP"
+#endif
+
+#if defined( _i386_ ) || \
+ defined( __i386__ ) || \
+ defined( __i386 ) || \
+ defined( _M_IX86 )
+ #define OSPLAT "OSPLAT=X86"
+#endif
+
+#if defined( __ia64__ ) || \
+ defined( __IA64__ ) || \
+ defined( __ia64 )
+ #define OSPLAT "OSPLAT=IA64"
+#endif
+
+#if defined( __x86_64__ ) || \
+ defined( __amd64__ ) || \
+ defined( _M_AMD64 )
+ #define OSPLAT "OSPLAT=X86_64"
+#endif
+
+#if defined( __sparc__ ) || \
+ defined( __sparc )
+ #define OSPLAT "OSPLAT=SPARC"
+#endif
+
+#ifdef __mips__
+ #if defined(_ABI64)
+ #define OSPLAT "OSPLAT=MIPS64"
+ #elif defined(_ABIO32)
+ #define OSPLAT "OSPLAT=MIPS32"
+ #endif
+#endif
+
+#if defined( __arm__ ) || \
+ defined( __aarch64__ )
+ #define OSPLAT "OSPLAT=ARM"
+#endif
+
+#ifdef __s390__
+ #define OSPLAT "OSPLAT=390"
+#endif
+
+#ifdef __hppa
+ #define OSPLAT "OSPLAT=PARISC"
+#endif
+
+#ifndef OSPLAT
+ #define OSPLAT ""
+#endif
+
+
+/*
+ * Jam implementation misc.
+ */
+
+#ifndef MAXLINE
+ #define MAXLINE 102400 /* max chars per command line */
+#endif
+
+#ifndef EXITOK
+ #define EXITOK 0
+ #define EXITBAD 1
+#endif
+
+#ifndef SPLITPATH
+ #define SPLITPATH ':'
+#endif
+
+/* You probably do not need to muck with these. */
+
+#define MAXSYM 1024 /* longest symbol in the environment */
+#define MAXJPATH 1024 /* longest filename */
+
+#define MAXARGC 32 /* words in $(JAMSHELL) */
+
+/* Jam private definitions below. */
+
+#define DEBUG_MAX 14
+
+
+struct globs
+{
+ int noexec;
+ int jobs;
+ int quitquick;
+ int newestfirst; /* build newest sources first */
+ int pipe_action;
+ char debug[ DEBUG_MAX ];
+ FILE * out; /* mirror output here */
+ long timeout; /* number of seconds to limit actions to,
+ * default 0 for no limit.
+ */
+ int dart; /* output build and test results formatted for
+ * Dart
+ */
+ int max_buf; /* maximum amount of output saved from target
+ * (kb)
+ */
+};
+
+extern struct globs globs;
+
+#define DEBUG_MAKE ( globs.debug[ 1 ] ) /* show actions when executed */
+#define DEBUG_MAKEQ ( globs.debug[ 2 ] ) /* show even quiet actions */
+#define DEBUG_EXEC ( globs.debug[ 2 ] ) /* show text of actons */
+#define DEBUG_MAKEPROG ( globs.debug[ 3 ] ) /* show make0 progress */
+#define DEBUG_BIND ( globs.debug[ 3 ] ) /* show when files bound */
+
+#define DEBUG_EXECCMD ( globs.debug[ 4 ] ) /* show execcmds()'s work */
+
+#define DEBUG_COMPILE ( globs.debug[ 5 ] ) /* show rule invocations */
+
+#define DEBUG_HEADER ( globs.debug[ 6 ] ) /* show result of header scan */
+#define DEBUG_BINDSCAN ( globs.debug[ 6 ] ) /* show result of dir scan */
+#define DEBUG_SEARCH ( globs.debug[ 6 ] ) /* show binding attempts */
+
+#define DEBUG_VARSET ( globs.debug[ 7 ] ) /* show variable settings */
+#define DEBUG_VARGET ( globs.debug[ 8 ] ) /* show variable fetches */
+#define DEBUG_VAREXP ( globs.debug[ 8 ] ) /* show variable expansions */
+#define DEBUG_IF ( globs.debug[ 8 ] ) /* show 'if' calculations */
+#define DEBUG_LISTS ( globs.debug[ 9 ] ) /* show list manipulation */
+#define DEBUG_SCAN ( globs.debug[ 9 ] ) /* show scanner tokens */
+#define DEBUG_MEM ( globs.debug[ 9 ] ) /* show memory use */
+
+#define DEBUG_PROFILE ( globs.debug[ 10 ] ) /* dump rule execution times */
+#define DEBUG_PARSE ( globs.debug[ 11 ] ) /* debug parsing */
+#define DEBUG_GRAPH ( globs.debug[ 12 ] ) /* debug dependencies */
+#define DEBUG_FATE ( globs.debug[ 13 ] ) /* show fate changes in make0() */
+
+/* Everyone gets the memory definitions. */
+#include "mem.h"
+
+/* They also get the profile functions. */
+#include "debug.h"
+
+#endif
diff --git a/src/boost/tools/build/src/engine/jam_strings.cpp b/src/boost/tools/build/src/engine/jam_strings.cpp
new file mode 100644
index 000000000..c9ed8a17f
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jam_strings.cpp
@@ -0,0 +1,240 @@
+/* Copyright David Abrahams 2004. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "jam.h"
+#include "jam_strings.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+#ifndef NDEBUG
+# define JAM_STRING_MAGIC ((char)0xcf)
+# define JAM_STRING_MAGIC_SIZE 4
+static void assert_invariants( string * self )
+{
+ int i;
+
+ if ( self->value == 0 )
+ {
+ assert( self->size == 0 );
+ assert( self->capacity == 0 );
+ assert( self->opt[ 0 ] == 0 );
+ return;
+ }
+
+ assert( self->size < self->capacity );
+ assert( ( self->capacity <= sizeof( self->opt ) ) == ( self->value == self->opt ) );
+ assert( self->value[ self->size ] == 0 );
+ /* String objects modified manually after construction to contain embedded
+ * '\0' characters are considered structurally valid.
+ */
+ assert( strlen( self->value ) <= self->size );
+
+ for ( i = 0; i < 4; ++i )
+ {
+ assert( self->magic[ i ] == JAM_STRING_MAGIC );
+ assert( self->value[ self->capacity + i ] == JAM_STRING_MAGIC );
+ }
+}
+#else
+# define JAM_STRING_MAGIC_SIZE 0
+# define assert_invariants(x) do {} while (0)
+#endif
+
+
+void string_new( string * s )
+{
+ s->value = s->opt;
+ s->size = 0;
+ s->capacity = sizeof( s->opt );
+ s->opt[ 0 ] = 0;
+#ifndef NDEBUG
+ memset( s->magic, JAM_STRING_MAGIC, sizeof( s->magic ) );
+#endif
+ assert_invariants( s );
+}
+
+
+void string_free( string * s )
+{
+ assert_invariants( s );
+ if ( s->value != s->opt )
+ BJAM_FREE( s->value );
+ string_new( s );
+}
+
+
+static void string_reserve_internal( string * self, size_t capacity )
+{
+ if ( self->value == self->opt )
+ {
+ self->value = (char *)BJAM_MALLOC_ATOMIC( capacity +
+ JAM_STRING_MAGIC_SIZE );
+ self->value[ 0 ] = 0;
+ size_t opt_size = sizeof(self->opt); // Workaround sizeof in strncat warning.
+ strncat( self->value, self->opt, opt_size );
+ assert( strlen( self->value ) <= self->capacity && "Regression test" );
+ }
+ else
+ {
+ self->value = (char *)BJAM_REALLOC( self->value, capacity +
+ JAM_STRING_MAGIC_SIZE );
+ }
+#ifndef NDEBUG
+ memcpy( self->value + capacity, self->magic, JAM_STRING_MAGIC_SIZE );
+#endif
+ self->capacity = capacity;
+}
+
+
+void string_reserve( string * self, size_t capacity )
+{
+ assert_invariants( self );
+ if ( capacity <= self->capacity )
+ return;
+ string_reserve_internal( self, capacity );
+ assert_invariants( self );
+}
+
+
+static void maybe_reserve( string * self, size_t new_size )
+{
+ size_t capacity = self->capacity;
+ if ( capacity <= new_size )
+ {
+ size_t new_capacity = capacity;
+ while ( new_capacity <= new_size )
+ new_capacity <<= 1;
+ string_reserve_internal( self, new_capacity );
+ }
+}
+
+
+void string_append( string * self, char const * rhs )
+{
+ size_t rhs_size = strlen( rhs );
+ size_t new_size = self->size + rhs_size;
+ assert_invariants( self );
+
+ maybe_reserve( self, new_size );
+
+ memcpy( self->value + self->size, rhs, rhs_size + 1 );
+ self->size = new_size;
+
+ assert_invariants( self );
+}
+
+
+void string_append_range( string * self, char const * start, char const * finish )
+{
+ size_t rhs_size = finish - start;
+ size_t new_size = self->size + rhs_size;
+ assert_invariants( self );
+
+ maybe_reserve( self, new_size );
+
+ if ( start != finish )
+ memcpy( self->value + self->size, start, rhs_size );
+ self->size = new_size;
+ self->value[ new_size ] = 0;
+
+ assert_invariants( self );
+}
+
+
+void string_copy( string * s, char const * rhs )
+{
+ string_new( s );
+ string_append( s, rhs );
+}
+
+void string_truncate( string * self, size_t n )
+{
+ assert_invariants( self );
+ assert( n <= self->capacity );
+ self->value[ self->size = n ] = 0;
+ assert_invariants( self );
+}
+
+
+void string_pop_back( string * self )
+{
+ string_truncate( self, self->size - 1 );
+}
+
+
+void string_push_back( string * self, char x )
+{
+ string_append_range( self, &x, &x + 1 );
+}
+
+
+char string_back( string * self )
+{
+ assert_invariants( self );
+ return self->value[ self->size - 1 ];
+}
+
+void string_rtrim( string * self )
+{
+ char *p;
+ assert_invariants( self );
+ p = self->value + self->size - 1;
+ for ( ; p >= self->value && ( *p == '\0' || isspace( *p ) ); *p-- = 0 );
+}
+
+#ifndef NDEBUG
+void string_unit_test()
+{
+ {
+ string s[ 1 ];
+ unsigned long i;
+ unsigned long const limit = sizeof( s->opt ) * 2 + 2;
+ string_new( s );
+ assert( s->value == s->opt );
+ for ( i = 0; i < limit; ++i )
+ {
+ string_push_back( s, (char)( i + 1 ) );
+ assert( s->size == i + 1 );
+ }
+ assert( s->size == limit );
+ assert( s->value != s->opt );
+ for ( i = 0; i < limit; ++i )
+ assert( s->value[ i ] == (char)( i + 1 ) );
+ string_free( s );
+ }
+
+ {
+ const char * const original = " \n\t\v Foo \r\n\v \tBar\n\n\r\r\t\n\v\t \t";
+ string copy[ 1 ];
+ string_copy( copy, original );
+ assert( !strcmp( copy->value, original ) );
+ assert( copy->size == strlen( original ) );
+ string_free( copy );
+ }
+
+ {
+ const char * const foo = "Foo ";
+ string foo_copy[ 1 ];
+ string_copy( foo_copy, foo );
+ string_rtrim( foo_copy );
+ assert( !strcmp( foo_copy->value, "Foo" ) );
+
+ string_rtrim( foo_copy );
+ assert( !strcmp( foo_copy->value, "Foo" ) );
+ }
+ {
+ const char * const bar = "Bar\0\0\0";
+ string bar_copy[ 1 ];
+ string_copy( bar_copy, bar );
+ string_rtrim( bar_copy );
+ assert( !strcmp( bar_copy->value, "Bar" ) );
+
+ string_rtrim( bar_copy );
+ assert( !strcmp( bar_copy->value, "Bar" ) );
+ }
+}
+#endif
diff --git a/src/boost/tools/build/src/engine/jam_strings.h b/src/boost/tools/build/src/engine/jam_strings.h
new file mode 100644
index 000000000..f47db10af
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jam_strings.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2004. David Abrahams
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef JAM_STRINGS_DWA20011024_H
+#define JAM_STRINGS_DWA20011024_H
+
+#include "config.h"
+#include <stddef.h>
+
+typedef struct string
+{
+ char * value;
+ unsigned long size;
+ unsigned long capacity;
+ char opt[ 32 ];
+#ifndef NDEBUG
+ char magic[ 4 ];
+#endif
+} string;
+
+void string_new( string * );
+void string_copy( string *, char const * );
+void string_free( string * );
+void string_append( string *, char const * );
+void string_append_range( string *, char const *, char const * );
+void string_push_back( string * s, char x );
+void string_reserve( string *, size_t );
+void string_truncate( string *, size_t );
+void string_pop_back( string * );
+char string_back( string * );
+void string_rtrim( string * );
+void string_unit_test();
+
+#endif
diff --git a/src/boost/tools/build/src/engine/jambase.cpp b/src/boost/tools/build/src/engine/jambase.cpp
new file mode 100644
index 000000000..055c8f1d8
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jambase.cpp
@@ -0,0 +1,112 @@
+/* Generated by mkjambase from Jambase */
+const char *jambase[] = {
+/* src/engine/Jambase */
+"if $(NT)\n",
+"{\n",
+"SLASH ?= \\\\ ;\n",
+"}\n",
+"SLASH ?= / ;\n",
+"rule find-to-root ( dir : patterns + )\n",
+"{\n",
+"local globs = [ GLOB $(dir) : $(patterns) ] ;\n",
+"while ! $(globs) && $(dir:P) != $(dir)\n",
+"{\n",
+"dir = $(dir:P) ;\n",
+"globs = [ GLOB $(dir) : $(patterns) ] ;\n",
+"}\n",
+"return $(globs) ;\n",
+"}\n",
+".boost-build-file = ;\n",
+".bootstrap-file = ;\n",
+"BOOST_BUILD_PATH.user-value = $(BOOST_BUILD_PATH) ;\n",
+"if ! $(BOOST_BUILD_PATH) && $(UNIX)\n",
+"{\n",
+"BOOST_BUILD_PATH = /usr/share/boost-build ;\n",
+"}\n",
+"rule _poke ( module-name ? : variables + : value * )\n",
+"{\n",
+"module $(<)\n",
+"{\n",
+"$(>) = $(3) ;\n",
+"}\n",
+"}\n",
+"rule boost-build ( dir ? )\n",
+"{\n",
+"if $(.bootstrap-file)\n",
+"{\n",
+"ECHO \"Error: Illegal attempt to re-bootstrap the build system by invoking\" ;\n",
+"ECHO ;\n",
+"ECHO \" 'boost-build\" $(dir) \";'\" ;\n",
+"ECHO ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"BOOST_BUILD_PATH = $(dir:R=$(.boost-build-file:D)) $(BOOST_BUILD_PATH) ;\n",
+"_poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;\n",
+"local bootstrap-file = [ GLOB $(BOOST_BUILD_PATH) : bootstrap.jam ] ;\n",
+".bootstrap-file = $(bootstrap-file[1]) ;\n",
+"if ! $(.bootstrap-file)\n",
+"{\n",
+"ECHO \"Unable to load B2: could not find build system.\" ;\n",
+"ECHO --------------------------------------------------------- ;\n",
+"ECHO \"$(.boost-build-file) attempted to load the build system by invoking\" ;\n",
+"ECHO ;\n",
+"ECHO \" 'boost-build\" $(dir) \";'\" ;\n",
+"ECHO ;\n",
+"ECHO \"but we were unable to find \\\"bootstrap.jam\\\" in the specified directory\" ;\n",
+"ECHO \"or in BOOST_BUILD_PATH (searching \"$(BOOST_BUILD_PATH:J=\", \")\").\" ;\n",
+"ECHO ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"if [ MATCH .*(--debug-configuration).* : $(ARGV) ]\n",
+"{\n",
+"ECHO \"notice: loading B2 from\"\n",
+"[ NORMALIZE_PATH $(.bootstrap-file:D) ] ;\n",
+"}\n",
+"include $(.bootstrap-file) ;\n",
+"}\n",
+"{\n",
+"local search-path = $(BOOST_BUILD_PATH) $(BOOST_ROOT) ;\n",
+"local self = [ SELF_PATH ] ;\n",
+"local boost-build-relative = ../../share/boost-build ;\n",
+"local self-based-path = [ NORMALIZE_PATH $(boost-build-relative:R=$(self)) ] ;\n",
+"local boost-build-files =\n",
+"[ find-to-root [ PWD ] : boost-build.jam ]\n",
+"[ GLOB $(self-based-path) : boost-build.jam ]\n",
+"[ GLOB $(search-path) : boost-build.jam ] ;\n",
+".boost-build-file = $(boost-build-files[1]) ;\n",
+"if ! $(.boost-build-file)\n",
+"{\n",
+"ECHO \"Unable to load B2: could not find \\\"boost-build.jam\\\"\" ;\n",
+"ECHO --------------------------------------------------------------- ;\n",
+"if ! [ MATCH .*(bjam).* : $(ARGV[1]:BL) ]\n",
+"{\n",
+"ECHO \"BOOST_ROOT must be set, either in the environment, or \" ;\n",
+"ECHO \"on the command-line with -sBOOST_ROOT=..., to the root\" ;\n",
+"ECHO \"of the boost installation.\" ;\n",
+"ECHO ;\n",
+"}\n",
+"ECHO \"Attempted search from\" [ PWD ] \"up to the root\" ;\n",
+"ECHO \"at\" $(self-based-path) ;\n",
+"ECHO \"and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: \"$(search-path:J=\", \")\".\" ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"if [ MATCH .*(--debug-configuration).* : $(ARGV) ]\n",
+"{\n",
+"ECHO \"notice: found boost-build.jam at\"\n",
+"[ NORMALIZE_PATH $(.boost-build-file) ] ;\n",
+"}\n",
+"include $(.boost-build-file) ;\n",
+"if ! $(.bootstrap-file)\n",
+"{\n",
+"ECHO \"Unable to load B2\" ;\n",
+"ECHO -------------------------- ;\n",
+"ECHO \"\\\"$(.boost-build-file)\\\" was found by searching from\" [ PWD ] \"up to the root\" ;\n",
+"ECHO \"and in these directories from BOOST_BUILD_PATH and BOOST_ROOT: \"$(search-path:J=\", \")\".\" ;\n",
+"ECHO ;\n",
+"ECHO \"However, it failed to call the \\\"boost-build\\\" rule to indicate\" ;\n",
+"ECHO \"the location of the build system.\" ;\n",
+"ECHO ;\n",
+"EXIT \"Please consult the documentation at 'http://www.boost.org'.\" ;\n",
+"}\n",
+"}\n",
+0 };
diff --git a/src/boost/tools/build/src/engine/jambase.h b/src/boost/tools/build/src/engine/jambase.h
new file mode 100644
index 000000000..c63d08311
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jambase.h
@@ -0,0 +1,15 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * jambase.h - declaration for the internal jambase
+ *
+ * The file Jambase is turned into a C array of strings in jambase.c
+ * so that it can be built in to the executable. This is the
+ * declaration for that array.
+ */
+
+extern const char *jambase[];
diff --git a/src/boost/tools/build/src/engine/jamgram.cpp b/src/boost/tools/build/src/engine/jamgram.cpp
new file mode 100644
index 000000000..d4ccc8e4f
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jamgram.cpp
@@ -0,0 +1,2548 @@
+/* A Bison parser, made by GNU Bison 3.5.2. */
+
+/* Bison implementation for Yacc-like parsers in C
+
+ Copyright (C) 1984, 1989-1990, 2000-2015, 2018-2020 Free Software Foundation,
+ Inc.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>. */
+
+/* As a special exception, you may create a larger work that contains
+ part or all of the Bison parser skeleton and distribute that work
+ under terms of your choice, so long as that work isn't itself a
+ parser generator using the skeleton or a modified version thereof
+ as a parser skeleton. Alternatively, if you modify or redistribute
+ the parser skeleton itself, you may (at your option) remove this
+ special exception, which will cause the skeleton and the resulting
+ Bison output files to be licensed under the GNU General Public
+ License without this special exception.
+
+ This special exception was added by the Free Software Foundation in
+ version 2.2 of Bison. */
+
+/* C LALR(1) parser skeleton written by Richard Stallman, by
+ simplifying the original so-called "semantic" parser. */
+
+/* All symbols defined below should begin with yy or YY, to avoid
+ infringing on user name space. This should be done even for local
+ variables, as they might otherwise be expanded by user macros.
+ There are some unavoidable exceptions within include files to
+ define necessary library symbols; they are noted "INFRINGES ON
+ USER NAME SPACE" below. */
+
+/* Undocumented macros, especially those whose name start with YY_,
+ are private implementation details. Do not rely on them. */
+
+/* Identify Bison output. */
+#define YYBISON 1
+
+/* Bison version. */
+#define YYBISON_VERSION "3.5.2"
+
+/* Skeleton name. */
+#define YYSKELETON_NAME "yacc.c"
+
+/* Pure parsers. */
+#define YYPURE 0
+
+/* Push parsers. */
+#define YYPUSH 0
+
+/* Pull parsers. */
+#define YYPULL 1
+
+
+
+
+/* First part of user prologue. */
+#line 98 "src/engine/jamgram.y"
+
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "compile.h"
+#include "object.h"
+#include "rules.h"
+
+# define YYINITDEPTH 5000 /* for C++ parsing */
+# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
+
+# define F0 -1
+# define P0 (PARSE *)0
+# define S0 (OBJECT *)0
+
+# define pappend( l,r ) parse_make( PARSE_APPEND,l,r,P0,S0,S0,0 )
+# define peval( c,l,r ) parse_make( PARSE_EVAL,l,r,P0,S0,S0,c )
+# define pfor( s,l,r,x ) parse_make( PARSE_FOREACH,l,r,P0,s,S0,x )
+# define pif( l,r,t ) parse_make( PARSE_IF,l,r,t,S0,S0,0 )
+# define pincl( l ) parse_make( PARSE_INCLUDE,l,P0,P0,S0,S0,0 )
+# define plist( s ) parse_make( PARSE_LIST,P0,P0,P0,s,S0,0 )
+# define plocal( l,r,t ) parse_make( PARSE_LOCAL,l,r,t,S0,S0,0 )
+# define pmodule( l,r ) parse_make( PARSE_MODULE,l,r,P0,S0,S0,0 )
+# define pclass( l,r ) parse_make( PARSE_CLASS,l,r,P0,S0,S0,0 )
+# define pnull() parse_make( PARSE_NULL,P0,P0,P0,S0,S0,0 )
+# define pon( l,r ) parse_make( PARSE_ON,l,r,P0,S0,S0,0 )
+# define prule( s,p ) parse_make( PARSE_RULE,p,P0,P0,s,S0,0 )
+# define prules( l,r ) parse_make( PARSE_RULES,l,r,P0,S0,S0,0 )
+# define pset( l,r,a ) parse_make( PARSE_SET,l,r,P0,S0,S0,a )
+# define pset1( l,r,t,a ) parse_make( PARSE_SETTINGS,l,r,t,S0,S0,a )
+# define psetc( s,p,a,l ) parse_make( PARSE_SETCOMP,p,a,P0,s,S0,l )
+# define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f )
+# define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 )
+# define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 )
+# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 )
+# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 )
+# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 )
+
+# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
+# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
+
+
+#line 115 "src/engine/jamgram.cpp"
+
+# ifndef YY_CAST
+# ifdef __cplusplus
+# define YY_CAST(Type, Val) static_cast<Type> (Val)
+# define YY_REINTERPRET_CAST(Type, Val) reinterpret_cast<Type> (Val)
+# else
+# define YY_CAST(Type, Val) ((Type) (Val))
+# define YY_REINTERPRET_CAST(Type, Val) ((Type) (Val))
+# endif
+# endif
+# ifndef YY_NULLPTR
+# if defined __cplusplus
+# if 201103L <= __cplusplus
+# define YY_NULLPTR nullptr
+# else
+# define YY_NULLPTR 0
+# endif
+# else
+# define YY_NULLPTR ((void*)0)
+# endif
+# endif
+
+/* Enabling verbose error messages. */
+#ifdef YYERROR_VERBOSE
+# undef YYERROR_VERBOSE
+# define YYERROR_VERBOSE 1
+#else
+# define YYERROR_VERBOSE 0
+#endif
+
+/* Use api.header.include to #include this header
+ instead of duplicating it here. */
+#ifndef YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED
+# define YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED
+/* Debug traces. */
+#ifndef YYDEBUG
+# define YYDEBUG 0
+#endif
+#if YYDEBUG
+extern int yydebug;
+#endif
+
+/* Token type. */
+#ifndef YYTOKENTYPE
+# define YYTOKENTYPE
+ enum yytokentype
+ {
+ _BANG_t = 258,
+ _BANG_EQUALS_t = 259,
+ _AMPER_t = 260,
+ _AMPERAMPER_t = 261,
+ _LPAREN_t = 262,
+ _RPAREN_t = 263,
+ _PLUS_EQUALS_t = 264,
+ _COLON_t = 265,
+ _SEMIC_t = 266,
+ _LANGLE_t = 267,
+ _LANGLE_EQUALS_t = 268,
+ _EQUALS_t = 269,
+ _RANGLE_t = 270,
+ _RANGLE_EQUALS_t = 271,
+ _QUESTION_EQUALS_t = 272,
+ _LBRACKET_t = 273,
+ _RBRACKET_t = 274,
+ ACTIONS_t = 275,
+ BIND_t = 276,
+ BREAK_t = 277,
+ CASE_t = 278,
+ CLASS_t = 279,
+ CONTINUE_t = 280,
+ DEFAULT_t = 281,
+ ELSE_t = 282,
+ EXISTING_t = 283,
+ FOR_t = 284,
+ IF_t = 285,
+ IGNORE_t = 286,
+ IN_t = 287,
+ INCLUDE_t = 288,
+ LOCAL_t = 289,
+ MODULE_t = 290,
+ ON_t = 291,
+ PIECEMEAL_t = 292,
+ QUIETLY_t = 293,
+ RETURN_t = 294,
+ RULE_t = 295,
+ SWITCH_t = 296,
+ TOGETHER_t = 297,
+ UPDATED_t = 298,
+ WHILE_t = 299,
+ _LBRACE_t = 300,
+ _BAR_t = 301,
+ _BARBAR_t = 302,
+ _RBRACE_t = 303,
+ ARG = 304,
+ STRING = 305
+ };
+#endif
+/* Tokens. */
+#define _BANG_t 258
+#define _BANG_EQUALS_t 259
+#define _AMPER_t 260
+#define _AMPERAMPER_t 261
+#define _LPAREN_t 262
+#define _RPAREN_t 263
+#define _PLUS_EQUALS_t 264
+#define _COLON_t 265
+#define _SEMIC_t 266
+#define _LANGLE_t 267
+#define _LANGLE_EQUALS_t 268
+#define _EQUALS_t 269
+#define _RANGLE_t 270
+#define _RANGLE_EQUALS_t 271
+#define _QUESTION_EQUALS_t 272
+#define _LBRACKET_t 273
+#define _RBRACKET_t 274
+#define ACTIONS_t 275
+#define BIND_t 276
+#define BREAK_t 277
+#define CASE_t 278
+#define CLASS_t 279
+#define CONTINUE_t 280
+#define DEFAULT_t 281
+#define ELSE_t 282
+#define EXISTING_t 283
+#define FOR_t 284
+#define IF_t 285
+#define IGNORE_t 286
+#define IN_t 287
+#define INCLUDE_t 288
+#define LOCAL_t 289
+#define MODULE_t 290
+#define ON_t 291
+#define PIECEMEAL_t 292
+#define QUIETLY_t 293
+#define RETURN_t 294
+#define RULE_t 295
+#define SWITCH_t 296
+#define TOGETHER_t 297
+#define UPDATED_t 298
+#define WHILE_t 299
+#define _LBRACE_t 300
+#define _BAR_t 301
+#define _BARBAR_t 302
+#define _RBRACE_t 303
+#define ARG 304
+#define STRING 305
+
+/* Value type. */
+#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED
+typedef int YYSTYPE;
+# define YYSTYPE_IS_TRIVIAL 1
+# define YYSTYPE_IS_DECLARED 1
+#endif
+
+
+extern YYSTYPE yylval;
+
+int yyparse (void);
+
+#endif /* !YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED */
+
+
+
+#ifdef short
+# undef short
+#endif
+
+/* On compilers that do not define __PTRDIFF_MAX__ etc., make sure
+ <limits.h> and (if available) <stdint.h> are included
+ so that the code can choose integer types of a good width. */
+
+#ifndef __PTRDIFF_MAX__
+# include <limits.h> /* INFRINGES ON USER NAME SPACE */
+# if defined __STDC_VERSION__ && 199901 <= __STDC_VERSION__
+# include <stdint.h> /* INFRINGES ON USER NAME SPACE */
+# define YY_STDINT_H
+# endif
+#endif
+
+/* Narrow types that promote to a signed type and that can represent a
+ signed or unsigned integer of at least N bits. In tables they can
+ save space and decrease cache pressure. Promoting to a signed type
+ helps avoid bugs in integer arithmetic. */
+
+#ifdef __INT_LEAST8_MAX__
+typedef __INT_LEAST8_TYPE__ yytype_int8;
+#elif defined YY_STDINT_H
+typedef int_least8_t yytype_int8;
+#else
+typedef signed char yytype_int8;
+#endif
+
+#ifdef __INT_LEAST16_MAX__
+typedef __INT_LEAST16_TYPE__ yytype_int16;
+#elif defined YY_STDINT_H
+typedef int_least16_t yytype_int16;
+#else
+typedef short yytype_int16;
+#endif
+
+#if defined __UINT_LEAST8_MAX__ && __UINT_LEAST8_MAX__ <= __INT_MAX__
+typedef __UINT_LEAST8_TYPE__ yytype_uint8;
+#elif (!defined __UINT_LEAST8_MAX__ && defined YY_STDINT_H \
+ && UINT_LEAST8_MAX <= INT_MAX)
+typedef uint_least8_t yytype_uint8;
+#elif !defined __UINT_LEAST8_MAX__ && UCHAR_MAX <= INT_MAX
+typedef unsigned char yytype_uint8;
+#else
+typedef short yytype_uint8;
+#endif
+
+#if defined __UINT_LEAST16_MAX__ && __UINT_LEAST16_MAX__ <= __INT_MAX__
+typedef __UINT_LEAST16_TYPE__ yytype_uint16;
+#elif (!defined __UINT_LEAST16_MAX__ && defined YY_STDINT_H \
+ && UINT_LEAST16_MAX <= INT_MAX)
+typedef uint_least16_t yytype_uint16;
+#elif !defined __UINT_LEAST16_MAX__ && USHRT_MAX <= INT_MAX
+typedef unsigned short yytype_uint16;
+#else
+typedef int yytype_uint16;
+#endif
+
+#ifndef YYPTRDIFF_T
+# if defined __PTRDIFF_TYPE__ && defined __PTRDIFF_MAX__
+# define YYPTRDIFF_T __PTRDIFF_TYPE__
+# define YYPTRDIFF_MAXIMUM __PTRDIFF_MAX__
+# elif defined PTRDIFF_MAX
+# ifndef ptrdiff_t
+# include <stddef.h> /* INFRINGES ON USER NAME SPACE */
+# endif
+# define YYPTRDIFF_T ptrdiff_t
+# define YYPTRDIFF_MAXIMUM PTRDIFF_MAX
+# else
+# define YYPTRDIFF_T long
+# define YYPTRDIFF_MAXIMUM LONG_MAX
+# endif
+#endif
+
+#ifndef YYSIZE_T
+# ifdef __SIZE_TYPE__
+# define YYSIZE_T __SIZE_TYPE__
+# elif defined size_t
+# define YYSIZE_T size_t
+# elif defined __STDC_VERSION__ && 199901 <= __STDC_VERSION__
+# include <stddef.h> /* INFRINGES ON USER NAME SPACE */
+# define YYSIZE_T size_t
+# else
+# define YYSIZE_T unsigned
+# endif
+#endif
+
+#define YYSIZE_MAXIMUM \
+ YY_CAST (YYPTRDIFF_T, \
+ (YYPTRDIFF_MAXIMUM < YY_CAST (YYSIZE_T, -1) \
+ ? YYPTRDIFF_MAXIMUM \
+ : YY_CAST (YYSIZE_T, -1)))
+
+#define YYSIZEOF(X) YY_CAST (YYPTRDIFF_T, sizeof (X))
+
+/* Stored state numbers (used for stacks). */
+typedef yytype_uint8 yy_state_t;
+
+/* State numbers in computations. */
+typedef int yy_state_fast_t;
+
+#ifndef YY_
+# if defined YYENABLE_NLS && YYENABLE_NLS
+# if ENABLE_NLS
+# include <libintl.h> /* INFRINGES ON USER NAME SPACE */
+# define YY_(Msgid) dgettext ("bison-runtime", Msgid)
+# endif
+# endif
+# ifndef YY_
+# define YY_(Msgid) Msgid
+# endif
+#endif
+
+#ifndef YY_ATTRIBUTE_PURE
+# if defined __GNUC__ && 2 < __GNUC__ + (96 <= __GNUC_MINOR__)
+# define YY_ATTRIBUTE_PURE __attribute__ ((__pure__))
+# else
+# define YY_ATTRIBUTE_PURE
+# endif
+#endif
+
+#ifndef YY_ATTRIBUTE_UNUSED
+# if defined __GNUC__ && 2 < __GNUC__ + (7 <= __GNUC_MINOR__)
+# define YY_ATTRIBUTE_UNUSED __attribute__ ((__unused__))
+# else
+# define YY_ATTRIBUTE_UNUSED
+# endif
+#endif
+
+/* Suppress unused-variable warnings by "using" E. */
+#if ! defined lint || defined __GNUC__
+# define YYUSE(E) ((void) (E))
+#else
+# define YYUSE(E) /* empty */
+#endif
+
+#if defined __GNUC__ && ! defined __ICC && 407 <= __GNUC__ * 100 + __GNUC_MINOR__
+/* Suppress an incorrect diagnostic about yylval being uninitialized. */
+# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN \
+ _Pragma ("GCC diagnostic push") \
+ _Pragma ("GCC diagnostic ignored \"-Wuninitialized\"") \
+ _Pragma ("GCC diagnostic ignored \"-Wmaybe-uninitialized\"")
+# define YY_IGNORE_MAYBE_UNINITIALIZED_END \
+ _Pragma ("GCC diagnostic pop")
+#else
+# define YY_INITIAL_VALUE(Value) Value
+#endif
+#ifndef YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
+# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
+# define YY_IGNORE_MAYBE_UNINITIALIZED_END
+#endif
+#ifndef YY_INITIAL_VALUE
+# define YY_INITIAL_VALUE(Value) /* Nothing. */
+#endif
+
+#if defined __cplusplus && defined __GNUC__ && ! defined __ICC && 6 <= __GNUC__
+# define YY_IGNORE_USELESS_CAST_BEGIN \
+ _Pragma ("GCC diagnostic push") \
+ _Pragma ("GCC diagnostic ignored \"-Wuseless-cast\"")
+# define YY_IGNORE_USELESS_CAST_END \
+ _Pragma ("GCC diagnostic pop")
+#endif
+#ifndef YY_IGNORE_USELESS_CAST_BEGIN
+# define YY_IGNORE_USELESS_CAST_BEGIN
+# define YY_IGNORE_USELESS_CAST_END
+#endif
+
+
+#define YY_ASSERT(E) ((void) (0 && (E)))
+
+#if ! defined yyoverflow || YYERROR_VERBOSE
+
+/* The parser invokes alloca or malloc; define the necessary symbols. */
+
+# ifdef YYSTACK_USE_ALLOCA
+# if YYSTACK_USE_ALLOCA
+# ifdef __GNUC__
+# define YYSTACK_ALLOC __builtin_alloca
+# elif defined __BUILTIN_VA_ARG_INCR
+# include <alloca.h> /* INFRINGES ON USER NAME SPACE */
+# elif defined _AIX
+# define YYSTACK_ALLOC __alloca
+# elif defined _MSC_VER
+# include <malloc.h> /* INFRINGES ON USER NAME SPACE */
+# define alloca _alloca
+# else
+# define YYSTACK_ALLOC alloca
+# if ! defined _ALLOCA_H && ! defined EXIT_SUCCESS
+# include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
+ /* Use EXIT_SUCCESS as a witness for stdlib.h. */
+# ifndef EXIT_SUCCESS
+# define EXIT_SUCCESS 0
+# endif
+# endif
+# endif
+# endif
+# endif
+
+# ifdef YYSTACK_ALLOC
+ /* Pacify GCC's 'empty if-body' warning. */
+# define YYSTACK_FREE(Ptr) do { /* empty */; } while (0)
+# ifndef YYSTACK_ALLOC_MAXIMUM
+ /* The OS might guarantee only one guard page at the bottom of the stack,
+ and a page size can be as small as 4096 bytes. So we cannot safely
+ invoke alloca (N) if N exceeds 4096. Use a slightly smaller number
+ to allow for a few compiler-allocated temporary stack slots. */
+# define YYSTACK_ALLOC_MAXIMUM 4032 /* reasonable circa 2006 */
+# endif
+# else
+# define YYSTACK_ALLOC YYMALLOC
+# define YYSTACK_FREE YYFREE
+# ifndef YYSTACK_ALLOC_MAXIMUM
+# define YYSTACK_ALLOC_MAXIMUM YYSIZE_MAXIMUM
+# endif
+# if (defined __cplusplus && ! defined EXIT_SUCCESS \
+ && ! ((defined YYMALLOC || defined malloc) \
+ && (defined YYFREE || defined free)))
+# include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
+# ifndef EXIT_SUCCESS
+# define EXIT_SUCCESS 0
+# endif
+# endif
+# ifndef YYMALLOC
+# define YYMALLOC malloc
+# if ! defined malloc && ! defined EXIT_SUCCESS
+void *malloc (YYSIZE_T); /* INFRINGES ON USER NAME SPACE */
+# endif
+# endif
+# ifndef YYFREE
+# define YYFREE free
+# if ! defined free && ! defined EXIT_SUCCESS
+void free (void *); /* INFRINGES ON USER NAME SPACE */
+# endif
+# endif
+# endif
+#endif /* ! defined yyoverflow || YYERROR_VERBOSE */
+
+
+#if (! defined yyoverflow \
+ && (! defined __cplusplus \
+ || (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL)))
+
+/* A type that is properly aligned for any stack member. */
+union yyalloc
+{
+ yy_state_t yyss_alloc;
+ YYSTYPE yyvs_alloc;
+};
+
+/* The size of the maximum gap between one aligned stack and the next. */
+# define YYSTACK_GAP_MAXIMUM (YYSIZEOF (union yyalloc) - 1)
+
+/* The size of an array large to enough to hold all stacks, each with
+ N elements. */
+# define YYSTACK_BYTES(N) \
+ ((N) * (YYSIZEOF (yy_state_t) + YYSIZEOF (YYSTYPE)) \
+ + YYSTACK_GAP_MAXIMUM)
+
+# define YYCOPY_NEEDED 1
+
+/* Relocate STACK from its old location to the new one. The
+ local variables YYSIZE and YYSTACKSIZE give the old and new number of
+ elements in the stack, and YYPTR gives the new location of the
+ stack. Advance YYPTR to a properly aligned location for the next
+ stack. */
+# define YYSTACK_RELOCATE(Stack_alloc, Stack) \
+ do \
+ { \
+ YYPTRDIFF_T yynewbytes; \
+ YYCOPY (&yyptr->Stack_alloc, Stack, yysize); \
+ Stack = &yyptr->Stack_alloc; \
+ yynewbytes = yystacksize * YYSIZEOF (*Stack) + YYSTACK_GAP_MAXIMUM; \
+ yyptr += yynewbytes / YYSIZEOF (*yyptr); \
+ } \
+ while (0)
+
+#endif
+
+#if defined YYCOPY_NEEDED && YYCOPY_NEEDED
+/* Copy COUNT objects from SRC to DST. The source and destination do
+ not overlap. */
+# ifndef YYCOPY
+# if defined __GNUC__ && 1 < __GNUC__
+# define YYCOPY(Dst, Src, Count) \
+ __builtin_memcpy (Dst, Src, YY_CAST (YYSIZE_T, (Count)) * sizeof (*(Src)))
+# else
+# define YYCOPY(Dst, Src, Count) \
+ do \
+ { \
+ YYPTRDIFF_T yyi; \
+ for (yyi = 0; yyi < (Count); yyi++) \
+ (Dst)[yyi] = (Src)[yyi]; \
+ } \
+ while (0)
+# endif
+# endif
+#endif /* !YYCOPY_NEEDED */
+
+/* YYFINAL -- State number of the termination state. */
+#define YYFINAL 42
+/* YYLAST -- Last index in YYTABLE. */
+#define YYLAST 242
+
+/* YYNTOKENS -- Number of terminals. */
+#define YYNTOKENS 51
+/* YYNNTS -- Number of nonterminals. */
+#define YYNNTS 68
+/* YYNRULES -- Number of rules. */
+#define YYNRULES 121
+/* YYNSTATES -- Number of states. */
+#define YYNSTATES 207
+
+#define YYUNDEFTOK 2
+#define YYMAXUTOK 305
+
+
+/* YYTRANSLATE(TOKEN-NUM) -- Symbol number corresponding to TOKEN-NUM
+ as returned by yylex, with out-of-bounds checking. */
+#define YYTRANSLATE(YYX) \
+ (0 <= (YYX) && (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK)
+
+/* YYTRANSLATE[TOKEN-NUM] -- Symbol number corresponding to TOKEN-NUM
+ as returned by yylex. */
+static const yytype_int8 yytranslate[] =
+{
+ 0, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 1, 2, 3, 4,
+ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50
+};
+
+#if YYDEBUG
+ /* YYRLINE[YYN] -- Source line where rule number YYN was defined. */
+static const yytype_int16 yyrline[] =
+{
+ 0, 145, 145, 147, 158, 160, 164, 166, 168, 168,
+ 168, 173, 176, 176, 178, 182, 185, 188, 191, 194,
+ 197, 199, 201, 201, 203, 203, 205, 205, 207, 207,
+ 207, 209, 209, 211, 213, 215, 215, 215, 217, 217,
+ 217, 219, 219, 219, 221, 221, 221, 223, 223, 223,
+ 225, 225, 225, 227, 227, 227, 227, 229, 232, 234,
+ 231, 243, 245, 247, 249, 256, 258, 258, 260, 260,
+ 262, 262, 264, 264, 266, 266, 268, 268, 270, 270,
+ 272, 272, 274, 274, 276, 276, 278, 278, 280, 280,
+ 282, 282, 294, 295, 299, 299, 299, 308, 310, 320,
+ 325, 326, 330, 332, 332, 341, 341, 343, 343, 345,
+ 345, 356, 357, 361, 363, 365, 367, 369, 371, 381,
+ 382, 382
+};
+#endif
+
+#if YYDEBUG || YYERROR_VERBOSE || 0
+/* YYTNAME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM.
+ First, the terminals, then, starting at YYNTOKENS, nonterminals. */
+static const char *const yytname[] =
+{
+ "$end", "error", "$undefined", "_BANG_t", "_BANG_EQUALS_t", "_AMPER_t",
+ "_AMPERAMPER_t", "_LPAREN_t", "_RPAREN_t", "_PLUS_EQUALS_t", "_COLON_t",
+ "_SEMIC_t", "_LANGLE_t", "_LANGLE_EQUALS_t", "_EQUALS_t", "_RANGLE_t",
+ "_RANGLE_EQUALS_t", "_QUESTION_EQUALS_t", "_LBRACKET_t", "_RBRACKET_t",
+ "ACTIONS_t", "BIND_t", "BREAK_t", "CASE_t", "CLASS_t", "CONTINUE_t",
+ "DEFAULT_t", "ELSE_t", "EXISTING_t", "FOR_t", "IF_t", "IGNORE_t", "IN_t",
+ "INCLUDE_t", "LOCAL_t", "MODULE_t", "ON_t", "PIECEMEAL_t", "QUIETLY_t",
+ "RETURN_t", "RULE_t", "SWITCH_t", "TOGETHER_t", "UPDATED_t", "WHILE_t",
+ "_LBRACE_t", "_BAR_t", "_BARBAR_t", "_RBRACE_t", "ARG", "STRING",
+ "$accept", "run", "block", "rules", "$@1", "$@2", "null",
+ "assign_list_opt", "$@3", "arglist_opt", "local_opt", "else_opt", "rule",
+ "$@4", "$@5", "$@6", "$@7", "$@8", "$@9", "$@10", "$@11", "$@12", "$@13",
+ "$@14", "$@15", "$@16", "$@17", "$@18", "$@19", "$@20", "$@21", "$@22",
+ "$@23", "$@24", "$@25", "$@26", "assign", "expr", "$@27", "$@28", "$@29",
+ "$@30", "$@31", "$@32", "$@33", "$@34", "$@35", "$@36", "$@37", "$@38",
+ "$@39", "cases", "case", "$@40", "$@41", "lol", "list", "listp", "arg",
+ "@42", "func", "$@43", "$@44", "$@45", "eflags", "eflag", "bindlist",
+ "$@46", YY_NULLPTR
+};
+#endif
+
+# ifdef YYPRINT
+/* YYTOKNUM[NUM] -- (External) token number corresponding to the
+ (internal) symbol number NUM (which must be that of a token). */
+static const yytype_int16 yytoknum[] =
+{
+ 0, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274,
+ 275, 276, 277, 278, 279, 280, 281, 282, 283, 284,
+ 285, 286, 287, 288, 289, 290, 291, 292, 293, 294,
+ 295, 296, 297, 298, 299, 300, 301, 302, 303, 304,
+ 305
+};
+# endif
+
+#define YYPACT_NINF (-119)
+
+#define yypact_value_is_default(Yyn) \
+ ((Yyn) == YYPACT_NINF)
+
+#define YYTABLE_NINF (-25)
+
+#define yytable_value_is_error(Yyn) \
+ 0
+
+ /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing
+ STATE-NUM. */
+static const yytype_int16 yypact[] =
+{
+ 140, -119, -119, 1, -119, 2, -18, -119, -119, -23,
+ -119, -9, -119, -119, -119, 140, 12, 31, -119, 4,
+ 140, 77, -17, 186, -119, -119, -119, -119, -7, 3,
+ -119, -119, -119, -119, 177, -119, -119, 3, -5, -119,
+ -119, -119, -119, -119, -119, -119, -119, -119, 33, -119,
+ -119, -9, -119, 29, -119, -119, -119, -119, -119, -119,
+ 35, -119, 14, 50, -9, 34, -119, -119, 23, 39,
+ 52, 53, 40, -119, 66, 45, 94, -119, 67, 30,
+ -119, -119, -119, 16, -119, -119, -119, 47, -119, -119,
+ -119, -119, 3, 3, -119, -119, -119, -119, -119, -119,
+ -119, -119, -119, -119, -119, -119, -119, -119, -119, 84,
+ -119, -119, -119, 51, -119, -119, 32, 105, -119, -119,
+ -119, -119, -119, 140, -119, -119, -119, 68, 3, 3,
+ 3, 3, 3, 3, 3, 3, 140, 3, 3, -119,
+ -119, -119, 140, 95, 140, 110, -119, -119, -119, -119,
+ -119, 69, 73, 87, -119, 89, 139, 139, -119, -119,
+ 89, -119, -119, 90, 226, 226, -119, -119, 140, 91,
+ -119, 97, 95, 98, -119, -119, -119, -119, -119, -119,
+ -119, -119, 108, -119, -119, 88, -119, -119, -119, 141,
+ 177, 145, 102, 140, 177, -119, 149, -119, -119, -119,
+ -119, 115, -119, -119, -119, 140, -119
+};
+
+ /* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM.
+ Performed when YYTABLE does not specify something else to do. Zero
+ means the default is an error. */
+static const yytype_int8 yydefact[] =
+{
+ 2, 103, 111, 0, 47, 0, 18, 41, 22, 8,
+ 44, 0, 31, 38, 50, 11, 102, 0, 3, 0,
+ 6, 0, 0, 0, 33, 100, 34, 17, 0, 0,
+ 100, 100, 100, 102, 18, 100, 100, 0, 0, 5,
+ 4, 100, 1, 53, 7, 62, 61, 63, 0, 28,
+ 26, 0, 105, 0, 118, 115, 117, 116, 114, 113,
+ 119, 112, 0, 97, 99, 0, 88, 90, 0, 65,
+ 0, 11, 0, 57, 0, 0, 51, 21, 0, 0,
+ 64, 100, 100, 0, 100, 104, 120, 0, 48, 100,
+ 101, 35, 0, 0, 68, 78, 80, 70, 72, 66,
+ 74, 76, 42, 82, 84, 86, 23, 12, 14, 0,
+ 45, 32, 39, 0, 25, 54, 0, 0, 109, 107,
+ 106, 100, 58, 11, 98, 100, 89, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 11, 0, 0, 100,
+ 100, 9, 11, 92, 11, 16, 29, 27, 100, 100,
+ 121, 0, 0, 0, 91, 69, 79, 81, 71, 73,
+ 67, 75, 77, 0, 83, 85, 87, 13, 11, 0,
+ 94, 0, 92, 0, 100, 55, 100, 110, 108, 59,
+ 49, 36, 20, 10, 46, 0, 40, 93, 52, 0,
+ 18, 0, 0, 11, 18, 43, 0, 15, 56, 30,
+ 60, 0, 19, 95, 37, 11, 96
+};
+
+ /* YYPGOTO[NTERM-NUM]. */
+static const yytype_int16 yypgoto[] =
+{
+ -119, -119, -118, 25, -119, -119, 96, -119, -119, -119,
+ 160, -119, -33, -119, -119, -119, -119, -119, -119, -119,
+ -119, -119, -119, -119, -119, -119, -119, -119, -119, -119,
+ -119, -119, -119, -119, -119, -119, 55, -4, -119, -119,
+ -119, -119, -119, -119, -119, -119, -119, -119, -119, -119,
+ -119, 5, -119, -119, -119, -27, -28, -119, 0, -119,
+ -119, -119, -119, -119, -119, -119, -119, -119
+};
+
+ /* YYDEFGOTO[NTERM-NUM]. */
+static const yytype_int16 yydefgoto[] =
+{
+ -1, 17, 38, 39, 31, 168, 40, 109, 140, 175,
+ 19, 195, 20, 30, 41, 82, 81, 176, 35, 125,
+ 193, 36, 143, 29, 136, 32, 142, 25, 123, 37,
+ 113, 79, 145, 190, 151, 192, 50, 68, 133, 128,
+ 131, 132, 134, 135, 129, 130, 137, 138, 139, 92,
+ 93, 171, 172, 185, 205, 62, 63, 64, 69, 22,
+ 53, 84, 149, 148, 23, 61, 87, 121
+};
+
+ /* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If
+ positive, shift that token. If negative, reduce the rule whose
+ number is the opposite. If YYTABLE_NINF, syntax error. */
+static const yytype_int16 yytable[] =
+{
+ 21, 73, 70, 71, 72, 152, 66, 74, 75, 1,
+ 67, 34, 24, 26, 78, 21, 27, -17, 163, 51,
+ 21, 1, -24, -24, 169, 18, 173, 94, 95, 96,
+ -24, 42, 52, 76, 21, 97, 98, 99, 100, 101,
+ 33, 45, 65, 77, 43, 44, 46, 80, 85, 47,
+ 183, 83, 33, 116, 117, 118, 86, 120, 48, 88,
+ 89, -24, 124, 106, 90, 119, 91, 107, 102, 103,
+ 104, 105, 94, 95, 96, 201, 154, 111, 114, 115,
+ 97, 98, 99, 100, 101, 110, 45, 206, 126, 127,
+ 112, 46, 122, 150, 47, 141, 144, 153, 94, 95,
+ 96, 97, 98, 48, 100, 101, 97, 98, 99, 100,
+ 101, 166, 167, 49, 103, 104, 147, 174, 170, 179,
+ 177, 180, 178, 21, 155, 156, 157, 158, 159, 160,
+ 161, 162, 181, 164, 165, 194, 21, 196, 182, 184,
+ 103, 104, 21, 94, 21, 186, 188, 189, 191, 197,
+ 200, 97, 98, 99, 100, 101, 199, 198, 1, 203,
+ 2, 202, 3, 204, 4, 5, 28, 108, 21, 6,
+ 7, 146, 0, 8, 9, 10, 11, 187, 0, 12,
+ -18, 13, 0, 0, 14, 15, 0, 0, 0, 16,
+ 21, 0, 0, 21, 21, 1, 0, 2, 0, 3,
+ 0, 4, 5, 0, 0, 21, 6, 7, 0, 0,
+ 8, 27, 10, 11, 54, 0, 12, 55, 13, 0,
+ 0, 14, 15, 56, 57, 0, 16, 0, 58, 59,
+ 94, 95, 96, 0, 0, 60, 0, 0, 97, 98,
+ 99, 100, 101
+};
+
+static const yytype_int16 yycheck[] =
+{
+ 0, 34, 30, 31, 32, 123, 3, 35, 36, 18,
+ 7, 11, 11, 11, 41, 15, 34, 40, 136, 36,
+ 20, 18, 10, 11, 142, 0, 144, 4, 5, 6,
+ 18, 0, 49, 37, 34, 12, 13, 14, 15, 16,
+ 49, 9, 49, 48, 40, 20, 14, 14, 19, 17,
+ 168, 51, 49, 81, 82, 39, 21, 84, 26, 45,
+ 10, 49, 89, 11, 64, 49, 32, 14, 45, 46,
+ 47, 32, 4, 5, 6, 193, 8, 11, 11, 49,
+ 12, 13, 14, 15, 16, 45, 9, 205, 92, 93,
+ 45, 14, 45, 121, 17, 11, 45, 125, 4, 5,
+ 6, 12, 13, 26, 15, 16, 12, 13, 14, 15,
+ 16, 139, 140, 36, 46, 47, 11, 7, 23, 50,
+ 148, 48, 149, 123, 128, 129, 130, 131, 132, 133,
+ 134, 135, 45, 137, 138, 27, 136, 49, 48, 48,
+ 46, 47, 142, 4, 144, 48, 48, 174, 176, 8,
+ 48, 12, 13, 14, 15, 16, 11, 190, 18, 10,
+ 20, 194, 22, 48, 24, 25, 6, 71, 168, 29,
+ 30, 116, -1, 33, 34, 35, 36, 172, -1, 39,
+ 40, 41, -1, -1, 44, 45, -1, -1, -1, 49,
+ 190, -1, -1, 193, 194, 18, -1, 20, -1, 22,
+ -1, 24, 25, -1, -1, 205, 29, 30, -1, -1,
+ 33, 34, 35, 36, 28, -1, 39, 31, 41, -1,
+ -1, 44, 45, 37, 38, -1, 49, -1, 42, 43,
+ 4, 5, 6, -1, -1, 49, -1, -1, 12, 13,
+ 14, 15, 16
+};
+
+ /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing
+ symbol of state STATE-NUM. */
+static const yytype_int8 yystos[] =
+{
+ 0, 18, 20, 22, 24, 25, 29, 30, 33, 34,
+ 35, 36, 39, 41, 44, 45, 49, 52, 54, 61,
+ 63, 109, 110, 115, 11, 78, 11, 34, 61, 74,
+ 64, 55, 76, 49, 109, 69, 72, 80, 53, 54,
+ 57, 65, 0, 40, 54, 9, 14, 17, 26, 36,
+ 87, 36, 49, 111, 28, 31, 37, 38, 42, 43,
+ 49, 116, 106, 107, 108, 49, 3, 7, 88, 109,
+ 107, 107, 107, 63, 107, 107, 88, 48, 106, 82,
+ 14, 67, 66, 109, 112, 19, 21, 117, 45, 10,
+ 109, 32, 100, 101, 4, 5, 6, 12, 13, 14,
+ 15, 16, 45, 46, 47, 32, 11, 14, 57, 58,
+ 45, 11, 45, 81, 11, 49, 107, 107, 39, 49,
+ 106, 118, 45, 79, 106, 70, 88, 88, 90, 95,
+ 96, 91, 92, 89, 93, 94, 75, 97, 98, 99,
+ 59, 11, 77, 73, 45, 83, 87, 11, 114, 113,
+ 107, 85, 53, 107, 8, 88, 88, 88, 88, 88,
+ 88, 88, 88, 53, 88, 88, 107, 107, 56, 53,
+ 23, 102, 103, 53, 7, 60, 68, 107, 106, 50,
+ 48, 45, 48, 53, 48, 104, 48, 102, 48, 106,
+ 84, 107, 86, 71, 27, 62, 49, 8, 63, 11,
+ 48, 53, 63, 10, 48, 105, 53
+};
+
+ /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */
+static const yytype_int8 yyr1[] =
+{
+ 0, 51, 52, 52, 53, 53, 54, 54, 55, 56,
+ 54, 57, 59, 58, 58, 60, 60, 61, 61, 62,
+ 62, 63, 64, 63, 65, 63, 66, 63, 67, 68,
+ 63, 69, 63, 63, 63, 70, 71, 63, 72, 73,
+ 63, 74, 75, 63, 76, 77, 63, 78, 79, 63,
+ 80, 81, 63, 82, 83, 84, 63, 63, 85, 86,
+ 63, 87, 87, 87, 87, 88, 89, 88, 90, 88,
+ 91, 88, 92, 88, 93, 88, 94, 88, 95, 88,
+ 96, 88, 97, 88, 98, 88, 99, 88, 100, 88,
+ 101, 88, 102, 102, 104, 105, 103, 106, 106, 107,
+ 108, 108, 109, 110, 109, 112, 111, 113, 111, 114,
+ 111, 115, 115, 116, 116, 116, 116, 116, 116, 117,
+ 118, 117
+};
+
+ /* YYR2[YYN] -- Number of symbols on the right hand side of rule YYN. */
+static const yytype_int8 yyr2[] =
+{
+ 0, 2, 0, 1, 1, 1, 1, 2, 0, 0,
+ 7, 0, 0, 3, 1, 3, 0, 1, 0, 2,
+ 0, 3, 0, 4, 0, 4, 0, 5, 0, 0,
+ 8, 0, 4, 2, 2, 0, 0, 10, 0, 0,
+ 7, 0, 0, 8, 0, 0, 7, 0, 0, 7,
+ 0, 0, 7, 0, 0, 0, 8, 3, 0, 0,
+ 9, 1, 1, 1, 2, 1, 0, 4, 0, 4,
+ 0, 4, 0, 4, 0, 4, 0, 4, 0, 4,
+ 0, 4, 0, 4, 0, 4, 0, 4, 0, 3,
+ 0, 4, 0, 2, 0, 0, 6, 1, 3, 1,
+ 0, 2, 1, 0, 4, 0, 3, 0, 5, 0,
+ 5, 0, 2, 1, 1, 1, 1, 1, 1, 0,
+ 0, 3
+};
+
+
+#define yyerrok (yyerrstatus = 0)
+#define yyclearin (yychar = YYEMPTY)
+#define YYEMPTY (-2)
+#define YYEOF 0
+
+#define YYACCEPT goto yyacceptlab
+#define YYABORT goto yyabortlab
+#define YYERROR goto yyerrorlab
+
+
+#define YYRECOVERING() (!!yyerrstatus)
+
+#define YYBACKUP(Token, Value) \
+ do \
+ if (yychar == YYEMPTY) \
+ { \
+ yychar = (Token); \
+ yylval = (Value); \
+ YYPOPSTACK (yylen); \
+ yystate = *yyssp; \
+ goto yybackup; \
+ } \
+ else \
+ { \
+ yyerror (YY_("syntax error: cannot back up")); \
+ YYERROR; \
+ } \
+ while (0)
+
+/* Error token number */
+#define YYTERROR 1
+#define YYERRCODE 256
+
+
+
+/* Enable debugging if requested. */
+#if YYDEBUG
+
+# ifndef YYFPRINTF
+# include <stdio.h> /* INFRINGES ON USER NAME SPACE */
+# define YYFPRINTF fprintf
+# endif
+
+# define YYDPRINTF(Args) \
+do { \
+ if (yydebug) \
+ YYFPRINTF Args; \
+} while (0)
+
+/* This macro is provided for backward compatibility. */
+#ifndef YY_LOCATION_PRINT
+# define YY_LOCATION_PRINT(File, Loc) ((void) 0)
+#endif
+
+
+# define YY_SYMBOL_PRINT(Title, Type, Value, Location) \
+do { \
+ if (yydebug) \
+ { \
+ YYFPRINTF (stderr, "%s ", Title); \
+ yy_symbol_print (stderr, \
+ Type, Value); \
+ YYFPRINTF (stderr, "\n"); \
+ } \
+} while (0)
+
+
+/*-----------------------------------.
+| Print this symbol's value on YYO. |
+`-----------------------------------*/
+
+static void
+yy_symbol_value_print (FILE *yyo, int yytype, YYSTYPE const * const yyvaluep)
+{
+ FILE *yyoutput = yyo;
+ YYUSE (yyoutput);
+ if (!yyvaluep)
+ return;
+# ifdef YYPRINT
+ if (yytype < YYNTOKENS)
+ YYPRINT (yyo, yytoknum[yytype], *yyvaluep);
+# endif
+ YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
+ YYUSE (yytype);
+ YY_IGNORE_MAYBE_UNINITIALIZED_END
+}
+
+
+/*---------------------------.
+| Print this symbol on YYO. |
+`---------------------------*/
+
+static void
+yy_symbol_print (FILE *yyo, int yytype, YYSTYPE const * const yyvaluep)
+{
+ YYFPRINTF (yyo, "%s %s (",
+ yytype < YYNTOKENS ? "token" : "nterm", yytname[yytype]);
+
+ yy_symbol_value_print (yyo, yytype, yyvaluep);
+ YYFPRINTF (yyo, ")");
+}
+
+/*------------------------------------------------------------------.
+| yy_stack_print -- Print the state stack from its BOTTOM up to its |
+| TOP (included). |
+`------------------------------------------------------------------*/
+
+static void
+yy_stack_print (yy_state_t *yybottom, yy_state_t *yytop)
+{
+ YYFPRINTF (stderr, "Stack now");
+ for (; yybottom <= yytop; yybottom++)
+ {
+ int yybot = *yybottom;
+ YYFPRINTF (stderr, " %d", yybot);
+ }
+ YYFPRINTF (stderr, "\n");
+}
+
+# define YY_STACK_PRINT(Bottom, Top) \
+do { \
+ if (yydebug) \
+ yy_stack_print ((Bottom), (Top)); \
+} while (0)
+
+
+/*------------------------------------------------.
+| Report that the YYRULE is going to be reduced. |
+`------------------------------------------------*/
+
+static void
+yy_reduce_print (yy_state_t *yyssp, YYSTYPE *yyvsp, int yyrule)
+{
+ int yylno = yyrline[yyrule];
+ int yynrhs = yyr2[yyrule];
+ int yyi;
+ YYFPRINTF (stderr, "Reducing stack by rule %d (line %d):\n",
+ yyrule - 1, yylno);
+ /* The symbols being reduced. */
+ for (yyi = 0; yyi < yynrhs; yyi++)
+ {
+ YYFPRINTF (stderr, " $%d = ", yyi + 1);
+ yy_symbol_print (stderr,
+ yystos[+yyssp[yyi + 1 - yynrhs]],
+ &yyvsp[(yyi + 1) - (yynrhs)]
+ );
+ YYFPRINTF (stderr, "\n");
+ }
+}
+
+# define YY_REDUCE_PRINT(Rule) \
+do { \
+ if (yydebug) \
+ yy_reduce_print (yyssp, yyvsp, Rule); \
+} while (0)
+
+/* Nonzero means print parse trace. It is left uninitialized so that
+ multiple parsers can coexist. */
+int yydebug;
+#else /* !YYDEBUG */
+# define YYDPRINTF(Args)
+# define YY_SYMBOL_PRINT(Title, Type, Value, Location)
+# define YY_STACK_PRINT(Bottom, Top)
+# define YY_REDUCE_PRINT(Rule)
+#endif /* !YYDEBUG */
+
+
+/* YYINITDEPTH -- initial size of the parser's stacks. */
+#ifndef YYINITDEPTH
+# define YYINITDEPTH 200
+#endif
+
+/* YYMAXDEPTH -- maximum size the stacks can grow to (effective only
+ if the built-in stack extension method is used).
+
+ Do not make this value too large; the results are undefined if
+ YYSTACK_ALLOC_MAXIMUM < YYSTACK_BYTES (YYMAXDEPTH)
+ evaluated with infinite-precision integer arithmetic. */
+
+#ifndef YYMAXDEPTH
+# define YYMAXDEPTH 10000
+#endif
+
+
+#if YYERROR_VERBOSE
+
+# ifndef yystrlen
+# if defined __GLIBC__ && defined _STRING_H
+# define yystrlen(S) (YY_CAST (YYPTRDIFF_T, strlen (S)))
+# else
+/* Return the length of YYSTR. */
+static YYPTRDIFF_T
+yystrlen (const char *yystr)
+{
+ YYPTRDIFF_T yylen;
+ for (yylen = 0; yystr[yylen]; yylen++)
+ continue;
+ return yylen;
+}
+# endif
+# endif
+
+# ifndef yystpcpy
+# if defined __GLIBC__ && defined _STRING_H && defined _GNU_SOURCE
+# define yystpcpy stpcpy
+# else
+/* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in
+ YYDEST. */
+static char *
+yystpcpy (char *yydest, const char *yysrc)
+{
+ char *yyd = yydest;
+ const char *yys = yysrc;
+
+ while ((*yyd++ = *yys++) != '\0')
+ continue;
+
+ return yyd - 1;
+}
+# endif
+# endif
+
+# ifndef yytnamerr
+/* Copy to YYRES the contents of YYSTR after stripping away unnecessary
+ quotes and backslashes, so that it's suitable for yyerror. The
+ heuristic is that double-quoting is unnecessary unless the string
+ contains an apostrophe, a comma, or backslash (other than
+ backslash-backslash). YYSTR is taken from yytname. If YYRES is
+ null, do not copy; instead, return the length of what the result
+ would have been. */
+static YYPTRDIFF_T
+yytnamerr (char *yyres, const char *yystr)
+{
+ if (*yystr == '"')
+ {
+ YYPTRDIFF_T yyn = 0;
+ char const *yyp = yystr;
+
+ for (;;)
+ switch (*++yyp)
+ {
+ case '\'':
+ case ',':
+ goto do_not_strip_quotes;
+
+ case '\\':
+ if (*++yyp != '\\')
+ goto do_not_strip_quotes;
+ else
+ goto append;
+
+ append:
+ default:
+ if (yyres)
+ yyres[yyn] = *yyp;
+ yyn++;
+ break;
+
+ case '"':
+ if (yyres)
+ yyres[yyn] = '\0';
+ return yyn;
+ }
+ do_not_strip_quotes: ;
+ }
+
+ if (yyres)
+ return yystpcpy (yyres, yystr) - yyres;
+ else
+ return yystrlen (yystr);
+}
+# endif
+
+/* Copy into *YYMSG, which is of size *YYMSG_ALLOC, an error message
+ about the unexpected token YYTOKEN for the state stack whose top is
+ YYSSP.
+
+ Return 0 if *YYMSG was successfully written. Return 1 if *YYMSG is
+ not large enough to hold the message. In that case, also set
+ *YYMSG_ALLOC to the required number of bytes. Return 2 if the
+ required number of bytes is too large to store. */
+static int
+yysyntax_error (YYPTRDIFF_T *yymsg_alloc, char **yymsg,
+ yy_state_t *yyssp, int yytoken)
+{
+ enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 };
+ /* Internationalized format string. */
+ const char *yyformat = YY_NULLPTR;
+ /* Arguments of yyformat: reported tokens (one for the "unexpected",
+ one per "expected"). */
+ char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM];
+ /* Actual size of YYARG. */
+ int yycount = 0;
+ /* Cumulated lengths of YYARG. */
+ YYPTRDIFF_T yysize = 0;
+
+ /* There are many possibilities here to consider:
+ - If this state is a consistent state with a default action, then
+ the only way this function was invoked is if the default action
+ is an error action. In that case, don't check for expected
+ tokens because there are none.
+ - The only way there can be no lookahead present (in yychar) is if
+ this state is a consistent state with a default action. Thus,
+ detecting the absence of a lookahead is sufficient to determine
+ that there is no unexpected or expected token to report. In that
+ case, just report a simple "syntax error".
+ - Don't assume there isn't a lookahead just because this state is a
+ consistent state with a default action. There might have been a
+ previous inconsistent state, consistent state with a non-default
+ action, or user semantic action that manipulated yychar.
+ - Of course, the expected token list depends on states to have
+ correct lookahead information, and it depends on the parser not
+ to perform extra reductions after fetching a lookahead from the
+ scanner and before detecting a syntax error. Thus, state merging
+ (from LALR or IELR) and default reductions corrupt the expected
+ token list. However, the list is correct for canonical LR with
+ one exception: it will still contain any token that will not be
+ accepted due to an error action in a later state.
+ */
+ if (yytoken != YYEMPTY)
+ {
+ int yyn = yypact[+*yyssp];
+ YYPTRDIFF_T yysize0 = yytnamerr (YY_NULLPTR, yytname[yytoken]);
+ yysize = yysize0;
+ yyarg[yycount++] = yytname[yytoken];
+ if (!yypact_value_is_default (yyn))
+ {
+ /* Start YYX at -YYN if negative to avoid negative indexes in
+ YYCHECK. In other words, skip the first -YYN actions for
+ this state because they are default actions. */
+ int yyxbegin = yyn < 0 ? -yyn : 0;
+ /* Stay within bounds of both yycheck and yytname. */
+ int yychecklim = YYLAST - yyn + 1;
+ int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS;
+ int yyx;
+
+ for (yyx = yyxbegin; yyx < yyxend; ++yyx)
+ if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR
+ && !yytable_value_is_error (yytable[yyx + yyn]))
+ {
+ if (yycount == YYERROR_VERBOSE_ARGS_MAXIMUM)
+ {
+ yycount = 1;
+ yysize = yysize0;
+ break;
+ }
+ yyarg[yycount++] = yytname[yyx];
+ {
+ YYPTRDIFF_T yysize1
+ = yysize + yytnamerr (YY_NULLPTR, yytname[yyx]);
+ if (yysize <= yysize1 && yysize1 <= YYSTACK_ALLOC_MAXIMUM)
+ yysize = yysize1;
+ else
+ return 2;
+ }
+ }
+ }
+ }
+
+ switch (yycount)
+ {
+# define YYCASE_(N, S) \
+ case N: \
+ yyformat = S; \
+ break
+ default: /* Avoid compiler warnings. */
+ YYCASE_(0, YY_("syntax error"));
+ YYCASE_(1, YY_("syntax error, unexpected %s"));
+ YYCASE_(2, YY_("syntax error, unexpected %s, expecting %s"));
+ YYCASE_(3, YY_("syntax error, unexpected %s, expecting %s or %s"));
+ YYCASE_(4, YY_("syntax error, unexpected %s, expecting %s or %s or %s"));
+ YYCASE_(5, YY_("syntax error, unexpected %s, expecting %s or %s or %s or %s"));
+# undef YYCASE_
+ }
+
+ {
+ /* Don't count the "%s"s in the final size, but reserve room for
+ the terminator. */
+ YYPTRDIFF_T yysize1 = yysize + (yystrlen (yyformat) - 2 * yycount) + 1;
+ if (yysize <= yysize1 && yysize1 <= YYSTACK_ALLOC_MAXIMUM)
+ yysize = yysize1;
+ else
+ return 2;
+ }
+
+ if (*yymsg_alloc < yysize)
+ {
+ *yymsg_alloc = 2 * yysize;
+ if (! (yysize <= *yymsg_alloc
+ && *yymsg_alloc <= YYSTACK_ALLOC_MAXIMUM))
+ *yymsg_alloc = YYSTACK_ALLOC_MAXIMUM;
+ return 1;
+ }
+
+ /* Avoid sprintf, as that infringes on the user's name space.
+ Don't have undefined behavior even if the translation
+ produced a string with the wrong number of "%s"s. */
+ {
+ char *yyp = *yymsg;
+ int yyi = 0;
+ while ((*yyp = *yyformat) != '\0')
+ if (*yyp == '%' && yyformat[1] == 's' && yyi < yycount)
+ {
+ yyp += yytnamerr (yyp, yyarg[yyi++]);
+ yyformat += 2;
+ }
+ else
+ {
+ ++yyp;
+ ++yyformat;
+ }
+ }
+ return 0;
+}
+#endif /* YYERROR_VERBOSE */
+
+/*-----------------------------------------------.
+| Release the memory associated to this symbol. |
+`-----------------------------------------------*/
+
+static void
+yydestruct (const char *yymsg, int yytype, YYSTYPE *yyvaluep)
+{
+ YYUSE (yyvaluep);
+ if (!yymsg)
+ yymsg = "Deleting";
+ YY_SYMBOL_PRINT (yymsg, yytype, yyvaluep, yylocationp);
+
+ YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
+ YYUSE (yytype);
+ YY_IGNORE_MAYBE_UNINITIALIZED_END
+}
+
+
+
+
+/* The lookahead symbol. */
+int yychar;
+
+/* The semantic value of the lookahead symbol. */
+YYSTYPE yylval;
+/* Number of syntax errors so far. */
+int yynerrs;
+
+
+/*----------.
+| yyparse. |
+`----------*/
+
+int
+yyparse (void)
+{
+ yy_state_fast_t yystate;
+ /* Number of tokens to shift before error messages enabled. */
+ int yyerrstatus;
+
+ /* The stacks and their tools:
+ 'yyss': related to states.
+ 'yyvs': related to semantic values.
+
+ Refer to the stacks through separate pointers, to allow yyoverflow
+ to reallocate them elsewhere. */
+
+ /* The state stack. */
+ yy_state_t yyssa[YYINITDEPTH];
+ yy_state_t *yyss;
+ yy_state_t *yyssp;
+
+ /* The semantic value stack. */
+ YYSTYPE yyvsa[YYINITDEPTH];
+ YYSTYPE *yyvs;
+ YYSTYPE *yyvsp;
+
+ YYPTRDIFF_T yystacksize;
+
+ int yyn;
+ int yyresult;
+ /* Lookahead token as an internal (translated) token number. */
+ int yytoken = 0;
+ /* The variables used to return semantic value and location from the
+ action routines. */
+ YYSTYPE yyval;
+
+#if YYERROR_VERBOSE
+ /* Buffer for error messages, and its allocated size. */
+ char yymsgbuf[128];
+ char *yymsg = yymsgbuf;
+ YYPTRDIFF_T yymsg_alloc = sizeof yymsgbuf;
+#endif
+
+#define YYPOPSTACK(N) (yyvsp -= (N), yyssp -= (N))
+
+ /* The number of symbols on the RHS of the reduced rule.
+ Keep to zero when no symbol should be popped. */
+ int yylen = 0;
+
+ yyssp = yyss = yyssa;
+ yyvsp = yyvs = yyvsa;
+ yystacksize = YYINITDEPTH;
+
+ YYDPRINTF ((stderr, "Starting parse\n"));
+
+ yystate = 0;
+ yyerrstatus = 0;
+ yynerrs = 0;
+ yychar = YYEMPTY; /* Cause a token to be read. */
+ goto yysetstate;
+
+
+/*------------------------------------------------------------.
+| yynewstate -- push a new state, which is found in yystate. |
+`------------------------------------------------------------*/
+yynewstate:
+ /* In all cases, when you get here, the value and location stacks
+ have just been pushed. So pushing a state here evens the stacks. */
+ yyssp++;
+
+
+/*--------------------------------------------------------------------.
+| yysetstate -- set current state (the top of the stack) to yystate. |
+`--------------------------------------------------------------------*/
+yysetstate:
+ YYDPRINTF ((stderr, "Entering state %d\n", yystate));
+ YY_ASSERT (0 <= yystate && yystate < YYNSTATES);
+ YY_IGNORE_USELESS_CAST_BEGIN
+ *yyssp = YY_CAST (yy_state_t, yystate);
+ YY_IGNORE_USELESS_CAST_END
+
+ if (yyss + yystacksize - 1 <= yyssp)
+#if !defined yyoverflow && !defined YYSTACK_RELOCATE
+ goto yyexhaustedlab;
+#else
+ {
+ /* Get the current used size of the three stacks, in elements. */
+ YYPTRDIFF_T yysize = yyssp - yyss + 1;
+
+# if defined yyoverflow
+ {
+ /* Give user a chance to reallocate the stack. Use copies of
+ these so that the &'s don't force the real ones into
+ memory. */
+ yy_state_t *yyss1 = yyss;
+ YYSTYPE *yyvs1 = yyvs;
+
+ /* Each stack pointer address is followed by the size of the
+ data in use in that stack, in bytes. This used to be a
+ conditional around just the two extra args, but that might
+ be undefined if yyoverflow is a macro. */
+ yyoverflow (YY_("memory exhausted"),
+ &yyss1, yysize * YYSIZEOF (*yyssp),
+ &yyvs1, yysize * YYSIZEOF (*yyvsp),
+ &yystacksize);
+ yyss = yyss1;
+ yyvs = yyvs1;
+ }
+# else /* defined YYSTACK_RELOCATE */
+ /* Extend the stack our own way. */
+ if (YYMAXDEPTH <= yystacksize)
+ goto yyexhaustedlab;
+ yystacksize *= 2;
+ if (YYMAXDEPTH < yystacksize)
+ yystacksize = YYMAXDEPTH;
+
+ {
+ yy_state_t *yyss1 = yyss;
+ union yyalloc *yyptr =
+ YY_CAST (union yyalloc *,
+ YYSTACK_ALLOC (YY_CAST (YYSIZE_T, YYSTACK_BYTES (yystacksize))));
+ if (! yyptr)
+ goto yyexhaustedlab;
+ YYSTACK_RELOCATE (yyss_alloc, yyss);
+ YYSTACK_RELOCATE (yyvs_alloc, yyvs);
+# undef YYSTACK_RELOCATE
+ if (yyss1 != yyssa)
+ YYSTACK_FREE (yyss1);
+ }
+# endif
+
+ yyssp = yyss + yysize - 1;
+ yyvsp = yyvs + yysize - 1;
+
+ YY_IGNORE_USELESS_CAST_BEGIN
+ YYDPRINTF ((stderr, "Stack size increased to %ld\n",
+ YY_CAST (long, yystacksize)));
+ YY_IGNORE_USELESS_CAST_END
+
+ if (yyss + yystacksize - 1 <= yyssp)
+ YYABORT;
+ }
+#endif /* !defined yyoverflow && !defined YYSTACK_RELOCATE */
+
+ if (yystate == YYFINAL)
+ YYACCEPT;
+
+ goto yybackup;
+
+
+/*-----------.
+| yybackup. |
+`-----------*/
+yybackup:
+ /* Do appropriate processing given the current state. Read a
+ lookahead token if we need one and don't already have one. */
+
+ /* First try to decide what to do without reference to lookahead token. */
+ yyn = yypact[yystate];
+ if (yypact_value_is_default (yyn))
+ goto yydefault;
+
+ /* Not known => get a lookahead token if don't already have one. */
+
+ /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */
+ if (yychar == YYEMPTY)
+ {
+ YYDPRINTF ((stderr, "Reading a token: "));
+ yychar = yylex ();
+ }
+
+ if (yychar <= YYEOF)
+ {
+ yychar = yytoken = YYEOF;
+ YYDPRINTF ((stderr, "Now at end of input.\n"));
+ }
+ else
+ {
+ yytoken = YYTRANSLATE (yychar);
+ YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc);
+ }
+
+ /* If the proper action on seeing token YYTOKEN is to reduce or to
+ detect an error, take that action. */
+ yyn += yytoken;
+ if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken)
+ goto yydefault;
+ yyn = yytable[yyn];
+ if (yyn <= 0)
+ {
+ if (yytable_value_is_error (yyn))
+ goto yyerrlab;
+ yyn = -yyn;
+ goto yyreduce;
+ }
+
+ /* Count tokens shifted since error; after three, turn off error
+ status. */
+ if (yyerrstatus)
+ yyerrstatus--;
+
+ /* Shift the lookahead token. */
+ YY_SYMBOL_PRINT ("Shifting", yytoken, &yylval, &yylloc);
+ yystate = yyn;
+ YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
+ *++yyvsp = yylval;
+ YY_IGNORE_MAYBE_UNINITIALIZED_END
+
+ /* Discard the shifted token. */
+ yychar = YYEMPTY;
+ goto yynewstate;
+
+
+/*-----------------------------------------------------------.
+| yydefault -- do the default action for the current state. |
+`-----------------------------------------------------------*/
+yydefault:
+ yyn = yydefact[yystate];
+ if (yyn == 0)
+ goto yyerrlab;
+ goto yyreduce;
+
+
+/*-----------------------------.
+| yyreduce -- do a reduction. |
+`-----------------------------*/
+yyreduce:
+ /* yyn is the number of a rule to reduce with. */
+ yylen = yyr2[yyn];
+
+ /* If YYLEN is nonzero, implement the default value of the action:
+ '$$ = $1'.
+
+ Otherwise, the following line sets YYVAL to garbage.
+ This behavior is undocumented and Bison
+ users should not rely upon it. Assigning to YYVAL
+ unconditionally makes the parser a bit smaller, and it avoids a
+ GCC warning that YYVAL may be used uninitialized. */
+ yyval = yyvsp[1-yylen];
+
+
+ YY_REDUCE_PRINT (yyn);
+ switch (yyn)
+ {
+ case 3:
+#line 148 "src/engine/jamgram.y"
+ { parse_save( yyvsp[0].parse ); }
+#line 1606 "src/engine/jamgram.cpp"
+ break;
+
+ case 4:
+#line 159 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+#line 1612 "src/engine/jamgram.cpp"
+ break;
+
+ case 5:
+#line 161 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+#line 1618 "src/engine/jamgram.cpp"
+ break;
+
+ case 6:
+#line 165 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+#line 1624 "src/engine/jamgram.cpp"
+ break;
+
+ case 7:
+#line 167 "src/engine/jamgram.y"
+ { yyval.parse = prules( yyvsp[-1].parse, yyvsp[0].parse ); }
+#line 1630 "src/engine/jamgram.cpp"
+ break;
+
+ case 8:
+#line 168 "src/engine/jamgram.y"
+ { yymode( SCAN_ASSIGN ); }
+#line 1636 "src/engine/jamgram.cpp"
+ break;
+
+ case 9:
+#line 168 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1642 "src/engine/jamgram.cpp"
+ break;
+
+ case 10:
+#line 169 "src/engine/jamgram.y"
+ { yyval.parse = plocal( yyvsp[-4].parse, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 1648 "src/engine/jamgram.cpp"
+ break;
+
+ case 11:
+#line 173 "src/engine/jamgram.y"
+ { yyval.parse = pnull(); }
+#line 1654 "src/engine/jamgram.cpp"
+ break;
+
+ case 12:
+#line 176 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1660 "src/engine/jamgram.cpp"
+ break;
+
+ case 13:
+#line 177 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_SET; }
+#line 1666 "src/engine/jamgram.cpp"
+ break;
+
+ case 14:
+#line 179 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_APPEND; }
+#line 1672 "src/engine/jamgram.cpp"
+ break;
+
+ case 15:
+#line 183 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+#line 1678 "src/engine/jamgram.cpp"
+ break;
+
+ case 16:
+#line 185 "src/engine/jamgram.y"
+ { yyval.parse = P0; }
+#line 1684 "src/engine/jamgram.cpp"
+ break;
+
+ case 17:
+#line 189 "src/engine/jamgram.y"
+ { yyval.number = 1; }
+#line 1690 "src/engine/jamgram.cpp"
+ break;
+
+ case 18:
+#line 191 "src/engine/jamgram.y"
+ { yyval.number = 0; }
+#line 1696 "src/engine/jamgram.cpp"
+ break;
+
+ case 19:
+#line 195 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+#line 1702 "src/engine/jamgram.cpp"
+ break;
+
+ case 20:
+#line 197 "src/engine/jamgram.y"
+ { yyval.parse = pnull(); }
+#line 1708 "src/engine/jamgram.cpp"
+ break;
+
+ case 21:
+#line 200 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+#line 1714 "src/engine/jamgram.cpp"
+ break;
+
+ case 22:
+#line 201 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1720 "src/engine/jamgram.cpp"
+ break;
+
+ case 23:
+#line 202 "src/engine/jamgram.y"
+ { yyval.parse = pincl( yyvsp[-1].parse ); yymode( SCAN_NORMAL ); }
+#line 1726 "src/engine/jamgram.cpp"
+ break;
+
+ case 24:
+#line 203 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1732 "src/engine/jamgram.cpp"
+ break;
+
+ case 25:
+#line 204 "src/engine/jamgram.y"
+ { yyval.parse = prule( yyvsp[-3].string, yyvsp[-1].parse ); yymode( SCAN_NORMAL ); }
+#line 1738 "src/engine/jamgram.cpp"
+ break;
+
+ case 26:
+#line 205 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1744 "src/engine/jamgram.cpp"
+ break;
+
+ case 27:
+#line 206 "src/engine/jamgram.y"
+ { yyval.parse = pset( yyvsp[-4].parse, yyvsp[-1].parse, yyvsp[-3].number ); yymode( SCAN_NORMAL ); }
+#line 1750 "src/engine/jamgram.cpp"
+ break;
+
+ case 28:
+#line 207 "src/engine/jamgram.y"
+ { yymode( SCAN_ASSIGN ); }
+#line 1756 "src/engine/jamgram.cpp"
+ break;
+
+ case 29:
+#line 207 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1762 "src/engine/jamgram.cpp"
+ break;
+
+ case 30:
+#line 208 "src/engine/jamgram.y"
+ { yyval.parse = pset1( yyvsp[-7].parse, yyvsp[-4].parse, yyvsp[-1].parse, yyvsp[-3].number ); yymode( SCAN_NORMAL ); }
+#line 1768 "src/engine/jamgram.cpp"
+ break;
+
+ case 31:
+#line 209 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1774 "src/engine/jamgram.cpp"
+ break;
+
+ case 32:
+#line 210 "src/engine/jamgram.y"
+ { yyval.parse = preturn( yyvsp[-1].parse ); yymode( SCAN_NORMAL ); }
+#line 1780 "src/engine/jamgram.cpp"
+ break;
+
+ case 33:
+#line 212 "src/engine/jamgram.y"
+ { yyval.parse = pbreak(); }
+#line 1786 "src/engine/jamgram.cpp"
+ break;
+
+ case 34:
+#line 214 "src/engine/jamgram.y"
+ { yyval.parse = pcontinue(); }
+#line 1792 "src/engine/jamgram.cpp"
+ break;
+
+ case 35:
+#line 215 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1798 "src/engine/jamgram.cpp"
+ break;
+
+ case 36:
+#line 215 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1804 "src/engine/jamgram.cpp"
+ break;
+
+ case 37:
+#line 216 "src/engine/jamgram.y"
+ { yyval.parse = pfor( yyvsp[-7].string, yyvsp[-4].parse, yyvsp[-1].parse, yyvsp[-8].number ); }
+#line 1810 "src/engine/jamgram.cpp"
+ break;
+
+ case 38:
+#line 217 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1816 "src/engine/jamgram.cpp"
+ break;
+
+ case 39:
+#line 217 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1822 "src/engine/jamgram.cpp"
+ break;
+
+ case 40:
+#line 218 "src/engine/jamgram.y"
+ { yyval.parse = pswitch( yyvsp[-4].parse, yyvsp[-1].parse ); }
+#line 1828 "src/engine/jamgram.cpp"
+ break;
+
+ case 41:
+#line 219 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 1834 "src/engine/jamgram.cpp"
+ break;
+
+ case 42:
+#line 219 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1840 "src/engine/jamgram.cpp"
+ break;
+
+ case 43:
+#line 220 "src/engine/jamgram.y"
+ { yyval.parse = pif( yyvsp[-5].parse, yyvsp[-2].parse, yyvsp[0].parse ); }
+#line 1846 "src/engine/jamgram.cpp"
+ break;
+
+ case 44:
+#line 221 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1852 "src/engine/jamgram.cpp"
+ break;
+
+ case 45:
+#line 221 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1858 "src/engine/jamgram.cpp"
+ break;
+
+ case 46:
+#line 222 "src/engine/jamgram.y"
+ { yyval.parse = pmodule( yyvsp[-4].parse, yyvsp[-1].parse ); }
+#line 1864 "src/engine/jamgram.cpp"
+ break;
+
+ case 47:
+#line 223 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1870 "src/engine/jamgram.cpp"
+ break;
+
+ case 48:
+#line 223 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1876 "src/engine/jamgram.cpp"
+ break;
+
+ case 49:
+#line 224 "src/engine/jamgram.y"
+ { yyval.parse = pclass( yyvsp[-4].parse, yyvsp[-1].parse ); }
+#line 1882 "src/engine/jamgram.cpp"
+ break;
+
+ case 50:
+#line 225 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 1888 "src/engine/jamgram.cpp"
+ break;
+
+ case 51:
+#line 225 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1894 "src/engine/jamgram.cpp"
+ break;
+
+ case 52:
+#line 226 "src/engine/jamgram.y"
+ { yyval.parse = pwhile( yyvsp[-4].parse, yyvsp[-1].parse ); }
+#line 1900 "src/engine/jamgram.cpp"
+ break;
+
+ case 53:
+#line 227 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 1906 "src/engine/jamgram.cpp"
+ break;
+
+ case 54:
+#line 227 "src/engine/jamgram.y"
+ { yymode( SCAN_PARAMS ); }
+#line 1912 "src/engine/jamgram.cpp"
+ break;
+
+ case 55:
+#line 227 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1918 "src/engine/jamgram.cpp"
+ break;
+
+ case 56:
+#line 228 "src/engine/jamgram.y"
+ { yyval.parse = psetc( yyvsp[-4].string, yyvsp[0].parse, yyvsp[-2].parse, yyvsp[-7].number ); }
+#line 1924 "src/engine/jamgram.cpp"
+ break;
+
+ case 57:
+#line 230 "src/engine/jamgram.y"
+ { yyval.parse = pon( yyvsp[-1].parse, yyvsp[0].parse ); }
+#line 1930 "src/engine/jamgram.cpp"
+ break;
+
+ case 58:
+#line 232 "src/engine/jamgram.y"
+ { yymode( SCAN_STRING ); }
+#line 1936 "src/engine/jamgram.cpp"
+ break;
+
+ case 59:
+#line 234 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 1942 "src/engine/jamgram.cpp"
+ break;
+
+ case 60:
+#line 236 "src/engine/jamgram.y"
+ { yyval.parse = psete( yyvsp[-6].string,yyvsp[-5].parse,yyvsp[-2].string,yyvsp[-7].number ); }
+#line 1948 "src/engine/jamgram.cpp"
+ break;
+
+ case 61:
+#line 244 "src/engine/jamgram.y"
+ { yyval.number = ASSIGN_SET; }
+#line 1954 "src/engine/jamgram.cpp"
+ break;
+
+ case 62:
+#line 246 "src/engine/jamgram.y"
+ { yyval.number = ASSIGN_APPEND; }
+#line 1960 "src/engine/jamgram.cpp"
+ break;
+
+ case 63:
+#line 248 "src/engine/jamgram.y"
+ { yyval.number = ASSIGN_DEFAULT; }
+#line 1966 "src/engine/jamgram.cpp"
+ break;
+
+ case 64:
+#line 250 "src/engine/jamgram.y"
+ { yyval.number = ASSIGN_DEFAULT; }
+#line 1972 "src/engine/jamgram.cpp"
+ break;
+
+ case 65:
+#line 257 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_EXISTS, yyvsp[0].parse, pnull() ); yymode( SCAN_COND ); }
+#line 1978 "src/engine/jamgram.cpp"
+ break;
+
+ case 66:
+#line 258 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 1984 "src/engine/jamgram.cpp"
+ break;
+
+ case 67:
+#line 259 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_EQUALS, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 1990 "src/engine/jamgram.cpp"
+ break;
+
+ case 68:
+#line 260 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 1996 "src/engine/jamgram.cpp"
+ break;
+
+ case 69:
+#line 261 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_NOTEQ, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2002 "src/engine/jamgram.cpp"
+ break;
+
+ case 70:
+#line 262 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2008 "src/engine/jamgram.cpp"
+ break;
+
+ case 71:
+#line 263 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_LESS, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2014 "src/engine/jamgram.cpp"
+ break;
+
+ case 72:
+#line 264 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2020 "src/engine/jamgram.cpp"
+ break;
+
+ case 73:
+#line 265 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_LESSEQ, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2026 "src/engine/jamgram.cpp"
+ break;
+
+ case 74:
+#line 266 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2032 "src/engine/jamgram.cpp"
+ break;
+
+ case 75:
+#line 267 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_MORE, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2038 "src/engine/jamgram.cpp"
+ break;
+
+ case 76:
+#line 268 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2044 "src/engine/jamgram.cpp"
+ break;
+
+ case 77:
+#line 269 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_MOREEQ, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2050 "src/engine/jamgram.cpp"
+ break;
+
+ case 78:
+#line 270 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2056 "src/engine/jamgram.cpp"
+ break;
+
+ case 79:
+#line 271 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_AND, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2062 "src/engine/jamgram.cpp"
+ break;
+
+ case 80:
+#line 272 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2068 "src/engine/jamgram.cpp"
+ break;
+
+ case 81:
+#line 273 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_AND, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2074 "src/engine/jamgram.cpp"
+ break;
+
+ case 82:
+#line 274 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2080 "src/engine/jamgram.cpp"
+ break;
+
+ case 83:
+#line 275 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_OR, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2086 "src/engine/jamgram.cpp"
+ break;
+
+ case 84:
+#line 276 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2092 "src/engine/jamgram.cpp"
+ break;
+
+ case 85:
+#line 277 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_OR, yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2098 "src/engine/jamgram.cpp"
+ break;
+
+ case 86:
+#line 278 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 2104 "src/engine/jamgram.cpp"
+ break;
+
+ case 87:
+#line 279 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_IN, yyvsp[-3].parse, yyvsp[0].parse ); yymode( SCAN_COND ); }
+#line 2110 "src/engine/jamgram.cpp"
+ break;
+
+ case 88:
+#line 280 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2116 "src/engine/jamgram.cpp"
+ break;
+
+ case 89:
+#line 281 "src/engine/jamgram.y"
+ { yyval.parse = peval( EXPR_NOT, yyvsp[0].parse, pnull() ); }
+#line 2122 "src/engine/jamgram.cpp"
+ break;
+
+ case 90:
+#line 282 "src/engine/jamgram.y"
+ { yymode( SCAN_CONDB ); }
+#line 2128 "src/engine/jamgram.cpp"
+ break;
+
+ case 91:
+#line 283 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; }
+#line 2134 "src/engine/jamgram.cpp"
+ break;
+
+ case 92:
+#line 294 "src/engine/jamgram.y"
+ { yyval.parse = P0; }
+#line 2140 "src/engine/jamgram.cpp"
+ break;
+
+ case 93:
+#line 296 "src/engine/jamgram.y"
+ { yyval.parse = pnode( yyvsp[-1].parse, yyvsp[0].parse ); }
+#line 2146 "src/engine/jamgram.cpp"
+ break;
+
+ case 94:
+#line 299 "src/engine/jamgram.y"
+ { yymode( SCAN_CASE ); }
+#line 2152 "src/engine/jamgram.cpp"
+ break;
+
+ case 95:
+#line 299 "src/engine/jamgram.y"
+ { yymode( SCAN_NORMAL ); }
+#line 2158 "src/engine/jamgram.cpp"
+ break;
+
+ case 96:
+#line 300 "src/engine/jamgram.y"
+ { yyval.parse = psnode( yyvsp[-3].string, yyvsp[0].parse ); }
+#line 2164 "src/engine/jamgram.cpp"
+ break;
+
+ case 97:
+#line 309 "src/engine/jamgram.y"
+ { yyval.parse = pnode( P0, yyvsp[0].parse ); }
+#line 2170 "src/engine/jamgram.cpp"
+ break;
+
+ case 98:
+#line 311 "src/engine/jamgram.y"
+ { yyval.parse = pnode( yyvsp[0].parse, yyvsp[-2].parse ); }
+#line 2176 "src/engine/jamgram.cpp"
+ break;
+
+ case 99:
+#line 321 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+#line 2182 "src/engine/jamgram.cpp"
+ break;
+
+ case 100:
+#line 325 "src/engine/jamgram.y"
+ { yyval.parse = pnull(); }
+#line 2188 "src/engine/jamgram.cpp"
+ break;
+
+ case 101:
+#line 327 "src/engine/jamgram.y"
+ { yyval.parse = pappend( yyvsp[-1].parse, yyvsp[0].parse ); }
+#line 2194 "src/engine/jamgram.cpp"
+ break;
+
+ case 102:
+#line 331 "src/engine/jamgram.y"
+ { yyval.parse = plist( yyvsp[0].string ); }
+#line 2200 "src/engine/jamgram.cpp"
+ break;
+
+ case 103:
+#line 332 "src/engine/jamgram.y"
+ { yyval.number = yymode( SCAN_CALL ); }
+#line 2206 "src/engine/jamgram.cpp"
+ break;
+
+ case 104:
+#line 333 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[-1].parse; yymode( yyvsp[-2].number ); }
+#line 2212 "src/engine/jamgram.cpp"
+ break;
+
+ case 105:
+#line 341 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 2218 "src/engine/jamgram.cpp"
+ break;
+
+ case 106:
+#line 342 "src/engine/jamgram.y"
+ { yyval.parse = prule( yyvsp[-2].string, yyvsp[0].parse ); }
+#line 2224 "src/engine/jamgram.cpp"
+ break;
+
+ case 107:
+#line 343 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 2230 "src/engine/jamgram.cpp"
+ break;
+
+ case 108:
+#line 344 "src/engine/jamgram.y"
+ { yyval.parse = pon( yyvsp[-3].parse, prule( yyvsp[-2].string, yyvsp[0].parse ) ); }
+#line 2236 "src/engine/jamgram.cpp"
+ break;
+
+ case 109:
+#line 345 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 2242 "src/engine/jamgram.cpp"
+ break;
+
+ case 110:
+#line 346 "src/engine/jamgram.y"
+ { yyval.parse = pon( yyvsp[-3].parse, yyvsp[0].parse ); }
+#line 2248 "src/engine/jamgram.cpp"
+ break;
+
+ case 111:
+#line 356 "src/engine/jamgram.y"
+ { yyval.number = 0; }
+#line 2254 "src/engine/jamgram.cpp"
+ break;
+
+ case 112:
+#line 358 "src/engine/jamgram.y"
+ { yyval.number = yyvsp[-1].number | yyvsp[0].number; }
+#line 2260 "src/engine/jamgram.cpp"
+ break;
+
+ case 113:
+#line 362 "src/engine/jamgram.y"
+ { yyval.number = EXEC_UPDATED; }
+#line 2266 "src/engine/jamgram.cpp"
+ break;
+
+ case 114:
+#line 364 "src/engine/jamgram.y"
+ { yyval.number = EXEC_TOGETHER; }
+#line 2272 "src/engine/jamgram.cpp"
+ break;
+
+ case 115:
+#line 366 "src/engine/jamgram.y"
+ { yyval.number = EXEC_IGNORE; }
+#line 2278 "src/engine/jamgram.cpp"
+ break;
+
+ case 116:
+#line 368 "src/engine/jamgram.y"
+ { yyval.number = EXEC_QUIETLY; }
+#line 2284 "src/engine/jamgram.cpp"
+ break;
+
+ case 117:
+#line 370 "src/engine/jamgram.y"
+ { yyval.number = EXEC_PIECEMEAL; }
+#line 2290 "src/engine/jamgram.cpp"
+ break;
+
+ case 118:
+#line 372 "src/engine/jamgram.y"
+ { yyval.number = EXEC_EXISTING; }
+#line 2296 "src/engine/jamgram.cpp"
+ break;
+
+ case 119:
+#line 381 "src/engine/jamgram.y"
+ { yyval.parse = pnull(); }
+#line 2302 "src/engine/jamgram.cpp"
+ break;
+
+ case 120:
+#line 382 "src/engine/jamgram.y"
+ { yymode( SCAN_PUNCT ); }
+#line 2308 "src/engine/jamgram.cpp"
+ break;
+
+ case 121:
+#line 383 "src/engine/jamgram.y"
+ { yyval.parse = yyvsp[0].parse; }
+#line 2314 "src/engine/jamgram.cpp"
+ break;
+
+
+#line 2318 "src/engine/jamgram.cpp"
+
+ default: break;
+ }
+ /* User semantic actions sometimes alter yychar, and that requires
+ that yytoken be updated with the new translation. We take the
+ approach of translating immediately before every use of yytoken.
+ One alternative is translating here after every semantic action,
+ but that translation would be missed if the semantic action invokes
+ YYABORT, YYACCEPT, or YYERROR immediately after altering yychar or
+ if it invokes YYBACKUP. In the case of YYABORT or YYACCEPT, an
+ incorrect destructor might then be invoked immediately. In the
+ case of YYERROR or YYBACKUP, subsequent parser actions might lead
+ to an incorrect destructor call or verbose syntax error message
+ before the lookahead is translated. */
+ YY_SYMBOL_PRINT ("-> $$ =", yyr1[yyn], &yyval, &yyloc);
+
+ YYPOPSTACK (yylen);
+ yylen = 0;
+ YY_STACK_PRINT (yyss, yyssp);
+
+ *++yyvsp = yyval;
+
+ /* Now 'shift' the result of the reduction. Determine what state
+ that goes to, based on the state we popped back to and the rule
+ number reduced by. */
+ {
+ const int yylhs = yyr1[yyn] - YYNTOKENS;
+ const int yyi = yypgoto[yylhs] + *yyssp;
+ yystate = (0 <= yyi && yyi <= YYLAST && yycheck[yyi] == *yyssp
+ ? yytable[yyi]
+ : yydefgoto[yylhs]);
+ }
+
+ goto yynewstate;
+
+
+/*--------------------------------------.
+| yyerrlab -- here on detecting error. |
+`--------------------------------------*/
+yyerrlab:
+ /* Make sure we have latest lookahead translation. See comments at
+ user semantic actions for why this is necessary. */
+ yytoken = yychar == YYEMPTY ? YYEMPTY : YYTRANSLATE (yychar);
+
+ /* If not already recovering from an error, report this error. */
+ if (!yyerrstatus)
+ {
+ ++yynerrs;
+#if ! YYERROR_VERBOSE
+ yyerror (YY_("syntax error"));
+#else
+# define YYSYNTAX_ERROR yysyntax_error (&yymsg_alloc, &yymsg, \
+ yyssp, yytoken)
+ {
+ char const *yymsgp = YY_("syntax error");
+ int yysyntax_error_status;
+ yysyntax_error_status = YYSYNTAX_ERROR;
+ if (yysyntax_error_status == 0)
+ yymsgp = yymsg;
+ else if (yysyntax_error_status == 1)
+ {
+ if (yymsg != yymsgbuf)
+ YYSTACK_FREE (yymsg);
+ yymsg = YY_CAST (char *, YYSTACK_ALLOC (YY_CAST (YYSIZE_T, yymsg_alloc)));
+ if (!yymsg)
+ {
+ yymsg = yymsgbuf;
+ yymsg_alloc = sizeof yymsgbuf;
+ yysyntax_error_status = 2;
+ }
+ else
+ {
+ yysyntax_error_status = YYSYNTAX_ERROR;
+ yymsgp = yymsg;
+ }
+ }
+ yyerror (yymsgp);
+ if (yysyntax_error_status == 2)
+ goto yyexhaustedlab;
+ }
+# undef YYSYNTAX_ERROR
+#endif
+ }
+
+
+
+ if (yyerrstatus == 3)
+ {
+ /* If just tried and failed to reuse lookahead token after an
+ error, discard it. */
+
+ if (yychar <= YYEOF)
+ {
+ /* Return failure if at end of input. */
+ if (yychar == YYEOF)
+ YYABORT;
+ }
+ else
+ {
+ yydestruct ("Error: discarding",
+ yytoken, &yylval);
+ yychar = YYEMPTY;
+ }
+ }
+
+ /* Else will try to reuse lookahead token after shifting the error
+ token. */
+ goto yyerrlab1;
+
+
+/*---------------------------------------------------.
+| yyerrorlab -- error raised explicitly by YYERROR. |
+`---------------------------------------------------*/
+yyerrorlab:
+ /* Pacify compilers when the user code never invokes YYERROR and the
+ label yyerrorlab therefore never appears in user code. */
+ if (0)
+ YYERROR;
+
+ /* Do not reclaim the symbols of the rule whose action triggered
+ this YYERROR. */
+ YYPOPSTACK (yylen);
+ yylen = 0;
+ YY_STACK_PRINT (yyss, yyssp);
+ yystate = *yyssp;
+ goto yyerrlab1;
+
+
+/*-------------------------------------------------------------.
+| yyerrlab1 -- common code for both syntax error and YYERROR. |
+`-------------------------------------------------------------*/
+yyerrlab1:
+ yyerrstatus = 3; /* Each real token shifted decrements this. */
+
+ for (;;)
+ {
+ yyn = yypact[yystate];
+ if (!yypact_value_is_default (yyn))
+ {
+ yyn += YYTERROR;
+ if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR)
+ {
+ yyn = yytable[yyn];
+ if (0 < yyn)
+ break;
+ }
+ }
+
+ /* Pop the current state because it cannot handle the error token. */
+ if (yyssp == yyss)
+ YYABORT;
+
+
+ yydestruct ("Error: popping",
+ yystos[yystate], yyvsp);
+ YYPOPSTACK (1);
+ yystate = *yyssp;
+ YY_STACK_PRINT (yyss, yyssp);
+ }
+
+ YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
+ *++yyvsp = yylval;
+ YY_IGNORE_MAYBE_UNINITIALIZED_END
+
+
+ /* Shift the error token. */
+ YY_SYMBOL_PRINT ("Shifting", yystos[yyn], yyvsp, yylsp);
+
+ yystate = yyn;
+ goto yynewstate;
+
+
+/*-------------------------------------.
+| yyacceptlab -- YYACCEPT comes here. |
+`-------------------------------------*/
+yyacceptlab:
+ yyresult = 0;
+ goto yyreturn;
+
+
+/*-----------------------------------.
+| yyabortlab -- YYABORT comes here. |
+`-----------------------------------*/
+yyabortlab:
+ yyresult = 1;
+ goto yyreturn;
+
+
+#if !defined yyoverflow || YYERROR_VERBOSE
+/*-------------------------------------------------.
+| yyexhaustedlab -- memory exhaustion comes here. |
+`-------------------------------------------------*/
+yyexhaustedlab:
+ yyerror (YY_("memory exhausted"));
+ yyresult = 2;
+ /* Fall through. */
+#endif
+
+
+/*-----------------------------------------------------.
+| yyreturn -- parsing is finished, return the result. |
+`-----------------------------------------------------*/
+yyreturn:
+ if (yychar != YYEMPTY)
+ {
+ /* Make sure we have latest lookahead translation. See comments at
+ user semantic actions for why this is necessary. */
+ yytoken = YYTRANSLATE (yychar);
+ yydestruct ("Cleanup: discarding lookahead",
+ yytoken, &yylval);
+ }
+ /* Do not reclaim the symbols of the rule whose action triggered
+ this YYABORT or YYACCEPT. */
+ YYPOPSTACK (yylen);
+ YY_STACK_PRINT (yyss, yyssp);
+ while (yyssp != yyss)
+ {
+ yydestruct ("Cleanup: popping",
+ yystos[+*yyssp], yyvsp);
+ YYPOPSTACK (1);
+ }
+#ifndef yyoverflow
+ if (yyss != yyssa)
+ YYSTACK_FREE (yyss);
+#endif
+#if YYERROR_VERBOSE
+ if (yymsg != yymsgbuf)
+ YYSTACK_FREE (yymsg);
+#endif
+ return yyresult;
+}
diff --git a/src/boost/tools/build/src/engine/jamgram.hpp b/src/boost/tools/build/src/engine/jamgram.hpp
new file mode 100644
index 000000000..984761939
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jamgram.hpp
@@ -0,0 +1,164 @@
+/* A Bison parser, made by GNU Bison 3.5.2. */
+
+/* Bison interface for Yacc-like parsers in C
+
+ Copyright (C) 1984, 1989-1990, 2000-2015, 2018-2020 Free Software Foundation,
+ Inc.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>. */
+
+/* As a special exception, you may create a larger work that contains
+ part or all of the Bison parser skeleton and distribute that work
+ under terms of your choice, so long as that work isn't itself a
+ parser generator using the skeleton or a modified version thereof
+ as a parser skeleton. Alternatively, if you modify or redistribute
+ the parser skeleton itself, you may (at your option) remove this
+ special exception, which will cause the skeleton and the resulting
+ Bison output files to be licensed under the GNU General Public
+ License without this special exception.
+
+ This special exception was added by the Free Software Foundation in
+ version 2.2 of Bison. */
+
+/* Undocumented macros, especially those whose name start with YY_,
+ are private implementation details. Do not rely on them. */
+
+#ifndef YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED
+# define YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED
+/* Debug traces. */
+#ifndef YYDEBUG
+# define YYDEBUG 0
+#endif
+#if YYDEBUG
+extern int yydebug;
+#endif
+
+/* Token type. */
+#ifndef YYTOKENTYPE
+# define YYTOKENTYPE
+ enum yytokentype
+ {
+ _BANG_t = 258,
+ _BANG_EQUALS_t = 259,
+ _AMPER_t = 260,
+ _AMPERAMPER_t = 261,
+ _LPAREN_t = 262,
+ _RPAREN_t = 263,
+ _PLUS_EQUALS_t = 264,
+ _COLON_t = 265,
+ _SEMIC_t = 266,
+ _LANGLE_t = 267,
+ _LANGLE_EQUALS_t = 268,
+ _EQUALS_t = 269,
+ _RANGLE_t = 270,
+ _RANGLE_EQUALS_t = 271,
+ _QUESTION_EQUALS_t = 272,
+ _LBRACKET_t = 273,
+ _RBRACKET_t = 274,
+ ACTIONS_t = 275,
+ BIND_t = 276,
+ BREAK_t = 277,
+ CASE_t = 278,
+ CLASS_t = 279,
+ CONTINUE_t = 280,
+ DEFAULT_t = 281,
+ ELSE_t = 282,
+ EXISTING_t = 283,
+ FOR_t = 284,
+ IF_t = 285,
+ IGNORE_t = 286,
+ IN_t = 287,
+ INCLUDE_t = 288,
+ LOCAL_t = 289,
+ MODULE_t = 290,
+ ON_t = 291,
+ PIECEMEAL_t = 292,
+ QUIETLY_t = 293,
+ RETURN_t = 294,
+ RULE_t = 295,
+ SWITCH_t = 296,
+ TOGETHER_t = 297,
+ UPDATED_t = 298,
+ WHILE_t = 299,
+ _LBRACE_t = 300,
+ _BAR_t = 301,
+ _BARBAR_t = 302,
+ _RBRACE_t = 303,
+ ARG = 304,
+ STRING = 305
+ };
+#endif
+/* Tokens. */
+#define _BANG_t 258
+#define _BANG_EQUALS_t 259
+#define _AMPER_t 260
+#define _AMPERAMPER_t 261
+#define _LPAREN_t 262
+#define _RPAREN_t 263
+#define _PLUS_EQUALS_t 264
+#define _COLON_t 265
+#define _SEMIC_t 266
+#define _LANGLE_t 267
+#define _LANGLE_EQUALS_t 268
+#define _EQUALS_t 269
+#define _RANGLE_t 270
+#define _RANGLE_EQUALS_t 271
+#define _QUESTION_EQUALS_t 272
+#define _LBRACKET_t 273
+#define _RBRACKET_t 274
+#define ACTIONS_t 275
+#define BIND_t 276
+#define BREAK_t 277
+#define CASE_t 278
+#define CLASS_t 279
+#define CONTINUE_t 280
+#define DEFAULT_t 281
+#define ELSE_t 282
+#define EXISTING_t 283
+#define FOR_t 284
+#define IF_t 285
+#define IGNORE_t 286
+#define IN_t 287
+#define INCLUDE_t 288
+#define LOCAL_t 289
+#define MODULE_t 290
+#define ON_t 291
+#define PIECEMEAL_t 292
+#define QUIETLY_t 293
+#define RETURN_t 294
+#define RULE_t 295
+#define SWITCH_t 296
+#define TOGETHER_t 297
+#define UPDATED_t 298
+#define WHILE_t 299
+#define _LBRACE_t 300
+#define _BAR_t 301
+#define _BARBAR_t 302
+#define _RBRACE_t 303
+#define ARG 304
+#define STRING 305
+
+/* Value type. */
+#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED
+typedef int YYSTYPE;
+# define YYSTYPE_IS_TRIVIAL 1
+# define YYSTYPE_IS_DECLARED 1
+#endif
+
+
+extern YYSTYPE yylval;
+
+int yyparse (void);
+
+#endif /* !YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED */
diff --git a/src/boost/tools/build/src/engine/jamgram.y b/src/boost/tools/build/src/engine/jamgram.y
new file mode 100644
index 000000000..4e5839381
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jamgram.y
@@ -0,0 +1,386 @@
+%token _BANG_t
+%token _BANG_EQUALS_t
+%token _AMPER_t
+%token _AMPERAMPER_t
+%token _LPAREN_t
+%token _RPAREN_t
+%token _PLUS_EQUALS_t
+%token _COLON_t
+%token _SEMIC_t
+%token _LANGLE_t
+%token _LANGLE_EQUALS_t
+%token _EQUALS_t
+%token _RANGLE_t
+%token _RANGLE_EQUALS_t
+%token _QUESTION_EQUALS_t
+%token _LBRACKET_t
+%token _RBRACKET_t
+%token ACTIONS_t
+%token BIND_t
+%token BREAK_t
+%token CASE_t
+%token CLASS_t
+%token CONTINUE_t
+%token DEFAULT_t
+%token ELSE_t
+%token EXISTING_t
+%token FOR_t
+%token IF_t
+%token IGNORE_t
+%token IN_t
+%token INCLUDE_t
+%token LOCAL_t
+%token MODULE_t
+%token ON_t
+%token PIECEMEAL_t
+%token QUIETLY_t
+%token RETURN_t
+%token RULE_t
+%token SWITCH_t
+%token TOGETHER_t
+%token UPDATED_t
+%token WHILE_t
+%token _LBRACE_t
+%token _BAR_t
+%token _BARBAR_t
+%token _RBRACE_t
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jamgram.yy - jam grammar
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 06/01/94 (seiwald) - new 'actions existing' does existing sources
+ * 08/23/94 (seiwald) - Support for '+=' (append to variable)
+ * 08/31/94 (seiwald) - Allow ?= as alias for "default =".
+ * 09/15/94 (seiwald) - if conditionals take only single arguments, so
+ * that 'if foo == bar' gives syntax error (use =).
+ * 02/11/95 (seiwald) - when scanning arguments to rules, only treat
+ * punctuation keywords as keywords. All arg lists
+ * are terminated with punctuation keywords.
+ *
+ * 09/11/00 (seiwald) - Support for function calls:
+ *
+ * Rules now return lists (LIST *), rather than void.
+ *
+ * New "[ rule ]" syntax evals rule into a LIST.
+ *
+ * Lists are now generated by compile_list() and
+ * compile_append(), and any other rule that indirectly
+ * makes a list, rather than being built directly here,
+ * so that lists values can contain rule evaluations.
+ *
+ * New 'return' rule sets the return value, though
+ * other statements also may have return values.
+ *
+ * 'run' production split from 'block' production so
+ * that empty blocks can be handled separately.
+ */
+
+%token ARG STRING
+
+%left _BARBAR_t _BAR_t
+%left _AMPERAMPER_t _AMPER_t
+%left _EQUALS_t _BANG_EQUALS_t IN_t
+%left _LANGLE_t _LANGLE_EQUALS_t _RANGLE_t _RANGLE_EQUALS_t
+%left _BANG_t
+
+%{
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "compile.h"
+#include "object.h"
+#include "rules.h"
+
+# define YYINITDEPTH 5000 /* for C++ parsing */
+# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
+
+# define F0 -1
+# define P0 (PARSE *)0
+# define S0 (OBJECT *)0
+
+# define pappend( l,r ) parse_make( PARSE_APPEND,l,r,P0,S0,S0,0 )
+# define peval( c,l,r ) parse_make( PARSE_EVAL,l,r,P0,S0,S0,c )
+# define pfor( s,l,r,x ) parse_make( PARSE_FOREACH,l,r,P0,s,S0,x )
+# define pif( l,r,t ) parse_make( PARSE_IF,l,r,t,S0,S0,0 )
+# define pincl( l ) parse_make( PARSE_INCLUDE,l,P0,P0,S0,S0,0 )
+# define plist( s ) parse_make( PARSE_LIST,P0,P0,P0,s,S0,0 )
+# define plocal( l,r,t ) parse_make( PARSE_LOCAL,l,r,t,S0,S0,0 )
+# define pmodule( l,r ) parse_make( PARSE_MODULE,l,r,P0,S0,S0,0 )
+# define pclass( l,r ) parse_make( PARSE_CLASS,l,r,P0,S0,S0,0 )
+# define pnull() parse_make( PARSE_NULL,P0,P0,P0,S0,S0,0 )
+# define pon( l,r ) parse_make( PARSE_ON,l,r,P0,S0,S0,0 )
+# define prule( s,p ) parse_make( PARSE_RULE,p,P0,P0,s,S0,0 )
+# define prules( l,r ) parse_make( PARSE_RULES,l,r,P0,S0,S0,0 )
+# define pset( l,r,a ) parse_make( PARSE_SET,l,r,P0,S0,S0,a )
+# define pset1( l,r,t,a ) parse_make( PARSE_SETTINGS,l,r,t,S0,S0,a )
+# define psetc( s,p,a,l ) parse_make( PARSE_SETCOMP,p,a,P0,s,S0,l )
+# define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f )
+# define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 )
+# define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 )
+# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 )
+# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 )
+# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 )
+
+# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
+# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
+
+%}
+
+%%
+
+run : /* empty */
+ /* do nothing */
+ | rules
+ { parse_save( $1.parse ); }
+ ;
+
+/*
+ * block - zero or more rules
+ * rules - one or more rules
+ * rule - any one of jam's rules
+ * right-recursive so rules execute in order.
+ */
+
+block : null
+ { $$.parse = $1.parse; }
+ | rules
+ { $$.parse = $1.parse; }
+ ;
+
+rules : rule
+ { $$.parse = $1.parse; }
+ | rule rules
+ { $$.parse = prules( $1.parse, $2.parse ); }
+ | LOCAL_t { yymode( SCAN_ASSIGN ); } list assign_list_opt _SEMIC_t { yymode( SCAN_NORMAL ); } block
+ { $$.parse = plocal( $3.parse, $4.parse, $7.parse ); }
+ ;
+
+null : /* empty */
+ { $$.parse = pnull(); }
+ ;
+
+assign_list_opt : _EQUALS_t { yymode( SCAN_PUNCT ); } list
+ { $$.parse = $3.parse; $$.number = ASSIGN_SET; }
+ | null
+ { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; }
+ ;
+
+arglist_opt : _LPAREN_t lol _RPAREN_t
+ { $$.parse = $2.parse; }
+ |
+ { $$.parse = P0; }
+ ;
+
+local_opt : LOCAL_t
+ { $$.number = 1; }
+ | /* empty */
+ { $$.number = 0; }
+ ;
+
+else_opt : ELSE_t rule
+ { $$.parse = $2.parse; }
+ | /* empty */
+ { $$.parse = pnull(); }
+
+rule : _LBRACE_t block _RBRACE_t
+ { $$.parse = $2.parse; }
+ | INCLUDE_t { yymode( SCAN_PUNCT ); } list _SEMIC_t
+ { $$.parse = pincl( $3.parse ); yymode( SCAN_NORMAL ); }
+ | ARG { yymode( SCAN_PUNCT ); } lol _SEMIC_t
+ { $$.parse = prule( $1.string, $3.parse ); yymode( SCAN_NORMAL ); }
+ | arg assign { yymode( SCAN_PUNCT ); } list _SEMIC_t
+ { $$.parse = pset( $1.parse, $4.parse, $2.number ); yymode( SCAN_NORMAL ); }
+ | arg ON_t { yymode( SCAN_ASSIGN ); } list assign { yymode( SCAN_PUNCT ); } list _SEMIC_t
+ { $$.parse = pset1( $1.parse, $4.parse, $7.parse, $5.number ); yymode( SCAN_NORMAL ); }
+ | RETURN_t { yymode( SCAN_PUNCT ); } list _SEMIC_t
+ { $$.parse = preturn( $3.parse ); yymode( SCAN_NORMAL ); }
+ | BREAK_t _SEMIC_t
+ { $$.parse = pbreak(); }
+ | CONTINUE_t _SEMIC_t
+ { $$.parse = pcontinue(); }
+ | FOR_t local_opt ARG IN_t { yymode( SCAN_PUNCT ); } list _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t
+ { $$.parse = pfor( $3.string, $6.parse, $9.parse, $2.number ); }
+ | SWITCH_t { yymode( SCAN_PUNCT ); } list _LBRACE_t { yymode( SCAN_NORMAL ); } cases _RBRACE_t
+ { $$.parse = pswitch( $3.parse, $6.parse ); }
+ | IF_t { yymode( SCAN_CONDB ); } expr _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t else_opt
+ { $$.parse = pif( $3.parse, $6.parse, $8.parse ); }
+ | MODULE_t { yymode( SCAN_PUNCT ); } list _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t
+ { $$.parse = pmodule( $3.parse, $6.parse ); }
+ | CLASS_t { yymode( SCAN_PUNCT ); } lol _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t
+ { $$.parse = pclass( $3.parse, $6.parse ); }
+ | WHILE_t { yymode( SCAN_CONDB ); } expr { yymode( SCAN_NORMAL ); } _LBRACE_t block _RBRACE_t
+ { $$.parse = pwhile( $3.parse, $6.parse ); }
+ | local_opt RULE_t { yymode( SCAN_PUNCT ); } ARG { yymode( SCAN_PARAMS ); } arglist_opt { yymode( SCAN_NORMAL ); } rule
+ { $$.parse = psetc( $4.string, $8.parse, $6.parse, $1.number ); }
+ | ON_t arg rule
+ { $$.parse = pon( $2.parse, $3.parse ); }
+ | ACTIONS_t eflags ARG bindlist _LBRACE_t
+ { yymode( SCAN_STRING ); }
+ STRING
+ { yymode( SCAN_NORMAL ); }
+ _RBRACE_t
+ { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); }
+ ;
+
+/*
+ * assign - = or +=
+ */
+
+assign : _EQUALS_t
+ { $$.number = ASSIGN_SET; }
+ | _PLUS_EQUALS_t
+ { $$.number = ASSIGN_APPEND; }
+ | _QUESTION_EQUALS_t
+ { $$.number = ASSIGN_DEFAULT; }
+ | DEFAULT_t _EQUALS_t
+ { $$.number = ASSIGN_DEFAULT; }
+ ;
+
+/*
+ * expr - an expression for if
+ */
+expr : arg
+ { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); yymode( SCAN_COND ); }
+ | expr _EQUALS_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_EQUALS, $1.parse, $4.parse ); }
+ | expr _BANG_EQUALS_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_NOTEQ, $1.parse, $4.parse ); }
+ | expr _LANGLE_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_LESS, $1.parse, $4.parse ); }
+ | expr _LANGLE_EQUALS_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_LESSEQ, $1.parse, $4.parse ); }
+ | expr _RANGLE_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_MORE, $1.parse, $4.parse ); }
+ | expr _RANGLE_EQUALS_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_MOREEQ, $1.parse, $4.parse ); }
+ | expr _AMPER_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); }
+ | expr _AMPERAMPER_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); }
+ | expr _BAR_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); }
+ | expr _BARBAR_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); }
+ | arg IN_t { yymode( SCAN_PUNCT ); } list
+ { $$.parse = peval( EXPR_IN, $1.parse, $4.parse ); yymode( SCAN_COND ); }
+ | _BANG_t { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_NOT, $3.parse, pnull() ); }
+ | _LPAREN_t { yymode( SCAN_CONDB ); } expr _RPAREN_t
+ { $$.parse = $3.parse; }
+ ;
+
+
+/*
+ * cases - action elements inside a 'switch'
+ * case - a single action element inside a 'switch'
+ * right-recursive rule so cases can be examined in order.
+ */
+
+cases : /* empty */
+ { $$.parse = P0; }
+ | case cases
+ { $$.parse = pnode( $1.parse, $2.parse ); }
+ ;
+
+case : CASE_t { yymode( SCAN_CASE ); } ARG _COLON_t { yymode( SCAN_NORMAL ); } block
+ { $$.parse = psnode( $3.string, $6.parse ); }
+ ;
+
+/*
+ * lol - list of lists
+ * right-recursive rule so that lists can be added in order.
+ */
+
+lol : list
+ { $$.parse = pnode( P0, $1.parse ); }
+ | list _COLON_t lol
+ { $$.parse = pnode( $3.parse, $1.parse ); }
+ ;
+
+/*
+ * list - zero or more args in a LIST
+ * listp - list (in puncutation only mode)
+ * arg - one ARG or function call
+ */
+
+list : listp
+ { $$.parse = $1.parse; }
+ ;
+
+listp : /* empty */
+ { $$.parse = pnull(); }
+ | listp arg
+ { $$.parse = pappend( $1.parse, $2.parse ); }
+ ;
+
+arg : ARG
+ { $$.parse = plist( $1.string ); }
+ | _LBRACKET_t { $$.number = yymode( SCAN_CALL ); } func _RBRACKET_t
+ { $$.parse = $3.parse; yymode( $2.number ); }
+ ;
+
+/*
+ * func - a function call (inside [])
+ * This needs to be split cleanly out of 'rule'
+ */
+
+func : ARG { yymode( SCAN_PUNCT ); } lol
+ { $$.parse = prule( $1.string, $3.parse ); }
+ | ON_t arg ARG { yymode( SCAN_PUNCT ); } lol
+ { $$.parse = pon( $2.parse, prule( $3.string, $5.parse ) ); }
+ | ON_t arg RETURN_t { yymode( SCAN_PUNCT ); } list
+ { $$.parse = pon( $2.parse, $5.parse ); }
+ ;
+
+
+/*
+ * eflags - zero or more modifiers to 'executes'
+ * eflag - a single modifier to 'executes'
+ */
+
+eflags : /* empty */
+ { $$.number = 0; }
+ | eflags eflag
+ { $$.number = $1.number | $2.number; }
+ ;
+
+eflag : UPDATED_t
+ { $$.number = EXEC_UPDATED; }
+ | TOGETHER_t
+ { $$.number = EXEC_TOGETHER; }
+ | IGNORE_t
+ { $$.number = EXEC_IGNORE; }
+ | QUIETLY_t
+ { $$.number = EXEC_QUIETLY; }
+ | PIECEMEAL_t
+ { $$.number = EXEC_PIECEMEAL; }
+ | EXISTING_t
+ { $$.number = EXEC_EXISTING; }
+ ;
+
+
+/*
+ * bindlist - list of variable to bind for an action
+ */
+
+bindlist : /* empty */
+ { $$.parse = pnull(); }
+ | BIND_t { yymode( SCAN_PUNCT ); } list
+ { $$.parse = $3.parse; }
+ ;
+
+
diff --git a/src/boost/tools/build/src/engine/jamgram.yy b/src/boost/tools/build/src/engine/jamgram.yy
new file mode 100644
index 000000000..d6158f771
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jamgram.yy
@@ -0,0 +1,340 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jamgram.yy - jam grammar
+ *
+ * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
+ * 06/01/94 (seiwald) - new 'actions existing' does existing sources
+ * 08/23/94 (seiwald) - Support for '+=' (append to variable)
+ * 08/31/94 (seiwald) - Allow ?= as alias for "default =".
+ * 09/15/94 (seiwald) - if conditionals take only single arguments, so
+ * that 'if foo == bar' gives syntax error (use =).
+ * 02/11/95 (seiwald) - when scanning arguments to rules, only treat
+ * punctuation keywords as keywords. All arg lists
+ * are terminated with punctuation keywords.
+ *
+ * 09/11/00 (seiwald) - Support for function calls:
+ *
+ * Rules now return lists (LIST *), rather than void.
+ *
+ * New "[ rule ]" syntax evals rule into a LIST.
+ *
+ * Lists are now generated by compile_list() and
+ * compile_append(), and any other rule that indirectly
+ * makes a list, rather than being built directly here,
+ * so that lists values can contain rule evaluations.
+ *
+ * New 'return' rule sets the return value, though
+ * other statements also may have return values.
+ *
+ * 'run' production split from 'block' production so
+ * that empty blocks can be handled separately.
+ */
+
+%token ARG STRING
+
+%left `||` `|`
+%left `&&` `&`
+%left `=` `!=` `in`
+%left `<` `<=` `>` `>=`
+%left `!`
+
+%{
+#include "jam.h"
+
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "compile.h"
+#include "object.h"
+#include "rules.h"
+
+# define YYINITDEPTH 5000 /* for C++ parsing */
+# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */
+
+# define F0 -1
+# define P0 (PARSE *)0
+# define S0 (OBJECT *)0
+
+# define pappend( l,r ) parse_make( PARSE_APPEND,l,r,P0,S0,S0,0 )
+# define peval( c,l,r ) parse_make( PARSE_EVAL,l,r,P0,S0,S0,c )
+# define pfor( s,l,r,x ) parse_make( PARSE_FOREACH,l,r,P0,s,S0,x )
+# define pif( l,r,t ) parse_make( PARSE_IF,l,r,t,S0,S0,0 )
+# define pincl( l ) parse_make( PARSE_INCLUDE,l,P0,P0,S0,S0,0 )
+# define plist( s ) parse_make( PARSE_LIST,P0,P0,P0,s,S0,0 )
+# define plocal( l,r,t ) parse_make( PARSE_LOCAL,l,r,t,S0,S0,0 )
+# define pmodule( l,r ) parse_make( PARSE_MODULE,l,r,P0,S0,S0,0 )
+# define pclass( l,r ) parse_make( PARSE_CLASS,l,r,P0,S0,S0,0 )
+# define pnull() parse_make( PARSE_NULL,P0,P0,P0,S0,S0,0 )
+# define pon( l,r ) parse_make( PARSE_ON,l,r,P0,S0,S0,0 )
+# define prule( s,p ) parse_make( PARSE_RULE,p,P0,P0,s,S0,0 )
+# define prules( l,r ) parse_make( PARSE_RULES,l,r,P0,S0,S0,0 )
+# define pset( l,r,a ) parse_make( PARSE_SET,l,r,P0,S0,S0,a )
+# define pset1( l,r,t,a ) parse_make( PARSE_SETTINGS,l,r,t,S0,S0,a )
+# define psetc( s,p,a,l ) parse_make( PARSE_SETCOMP,p,a,P0,s,S0,l )
+# define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f )
+# define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 )
+# define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 )
+# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 )
+# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 )
+# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 )
+
+# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 )
+# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 )
+
+%}
+
+%%
+
+run : /* empty */
+ /* do nothing */
+ | rules
+ { parse_save( $1.parse ); }
+ ;
+
+/*
+ * block - zero or more rules
+ * rules - one or more rules
+ * rule - any one of jam's rules
+ * right-recursive so rules execute in order.
+ */
+
+block : null
+ { $$.parse = $1.parse; }
+ | rules
+ { $$.parse = $1.parse; }
+ ;
+
+rules : rule
+ { $$.parse = $1.parse; }
+ | rule rules
+ { $$.parse = prules( $1.parse, $2.parse ); }
+ | `local` { yymode( SCAN_ASSIGN ); } list assign_list_opt `;` { yymode( SCAN_NORMAL ); } block
+ { $$.parse = plocal( $3.parse, $4.parse, $7.parse ); }
+ ;
+
+null : /* empty */
+ { $$.parse = pnull(); }
+ ;
+
+assign_list_opt : `=` { yymode( SCAN_PUNCT ); } list
+ { $$.parse = $3.parse; $$.number = ASSIGN_SET; }
+ | null
+ { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; }
+ ;
+
+arglist_opt : `(` lol `)`
+ { $$.parse = $2.parse; }
+ |
+ { $$.parse = P0; }
+ ;
+
+local_opt : `local`
+ { $$.number = 1; }
+ | /* empty */
+ { $$.number = 0; }
+ ;
+
+else_opt : `else` rule
+ { $$.parse = $2.parse; }
+ | /* empty */
+ { $$.parse = pnull(); }
+
+rule : `{` block `}`
+ { $$.parse = $2.parse; }
+ | `include` { yymode( SCAN_PUNCT ); } list `;`
+ { $$.parse = pincl( $3.parse ); yymode( SCAN_NORMAL ); }
+ | ARG { yymode( SCAN_PUNCT ); } lol `;`
+ { $$.parse = prule( $1.string, $3.parse ); yymode( SCAN_NORMAL ); }
+ | arg assign { yymode( SCAN_PUNCT ); } list `;`
+ { $$.parse = pset( $1.parse, $4.parse, $2.number ); yymode( SCAN_NORMAL ); }
+ | arg `on` { yymode( SCAN_ASSIGN ); } list assign { yymode( SCAN_PUNCT ); } list `;`
+ { $$.parse = pset1( $1.parse, $4.parse, $7.parse, $5.number ); yymode( SCAN_NORMAL ); }
+ | `return` { yymode( SCAN_PUNCT ); } list `;`
+ { $$.parse = preturn( $3.parse ); yymode( SCAN_NORMAL ); }
+ | `break` `;`
+ { $$.parse = pbreak(); }
+ | `continue` `;`
+ { $$.parse = pcontinue(); }
+ | `for` local_opt ARG `in` { yymode( SCAN_PUNCT ); } list `{` { yymode( SCAN_NORMAL ); } block `}`
+ { $$.parse = pfor( $3.string, $6.parse, $9.parse, $2.number ); }
+ | `switch` { yymode( SCAN_PUNCT ); } list `{` { yymode( SCAN_NORMAL ); } cases `}`
+ { $$.parse = pswitch( $3.parse, $6.parse ); }
+ | `if` { yymode( SCAN_CONDB ); } expr `{` { yymode( SCAN_NORMAL ); } block `}` else_opt
+ { $$.parse = pif( $3.parse, $6.parse, $8.parse ); }
+ | `module` { yymode( SCAN_PUNCT ); } list `{` { yymode( SCAN_NORMAL ); } block `}`
+ { $$.parse = pmodule( $3.parse, $6.parse ); }
+ | `class` { yymode( SCAN_PUNCT ); } lol `{` { yymode( SCAN_NORMAL ); } block `}`
+ { $$.parse = pclass( $3.parse, $6.parse ); }
+ | `while` { yymode( SCAN_CONDB ); } expr { yymode( SCAN_NORMAL ); } `{` block `}`
+ { $$.parse = pwhile( $3.parse, $6.parse ); }
+ | local_opt `rule` { yymode( SCAN_PUNCT ); } ARG { yymode( SCAN_PARAMS ); } arglist_opt { yymode( SCAN_NORMAL ); } rule
+ { $$.parse = psetc( $4.string, $8.parse, $6.parse, $1.number ); }
+ | `on` arg rule
+ { $$.parse = pon( $2.parse, $3.parse ); }
+ | `actions` eflags ARG bindlist `{`
+ { yymode( SCAN_STRING ); }
+ STRING
+ { yymode( SCAN_NORMAL ); }
+ `}`
+ { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); }
+ ;
+
+/*
+ * assign - = or +=
+ */
+
+assign : `=`
+ { $$.number = ASSIGN_SET; }
+ | `+=`
+ { $$.number = ASSIGN_APPEND; }
+ | `?=`
+ { $$.number = ASSIGN_DEFAULT; }
+ | `default` `=`
+ { $$.number = ASSIGN_DEFAULT; }
+ ;
+
+/*
+ * expr - an expression for if
+ */
+expr : arg
+ { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); yymode( SCAN_COND ); }
+ | expr `=` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_EQUALS, $1.parse, $4.parse ); }
+ | expr `!=` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_NOTEQ, $1.parse, $4.parse ); }
+ | expr `<` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_LESS, $1.parse, $4.parse ); }
+ | expr `<=` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_LESSEQ, $1.parse, $4.parse ); }
+ | expr `>` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_MORE, $1.parse, $4.parse ); }
+ | expr `>=` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_MOREEQ, $1.parse, $4.parse ); }
+ | expr `&` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); }
+ | expr `&&` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); }
+ | expr `|` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); }
+ | expr `||` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); }
+ | arg `in` { yymode( SCAN_PUNCT ); } list
+ { $$.parse = peval( EXPR_IN, $1.parse, $4.parse ); yymode( SCAN_COND ); }
+ | `!` { yymode( SCAN_CONDB ); } expr
+ { $$.parse = peval( EXPR_NOT, $3.parse, pnull() ); }
+ | `(` { yymode( SCAN_CONDB ); } expr `)`
+ { $$.parse = $3.parse; }
+ ;
+
+
+/*
+ * cases - action elements inside a 'switch'
+ * case - a single action element inside a 'switch'
+ * right-recursive rule so cases can be examined in order.
+ */
+
+cases : /* empty */
+ { $$.parse = P0; }
+ | case cases
+ { $$.parse = pnode( $1.parse, $2.parse ); }
+ ;
+
+case : `case` { yymode( SCAN_CASE ); } ARG `:` { yymode( SCAN_NORMAL ); } block
+ { $$.parse = psnode( $3.string, $6.parse ); }
+ ;
+
+/*
+ * lol - list of lists
+ * right-recursive rule so that lists can be added in order.
+ */
+
+lol : list
+ { $$.parse = pnode( P0, $1.parse ); }
+ | list `:` lol
+ { $$.parse = pnode( $3.parse, $1.parse ); }
+ ;
+
+/*
+ * list - zero or more args in a LIST
+ * listp - list (in puncutation only mode)
+ * arg - one ARG or function call
+ */
+
+list : listp
+ { $$.parse = $1.parse; }
+ ;
+
+listp : /* empty */
+ { $$.parse = pnull(); }
+ | listp arg
+ { $$.parse = pappend( $1.parse, $2.parse ); }
+ ;
+
+arg : ARG
+ { $$.parse = plist( $1.string ); }
+ | `[` { $$.number = yymode( SCAN_CALL ); } func `]`
+ { $$.parse = $3.parse; yymode( $2.number ); }
+ ;
+
+/*
+ * func - a function call (inside [])
+ * This needs to be split cleanly out of 'rule'
+ */
+
+func : ARG { yymode( SCAN_PUNCT ); } lol
+ { $$.parse = prule( $1.string, $3.parse ); }
+ | `on` arg ARG { yymode( SCAN_PUNCT ); } lol
+ { $$.parse = pon( $2.parse, prule( $3.string, $5.parse ) ); }
+ | `on` arg `return` { yymode( SCAN_PUNCT ); } list
+ { $$.parse = pon( $2.parse, $5.parse ); }
+ ;
+
+
+/*
+ * eflags - zero or more modifiers to 'executes'
+ * eflag - a single modifier to 'executes'
+ */
+
+eflags : /* empty */
+ { $$.number = 0; }
+ | eflags eflag
+ { $$.number = $1.number | $2.number; }
+ ;
+
+eflag : `updated`
+ { $$.number = EXEC_UPDATED; }
+ | `together`
+ { $$.number = EXEC_TOGETHER; }
+ | `ignore`
+ { $$.number = EXEC_IGNORE; }
+ | `quietly`
+ { $$.number = EXEC_QUIETLY; }
+ | `piecemeal`
+ { $$.number = EXEC_PIECEMEAL; }
+ | `existing`
+ { $$.number = EXEC_EXISTING; }
+ ;
+
+
+/*
+ * bindlist - list of variable to bind for an action
+ */
+
+bindlist : /* empty */
+ { $$.parse = pnull(); }
+ | `bind` { yymode( SCAN_PUNCT ); } list
+ { $$.parse = $3.parse; }
+ ;
+
+
diff --git a/src/boost/tools/build/src/engine/jamgramtab.h b/src/boost/tools/build/src/engine/jamgramtab.h
new file mode 100644
index 000000000..38a810871
--- /dev/null
+++ b/src/boost/tools/build/src/engine/jamgramtab.h
@@ -0,0 +1,46 @@
+ { "!", _BANG_t },
+ { "!=", _BANG_EQUALS_t },
+ { "&", _AMPER_t },
+ { "&&", _AMPERAMPER_t },
+ { "(", _LPAREN_t },
+ { ")", _RPAREN_t },
+ { "+=", _PLUS_EQUALS_t },
+ { ":", _COLON_t },
+ { ";", _SEMIC_t },
+ { "<", _LANGLE_t },
+ { "<=", _LANGLE_EQUALS_t },
+ { "=", _EQUALS_t },
+ { ">", _RANGLE_t },
+ { ">=", _RANGLE_EQUALS_t },
+ { "?=", _QUESTION_EQUALS_t },
+ { "[", _LBRACKET_t },
+ { "]", _RBRACKET_t },
+ { "actions", ACTIONS_t },
+ { "bind", BIND_t },
+ { "break", BREAK_t },
+ { "case", CASE_t },
+ { "class", CLASS_t },
+ { "continue", CONTINUE_t },
+ { "default", DEFAULT_t },
+ { "else", ELSE_t },
+ { "existing", EXISTING_t },
+ { "for", FOR_t },
+ { "if", IF_t },
+ { "ignore", IGNORE_t },
+ { "in", IN_t },
+ { "include", INCLUDE_t },
+ { "local", LOCAL_t },
+ { "module", MODULE_t },
+ { "on", ON_t },
+ { "piecemeal", PIECEMEAL_t },
+ { "quietly", QUIETLY_t },
+ { "return", RETURN_t },
+ { "rule", RULE_t },
+ { "switch", SWITCH_t },
+ { "together", TOGETHER_t },
+ { "updated", UPDATED_t },
+ { "while", WHILE_t },
+ { "{", _LBRACE_t },
+ { "|", _BAR_t },
+ { "||", _BARBAR_t },
+ { "}", _RBRACE_t },
diff --git a/src/boost/tools/build/src/engine/lists.cpp b/src/boost/tools/build/src/engine/lists.cpp
new file mode 100644
index 000000000..af602b504
--- /dev/null
+++ b/src/boost/tools/build/src/engine/lists.cpp
@@ -0,0 +1,474 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * lists.c - maintain lists of objects
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "output.h"
+
+#include <assert.h>
+
+static LIST * freelist[ 32 ]; /* junkpile for list_dealloc() */
+
+static unsigned get_bucket( unsigned size )
+{
+ unsigned bucket = 0;
+ while ( size > ( 1u << bucket ) ) ++bucket;
+ return bucket;
+}
+
+static LIST * list_alloc( unsigned const size )
+{
+ unsigned const bucket = get_bucket( size );
+ if ( freelist[ bucket ] )
+ {
+ LIST * result = freelist[ bucket ];
+ freelist[ bucket ] = result->impl.next;
+ return result;
+ }
+ return (LIST *)BJAM_MALLOC( sizeof( LIST ) + ( 1u << bucket ) *
+ sizeof( OBJECT * ) );
+}
+
+static void list_dealloc( LIST * l )
+{
+ unsigned size = list_length( l );
+ unsigned bucket;
+ LIST * node = l;
+
+ if ( size == 0 ) return;
+
+ bucket = get_bucket( size );;
+
+#ifdef BJAM_NO_MEM_CACHE
+ BJAM_FREE( node );
+#else
+ node->impl.next = freelist[ bucket ];
+ freelist[ bucket ] = node;
+#endif
+}
+
+/*
+ * list_append() - append a list onto another one, returning total
+ */
+
+LIST * list_append( LIST * l, LIST * nl )
+{
+ if ( list_empty( l ) )
+ return nl;
+ if ( !list_empty( nl ) )
+ {
+ unsigned int const l_size = list_length( l );
+ int const nl_size = list_length( nl );
+ int const size = l_size + nl_size;
+ unsigned const bucket = get_bucket( size );
+
+ /* Do we need to reallocate? */
+ if ( l_size <= ( 1u << ( bucket - 1 ) ) )
+ {
+ LIST * result = list_alloc( size );
+ memcpy( list_begin( result ), list_begin( l ), l_size * sizeof(
+ OBJECT * ) );
+ list_dealloc( l );
+ l = result;
+ }
+
+ l->impl.size = size;
+ memcpy( list_begin( l ) + l_size, list_begin( nl ), nl_size * sizeof(
+ OBJECT * ) );
+ list_dealloc( nl );
+ }
+ return l;
+}
+
+LISTITER list_begin( LIST * l )
+{
+ return l ? (LISTITER)( (char *)l + sizeof( LIST ) ) : 0;
+}
+
+LISTITER list_end( LIST * l )
+{
+ return l ? list_begin( l ) + l->impl.size : 0;
+}
+
+LIST * list_new( OBJECT * value )
+{
+ LIST * const head = list_alloc( 1 ) ;
+ head->impl.size = 1;
+ list_begin( head )[ 0 ] = value;
+ return head;
+}
+
+/*
+ * list_push_back() - tack a string onto the end of a list of strings
+ */
+
+LIST * list_push_back( LIST * head, OBJECT * value )
+{
+ unsigned int size = list_length( head );
+
+ if ( DEBUG_LISTS )
+ out_printf( "list > %s <\n", object_str( value ) );
+
+ /* If the size is a power of 2, reallocate. */
+ if ( size == 0 )
+ {
+ head = list_alloc( 1 );
+ }
+ else if ( ( ( size - 1 ) & size ) == 0 )
+ {
+ LIST * l = list_alloc( size + 1 );
+ memcpy( l, head, sizeof( LIST ) + size * sizeof( OBJECT * ) );
+ list_dealloc( head );
+ head = l;
+ }
+
+ list_begin( head )[ size ] = value;
+ head->impl.size = size + 1;
+
+ return head;
+}
+
+
+/*
+ * list_copy() - copy a whole list of strings (nl) onto end of another (l).
+ */
+
+LIST * list_copy( LIST * l )
+{
+ int size = list_length( l );
+ int i;
+ LIST * result;
+
+ if ( size == 0 ) return L0;
+
+ result = list_alloc( size );
+ result->impl.size = size;
+ for ( i = 0; i < size; ++i )
+ list_begin( result )[ i ] = object_copy( list_begin( l )[ i ] );
+ return result;
+}
+
+
+LIST * list_copy_range( LIST * l, LISTITER first, LISTITER last )
+{
+ if ( first == last )
+ return L0;
+ else
+ {
+ int size = last - first;
+ LIST * result = list_alloc( size );
+ LISTITER dest = list_begin( result );
+ result->impl.size = size;
+ for ( ; first != last; ++first, ++dest )
+ *dest = object_copy( *first );
+ return result;
+ }
+}
+
+
+/*
+ * list_sublist() - copy a subset of a list of strings.
+ */
+
+LIST * list_sublist( LIST * l, int start, int count )
+{
+ int end = start + count;
+ int size = list_length( l );
+ if ( start >= size ) return L0;
+ if ( end > size ) end = size;
+ return list_copy_range( l, list_begin( l ) + start, list_begin( l ) + end );
+}
+
+
+static int str_ptr_compare( void const * va, void const * vb )
+{
+ OBJECT * a = *( (OBJECT * *)va );
+ OBJECT * b = *( (OBJECT * *)vb );
+ return strcmp( object_str( a ), object_str( b ) );
+}
+
+
+LIST * list_sort( LIST * l )
+{
+ int len;
+ LIST * result;
+
+ if ( !l )
+ return L0;
+
+ len = list_length( l );
+ result = list_copy( l );
+
+ qsort( list_begin( result ), len, sizeof( OBJECT * ), str_ptr_compare );
+
+ return result;
+}
+
+
+/*
+ * list_free() - free a list of strings
+ */
+
+void list_free( LIST * head )
+{
+ if ( !list_empty( head ) )
+ {
+ LISTITER iter = list_begin( head );
+ LISTITER const end = list_end( head );
+ for ( ; iter != end; iter = list_next( iter ) )
+ object_free( list_item( iter ) );
+ list_dealloc( head );
+ }
+}
+
+
+/*
+ * list_pop_front() - remove the front element from a list of strings
+ */
+
+LIST * list_pop_front( LIST * l )
+{
+ unsigned size = list_length( l );
+ assert( size );
+ --size;
+ object_free( list_front( l ) );
+
+ if ( size == 0 )
+ {
+ list_dealloc( l );
+ return L0;
+ }
+
+ if ( ( ( size - 1 ) & size ) == 0 )
+ {
+ LIST * const nl = list_alloc( size );
+ nl->impl.size = size;
+ memcpy( list_begin( nl ), list_begin( l ) + 1, size * sizeof( OBJECT * )
+ );
+ list_dealloc( l );
+ return nl;
+ }
+
+ l->impl.size = size;
+ memmove( list_begin( l ), list_begin( l ) + 1, size * sizeof( OBJECT * ) );
+ return l;
+}
+
+LIST * list_reverse( LIST * l )
+{
+ int size = list_length( l );
+ if ( size == 0 ) return L0;
+ {
+ LIST * const result = list_alloc( size );
+ int i;
+ result->impl.size = size;
+ for ( i = 0; i < size; ++i )
+ list_begin( result )[ i ] = object_copy( list_begin( l )[ size - i -
+ 1 ] );
+ return result;
+ }
+}
+
+int list_cmp( LIST * t, LIST * s )
+{
+ int status = 0;
+ LISTITER t_it = list_begin( t );
+ LISTITER const t_end = list_end( t );
+ LISTITER s_it = list_begin( s );
+ LISTITER const s_end = list_end( s );
+
+ while ( !status && ( t_it != t_end || s_it != s_end ) )
+ {
+ char const * st = t_it != t_end ? object_str( list_item( t_it ) ) : "";
+ char const * ss = s_it != s_end ? object_str( list_item( s_it ) ) : "";
+
+ status = strcmp( st, ss );
+
+ t_it = t_it != t_end ? list_next( t_it ) : t_it;
+ s_it = s_it != s_end ? list_next( s_it ) : s_it;
+ }
+
+ return status;
+}
+
+int list_is_sublist( LIST * sub, LIST * l )
+{
+ LISTITER iter = list_begin( sub );
+ LISTITER const end = list_end( sub );
+ for ( ; iter != end; iter = list_next( iter ) )
+ if ( !list_in( l, list_item( iter ) ) )
+ return 0;
+ return 1;
+}
+
+/*
+ * list_print() - print a list of strings to stdout
+ */
+
+void list_print( LIST * l )
+{
+ LISTITER iter = list_begin( l ), end = list_end( l );
+ if ( iter != end )
+ {
+ out_printf( "%s", object_str( list_item( iter ) ) );
+ iter = list_next( iter );
+ for ( ; iter != end; iter = list_next( iter ) )
+ out_printf( " %s", object_str( list_item( iter ) ) );
+ }
+}
+
+
+/*
+ * list_length() - return the number of items in the list
+ */
+
+int list_length( LIST * l )
+{
+ return l ? l->impl.size : 0;
+}
+
+
+int list_in( LIST * l, OBJECT * value )
+{
+ LISTITER iter = list_begin( l );
+ LISTITER end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ if ( object_equal( list_item( iter ), value ) )
+ return 1;
+ return 0;
+}
+
+
+LIST * list_unique( LIST * sorted_list )
+{
+ LIST * result = L0;
+ OBJECT * last_added = 0;
+
+ LISTITER iter = list_begin( sorted_list ), end = list_end( sorted_list );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( !last_added || !object_equal( list_item( iter ), last_added ) )
+ {
+ result = list_push_back( result, object_copy( list_item( iter ) ) );
+ last_added = list_item( iter );
+ }
+ }
+ return result;
+}
+
+void list_done()
+{
+ unsigned int i;
+ for ( i = 0; i < sizeof( freelist ) / sizeof( freelist[ 0 ] ); ++i )
+ {
+ LIST * l = freelist[ i ];
+ while ( l )
+ {
+ LIST * const tmp = l;
+ l = l->impl.next;
+ BJAM_FREE( tmp );
+ }
+ }
+}
+
+
+/*
+ * lol_init() - initialize a LOL (list of lists).
+ */
+
+void lol_init( LOL * lol )
+{
+ lol->count = 0;
+}
+
+
+/*
+ * lol_add() - append a LIST onto an LOL.
+ */
+
+void lol_add( LOL * lol, LIST * l )
+{
+ if ( lol->count < LOL_MAX )
+ lol->list[ lol->count++ ] = l;
+}
+
+
+/*
+ * lol_free() - free the LOL and its LISTs.
+ */
+
+void lol_free( LOL * lol )
+{
+ int i;
+ for ( i = 0; i < lol->count; ++i )
+ list_free( lol->list[ i ] );
+ lol->count = 0;
+}
+
+
+/*
+ * lol_get() - return one of the LISTs in the LOL.
+ */
+
+LIST * lol_get( LOL * lol, int i )
+{
+ return i < lol->count ? lol->list[ i ] : L0;
+}
+
+
+/*
+ * lol_print() - debug print LISTS separated by ":".
+ */
+
+void lol_print( LOL * lol )
+{
+ int i;
+ for ( i = 0; i < lol->count; ++i )
+ {
+ if ( i )
+ out_printf( " : " );
+ list_print( lol->list[ i ] );
+ }
+}
+
+#ifdef HAVE_PYTHON
+
+PyObject * list_to_python( LIST * l )
+{
+ PyObject * result = PyList_New( 0 );
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ PyObject * s = PyString_FromString( object_str( list_item( iter ) ) );
+ PyList_Append( result, s );
+ Py_DECREF( s );
+ }
+
+ return result;
+}
+
+LIST * list_from_python( PyObject * l )
+{
+ LIST * result = L0;
+
+ Py_ssize_t n = PySequence_Size( l );
+ Py_ssize_t i;
+ for ( i = 0; i < n; ++i )
+ {
+ PyObject * v = PySequence_GetItem( l, i );
+ result = list_push_back( result, object_new( PyString_AsString( v ) ) );
+ Py_DECREF( v );
+ }
+
+ return result;
+}
+
+#endif
diff --git a/src/boost/tools/build/src/engine/lists.h b/src/boost/tools/build/src/engine/lists.h
new file mode 100644
index 000000000..7ddb668a4
--- /dev/null
+++ b/src/boost/tools/build/src/engine/lists.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * lists.h - the LIST structure and routines to manipulate them
+ *
+ * The whole of jam relies on lists of objects as a datatype. This module, in
+ * conjunction with object.c, handles these relatively efficiently.
+ *
+ * Structures defined:
+ *
+ * LIST - list of OBJECTs
+ * LOL - list of LISTs
+ *
+ * External routines:
+ *
+ * list_append() - append a list onto another one, returning total
+ * list_new() - tack an object onto the end of a list of objects
+ * list_copy() - copy a whole list of objects
+ * list_sublist() - copy a subset of a list of objects
+ * list_free() - free a list of objects
+ * list_print() - print a list of objects to stdout
+ * list_length() - return the number of items in the list
+ *
+ * lol_init() - initialize a LOL (list of lists)
+ * lol_add() - append a LIST onto an LOL
+ * lol_free() - free the LOL and its LISTs
+ * lol_get() - return one of the LISTs in the LOL
+ * lol_print() - debug print LISTS separated by ":"
+ */
+
+#ifndef LISTS_DWA20011022_H
+#define LISTS_DWA20011022_H
+
+#include "config.h"
+#include "object.h"
+
+#ifdef HAVE_PYTHON
+# include <Python.h>
+#endif
+
+/*
+ * LIST - list of strings
+ */
+
+typedef struct _list {
+ union {
+ int size;
+ struct _list * next;
+ OBJECT * align;
+ } impl;
+} LIST;
+
+typedef OBJECT * * LISTITER;
+
+/*
+ * LOL - list of LISTs
+ */
+
+#define LOL_MAX 19
+typedef struct _lol {
+ int count;
+ LIST * list[ LOL_MAX ];
+} LOL;
+
+LIST * list_new( OBJECT * value );
+LIST * list_append( LIST * destination, LIST * source );
+LIST * list_copy( LIST * );
+LIST * list_copy_range( LIST * destination, LISTITER first, LISTITER last );
+void list_free( LIST * head );
+LIST * list_push_back( LIST * head, OBJECT * value );
+void list_print( LIST * );
+int list_length( LIST * );
+LIST * list_sublist( LIST *, int start, int count );
+LIST * list_pop_front( LIST * );
+LIST * list_sort( LIST * );
+LIST * list_unique( LIST * sorted_list );
+int list_in( LIST *, OBJECT * value );
+LIST * list_reverse( LIST * );
+int list_cmp( LIST * lhs, LIST * rhs );
+int list_is_sublist( LIST * sub, LIST * l );
+void list_done();
+
+LISTITER list_begin( LIST * );
+LISTITER list_end( LIST * );
+#define list_next( it ) ((it) + 1)
+#define list_item( it ) (*(it))
+#define list_empty( l ) ((l) == L0)
+#define list_front( l ) list_item( list_begin( l ) )
+
+#define L0 ((LIST *)0)
+
+void lol_add( LOL *, LIST * );
+void lol_init( LOL * );
+void lol_free( LOL * );
+LIST * lol_get( LOL *, int i );
+void lol_print( LOL * );
+void lol_build( LOL *, char const * * elements );
+
+#ifdef HAVE_PYTHON
+PyObject * list_to_python( LIST * );
+LIST * list_from_python( PyObject * );
+#endif
+
+#endif
diff --git a/src/boost/tools/build/src/engine/make.cpp b/src/boost/tools/build/src/engine/make.cpp
new file mode 100644
index 000000000..470626e56
--- /dev/null
+++ b/src/boost/tools/build/src/engine/make.cpp
@@ -0,0 +1,941 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * make.c - bring a target up to date, once rules are in place.
+ *
+ * This modules controls the execution of rules to bring a target and its
+ * dependencies up to date. It is invoked after the targets, rules, et. al.
+ * described in rules.h are created by the interpreting jam files.
+ *
+ * This file contains the main make() entry point and the first pass make0().
+ * The second pass, make1(), which actually does the command execution, is in
+ * make1.c.
+ *
+ * External routines:
+ * make() - make a target, given its name
+ *
+ * Internal routines:
+ * make0() - bind and scan everything to make a TARGET
+ * make0sort() - reorder TARGETS chain by their time (newest to oldest)
+ */
+
+#include "jam.h"
+#include "make.h"
+
+#include "command.h"
+#ifdef OPT_HEADER_CACHE_EXT
+# include "hcache.h"
+#endif
+#include "headers.h"
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "search.h"
+#include "timestamp.h"
+#include "variable.h"
+#include "execcmd.h"
+#include "output.h"
+
+#include <assert.h>
+
+#ifndef max
+# define max(a,b) ((a)>(b)?(a):(b))
+#endif
+
+static TARGETS * make0sort( TARGETS * c );
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ static void dependGraphOutput( TARGET * t, int depth );
+#endif
+
+static char const * target_fate[] =
+{
+ "init", /* T_FATE_INIT */
+ "making", /* T_FATE_MAKING */
+ "stable", /* T_FATE_STABLE */
+ "newer", /* T_FATE_NEWER */
+ "temp", /* T_FATE_ISTMP */
+ "touched", /* T_FATE_TOUCHED */
+ "rebuild", /* T_FATE_REBUILD */
+ "missing", /* T_FATE_MISSING */
+ "needtmp", /* T_FATE_NEEDTMP */
+ "old", /* T_FATE_OUTDATED */
+ "update", /* T_FATE_UPDATE */
+ "nofind", /* T_FATE_CANTFIND */
+ "nomake" /* T_FATE_CANTMAKE */
+};
+
+static char const * target_bind[] =
+{
+ "unbound",
+ "missing",
+ "parents",
+ "exists",
+};
+
+#define spaces(x) ( ((const char *)" ") + ( x > 20 ? 0 : 20-x ) )
+
+
+/*
+ * make() - make a target, given its name.
+ */
+
+int make( LIST * targets, int anyhow )
+{
+ COUNTS counts[ 1 ];
+ int status = 0; /* 1 if anything fails */
+
+#ifdef OPT_HEADER_CACHE_EXT
+ hcache_init();
+#endif
+
+ memset( (char *)counts, 0, sizeof( *counts ) );
+
+ /* Make sure that the tables are set up correctly.
+ */
+ exec_init();
+
+ /* First bind all targets with LOCATE_TARGET setting. This is needed to
+ * correctly handle dependencies to generated headers.
+ */
+ bind_explicitly_located_targets();
+
+ {
+ LISTITER iter, end;
+ PROFILE_ENTER( MAKE_MAKE0 );
+ for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) )
+ {
+ TARGET * t = bindtarget( list_item( iter ) );
+ if ( t->fate == T_FATE_INIT )
+ make0( t, 0, 0, counts, anyhow, 0 );
+ }
+ PROFILE_EXIT( MAKE_MAKE0 );
+ }
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_GRAPH )
+ {
+ LISTITER iter, end;
+ for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) )
+ dependGraphOutput( bindtarget( list_item( iter ) ), 0 );
+ }
+#endif
+
+ if ( DEBUG_MAKE )
+ {
+ if ( counts->targets )
+ out_printf( "...found %d target%s...\n", counts->targets,
+ counts->targets > 1 ? "s" : "" );
+ if ( counts->temp )
+ out_printf( "...using %d temp target%s...\n", counts->temp,
+ counts->temp > 1 ? "s" : "" );
+ if ( counts->updating )
+ out_printf( "...updating %d target%s...\n", counts->updating,
+ counts->updating > 1 ? "s" : "" );
+ if ( counts->cantfind )
+ out_printf( "...can't find %d target%s...\n", counts->cantfind,
+ counts->cantfind > 1 ? "s" : "" );
+ if ( counts->cantmake )
+ out_printf( "...can't make %d target%s...\n", counts->cantmake,
+ counts->cantmake > 1 ? "s" : "" );
+ }
+
+ status = counts->cantfind || counts->cantmake;
+
+ {
+ PROFILE_ENTER( MAKE_MAKE1 );
+ status |= make1( targets );
+ PROFILE_EXIT( MAKE_MAKE1 );
+ }
+
+ return status;
+}
+
+
+/* Force any dependants of t that have already at least begun being visited by
+ * make0() to be updated.
+ */
+
+static void force_rebuilds( TARGET * t );
+
+static void update_dependants( TARGET * t )
+{
+ TARGETS * q;
+
+ for ( q = t->dependants; q; q = q->next )
+ {
+ TARGET * p = q->target;
+ char fate0 = p->fate;
+
+ /* If we have already at least begun visiting it and we are not already
+ * rebuilding it for other reasons.
+ */
+ if ( ( fate0 != T_FATE_INIT ) && ( fate0 < T_FATE_BUILD ) )
+ {
+ p->fate = T_FATE_UPDATE;
+
+ if ( DEBUG_FATE )
+ {
+ out_printf( "fate change %s from %s to %s (as dependent of %s)\n",
+ object_str( p->name ), target_fate[ (int) fate0 ], target_fate[ (int) p->fate ], object_str( t->name ) );
+ }
+
+ /* If we are done visiting it, go back and make sure its dependants
+ * get rebuilt.
+ */
+ if ( fate0 > T_FATE_MAKING )
+ update_dependants( p );
+ }
+ }
+ /* Make sure that rebuilds can be chained. */
+ force_rebuilds( t );
+}
+
+
+/*
+ * Make sure that all of t's rebuilds get rebuilt.
+ */
+
+static void force_rebuilds( TARGET * t )
+{
+ TARGETS * d;
+ for ( d = t->rebuilds; d; d = d->next )
+ {
+ TARGET * r = d->target;
+
+ /* If it is not already being rebuilt for other reasons. */
+ if ( r->fate < T_FATE_BUILD )
+ {
+ if ( DEBUG_FATE )
+ out_printf( "fate change %s from %s to %s (by rebuild)\n",
+ object_str( r->name ), target_fate[ (int) r->fate ], target_fate[ T_FATE_REBUILD ] );
+
+ /* Force rebuild it. */
+ r->fate = T_FATE_REBUILD;
+
+ /* And make sure its dependants are updated too. */
+ update_dependants( r );
+ }
+ }
+}
+
+
+int make0rescan( TARGET * t, TARGET * rescanning )
+{
+ int result = 0;
+ TARGETS * c;
+
+ /* Check whether we have already found a cycle. */
+ if ( target_scc( t ) == rescanning )
+ return 1;
+
+ /* If we have already visited this node, ignore it. */
+ if ( t->rescanning == rescanning )
+ return 0;
+
+ /* If t is already updated, ignore it. */
+ if ( t->scc_root == NULL && t->progress > T_MAKE_ACTIVE )
+ return 0;
+
+ t->rescanning = rescanning;
+ for ( c = t->depends; c; c = c->next )
+ {
+ TARGET * dependency = c->target;
+ /* Always start at the root of each new strongly connected component. */
+ if ( target_scc( dependency ) != target_scc( t ) )
+ dependency = target_scc( dependency );
+ result |= make0rescan( dependency, rescanning );
+
+ /* Make sure that we pick up the new include node. */
+ if ( c->target->includes == rescanning )
+ result = 1;
+ }
+ if ( result && t->scc_root == NULL )
+ {
+ t->scc_root = rescanning;
+ rescanning->depends = targetentry( rescanning->depends, t );
+ }
+ return result;
+}
+
+
+/*
+ * make0() - bind and scan everything to make a TARGET.
+ *
+ * Recursively binds a target, searches for #included headers, calls itself on
+ * those headers and any dependencies.
+ */
+
+void make0
+(
+ TARGET * t,
+ TARGET * p, /* parent */
+ int depth, /* for display purposes */
+ COUNTS * counts, /* for reporting */
+ int anyhow,
+ TARGET * rescanning
+) /* forcibly touch all (real) targets */
+{
+ TARGETS * c;
+ TARGET * ptime = t;
+ TARGET * located_target = 0;
+ timestamp last;
+ timestamp leaf;
+ timestamp hlast;
+ int fate;
+ char const * flag = "";
+ SETTINGS * s;
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ int savedFate;
+ int oldTimeStamp;
+#endif
+
+ if ( DEBUG_MAKEPROG )
+ out_printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) );
+
+ /*
+ * Step 1: Initialize.
+ */
+
+ if ( DEBUG_MAKEPROG )
+ out_printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) );
+
+ t->fate = T_FATE_MAKING;
+ t->depth = depth;
+
+ /*
+ * Step 2: Under the influence of "on target" variables, bind the target and
+ * search for headers.
+ */
+
+ /* Step 2a: Set "on target" variables. */
+ s = copysettings( t->settings );
+ pushsettings( root_module(), s );
+
+ /* Step 2b: Find and timestamp the target file (if it is a file). */
+ if ( ( t->binding == T_BIND_UNBOUND ) && !( t->flags & T_FLAG_NOTFILE ) )
+ {
+ OBJECT * another_target;
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, &another_target,
+ t->flags & T_FLAG_ISFILE );
+ /* If it was detected that this target refers to an already existing and
+ * bound target, we add a dependency so that every target depending on
+ * us will depend on that other target as well.
+ */
+ if ( another_target )
+ located_target = bindtarget( another_target );
+
+ t->binding = timestamp_empty( &t->time )
+ ? T_BIND_MISSING
+ : T_BIND_EXISTS;
+ }
+
+ /* INTERNAL, NOTFILE header nodes have the time of their parents. */
+ if ( p && ( t->flags & T_FLAG_INTERNAL ) )
+ ptime = p;
+
+ /* If temp file does not exist but parent does, use parent. */
+ if ( p && ( t->flags & T_FLAG_TEMP ) &&
+ ( t->binding == T_BIND_MISSING ) &&
+ ( p->binding != T_BIND_MISSING ) )
+ {
+ t->binding = T_BIND_PARENTS;
+ ptime = p;
+ }
+
+#ifdef OPT_SEMAPHORE
+ {
+ LIST * var = var_get( root_module(), constant_JAM_SEMAPHORE );
+ if ( !list_empty( var ) )
+ {
+ TARGET * const semaphore = bindtarget( list_front( var ) );
+ semaphore->progress = T_MAKE_SEMAPHORE;
+ t->semaphore = semaphore;
+ }
+ }
+#endif
+
+ /* Step 2c: If its a file, search for headers. */
+ if ( t->binding == T_BIND_EXISTS )
+ headers( t );
+
+ /* Step 2d: reset "on target" variables. */
+ popsettings( root_module(), s );
+ freesettings( s );
+
+ /*
+ * Pause for a little progress reporting.
+ */
+
+ if ( DEBUG_BIND )
+ {
+ if ( !object_equal( t->name, t->boundname ) )
+ out_printf( "bind\t--\t%s%s: %s\n", spaces( depth ),
+ object_str( t->name ), object_str( t->boundname ) );
+
+ switch ( t->binding )
+ {
+ case T_BIND_UNBOUND:
+ case T_BIND_MISSING:
+ case T_BIND_PARENTS:
+ out_printf( "time\t--\t%s%s: %s\n", spaces( depth ),
+ object_str( t->name ), target_bind[ (int)t->binding ] );
+ break;
+
+ case T_BIND_EXISTS:
+ out_printf( "time\t--\t%s%s: %s\n", spaces( depth ),
+ object_str( t->name ), timestamp_str( &t->time ) );
+ break;
+ }
+ }
+
+ /*
+ * Step 3: Recursively make0() dependencies & headers.
+ */
+
+ /* Step 3a: Recursively make0() dependencies. */
+ for ( c = t->depends; c; c = c->next )
+ {
+ int const internal = t->flags & T_FLAG_INTERNAL;
+
+ /* Warn about circular deps, except for includes, which include each
+ * other alot.
+ */
+ if ( c->target->fate == T_FATE_INIT )
+ make0( c->target, ptime, depth + 1, counts, anyhow, rescanning );
+ else if ( c->target->fate == T_FATE_MAKING && !internal )
+ out_printf( "warning: %s depends on itself\n", object_str(
+ c->target->name ) );
+ else if ( c->target->fate != T_FATE_MAKING && rescanning )
+ make0rescan( c->target, rescanning );
+ if ( rescanning && c->target->includes && c->target->includes->fate !=
+ T_FATE_MAKING )
+ make0rescan( target_scc( c->target->includes ), rescanning );
+ }
+
+ if ( located_target )
+ {
+ if ( located_target->fate == T_FATE_INIT )
+ make0( located_target, ptime, depth + 1, counts, anyhow, rescanning
+ );
+ else if ( located_target->fate != T_FATE_MAKING && rescanning )
+ make0rescan( located_target, rescanning );
+ }
+
+ /* Step 3b: Recursively make0() internal includes node. */
+ if ( t->includes )
+ make0( t->includes, p, depth + 1, counts, anyhow, rescanning );
+
+ /* Step 3c: Add dependencies' includes to our direct dependencies. */
+ {
+ TARGETS * incs = 0;
+ for ( c = t->depends; c; c = c->next )
+ if ( c->target->includes )
+ incs = targetentry( incs, c->target->includes );
+ t->depends = targetchain( t->depends, incs );
+ }
+
+ if ( located_target )
+ t->depends = targetentry( t->depends, located_target );
+
+ /* Step 3d: Detect cycles. */
+ {
+ int cycle_depth = depth;
+ for ( c = t->depends; c; c = c->next )
+ {
+ TARGET * scc_root = target_scc( c->target );
+ if ( scc_root->fate == T_FATE_MAKING &&
+ ( !scc_root->includes ||
+ scc_root->includes->fate != T_FATE_MAKING ) )
+ {
+ if ( scc_root->depth < cycle_depth )
+ {
+ cycle_depth = scc_root->depth;
+ t->scc_root = scc_root;
+ }
+ }
+ }
+ }
+
+ /*
+ * Step 4: Compute time & fate.
+ */
+
+ /* Step 4a: Pick up dependencies' time and fate. */
+ timestamp_clear( &last );
+ timestamp_clear( &leaf );
+ fate = T_FATE_STABLE;
+ for ( c = t->depends; c; c = c->next )
+ {
+ /* If we are in a different strongly connected component, pull
+ * timestamps from the root.
+ */
+ if ( c->target->scc_root )
+ {
+ TARGET * const scc_root = target_scc( c->target );
+ if ( scc_root != t->scc_root )
+ {
+ timestamp_max( &c->target->leaf, &c->target->leaf,
+ &scc_root->leaf );
+ timestamp_max( &c->target->time, &c->target->time,
+ &scc_root->time );
+ c->target->fate = max( c->target->fate, scc_root->fate );
+ }
+ }
+
+ /* If LEAVES has been applied, we only heed the timestamps of the leaf
+ * source nodes.
+ */
+ timestamp_max( &leaf, &leaf, &c->target->leaf );
+ if ( t->flags & T_FLAG_LEAVES )
+ {
+ timestamp_copy( &last, &leaf );
+ continue;
+ }
+ timestamp_max( &last, &last, &c->target->time );
+ fate = max( fate, c->target->fate );
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ if ( fate < c->target->fate )
+ out_printf( "fate change %s from %s to %s by dependency %s\n",
+ object_str( t->name ), target_fate[ (int)fate ],
+ target_fate[ (int)c->target->fate ], object_str(
+ c->target->name ) );
+#endif
+ }
+
+ /* Step 4b: Pick up included headers time. */
+
+ /*
+ * If a header is newer than a temp source that includes it, the temp source
+ * will need building.
+ */
+ if ( t->includes )
+ timestamp_copy( &hlast, &t->includes->time );
+ else
+ timestamp_clear( &hlast );
+
+ /* Step 4c: handle NOUPDATE oddity.
+ *
+ * If a NOUPDATE file exists, mark it as having eternally old dependencies.
+ * Do not inherit our fate from our dependencies. Decide fate based only on
+ * other flags and our binding (done later).
+ */
+ if ( t->flags & T_FLAG_NOUPDATE )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ if ( fate != T_FATE_STABLE )
+ out_printf( "fate change %s back to stable, NOUPDATE.\n",
+ object_str( t->name ) );
+#endif
+
+ timestamp_clear( &last );
+ timestamp_clear( &t->time );
+
+ /* Do not inherit our fate from our dependencies. Decide fate based only
+ * upon other flags and our binding (done later).
+ */
+ fate = T_FATE_STABLE;
+ }
+
+ /* Step 4d: Determine fate: rebuild target or what? */
+
+ /*
+ In English:
+ If can not find or make child, can not make target.
+ If children changed, make target.
+ If target missing, make it.
+ If children newer, make target.
+ If temp's children newer than parent, make temp.
+ If temp's headers newer than parent, make temp.
+ If deliberately touched, make it.
+ If up-to-date temp file present, use it.
+ If target newer than non-notfile parent, mark target newer.
+ Otherwise, stable!
+
+ Note this block runs from least to most stable: as we make it further
+ down the list, the target's fate gets more stable.
+ */
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ savedFate = fate;
+ oldTimeStamp = 0;
+#endif
+
+ if ( fate >= T_FATE_BROKEN )
+ {
+ fate = T_FATE_CANTMAKE;
+ }
+ else if ( fate >= T_FATE_SPOIL )
+ {
+ fate = T_FATE_UPDATE;
+ }
+ else if ( t->binding == T_BIND_MISSING )
+ {
+ fate = T_FATE_MISSING;
+ }
+ else if ( t->binding == T_BIND_EXISTS && timestamp_cmp( &last, &t->time ) >
+ 0 )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_OUTDATED;
+ }
+ else if ( t->binding == T_BIND_PARENTS && timestamp_cmp( &last, &p->time ) >
+ 0 )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_NEEDTMP;
+ }
+ else if ( t->binding == T_BIND_PARENTS && timestamp_cmp( &hlast, &p->time )
+ > 0 )
+ {
+ fate = T_FATE_NEEDTMP;
+ }
+ else if ( t->flags & T_FLAG_TOUCHED )
+ {
+ fate = T_FATE_TOUCHED;
+ }
+ else if ( anyhow && !( t->flags & T_FLAG_NOUPDATE ) )
+ {
+ fate = T_FATE_TOUCHED;
+ }
+ else if ( t->binding == T_BIND_EXISTS && ( t->flags & T_FLAG_TEMP ) )
+ {
+ fate = T_FATE_ISTMP;
+ }
+ else if ( t->binding == T_BIND_EXISTS && p && p->binding != T_BIND_UNBOUND
+ && timestamp_cmp( &t->time, &p->time ) > 0 )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_NEWER;
+ }
+ else
+ {
+ fate = T_FATE_STABLE;
+ }
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE && ( fate != savedFate ) )
+ {
+ if ( savedFate == T_FATE_STABLE )
+ out_printf( "fate change %s set to %s%s\n", object_str( t->name ),
+ target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" );
+ else
+ out_printf( "fate change %s from %s to %s%s\n", object_str( t->name ),
+ target_fate[ savedFate ], target_fate[ fate ], oldTimeStamp ?
+ " (by timestamp)" : "" );
+ }
+#endif
+
+ /* Step 4e: Handle missing files. */
+ /* If it is missing and there are no actions to create it, boom. */
+ /* If we can not make a target we do not care about it, okay. */
+ /* We could insist that there are updating actions for all missing */
+ /* files, but if they have dependencies we just pretend it is a NOTFILE. */
+
+ if ( ( fate == T_FATE_MISSING ) && !t->actions && !t->depends )
+ {
+ if ( t->flags & T_FLAG_NOCARE )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ out_printf( "fate change %s to STABLE from %s, "
+ "no actions, no dependencies and do not care\n",
+ object_str( t->name ), target_fate[ fate ] );
+#endif
+ fate = T_FATE_STABLE;
+ }
+ else
+ {
+ out_printf( "don't know how to make %s\n", object_str( t->name ) );
+ fate = T_FATE_CANTFIND;
+ }
+ }
+
+ /* Step 4f: Propagate dependencies' time & fate. */
+ /* Set leaf time to be our time only if this is a leaf. */
+
+ timestamp_max( &t->time, &t->time, &last );
+ timestamp_copy( &t->leaf, timestamp_empty( &leaf ) ? &t->time : &leaf );
+ /* This target's fate may have been updated by virtue of following some
+ * target's rebuilds list, so only allow it to be increased to the fate we
+ * have calculated. Otherwise, grab its new fate.
+ */
+ if ( fate > t->fate )
+ t->fate = fate;
+ else
+ fate = t->fate;
+
+ /*
+ * Step 4g: If this target needs to be built, make0 all targets
+ * that are updated by the same actions used to update this target.
+ * These have already been marked as REBUILDS, and make1 has
+ * special handling for them. We just need to make sure that
+ * they get make0ed.
+ */
+ if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) )
+ {
+ ACTIONS * a;
+ TARGETS * c;
+ for ( a = t->actions; a; a = a->next )
+ {
+ for ( c = a->action->targets; c; c = c->next )
+ {
+ if ( c->target->fate == T_FATE_INIT )
+ {
+ make0( c->target, ptime, depth + 1, counts, anyhow, rescanning );
+ }
+ }
+ }
+ }
+
+ /* Step 4h: If this target needs to be built, force rebuild everything in
+ * its rebuilds list.
+ */
+ if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) )
+ force_rebuilds( t );
+
+ /*
+ * Step 5: Sort dependencies by their update time.
+ */
+
+ if ( globs.newestfirst )
+ t->depends = make0sort( t->depends );
+
+ /*
+ * Step 6: A little harmless tabulating for tracing purposes.
+ */
+
+ /* Do not count or report internal includes nodes. */
+ if ( t->flags & T_FLAG_INTERNAL )
+ return;
+
+ if ( counts )
+ {
+#ifdef OPT_IMPROVED_PATIENCE_EXT
+ ++counts->targets;
+#else
+ if ( !( ++counts->targets % 1000 ) && DEBUG_MAKE )
+ {
+ out_printf( "...patience...\n" );
+ out_flush();
+ }
+#endif
+
+ if ( fate == T_FATE_ISTMP )
+ ++counts->temp;
+ else if ( fate == T_FATE_CANTFIND )
+ ++counts->cantfind;
+ else if ( ( fate == T_FATE_CANTMAKE ) && t->actions )
+ ++counts->cantmake;
+ else if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) &&
+ t->actions )
+ ++counts->updating;
+ }
+
+ if ( !( t->flags & T_FLAG_NOTFILE ) && ( fate >= T_FATE_SPOIL ) )
+ flag = "+";
+ else if ( t->binding == T_BIND_EXISTS && p && timestamp_cmp( &t->time,
+ &p->time ) > 0 )
+ flag = "*";
+
+ if ( DEBUG_MAKEPROG )
+ out_printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int)t->fate ],
+ spaces( depth ), object_str( t->name ) );
+}
+
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+
+static char const * target_name( TARGET * t )
+{
+ static char buf[ 1000 ];
+ if ( t->flags & T_FLAG_INTERNAL )
+ {
+ sprintf( buf, "%s (internal node)", object_str( t->name ) );
+ return buf;
+ }
+ return object_str( t->name );
+}
+
+
+/*
+ * dependGraphOutput() - output the DG after make0 has run.
+ */
+
+static void dependGraphOutput( TARGET * t, int depth )
+{
+ TARGETS * c;
+
+ if ( ( t->flags & T_FLAG_VISITED ) || !t->name || !t->boundname )
+ return;
+
+ t->flags |= T_FLAG_VISITED;
+
+ switch ( t->fate )
+ {
+ case T_FATE_TOUCHED:
+ case T_FATE_MISSING:
+ case T_FATE_OUTDATED:
+ case T_FATE_UPDATE:
+ out_printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t
+ ) );
+ break;
+ default:
+ out_printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t
+ ) );
+ break;
+ }
+
+ if ( !object_equal( t->name, t->boundname ) )
+ out_printf( " %s Loc: %s\n", spaces( depth ), object_str( t->boundname )
+ );
+
+ switch ( t->fate )
+ {
+ case T_FATE_STABLE:
+ out_printf( " %s : Stable\n", spaces( depth ) );
+ break;
+ case T_FATE_NEWER:
+ out_printf( " %s : Newer\n", spaces( depth ) );
+ break;
+ case T_FATE_ISTMP:
+ out_printf( " %s : Up to date temp file\n", spaces( depth ) );
+ break;
+ case T_FATE_NEEDTMP:
+ out_printf( " %s : Temporary file, to be updated\n", spaces( depth )
+ );
+ break;
+ case T_FATE_TOUCHED:
+ out_printf( " %s : Been touched, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_MISSING:
+ out_printf( " %s : Missing, creating it\n", spaces( depth ) );
+ break;
+ case T_FATE_OUTDATED:
+ out_printf( " %s : Outdated, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_REBUILD:
+ out_printf( " %s : Rebuild, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_UPDATE:
+ out_printf( " %s : Updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_CANTFIND:
+ out_printf( " %s : Can not find it\n", spaces( depth ) );
+ break;
+ case T_FATE_CANTMAKE:
+ out_printf( " %s : Can make it\n", spaces( depth ) );
+ break;
+ }
+
+ if ( t->flags & ~T_FLAG_VISITED )
+ {
+ out_printf( " %s : ", spaces( depth ) );
+ if ( t->flags & T_FLAG_TEMP ) out_printf( "TEMPORARY " );
+ if ( t->flags & T_FLAG_NOCARE ) out_printf( "NOCARE " );
+ if ( t->flags & T_FLAG_NOTFILE ) out_printf( "NOTFILE " );
+ if ( t->flags & T_FLAG_TOUCHED ) out_printf( "TOUCHED " );
+ if ( t->flags & T_FLAG_LEAVES ) out_printf( "LEAVES " );
+ if ( t->flags & T_FLAG_NOUPDATE ) out_printf( "NOUPDATE " );
+ out_printf( "\n" );
+ }
+
+ for ( c = t->depends; c; c = c->next )
+ {
+ out_printf( " %s : Depends on %s (%s)", spaces( depth ),
+ target_name( c->target ), target_fate[ (int)c->target->fate ] );
+ if ( !timestamp_cmp( &c->target->time, &t->time ) )
+ out_printf( " (max time)");
+ out_printf( "\n" );
+ }
+
+ for ( c = t->depends; c; c = c->next )
+ dependGraphOutput( c->target, depth + 1 );
+}
+#endif
+
+
+/*
+ * make0sort() - reorder TARGETS chain by their time (newest to oldest).
+ *
+ * We walk chain, taking each item and inserting it on the sorted result, with
+ * newest items at the front. This involves updating each of the TARGETS'
+ * c->next and c->tail. Note that we make c->tail a valid prev pointer for every
+ * entry. Normally, it is only valid at the head, where prev == tail. Note also
+ * that while tail is a loop, next ends at the end of the chain.
+ */
+
+static TARGETS * make0sort( TARGETS * chain )
+{
+ PROFILE_ENTER( MAKE_MAKE0SORT );
+
+ TARGETS * result = 0;
+
+ /* Walk the current target list. */
+ while ( chain )
+ {
+ TARGETS * c = chain;
+ TARGETS * s = result;
+
+ chain = chain->next;
+
+ /* Find point s in result for c. */
+ while ( s && timestamp_cmp( &s->target->time, &c->target->time ) > 0 )
+ s = s->next;
+
+ /* Insert c in front of s (might be 0). */
+ c->next = s; /* good even if s = 0 */
+ if ( result == s ) result = c; /* new head of chain? */
+ if ( !s ) s = result; /* wrap to ensure a next */
+ if ( result != c ) s->tail->next = c; /* not head? be prev's next */
+ c->tail = s->tail; /* take on next's prev */
+ s->tail = c; /* make next's prev us */
+ }
+
+ PROFILE_EXIT( MAKE_MAKE0SORT );
+ return result;
+}
+
+
+static LIST * targets_to_update_ = L0;
+
+
+void mark_target_for_updating( OBJECT * target )
+{
+ targets_to_update_ = list_push_back( targets_to_update_, object_copy(
+ target ) );
+}
+
+
+LIST * targets_to_update()
+{
+ return targets_to_update_;
+}
+
+
+void clear_targets_to_update()
+{
+ list_free( targets_to_update_ );
+ targets_to_update_ = L0;
+}
diff --git a/src/boost/tools/build/src/engine/make.h b/src/boost/tools/build/src/engine/make.h
new file mode 100644
index 000000000..537b9e98f
--- /dev/null
+++ b/src/boost/tools/build/src/engine/make.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * make.h - bring a target up to date, once rules are in place
+ */
+
+#ifndef MAKE_SW20111118_H
+#define MAKE_SW20111118_H
+
+#include "config.h"
+#include "lists.h"
+#include "object.h"
+#include "rules.h"
+
+int make( LIST * targets, int anyhow );
+int make1( LIST * t );
+
+typedef struct {
+ int temp;
+ int updating;
+ int cantfind;
+ int cantmake;
+ int targets;
+ int made;
+} COUNTS ;
+
+
+void make0( TARGET * t, TARGET * p, int depth, COUNTS * counts, int anyhow,
+ TARGET * rescanning );
+
+
+/* Specifies that the target should be updated. */
+void mark_target_for_updating( OBJECT * target );
+
+/* Returns targets previously passed to mark_target_for_updating(). */
+LIST * targets_to_update();
+
+/* Clears/unmarks all targets currently marked for update. */
+void clear_targets_to_update();
+
+#endif
diff --git a/src/boost/tools/build/src/engine/make1.cpp b/src/boost/tools/build/src/engine/make1.cpp
new file mode 100644
index 000000000..61f0614a1
--- /dev/null
+++ b/src/boost/tools/build/src/engine/make1.cpp
@@ -0,0 +1,1515 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * make1.c - execute commands to bring targets up to date
+ *
+ * This module contains make1(), the entry point called by make() to recursively
+ * descend the dependency graph executing update actions as marked by make0().
+ *
+ * External routines:
+ * make1() - execute commands to update a TARGET and all of its dependencies
+ *
+ * Internal routines, the recursive/asynchronous command executors:
+ * make1a() - recursively schedules dependency builds and then goes to
+ * MAKE1B
+ * make1b() - if nothing is blocking this target's build, proceed to
+ * MAKE1C
+ * make1c() - launch target's next command, or go to parents' MAKE1B
+ * if none
+ * make1c_closure() - handle command execution completion and go to MAKE1C
+ *
+ * Internal support routines:
+ * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
+ * make1list() - turn a list of targets into a LIST, for $(<) and $(>)
+ * make1settings() - for vars with bound values, build up replacement lists
+ * make1bind() - bind targets that weren't bound in dependency analysis
+ */
+
+#include "jam.h"
+#include "make.h"
+
+#include "command.h"
+#include "compile.h"
+#include "execcmd.h"
+#include "headers.h"
+#include "lists.h"
+#include "object.h"
+#include "output.h"
+#include "parse.h"
+#include "rules.h"
+#include "search.h"
+#include "variable.h"
+#include "output.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#if !defined( NT ) || defined( __GNUC__ )
+ #include <unistd.h> /* for unlink */
+#endif
+
+static CMD * make1cmds ( TARGET * );
+static LIST * make1list ( LIST *, TARGETS *, int flags );
+static SETTINGS * make1settings ( struct module_t *, LIST * vars );
+static void make1bind ( TARGET * );
+static void push_cmds( CMDLIST * cmds, int status );
+static int cmd_sem_lock( TARGET * t );
+static void cmd_sem_unlock( TARGET * t );
+
+static int targets_contains( TARGETS * l, TARGET * t );
+static int targets_equal( TARGETS * l1, TARGETS * l2 );
+
+/* Ugly static - it is too hard to carry it through the callbacks. */
+
+static struct
+{
+ int failed;
+ int skipped;
+ int total;
+ int made;
+} counts[ 1 ];
+
+/* Target state. */
+#define T_STATE_MAKE1A 0 /* make1a() should be called */
+#define T_STATE_MAKE1B 1 /* make1b() should be called */
+#define T_STATE_MAKE1C 2 /* make1c() should be called */
+
+typedef struct _state state;
+struct _state
+{
+ state * prev; /* previous state on stack */
+ TARGET * t; /* current target */
+ TARGET * parent; /* parent argument necessary for MAKE1A */
+ int curstate; /* current state */
+};
+
+static void make1a( state * const );
+static void make1b( state * const );
+static void make1c( state const * const );
+
+static void make1c_closure( void * const closure, int status,
+ timing_info const * const, char const * const cmd_stdout,
+ char const * const cmd_stderr, int const cmd_exit_reason );
+
+typedef struct _stack
+{
+ state * stack;
+} stack;
+
+static stack state_stack = { NULL };
+
+static state * state_freelist = NULL;
+
+/* Currently running command counter. */
+static int cmdsrunning;
+
+
+static state * alloc_state()
+{
+ if ( state_freelist )
+ {
+ state * const pState = state_freelist;
+ state_freelist = pState->prev;
+ memset( pState, 0, sizeof( state ) );
+ return pState;
+ }
+ return (state *)BJAM_MALLOC( sizeof( state ) );
+}
+
+
+static void free_state( state * const pState )
+{
+ pState->prev = state_freelist;
+ state_freelist = pState;
+}
+
+
+static void clear_state_freelist()
+{
+ while ( state_freelist )
+ {
+ state * const pState = state_freelist;
+ state_freelist = state_freelist->prev;
+ BJAM_FREE( pState );
+ }
+}
+
+
+static state * current_state( stack * const pStack )
+{
+ return pStack->stack;
+}
+
+
+static void pop_state( stack * const pStack )
+{
+ if ( pStack->stack )
+ {
+ state * const pState = pStack->stack->prev;
+ free_state( pStack->stack );
+ pStack->stack = pState;
+ }
+}
+
+
+static state * push_state( stack * const pStack, TARGET * const t,
+ TARGET * const parent, int const curstate )
+{
+ state * const pState = alloc_state();
+ pState->t = t;
+ pState->parent = parent;
+ pState->prev = pStack->stack;
+ pState->curstate = curstate;
+ return pStack->stack = pState;
+}
+
+
+/*
+ * Pushes a stack onto another stack, effectively reversing the order.
+ */
+
+static void push_stack_on_stack( stack * const pDest, stack * const pSrc )
+{
+ while ( pSrc->stack )
+ {
+ state * const pState = pSrc->stack;
+ pSrc->stack = pState->prev;
+ pState->prev = pDest->stack;
+ pDest->stack = pState;
+ }
+}
+
+
+/*
+ * make1() - execute commands to update a list of targets and all of their dependencies
+ */
+
+static int intr = 0;
+static int quit = 0;
+
+int make1( LIST * targets )
+{
+ state * pState;
+ int status = 0;
+
+ memset( (char *)counts, 0, sizeof( *counts ) );
+
+ {
+ LISTITER iter, end;
+ stack temp_stack = { NULL };
+ for ( iter = list_begin( targets ), end = list_end( targets );
+ iter != end; iter = list_next( iter ) )
+ push_state( &temp_stack, bindtarget( list_item( iter ) ), NULL, T_STATE_MAKE1A );
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+
+ /* Clear any state left over from the past */
+ quit = 0;
+
+ /* Recursively make the target and its dependencies. */
+
+ while ( 1 )
+ {
+ while ( ( pState = current_state( &state_stack ) ) )
+ {
+ if ( quit )
+ pop_state( &state_stack );
+
+ switch ( pState->curstate )
+ {
+ case T_STATE_MAKE1A: make1a( pState ); break;
+ case T_STATE_MAKE1B: make1b( pState ); break;
+ case T_STATE_MAKE1C: make1c( pState ); break;
+ default:
+ assert( !"make1(): Invalid state detected." );
+ }
+ }
+ if ( !cmdsrunning )
+ break;
+ /* Wait for outstanding commands to finish running. */
+ exec_wait();
+ }
+
+ clear_state_freelist();
+
+ /* Talk about it. */
+ if ( counts->failed )
+ out_printf( "...failed updating %d target%s...\n", counts->failed,
+ counts->failed > 1 ? "s" : "" );
+ if ( DEBUG_MAKE && counts->skipped )
+ out_printf( "...skipped %d target%s...\n", counts->skipped,
+ counts->skipped > 1 ? "s" : "" );
+ if ( DEBUG_MAKE && counts->made )
+ out_printf( "...updated %d target%s...\n", counts->made,
+ counts->made > 1 ? "s" : "" );
+
+ /* If we were interrupted, exit now that all child processes
+ have finished. */
+ if ( intr )
+ exit( EXITBAD );
+
+ {
+ LISTITER iter, end;
+ for ( iter = list_begin( targets ), end = list_end( targets );
+ iter != end; iter = list_next( iter ) )
+ {
+ /* Check that the target was updated and that the
+ update succeeded. */
+ TARGET * t = bindtarget( list_item( iter ) );
+ if (t->progress == T_MAKE_DONE)
+ {
+ if (t->status != EXEC_CMD_OK)
+ status = 1;
+ }
+ else if ( ! ( t->progress == T_MAKE_NOEXEC_DONE && globs.noexec ) )
+ {
+ status = 1;
+ }
+ }
+ }
+ return status;
+}
+
+
+/*
+ * make1a() - recursively schedules dependency builds and then goes to MAKE1B
+ *
+ * Called to start processing a specified target. Does nothing if the target is
+ * already being processed or otherwise starts processing all of its
+ * dependencies.
+ */
+
+static void make1a( state * const pState )
+{
+ TARGET * t = pState->t;
+ TARGET * const scc_root = target_scc( t );
+
+ if ( !pState->parent || target_scc( pState->parent ) != scc_root )
+ pState->t = t = scc_root;
+
+ /* If the parent is the first to try to build this target or this target is
+ * in the MAKE1C quagmire, arrange for the parent to be notified when this
+ * target has been built.
+ */
+ if ( pState->parent && t->progress <= T_MAKE_RUNNING )
+ {
+ TARGET * const parent_scc = target_scc( pState->parent );
+ if ( t != parent_scc )
+ {
+ t->parents = targetentry( t->parents, parent_scc );
+ ++parent_scc->asynccnt;
+ }
+ }
+
+ /* If the target has been previously updated with -n in effect, and we are
+ * now ignoring -n, update it for real. E.g. if the UPDATE_NOW rule was
+ * called for it twice - first with the -n option and then without.
+ */
+ if ( !globs.noexec && t->progress == T_MAKE_NOEXEC_DONE )
+ t->progress = T_MAKE_INIT;
+
+ /* If this target is already being processed then do nothing. There is no
+ * need to start processing the same target all over again.
+ */
+ if ( t->progress != T_MAKE_INIT )
+ {
+ pop_state( &state_stack );
+ return;
+ }
+
+ /* Guard against circular dependencies. */
+ t->progress = T_MAKE_ONSTACK;
+
+ /* 'asynccnt' counts the dependencies preventing this target from proceeding
+ * to MAKE1C for actual building. We start off with a count of 1 to prevent
+ * anything from happening until we can notify all dependencies that they
+ * are needed. This 1 is then accounted for when we enter MAKE1B ourselves,
+ * below. Without this if a dependency gets built before we finish
+ * processing all of our other dependencies our build might be triggered
+ * prematurely.
+ */
+ t->asynccnt = 1;
+
+ /* Push dependency build requests (to be executed in the natural order). */
+ {
+ stack temp_stack = { NULL };
+ TARGETS * c;
+ for ( c = t->depends; c && !quit; c = c->next )
+ push_state( &temp_stack, c->target, t, T_STATE_MAKE1A );
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+
+ t->progress = T_MAKE_ACTIVE;
+
+ /* Once all of our dependencies have started getting processed we can move
+ * onto MAKE1B.
+ */
+ /* Implementation note:
+ * In theory this would be done by popping this state before pushing
+ * dependency target build requests but as a slight optimization we simply
+ * modify our current state and leave it on the stack instead.
+ */
+ pState->curstate = T_STATE_MAKE1B;
+}
+
+
+/*
+ * make1b() - if nothing is blocking this target's build, proceed to MAKE1C
+ *
+ * Called after something stops blocking this target's build, e.g. that all of
+ * its dependencies have started being processed, one of its dependencies has
+ * been built or a semaphore this target has been waiting for is free again.
+ */
+
+static void make1b( state * const pState )
+{
+ TARGET * const t = pState->t;
+ TARGET * failed = 0;
+ char const * failed_name = "dependencies";
+
+ pop_state( &state_stack );
+
+ /* If any dependencies are still outstanding, wait until they signal their
+ * completion by pushing this same state for their parent targets.
+ */
+ if ( --t->asynccnt )
+ {
+ return;
+ }
+
+ /* Now ready to build target 't', if dependencies built OK. */
+
+ /* Collect status from dependencies. If -n was passed then act as though all
+ * dependencies built correctly (the only way they can fail is if UPDATE_NOW
+ * was called). If the dependencies can not be found or we got an interrupt,
+ * we can not get here.
+ */
+ if ( !globs.noexec )
+ {
+ TARGETS * c;
+ for ( c = t->depends; c; c = c->next )
+ if ( c->target->status > t->status && !( c->target->flags &
+ T_FLAG_NOCARE ) )
+ {
+ failed = c->target;
+ t->status = c->target->status;
+ }
+ }
+
+ /* If an internal header node failed to build, we want to output the target
+ * that it failed on.
+ */
+ if ( failed )
+ failed_name = failed->flags & T_FLAG_INTERNAL
+ ? failed->failed
+ : object_str( failed->name );
+ t->failed = failed_name;
+
+ /* If actions for building any of the dependencies have failed, bail.
+ * Otherwise, execute all actions to make the current target.
+ */
+ if ( ( t->status == EXEC_CMD_FAIL ) && t->actions )
+ {
+ ++counts->skipped;
+ if ( ( t->flags & ( T_FLAG_RMOLD | T_FLAG_NOTFILE ) ) == T_FLAG_RMOLD )
+ {
+ if ( !unlink( object_str( t->boundname ) ) )
+ out_printf( "...removing outdated %s\n", object_str( t->boundname )
+ );
+ }
+ else
+ out_printf( "...skipped %s for lack of %s...\n", object_str( t->name ),
+ failed_name );
+ }
+
+ if ( t->status == EXEC_CMD_OK )
+ switch ( t->fate )
+ {
+ case T_FATE_STABLE:
+ case T_FATE_NEWER:
+ break;
+
+ case T_FATE_CANTFIND:
+ case T_FATE_CANTMAKE:
+ t->status = EXEC_CMD_FAIL;
+ break;
+
+ case T_FATE_ISTMP:
+ if ( DEBUG_MAKE )
+ out_printf( "...using %s...\n", object_str( t->name ) );
+ break;
+
+ case T_FATE_TOUCHED:
+ case T_FATE_MISSING:
+ case T_FATE_NEEDTMP:
+ case T_FATE_OUTDATED:
+ case T_FATE_UPDATE:
+ case T_FATE_REBUILD:
+ /* Prepare commands for executing actions scheduled for this target.
+ * Commands have their embedded variables automatically expanded,
+ * including making use of any "on target" variables.
+ */
+ if ( t->actions )
+ {
+ ++counts->total;
+ if ( DEBUG_MAKE && !( counts->total % 100 ) )
+ out_printf( "...on %dth target...\n", counts->total );
+
+ t->cmds = (char *)make1cmds( t );
+ /* Update the target's "progress" so MAKE1C processing counts it
+ * among its successes/failures.
+ */
+ t->progress = T_MAKE_RUNNING;
+ }
+ break;
+
+ /* All valid fates should have been accounted for by now. */
+ default:
+ err_printf( "ERROR: %s has bad fate %d", object_str( t->name ),
+ t->fate );
+ abort();
+ }
+
+ /* Proceed to MAKE1C to begin executing the chain of commands prepared for
+ * building the target. If we are not going to build the target (e.g. due to
+ * dependency failures or no commands needing to be run) the chain will be
+ * empty and MAKE1C processing will directly signal the target's completion.
+ */
+
+ if ( t->cmds == NULL || --( ( CMD * )t->cmds )->asynccnt == 0 )
+ push_state( &state_stack, t, NULL, T_STATE_MAKE1C );
+ else if ( DEBUG_EXECCMD )
+ {
+ CMD * cmd = ( CMD * )t->cmds;
+ out_printf( "Delaying %s %s: %d targets not ready\n", object_str( cmd->rule->name ), object_str( t->boundname ), cmd->asynccnt );
+ }
+}
+
+
+/*
+ * make1c() - launch target's next command, or go to parents' MAKE1B if none
+ *
+ * If there are (more) commands to run to build this target (and we have not hit
+ * an error running earlier comands) we launch the command using exec_cmd().
+ * Command execution signals its completion in exec_wait() by calling our
+ * make1c_closure() callback.
+ *
+ * If there are no more commands to run, we collect the status from all the
+ * actions and report our completion to all the parents.
+ */
+
+static void make1c( state const * const pState )
+{
+ TARGET * const t = pState->t;
+ CMD * const cmd = (CMD *)t->cmds;
+ int exec_flags = 0;
+
+ if ( cmd )
+ {
+ /* Pop state first in case something below (e.g. exec_cmd(), exec_wait()
+ * or make1c_closure()) pushes a new state. Note that we must not access
+ * the popped state data after this as the same stack node might have
+ * been reused internally for some newly pushed state.
+ */
+ pop_state( &state_stack );
+
+ if ( cmd->status != EXEC_CMD_OK )
+ {
+ t->cmds = NULL;
+ push_cmds( cmd->next, cmd->status );
+ cmd_free( cmd );
+ return;
+ }
+
+#ifdef OPT_SEMAPHORE
+ if ( ! cmd_sem_lock( t ) )
+ {
+ return;
+ }
+#endif
+
+ /* Increment the jobs running counter. */
+ ++cmdsrunning;
+
+ if ( ( globs.jobs == 1 ) && ( DEBUG_MAKEQ ||
+ ( DEBUG_MAKE && !( cmd->rule->actions->flags & RULE_QUIETLY ) ) ) )
+ {
+ OBJECT * action = cmd->rule->name;
+ OBJECT * target = list_front( lol_get( (LOL *)&cmd->args, 0 ) );
+
+ out_printf( "%s %s\n", object_str( action ), object_str( target ) );
+
+ /* Print out the command executed if given -d+2. */
+ if ( DEBUG_EXEC )
+ {
+ out_puts( cmd->buf->value );
+ out_putc( '\n' );
+ }
+
+ /* We only need to flush the streams if there's likely to
+ * be a wait before it finishes.
+ */
+ if ( ! globs.noexec && ! cmd->noop )
+ {
+ out_flush();
+ err_flush();
+ }
+ }
+ else
+ {
+ exec_flags |= EXEC_CMD_QUIET;
+ }
+
+ /* Execute the actual build command or fake it if no-op. */
+ if ( globs.noexec || cmd->noop )
+ {
+ timing_info time_info = { 0 };
+ timestamp_current( &time_info.start );
+ timestamp_copy( &time_info.end, &time_info.start );
+ make1c_closure( t, EXEC_CMD_OK, &time_info, "", "", EXIT_OK );
+ }
+ else
+ {
+ exec_cmd( cmd->buf, exec_flags, make1c_closure, t, cmd->shell );
+
+ /* Wait until under the concurrent command count limit. */
+ /* FIXME: This wait could be skipped here and moved to just before
+ * trying to execute a command that would cross the command count
+ * limit. Note though that this might affect the order in which
+ * unrelated targets get built and would thus require that all
+ * affected Boost Build tests be updated.
+ */
+ assert( 0 < globs.jobs );
+ while ( cmdsrunning >= globs.jobs )
+ exec_wait();
+ }
+ }
+ else
+ {
+ /* Tally success/failure for those we tried to update. */
+ if ( t->progress == T_MAKE_RUNNING )
+ {
+ /* Invert OK/FAIL target status when FAIL_EXPECTED has been applied. */
+ if ( t->flags & T_FLAG_FAIL_EXPECTED && !globs.noexec )
+ {
+ switch ( t->status )
+ {
+ case EXEC_CMD_FAIL: t->status = EXEC_CMD_OK; break;
+ case EXEC_CMD_OK: t->status = EXEC_CMD_FAIL; break;
+ }
+
+ /* Printing failure has to be delayed until the last
+ * action is completed for FAIL_EXPECTED targets.
+ * Do it here.
+ */
+ if ( t->status == EXEC_CMD_FAIL )
+ {
+ out_printf( "...failed %s ", object_str( t->actions->action->rule->name ) );
+ out_printf( "%s", object_str( t->boundname ) );
+ out_printf( "...\n" );
+ }
+
+ /* Handle -q */
+ if ( t->status == EXEC_CMD_FAIL && globs.quitquick )
+ ++quit;
+
+ /* Delete the target on failure. */
+ if ( !( t->flags & ( T_FLAG_PRECIOUS | T_FLAG_NOTFILE ) ) &&
+ !unlink( object_str( t->boundname ) ) )
+ out_printf( "...removing %s\n", object_str( t->boundname ) );
+ }
+ switch ( t->status )
+ {
+ case EXEC_CMD_OK: ++counts->made; break;
+ case EXEC_CMD_FAIL: ++counts->failed; break;
+ }
+ }
+
+ /* Tell parents their dependency has been built. */
+ {
+ TARGETS * c;
+ stack temp_stack = { NULL };
+ TARGET * additional_includes = NULL;
+
+ t->progress = globs.noexec ? T_MAKE_NOEXEC_DONE : T_MAKE_DONE;
+
+ /* Target has been updated so rescan it for dependencies. */
+ if ( t->fate >= T_FATE_MISSING && t->status == EXEC_CMD_OK &&
+ !( t->flags & T_FLAG_INTERNAL ) )
+ {
+ TARGET * saved_includes;
+ SETTINGS * s;
+
+ /* Clean current includes. */
+ saved_includes = t->includes;
+ t->includes = 0;
+
+ s = copysettings( t->settings );
+ pushsettings( root_module(), s );
+ headers( t );
+ popsettings( root_module(), s );
+ freesettings( s );
+
+ if ( t->includes )
+ {
+ /* Tricky. The parents have already been processed, but they
+ * have not seen the internal node, because it was just
+ * created. We need to:
+ * - push MAKE1A states that would have been pushed by the
+ * parents here
+ * - make sure all unprocessed parents will pick up the
+ * new includes
+ * - make sure processing the additional MAKE1A states is
+ * done before processing the MAKE1B state for our
+ * current target (which would mean this target has
+ * already been built), otherwise the parent would be
+ * considered built before the additional MAKE1A state
+ * processing even got a chance to start.
+ */
+ make0( t->includes, t->parents->target, 0, 0, 0, t->includes
+ );
+ /* Link the old includes on to make sure that it gets
+ * cleaned up correctly.
+ */
+ t->includes->includes = saved_includes;
+ for ( c = t->dependants; c; c = c->next )
+ c->target->depends = targetentry( c->target->depends,
+ t->includes );
+ /* Will be processed below. */
+ additional_includes = t->includes;
+ }
+ else
+ {
+ t->includes = saved_includes;
+ }
+ }
+
+ if ( additional_includes )
+ for ( c = t->parents; c; c = c->next )
+ push_state( &temp_stack, additional_includes, c->target,
+ T_STATE_MAKE1A );
+
+ if ( t->scc_root )
+ {
+ TARGET * const scc_root = target_scc( t );
+ assert( scc_root->progress < T_MAKE_DONE );
+ for ( c = t->parents; c; c = c->next )
+ {
+ if ( target_scc( c->target ) == scc_root )
+ push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B
+ );
+ else
+ scc_root->parents = targetentry( scc_root->parents,
+ c->target );
+ }
+ }
+ else
+ {
+ for ( c = t->parents; c; c = c->next )
+ push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B );
+ }
+
+ /* Must pop state before pushing any more. */
+ pop_state( &state_stack );
+
+ /* Using stacks reverses the order of execution. Reverse it back. */
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+ }
+}
+
+
+/*
+ * call_timing_rule() - Look up the __TIMING_RULE__ variable on the given
+ * target, and if non-empty, invoke the rule it names, passing the given
+ * timing_info.
+ */
+
+static void call_timing_rule( TARGET * target, timing_info const * const time )
+{
+ LIST * timing_rule;
+
+ pushsettings( root_module(), target->settings );
+ timing_rule = var_get( root_module(), constant_TIMING_RULE );
+ popsettings( root_module(), target->settings );
+
+ if ( !list_empty( timing_rule ) )
+ {
+ /* rule timing-rule ( args * : target : start end user system clock ) */
+
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ OBJECT * rulename = list_front( timing_rule );
+ frame_init( frame );
+
+ /* args * :: $(__TIMING_RULE__[2-]) */
+ lol_add( frame->args, list_copy_range( timing_rule, list_next(
+ list_begin( timing_rule ) ), list_end( timing_rule ) ) );
+
+ /* target :: the name of the target */
+ lol_add( frame->args, list_new( object_copy( target->name ) ) );
+
+ /* start end user system clock :: info about the action command */
+ lol_add( frame->args, list_push_back( list_push_back( list_push_back( list_push_back( list_new(
+ outf_time( &time->start ) ),
+ outf_time( &time->end ) ),
+ outf_double( time->user ) ),
+ outf_double( time->system ) ),
+ outf_double( timestamp_delta_seconds(&time->start, &time->end) ) )
+ );
+
+ /* Call the rule. */
+ evaluate_rule( bindrule( rulename , root_module() ), rulename, frame );
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * call_action_rule() - Look up the __ACTION_RULE__ variable on the given
+ * target, and if non-empty, invoke the rule it names, passing the given info,
+ * timing_info, executed command and command output.
+ */
+
+static void call_action_rule
+(
+ TARGET * target,
+ int status,
+ timing_info const * time,
+ char const * executed_command,
+ char const * command_output
+)
+{
+ LIST * action_rule;
+
+ pushsettings( root_module(), target->settings );
+ action_rule = var_get( root_module(), constant_ACTION_RULE );
+ popsettings( root_module(), target->settings );
+
+ if ( !list_empty( action_rule ) )
+ {
+ /* rule action-rule (
+ args * :
+ target :
+ command status start end user system :
+ output ? ) */
+
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ OBJECT * rulename = list_front( action_rule );
+ frame_init( frame );
+
+ /* args * :: $(__ACTION_RULE__[2-]) */
+ lol_add( frame->args, list_copy_range( action_rule, list_next(
+ list_begin( action_rule ) ), list_end( action_rule ) ) );
+
+ /* target :: the name of the target */
+ lol_add( frame->args, list_new( object_copy( target->name ) ) );
+
+ /* command status start end user system :: info about the action command
+ */
+ lol_add( frame->args,
+ list_push_back( list_push_back( list_push_back( list_push_back( list_push_back( list_new(
+ object_new( executed_command ) ),
+ outf_int( status ) ),
+ outf_time( &time->start ) ),
+ outf_time( &time->end ) ),
+ outf_double( time->user ) ),
+ outf_double( time->system ) ) );
+
+ /* output ? :: the output of the action command */
+ if ( command_output )
+ {
+ OBJECT * command_output_obj = object_new( command_output );
+ char * output_i = (char*)object_str(command_output_obj);
+ /* Clean the output of control characters. */
+ for (; *output_i; ++output_i)
+ {
+ if (iscntrl(*output_i) && !isspace(*output_i)) *output_i = '?';
+ }
+ lol_add( frame->args, list_new( command_output_obj ) );
+ }
+ else
+ lol_add( frame->args, L0 );
+
+ /* Call the rule. */
+ evaluate_rule( bindrule( rulename, root_module() ), rulename, frame );
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * make1c_closure() - handle command execution completion and go to MAKE1C.
+ *
+ * Internal function passed as a notification callback for when a command
+ * finishes getting executed by the OS or called directly when faking that a
+ * command had been executed by the OS.
+ *
+ * Now all we need to do is fiddle with the command exit status and push a new
+ * MAKE1C state to execute the next command scheduled for building this target
+ * or close up the target's build process in case there are no more commands
+ * scheduled for it. On interrupts, we bail heavily.
+ */
+
+static void make1c_closure
+(
+ void * const closure,
+ int status_orig,
+ timing_info const * const time,
+ char const * const cmd_stdout,
+ char const * const cmd_stderr,
+ int const cmd_exit_reason
+)
+{
+ TARGET * const t = (TARGET *)closure;
+ CMD * const cmd = (CMD *)t->cmds;
+ char const * rule_name = 0;
+ char const * target_name = 0;
+
+ assert( cmd );
+
+ --cmdsrunning;
+
+ /* Calculate the target's status from the cmd execution result. */
+ {
+ /* Store the target's status. */
+ t->status = status_orig;
+
+ /* Ignore failures for actions marked as 'ignore'. */
+ if ( t->status == EXEC_CMD_FAIL && cmd->rule->actions->flags &
+ RULE_IGNORE )
+ t->status = EXEC_CMD_OK;
+ }
+
+ if ( DEBUG_MAKEQ ||
+ ( DEBUG_MAKE && !( cmd->rule->actions->flags & RULE_QUIETLY ) ) )
+ {
+ rule_name = object_str( cmd->rule->name );
+ target_name = object_str( list_front( lol_get( (LOL *)&cmd->args, 0 ) )
+ );
+ }
+
+ if ( rule_name == NULL || globs.jobs > 1 )
+ out_action( rule_name, target_name, cmd->buf->value, cmd_stdout,
+ cmd_stderr, cmd_exit_reason );
+
+ /* If the process expired, make user aware with an explicit message, but do
+ * this only for non-quiet actions.
+ */
+ if ( cmd_exit_reason == EXIT_TIMEOUT && target_name )
+ out_printf( "%ld second time limit exceeded\n", globs.timeout );
+
+ out_flush();
+ err_flush();
+
+ if ( !globs.noexec )
+ {
+ call_timing_rule( t, time );
+ if ( DEBUG_EXECCMD )
+ out_printf( "%f sec system; %f sec user; %f sec clock\n",
+ time->system, time->user,
+ timestamp_delta_seconds(&time->start, &time->end) );
+
+ /* Assume -p0 is in effect, i.e. cmd_stdout contains merged output. */
+ call_action_rule( t, status_orig, time, cmd->buf->value, cmd_stdout );
+ }
+
+ /* Print command text on failure. */
+ if ( t->status == EXEC_CMD_FAIL && DEBUG_MAKE &&
+ ! ( t->flags & T_FLAG_FAIL_EXPECTED ) )
+ {
+ if ( !DEBUG_EXEC )
+ out_printf( "%s\n", cmd->buf->value );
+
+ out_printf( "...failed %s ", object_str( cmd->rule->name ) );
+ list_print( lol_get( (LOL *)&cmd->args, 0 ) );
+ out_printf( "...\n" );
+ }
+
+ /* On interrupt, set quit so _everything_ fails. Do the same for failed
+ * commands if we were asked to stop the build in case of any errors.
+ */
+ if ( t->status == EXEC_CMD_INTR )
+ {
+ ++intr;
+ ++quit;
+ }
+ if ( t->status == EXEC_CMD_FAIL && globs.quitquick &&
+ ! ( t->flags & T_FLAG_FAIL_EXPECTED ) )
+ ++quit;
+
+ /* If the command was not successful remove all of its targets not marked as
+ * "precious".
+ */
+ if ( t->status != EXEC_CMD_OK )
+ {
+ LIST * const targets = lol_get( (LOL *)&cmd->args, 0 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ char const * const filename = object_str( list_item( iter ) );
+ TARGET const * const t = bindtarget( list_item( iter ) );
+ if ( !( t->flags & T_FLAG_PRECIOUS ) && !unlink( filename ) )
+ out_printf( "...removing %s\n", filename );
+ }
+ }
+
+#ifdef OPT_SEMAPHORE
+ /* Release any semaphores used by this action. */
+ cmd_sem_unlock( t );
+#endif
+
+ /* Free this command and push the MAKE1C state to execute the next one
+ * scheduled for building this same target.
+ */
+ t->cmds = NULL;
+ push_cmds( cmd->next, t->status );
+ cmd_free( cmd );
+}
+
+/* push the next MAKE1C state after a command is run. */
+static void push_cmds( CMDLIST * cmds, int status )
+{
+ CMDLIST * cmd_iter;
+ for( cmd_iter = cmds; cmd_iter; cmd_iter = cmd_iter->next )
+ {
+ if ( cmd_iter->iscmd )
+ {
+ CMD * next_cmd = cmd_iter->impl.cmd;
+ /* Propagate the command status. */
+ if ( next_cmd->status < status )
+ next_cmd->status = status;
+ if ( --next_cmd->asynccnt == 0 )
+ {
+ /* Select the first target associated with the action.
+ * This is safe because sibling CMDs cannot have targets
+ * in common.
+ */
+ TARGET * first_target = bindtarget( list_front( lol_get( &next_cmd->args, 0 ) ) );
+ first_target->cmds = (char *)next_cmd;
+ push_state( &state_stack, first_target, NULL, T_STATE_MAKE1C );
+ }
+ else if ( DEBUG_EXECCMD )
+ {
+ TARGET * first_target = bindtarget( list_front( lol_get( &next_cmd->args, 0 ) ) );
+ out_printf( "Delaying %s %s: %d targets not ready\n", object_str( next_cmd->rule->name ), object_str( first_target->boundname ), next_cmd->asynccnt );
+ }
+ }
+ else
+ {
+ /* This is a target that we're finished updating */
+ TARGET * updated_target = cmd_iter->impl.t;
+ if ( updated_target->status < status )
+ updated_target->status = status;
+ updated_target->cmds = NULL;
+ push_state( &state_stack, updated_target, NULL, T_STATE_MAKE1C );
+ }
+ }
+}
+
+
+/*
+ * swap_settings() - replace the settings from the current module and target
+ * with those from the new module and target
+ */
+
+static void swap_settings
+(
+ module_t * * current_module,
+ TARGET * * current_target,
+ module_t * new_module,
+ TARGET * new_target
+)
+{
+ if ( ( new_target == *current_target ) &&
+ ( new_module == *current_module ) )
+ return;
+
+ if ( *current_target )
+ popsettings( *current_module, (*current_target)->settings );
+
+ if ( new_target )
+ pushsettings( new_module, new_target->settings );
+
+ *current_module = new_module;
+ *current_target = new_target;
+}
+
+
+/*
+ * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
+ *
+ * Essentially copies a chain of ACTIONs to a chain of CMDs, grouping
+ * RULE_TOGETHER actions, splitting RULE_PIECEMEAL actions, and handling
+ * RULE_NEWSRCS actions. The result is a chain of CMDs which has already had all
+ * of its embedded variable references expanded and can now be executed using
+ * exec_cmd().
+ */
+
+static CMD * make1cmds( TARGET * t )
+{
+ CMD * cmds = 0;
+ CMD * last_cmd;
+ LIST * shell = L0;
+ module_t * settings_module = 0;
+ TARGET * settings_target = 0;
+ ACTIONS * a0;
+ int const running_flag = globs.noexec ? A_RUNNING_NOEXEC : A_RUNNING;
+
+ /* Step through actions.
+ */
+ for ( a0 = t->actions; a0; a0 = a0->next )
+ {
+ RULE * rule = a0->action->rule;
+ rule_actions * actions = rule->actions;
+ SETTINGS * boundvars;
+ LIST * nt;
+ LIST * ns;
+ ACTIONS * a1;
+
+ /* Only do rules with commands to execute.
+ */
+ if ( !actions )
+ continue;
+
+ if ( a0->action->running >= running_flag )
+ {
+ CMD * first;
+ /* If this action was skipped either because it was
+ * combined with another action by RULE_TOGETHER, or
+ * because all of its sources were filtered out,
+ * then we don't have anything to do here.
+ */
+ if ( a0->action->first_cmd == NULL )
+ continue;
+ /* This action has already been processed for another target.
+ * Just set up the dependency graph correctly and move on.
+ */
+ first = (CMD *)a0->action->first_cmd;
+ if( cmds )
+ {
+ last_cmd->next = cmdlist_append_cmd( last_cmd->next, first );
+ }
+ else
+ {
+ cmds = first;
+ }
+ last_cmd = (CMD *)a0->action->last_cmd;
+ continue;
+ }
+
+ a0->action->running = running_flag;
+
+ /* Make LISTS of targets and sources. If `execute together` has been
+ * specified for this rule, tack on sources from each instance of this
+ * rule for this target.
+ */
+ nt = make1list( L0, a0->action->targets, 0 );
+ ns = make1list( L0, a0->action->sources, actions->flags );
+ if ( actions->flags & RULE_TOGETHER )
+ for ( a1 = a0->next; a1; a1 = a1->next )
+ if ( a1->action->rule == rule &&
+ a1->action->running < running_flag &&
+ targets_equal( a0->action->targets, a1->action->targets ) )
+ {
+ ns = make1list( ns, a1->action->sources, actions->flags );
+ a1->action->running = running_flag;
+ }
+
+ /* If doing only updated (or existing) sources, but none have been
+ * updated (or exist), skip this action.
+ */
+ if ( list_empty( ns ) &&
+ ( actions->flags & ( RULE_NEWSRCS | RULE_EXISTING ) ) )
+ {
+ list_free( nt );
+ continue;
+ }
+
+ swap_settings( &settings_module, &settings_target, rule->module, t );
+ if ( list_empty( shell ) )
+ {
+ /* shell is per-target */
+ shell = var_get( rule->module, constant_JAMSHELL );
+ }
+
+ /* If we had 'actions xxx bind vars' we bind the vars now. */
+ boundvars = make1settings( rule->module, actions->bindlist );
+ pushsettings( rule->module, boundvars );
+
+ /*
+ * Build command, starting with all source args.
+ *
+ * For actions that allow PIECEMEAL commands, if the constructed command
+ * string is too long, we retry constructing it with a reduced number of
+ * source arguments presented.
+ *
+ * While reducing slowly takes a bit of compute time to get things just
+ * right, it is worth it to get as close to maximum allowed command
+ * string length as possible, because launching the commands we are
+ * executing is likely to be much more compute intensive.
+ *
+ * Note that we loop through at least once, for sourceless actions.
+ */
+ {
+ int const length = list_length( ns );
+ int start = 0;
+ int chunk = length;
+ int cmd_count = 0;
+ TARGETS * semaphores = NULL;
+ TARGETS * targets_iter;
+ int unique_targets;
+ do
+ {
+ CMD * cmd;
+ int cmd_check_result;
+ int cmd_error_length;
+ int cmd_error_max_length;
+ int retry = 0;
+ int accept_command = 0;
+
+ /* Build cmd: cmd_new() takes ownership of its lists. */
+ cmd = cmd_new( rule, list_copy( nt ), list_sublist( ns, start,
+ chunk ), list_copy( shell ) );
+
+ cmd_check_result = exec_check( cmd->buf, &cmd->shell,
+ &cmd_error_length, &cmd_error_max_length );
+
+ if ( cmd_check_result == EXEC_CHECK_OK )
+ {
+ accept_command = 1;
+ }
+ else if ( cmd_check_result == EXEC_CHECK_NOOP )
+ {
+ accept_command = 1;
+ cmd->noop = 1;
+ }
+ else if ( ( actions->flags & RULE_PIECEMEAL ) && ( chunk > 1 ) )
+ {
+ /* Too long but splittable. Reduce chunk size slowly and
+ * retry.
+ */
+ assert( cmd_check_result == EXEC_CHECK_TOO_LONG ||
+ cmd_check_result == EXEC_CHECK_LINE_TOO_LONG );
+ chunk = chunk * 9 / 10;
+ retry = 1;
+ }
+ else
+ {
+ /* Too long and not splittable. */
+ char const * const error_message = cmd_check_result ==
+ EXEC_CHECK_TOO_LONG
+ ? "is too long"
+ : "contains a line that is too long";
+ assert( cmd_check_result == EXEC_CHECK_TOO_LONG ||
+ cmd_check_result == EXEC_CHECK_LINE_TOO_LONG );
+ out_printf( "%s action %s (%d, max %d):\n", object_str(
+ rule->name ), error_message, cmd_error_length,
+ cmd_error_max_length );
+
+ /* Tell the user what did not fit. */
+ out_puts( cmd->buf->value );
+ exit( EXITBAD );
+ }
+
+ assert( !retry || !accept_command );
+
+ if ( accept_command )
+ {
+ /* Chain it up. */
+ if ( cmds )
+ {
+ last_cmd->next = cmdlist_append_cmd( last_cmd->next, cmd );
+ last_cmd = cmd;
+ }
+ else
+ {
+ cmds = last_cmd = cmd;
+ }
+
+ if ( cmd_count++ == 0 )
+ {
+ a0->action->first_cmd = cmd;
+ }
+ }
+ else
+ {
+ cmd_free( cmd );
+ }
+
+ if ( !retry )
+ start += chunk;
+ }
+ while ( start < length );
+
+ /* Record the end of the actions cmds */
+ a0->action->last_cmd = last_cmd;
+
+ unique_targets = 0;
+ for ( targets_iter = a0->action->targets; targets_iter; targets_iter = targets_iter->next )
+ {
+ if ( targets_contains( targets_iter->next, targets_iter->target ) )
+ continue;
+ /* Add all targets produced by the action to the update list. */
+ push_state( &state_stack, targets_iter->target, NULL, T_STATE_MAKE1A );
+ ++unique_targets;
+ }
+ /* We need to wait until all the targets agree that
+ * it's okay to run this action.
+ */
+ ( ( CMD * )a0->action->first_cmd )->asynccnt = unique_targets;
+
+#if OPT_SEMAPHORE
+ /* Collect semaphores */
+ for ( targets_iter = a0->action->targets; targets_iter; targets_iter = targets_iter->next )
+ {
+ TARGET * sem = targets_iter->target->semaphore;
+ if ( sem )
+ {
+ if ( ! targets_contains( semaphores, sem ) )
+ semaphores = targetentry( semaphores, sem );
+ }
+ }
+ ( ( CMD * )a0->action->first_cmd )->lock = semaphores;
+ ( ( CMD * )a0->action->last_cmd )->unlock = semaphores;
+#endif
+ }
+
+ /* These were always copied when used. */
+ list_free( nt );
+ list_free( ns );
+
+ /* Free variables with values bound by 'actions xxx bind vars'. */
+ popsettings( rule->module, boundvars );
+ freesettings( boundvars );
+ }
+
+ if ( cmds )
+ {
+ last_cmd->next = cmdlist_append_target( last_cmd->next, t );
+ }
+
+ swap_settings( &settings_module, &settings_target, 0, 0 );
+ return cmds;
+}
+
+
+/*
+ * make1list() - turn a list of targets into a LIST, for $(<) and $(>)
+ */
+
+static LIST * make1list( LIST * l, TARGETS * targets, int flags )
+{
+ for ( ; targets; targets = targets->next )
+ {
+ TARGET * t = targets->target;
+
+ if ( t->binding == T_BIND_UNBOUND )
+ make1bind( t );
+
+ if ( ( flags & RULE_EXISTING ) && ( flags & RULE_NEWSRCS ) )
+ {
+ if ( ( t->binding != T_BIND_EXISTS ) &&
+ ( t->fate <= T_FATE_STABLE ) )
+ continue;
+ }
+ else if ( flags & RULE_EXISTING )
+ {
+ if ( t->binding != T_BIND_EXISTS )
+ continue;
+ }
+ else if ( flags & RULE_NEWSRCS )
+ {
+ if ( t->fate <= T_FATE_STABLE )
+ continue;
+ }
+
+ /* Prohibit duplicates for RULE_TOGETHER. */
+ if ( flags & RULE_TOGETHER )
+ {
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ if ( object_equal( list_item( iter ), t->boundname ) )
+ break;
+ if ( iter != end )
+ continue;
+ }
+
+ /* Build new list. */
+ l = list_push_back( l, object_copy( t->boundname ) );
+ }
+
+ return l;
+}
+
+
+/*
+ * make1settings() - for vars with bound values, build up replacement lists
+ */
+
+static SETTINGS * make1settings( struct module_t * module, LIST * vars )
+{
+ SETTINGS * settings = 0;
+
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter ) )
+ {
+ LIST * const l = var_get( module, list_item( vars_iter ) );
+ LIST * nl = L0;
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+
+ /* Make sure the target is bound. */
+ if ( t->binding == T_BIND_UNBOUND )
+ make1bind( t );
+
+ /* Build a new list. */
+ nl = list_push_back( nl, object_copy( t->boundname ) );
+ }
+
+ /* Add to settings chain. */
+ settings = addsettings( settings, VAR_SET, list_item( vars_iter ), nl );
+ }
+
+ return settings;
+}
+
+
+/*
+ * make1bind() - bind targets that were not bound during dependency analysis
+ *
+ * Spot the kludge! If a target is not in the dependency tree, it did not get
+ * bound by make0(), so we have to do it here. Ugly.
+ */
+
+static void make1bind( TARGET * t )
+{
+ if ( t->flags & T_FLAG_NOTFILE )
+ return;
+
+ pushsettings( root_module(), t->settings );
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, 0, t->flags & T_FLAG_ISFILE );
+ t->binding = timestamp_empty( &t->time ) ? T_BIND_MISSING : T_BIND_EXISTS;
+ popsettings( root_module(), t->settings );
+}
+
+
+static int targets_contains( TARGETS * l, TARGET * t )
+{
+ for ( ; l; l = l->next )
+ {
+ if ( t == l->target )
+ {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+static int targets_equal( TARGETS * l1, TARGETS * l2 )
+{
+ for ( ; l1 && l2; l1 = l1->next, l2 = l2->next )
+ {
+ if ( l1->target != l2->target )
+ return 0;
+ }
+ return !l1 && !l2;
+}
+
+
+#ifdef OPT_SEMAPHORE
+
+static int cmd_sem_lock( TARGET * t )
+{
+ CMD * cmd = (CMD *)t->cmds;
+ TARGETS * iter;
+ /* Check whether all the semaphores required for updating
+ * this target are free.
+ */
+ for ( iter = cmd->lock; iter; iter = iter->next )
+ {
+ if ( iter->target->asynccnt > 0 )
+ {
+ if ( DEBUG_EXECCMD )
+ out_printf( "SEM: %s is busy, delaying launch of %s\n",
+ object_str( iter->target->name ), object_str( t->name ) );
+ iter->target->parents = targetentry( iter->target->parents, t );
+ return 0;
+ }
+ }
+ /* Lock the semaphores. */
+ for ( iter = cmd->lock; iter; iter = iter->next )
+ {
+ ++iter->target->asynccnt;
+ if ( DEBUG_EXECCMD )
+ out_printf( "SEM: %s now used by %s\n", object_str( iter->target->name
+ ), object_str( t->name ) );
+ }
+ /* A cmd only needs to be locked around its execution.
+ * clearing cmd->lock here makes it safe to call cmd_sem_lock
+ * twice.
+ */
+ cmd->lock = NULL;
+ return 1;
+}
+
+static void cmd_sem_unlock( TARGET * t )
+{
+ CMD * cmd = ( CMD * )t->cmds;
+ TARGETS * iter;
+ /* Release the semaphores. */
+ for ( iter = cmd->unlock; iter; iter = iter->next )
+ {
+ if ( DEBUG_EXECCMD )
+ out_printf( "SEM: %s is now free\n", object_str(
+ iter->target->name ) );
+ --iter->target->asynccnt;
+ assert( iter->target->asynccnt <= 0 );
+ }
+ for ( iter = cmd->unlock; iter; iter = iter->next )
+ {
+ /* Find a waiting target that's ready */
+ while ( iter->target->parents )
+ {
+ TARGETS * first = iter->target->parents;
+ TARGET * t1 = first->target;
+
+ /* Pop the first waiting CMD */
+ if ( first->next )
+ first->next->tail = first->tail;
+ iter->target->parents = first->next;
+ BJAM_FREE( first );
+
+ if ( cmd_sem_lock( t1 ) )
+ {
+ push_state( &state_stack, t1, NULL, T_STATE_MAKE1C );
+ break;
+ }
+ }
+ }
+}
+
+#endif
diff --git a/src/boost/tools/build/src/engine/md5.cpp b/src/boost/tools/build/src/engine/md5.cpp
new file mode 100644
index 000000000..c35d96c5e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/md5.cpp
@@ -0,0 +1,381 @@
+/*
+ Copyright (C) 1999, 2000, 2002 Aladdin Enterprises. All rights reserved.
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ L. Peter Deutsch
+ ghost@aladdin.com
+
+ */
+/* $Id: md5.c,v 1.6 2002/04/13 19:20:28 lpd Exp $ */
+/*
+ Independent implementation of MD5 (RFC 1321).
+
+ This code implements the MD5 Algorithm defined in RFC 1321, whose
+ text is available at
+ http://www.ietf.org/rfc/rfc1321.txt
+ The code is derived from the text of the RFC, including the test suite
+ (section A.5) but excluding the rest of Appendix A. It does not include
+ any code or documentation that is identified in the RFC as being
+ copyrighted.
+
+ The original and principal author of md5.c is L. Peter Deutsch
+ <ghost@aladdin.com>. Other authors are noted in the change history
+ that follows (in reverse chronological order):
+
+ 2002-04-13 lpd Clarified derivation from RFC 1321; now handles byte order
+ either statically or dynamically; added missing #include <string.h>
+ in library.
+ 2002-03-11 lpd Corrected argument list for main(), and added int return
+ type, in test program and T value program.
+ 2002-02-21 lpd Added missing #include <stdio.h> in test program.
+ 2000-07-03 lpd Patched to eliminate warnings about "constant is
+ unsigned in ANSI C, signed in traditional"; made test program
+ self-checking.
+ 1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
+ 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5).
+ 1999-05-03 lpd Original version.
+ */
+
+#include "md5.h"
+#include <string.h>
+
+#undef BYTE_ORDER /* 1 = big-endian, -1 = little-endian, 0 = unknown */
+#ifdef ARCH_IS_BIG_ENDIAN
+# define BYTE_ORDER (ARCH_IS_BIG_ENDIAN ? 1 : -1)
+#else
+# define BYTE_ORDER 0
+#endif
+
+#define T_MASK ((md5_word_t)~0)
+#define T1 /* 0xd76aa478 */ (T_MASK ^ 0x28955b87)
+#define T2 /* 0xe8c7b756 */ (T_MASK ^ 0x173848a9)
+#define T3 0x242070db
+#define T4 /* 0xc1bdceee */ (T_MASK ^ 0x3e423111)
+#define T5 /* 0xf57c0faf */ (T_MASK ^ 0x0a83f050)
+#define T6 0x4787c62a
+#define T7 /* 0xa8304613 */ (T_MASK ^ 0x57cfb9ec)
+#define T8 /* 0xfd469501 */ (T_MASK ^ 0x02b96afe)
+#define T9 0x698098d8
+#define T10 /* 0x8b44f7af */ (T_MASK ^ 0x74bb0850)
+#define T11 /* 0xffff5bb1 */ (T_MASK ^ 0x0000a44e)
+#define T12 /* 0x895cd7be */ (T_MASK ^ 0x76a32841)
+#define T13 0x6b901122
+#define T14 /* 0xfd987193 */ (T_MASK ^ 0x02678e6c)
+#define T15 /* 0xa679438e */ (T_MASK ^ 0x5986bc71)
+#define T16 0x49b40821
+#define T17 /* 0xf61e2562 */ (T_MASK ^ 0x09e1da9d)
+#define T18 /* 0xc040b340 */ (T_MASK ^ 0x3fbf4cbf)
+#define T19 0x265e5a51
+#define T20 /* 0xe9b6c7aa */ (T_MASK ^ 0x16493855)
+#define T21 /* 0xd62f105d */ (T_MASK ^ 0x29d0efa2)
+#define T22 0x02441453
+#define T23 /* 0xd8a1e681 */ (T_MASK ^ 0x275e197e)
+#define T24 /* 0xe7d3fbc8 */ (T_MASK ^ 0x182c0437)
+#define T25 0x21e1cde6
+#define T26 /* 0xc33707d6 */ (T_MASK ^ 0x3cc8f829)
+#define T27 /* 0xf4d50d87 */ (T_MASK ^ 0x0b2af278)
+#define T28 0x455a14ed
+#define T29 /* 0xa9e3e905 */ (T_MASK ^ 0x561c16fa)
+#define T30 /* 0xfcefa3f8 */ (T_MASK ^ 0x03105c07)
+#define T31 0x676f02d9
+#define T32 /* 0x8d2a4c8a */ (T_MASK ^ 0x72d5b375)
+#define T33 /* 0xfffa3942 */ (T_MASK ^ 0x0005c6bd)
+#define T34 /* 0x8771f681 */ (T_MASK ^ 0x788e097e)
+#define T35 0x6d9d6122
+#define T36 /* 0xfde5380c */ (T_MASK ^ 0x021ac7f3)
+#define T37 /* 0xa4beea44 */ (T_MASK ^ 0x5b4115bb)
+#define T38 0x4bdecfa9
+#define T39 /* 0xf6bb4b60 */ (T_MASK ^ 0x0944b49f)
+#define T40 /* 0xbebfbc70 */ (T_MASK ^ 0x4140438f)
+#define T41 0x289b7ec6
+#define T42 /* 0xeaa127fa */ (T_MASK ^ 0x155ed805)
+#define T43 /* 0xd4ef3085 */ (T_MASK ^ 0x2b10cf7a)
+#define T44 0x04881d05
+#define T45 /* 0xd9d4d039 */ (T_MASK ^ 0x262b2fc6)
+#define T46 /* 0xe6db99e5 */ (T_MASK ^ 0x1924661a)
+#define T47 0x1fa27cf8
+#define T48 /* 0xc4ac5665 */ (T_MASK ^ 0x3b53a99a)
+#define T49 /* 0xf4292244 */ (T_MASK ^ 0x0bd6ddbb)
+#define T50 0x432aff97
+#define T51 /* 0xab9423a7 */ (T_MASK ^ 0x546bdc58)
+#define T52 /* 0xfc93a039 */ (T_MASK ^ 0x036c5fc6)
+#define T53 0x655b59c3
+#define T54 /* 0x8f0ccc92 */ (T_MASK ^ 0x70f3336d)
+#define T55 /* 0xffeff47d */ (T_MASK ^ 0x00100b82)
+#define T56 /* 0x85845dd1 */ (T_MASK ^ 0x7a7ba22e)
+#define T57 0x6fa87e4f
+#define T58 /* 0xfe2ce6e0 */ (T_MASK ^ 0x01d3191f)
+#define T59 /* 0xa3014314 */ (T_MASK ^ 0x5cfebceb)
+#define T60 0x4e0811a1
+#define T61 /* 0xf7537e82 */ (T_MASK ^ 0x08ac817d)
+#define T62 /* 0xbd3af235 */ (T_MASK ^ 0x42c50dca)
+#define T63 0x2ad7d2bb
+#define T64 /* 0xeb86d391 */ (T_MASK ^ 0x14792c6e)
+
+
+static void
+md5_process(md5_state_t *pms, const md5_byte_t *data /*[64]*/)
+{
+ md5_word_t
+ a = pms->abcd[0], b = pms->abcd[1],
+ c = pms->abcd[2], d = pms->abcd[3];
+ md5_word_t t;
+#if BYTE_ORDER > 0
+ /* Define storage only for big-endian CPUs. */
+ md5_word_t X[16];
+#else
+ /* Define storage for little-endian or both types of CPUs. */
+ md5_word_t xbuf[16];
+ const md5_word_t *X;
+#endif
+
+ {
+#if BYTE_ORDER == 0
+ /*
+ * Determine dynamically whether this is a big-endian or
+ * little-endian machine, since we can use a more efficient
+ * algorithm on the latter.
+ */
+ static const int w = 1;
+
+ if (*((const md5_byte_t *)&w)) /* dynamic little-endian */
+#endif
+#if BYTE_ORDER <= 0 /* little-endian */
+ {
+ /*
+ * On little-endian machines, we can process properly aligned
+ * data without copying it.
+ */
+ if (!((data - (const md5_byte_t *)0) & 3)) {
+ /* data are properly aligned */
+ X = (const md5_word_t *)data;
+ } else {
+ /* not aligned */
+ memcpy(xbuf, data, 64);
+ X = xbuf;
+ }
+ }
+#endif
+#if BYTE_ORDER == 0
+ else /* dynamic big-endian */
+#endif
+#if BYTE_ORDER >= 0 /* big-endian */
+ {
+ /*
+ * On big-endian machines, we must arrange the bytes in the
+ * right order.
+ */
+ const md5_byte_t *xp = data;
+ int i;
+
+# if BYTE_ORDER == 0
+ X = xbuf; /* (dynamic only) */
+# else
+# define xbuf X /* (static only) */
+# endif
+ for (i = 0; i < 16; ++i, xp += 4)
+ xbuf[i] = xp[0] + (xp[1] << 8) + (xp[2] << 16) + (xp[3] << 24);
+ }
+#endif
+ }
+
+#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32 - (n))))
+
+ /* Round 1. */
+ /* Let [abcd k s i] denote the operation
+ a = b + ((a + F(b,c,d) + X[k] + T[i]) <<< s). */
+#define F(x, y, z) (((x) & (y)) | (~(x) & (z)))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + F(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 0, 7, T1);
+ SET(d, a, b, c, 1, 12, T2);
+ SET(c, d, a, b, 2, 17, T3);
+ SET(b, c, d, a, 3, 22, T4);
+ SET(a, b, c, d, 4, 7, T5);
+ SET(d, a, b, c, 5, 12, T6);
+ SET(c, d, a, b, 6, 17, T7);
+ SET(b, c, d, a, 7, 22, T8);
+ SET(a, b, c, d, 8, 7, T9);
+ SET(d, a, b, c, 9, 12, T10);
+ SET(c, d, a, b, 10, 17, T11);
+ SET(b, c, d, a, 11, 22, T12);
+ SET(a, b, c, d, 12, 7, T13);
+ SET(d, a, b, c, 13, 12, T14);
+ SET(c, d, a, b, 14, 17, T15);
+ SET(b, c, d, a, 15, 22, T16);
+#undef SET
+
+ /* Round 2. */
+ /* Let [abcd k s i] denote the operation
+ a = b + ((a + G(b,c,d) + X[k] + T[i]) <<< s). */
+#define G(x, y, z) (((x) & (z)) | ((y) & ~(z)))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + G(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 1, 5, T17);
+ SET(d, a, b, c, 6, 9, T18);
+ SET(c, d, a, b, 11, 14, T19);
+ SET(b, c, d, a, 0, 20, T20);
+ SET(a, b, c, d, 5, 5, T21);
+ SET(d, a, b, c, 10, 9, T22);
+ SET(c, d, a, b, 15, 14, T23);
+ SET(b, c, d, a, 4, 20, T24);
+ SET(a, b, c, d, 9, 5, T25);
+ SET(d, a, b, c, 14, 9, T26);
+ SET(c, d, a, b, 3, 14, T27);
+ SET(b, c, d, a, 8, 20, T28);
+ SET(a, b, c, d, 13, 5, T29);
+ SET(d, a, b, c, 2, 9, T30);
+ SET(c, d, a, b, 7, 14, T31);
+ SET(b, c, d, a, 12, 20, T32);
+#undef SET
+
+ /* Round 3. */
+ /* Let [abcd k s t] denote the operation
+ a = b + ((a + H(b,c,d) + X[k] + T[i]) <<< s). */
+#define H(x, y, z) ((x) ^ (y) ^ (z))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + H(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 5, 4, T33);
+ SET(d, a, b, c, 8, 11, T34);
+ SET(c, d, a, b, 11, 16, T35);
+ SET(b, c, d, a, 14, 23, T36);
+ SET(a, b, c, d, 1, 4, T37);
+ SET(d, a, b, c, 4, 11, T38);
+ SET(c, d, a, b, 7, 16, T39);
+ SET(b, c, d, a, 10, 23, T40);
+ SET(a, b, c, d, 13, 4, T41);
+ SET(d, a, b, c, 0, 11, T42);
+ SET(c, d, a, b, 3, 16, T43);
+ SET(b, c, d, a, 6, 23, T44);
+ SET(a, b, c, d, 9, 4, T45);
+ SET(d, a, b, c, 12, 11, T46);
+ SET(c, d, a, b, 15, 16, T47);
+ SET(b, c, d, a, 2, 23, T48);
+#undef SET
+
+ /* Round 4. */
+ /* Let [abcd k s t] denote the operation
+ a = b + ((a + I(b,c,d) + X[k] + T[i]) <<< s). */
+#define I(x, y, z) ((y) ^ ((x) | ~(z)))
+#define SET(a, b, c, d, k, s, Ti)\
+ t = a + I(b,c,d) + X[k] + Ti;\
+ a = ROTATE_LEFT(t, s) + b
+ /* Do the following 16 operations. */
+ SET(a, b, c, d, 0, 6, T49);
+ SET(d, a, b, c, 7, 10, T50);
+ SET(c, d, a, b, 14, 15, T51);
+ SET(b, c, d, a, 5, 21, T52);
+ SET(a, b, c, d, 12, 6, T53);
+ SET(d, a, b, c, 3, 10, T54);
+ SET(c, d, a, b, 10, 15, T55);
+ SET(b, c, d, a, 1, 21, T56);
+ SET(a, b, c, d, 8, 6, T57);
+ SET(d, a, b, c, 15, 10, T58);
+ SET(c, d, a, b, 6, 15, T59);
+ SET(b, c, d, a, 13, 21, T60);
+ SET(a, b, c, d, 4, 6, T61);
+ SET(d, a, b, c, 11, 10, T62);
+ SET(c, d, a, b, 2, 15, T63);
+ SET(b, c, d, a, 9, 21, T64);
+#undef SET
+
+ /* Then perform the following additions. (That is increment each
+ of the four registers by the value it had before this block
+ was started.) */
+ pms->abcd[0] += a;
+ pms->abcd[1] += b;
+ pms->abcd[2] += c;
+ pms->abcd[3] += d;
+}
+
+void
+md5_init(md5_state_t *pms)
+{
+ pms->count[0] = pms->count[1] = 0;
+ pms->abcd[0] = 0x67452301;
+ pms->abcd[1] = /*0xefcdab89*/ T_MASK ^ 0x10325476;
+ pms->abcd[2] = /*0x98badcfe*/ T_MASK ^ 0x67452301;
+ pms->abcd[3] = 0x10325476;
+}
+
+void
+md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes)
+{
+ const md5_byte_t *p = data;
+ int left = nbytes;
+ int offset = (pms->count[0] >> 3) & 63;
+ md5_word_t nbits = (md5_word_t)(nbytes << 3);
+
+ if (nbytes <= 0)
+ return;
+
+ /* Update the message length. */
+ pms->count[1] += nbytes >> 29;
+ pms->count[0] += nbits;
+ if (pms->count[0] < nbits)
+ pms->count[1]++;
+
+ /* Process an initial partial block. */
+ if (offset) {
+ int copy = (offset + nbytes > 64 ? 64 - offset : nbytes);
+
+ memcpy(pms->buf + offset, p, copy);
+ if (offset + copy < 64)
+ return;
+ p += copy;
+ left -= copy;
+ md5_process(pms, pms->buf);
+ }
+
+ /* Process full blocks. */
+ for (; left >= 64; p += 64, left -= 64)
+ md5_process(pms, p);
+
+ /* Process a final partial block. */
+ if (left)
+ memcpy(pms->buf, p, left);
+}
+
+void
+md5_finish(md5_state_t *pms, md5_byte_t digest[16])
+{
+ static const md5_byte_t pad[64] = {
+ 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ };
+ md5_byte_t data[8];
+ int i;
+
+ /* Save the length before padding. */
+ for (i = 0; i < 8; ++i)
+ data[i] = (md5_byte_t)(pms->count[i >> 2] >> ((i & 3) << 3));
+ /* Pad to 56 bytes mod 64. */
+ md5_append(pms, pad, ((55 - (pms->count[0] >> 3)) & 63) + 1);
+ /* Append the length. */
+ md5_append(pms, data, 8);
+ for (i = 0; i < 16; ++i)
+ digest[i] = (md5_byte_t)(pms->abcd[i >> 2] >> ((i & 3) << 3));
+}
diff --git a/src/boost/tools/build/src/engine/md5.h b/src/boost/tools/build/src/engine/md5.h
new file mode 100644
index 000000000..698c995d8
--- /dev/null
+++ b/src/boost/tools/build/src/engine/md5.h
@@ -0,0 +1,91 @@
+/*
+ Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved.
+
+ This software is provided 'as-is', without any express or implied
+ warranty. In no event will the authors be held liable for any damages
+ arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose,
+ including commercial applications, and to alter it and redistribute it
+ freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not
+ claim that you wrote the original software. If you use this software
+ in a product, an acknowledgment in the product documentation would be
+ appreciated but is not required.
+ 2. Altered source versions must be plainly marked as such, and must not be
+ misrepresented as being the original software.
+ 3. This notice may not be removed or altered from any source distribution.
+
+ L. Peter Deutsch
+ ghost@aladdin.com
+
+ */
+/* $Id: md5.h,v 1.4 2002/04/13 19:20:28 lpd Exp $ */
+/*
+ Independent implementation of MD5 (RFC 1321).
+
+ This code implements the MD5 Algorithm defined in RFC 1321, whose
+ text is available at
+ http://www.ietf.org/rfc/rfc1321.txt
+ The code is derived from the text of the RFC, including the test suite
+ (section A.5) but excluding the rest of Appendix A. It does not include
+ any code or documentation that is identified in the RFC as being
+ copyrighted.
+
+ The original and principal author of md5.h is L. Peter Deutsch
+ <ghost@aladdin.com>. Other authors are noted in the change history
+ that follows (in reverse chronological order):
+
+ 2002-04-13 lpd Removed support for non-ANSI compilers; removed
+ references to Ghostscript; clarified derivation from RFC 1321;
+ now handles byte order either statically or dynamically.
+ 1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
+ 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5);
+ added conditionalization for C++ compilation from Martin
+ Purschke <purschke@bnl.gov>.
+ 1999-05-03 lpd Original version.
+ */
+
+#ifndef md5_INCLUDED
+# define md5_INCLUDED
+
+/*
+ * This package supports both compile-time and run-time determination of CPU
+ * byte order. If ARCH_IS_BIG_ENDIAN is defined as 0, the code will be
+ * compiled to run only on little-endian CPUs; if ARCH_IS_BIG_ENDIAN is
+ * defined as non-zero, the code will be compiled to run only on big-endian
+ * CPUs; if ARCH_IS_BIG_ENDIAN is not defined, the code will be compiled to
+ * run on either big- or little-endian CPUs, but will run slightly less
+ * efficiently on either one than if ARCH_IS_BIG_ENDIAN is defined.
+ */
+
+typedef unsigned char md5_byte_t; /* 8-bit byte */
+typedef unsigned int md5_word_t; /* 32-bit word */
+
+/* Define the state of the MD5 Algorithm. */
+typedef struct md5_state_s {
+ md5_word_t count[2]; /* message length in bits, lsw first */
+ md5_word_t abcd[4]; /* digest buffer */
+ md5_byte_t buf[64]; /* accumulate block */
+} md5_state_t;
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+/* Initialize the algorithm. */
+void md5_init(md5_state_t *pms);
+
+/* Append a string to the message. */
+void md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes);
+
+/* Finish the message and return the digest. */
+void md5_finish(md5_state_t *pms, md5_byte_t digest[16]);
+
+#ifdef __cplusplus
+} /* end extern "C" */
+#endif
+
+#endif /* md5_INCLUDED */
diff --git a/src/boost/tools/build/src/engine/mem.cpp b/src/boost/tools/build/src/engine/mem.cpp
new file mode 100644
index 000000000..65c63f754
--- /dev/null
+++ b/src/boost/tools/build/src/engine/mem.cpp
@@ -0,0 +1,8 @@
+/*
+Copyright Rene Rivera 2006.
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include "jam.h"
diff --git a/src/boost/tools/build/src/engine/mem.h b/src/boost/tools/build/src/engine/mem.h
new file mode 100644
index 000000000..076808504
--- /dev/null
+++ b/src/boost/tools/build/src/engine/mem.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2006. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef BJAM_MEM_H
+#define BJAM_MEM_H
+
+#include "config.h"
+
+/* Standard C memory allocation. */
+#include <stdlib.h>
+
+#define bjam_malloc_x(s) malloc(s)
+#define bjam_calloc_x(n,s) calloc(n,s)
+#define bjam_realloc_x(p,s) realloc(p,s)
+#define bjam_free_x(p) free(p)
+
+#ifndef bjam_malloc_atomic_x
+ #define bjam_malloc_atomic_x(s) bjam_malloc_x(s)
+#endif
+#ifndef bjam_calloc_atomic_x
+ #define bjam_calloc_atomic_x(n,s) bjam_calloc_x(n,s)
+#endif
+#ifndef bjam_mem_init_x
+ #define bjam_mem_init_x()
+#endif
+#ifndef bjam_mem_close_x
+ #define bjam_mem_close_x()
+#endif
+#ifndef bjam_malloc_raw_x
+ #define bjam_malloc_raw_x(s) bjam_malloc_x(s)
+#endif
+#ifndef bjam_calloc_raw_x
+ #define bjam_calloc_raw_x(n,s) bjam_calloc_x(n,s)
+#endif
+#ifndef bjam_realloc_raw_x
+ #define bjam_realloc_raw_x(p,s) bjam_realloc_x(p,s)
+#endif
+#ifndef bjam_free_raw_x
+ #define bjam_free_raw_x(p) bjam_free_x(p)
+#endif
+
+#ifdef OPT_DEBUG_PROFILE
+ /* Profile tracing of memory allocations. */
+ #include "debug.h"
+
+ #define BJAM_MALLOC(s) (profile_memory(s), bjam_malloc_x(s))
+ #define BJAM_MALLOC_ATOMIC(s) (profile_memory(s), bjam_malloc_atomic_x(s))
+ #define BJAM_CALLOC(n,s) (profile_memory(n*s), bjam_calloc_x(n,s))
+ #define BJAM_CALLOC_ATOMIC(n,s) (profile_memory(n*s), bjam_calloc_atomic_x(n,s))
+ #define BJAM_REALLOC(p,s) (profile_memory(s), bjam_realloc_x(p,s))
+
+ #define BJAM_MALLOC_RAW(s) (profile_memory(s), bjam_malloc_raw_x(s))
+ #define BJAM_CALLOC_RAW(n,s) (profile_memory(n*s), bjam_calloc_raw_x(n,s))
+ #define BJAM_REALLOC_RAW(p,s) (profile_memory(s), bjam_realloc_raw_x(p,s))
+#else
+ /* No mem tracing. */
+ #define BJAM_MALLOC(s) bjam_malloc_x(s)
+ #define BJAM_MALLOC_ATOMIC(s) bjam_malloc_atomic_x(s)
+ #define BJAM_CALLOC(n,s) bjam_calloc_x(n,s)
+ #define BJAM_CALLOC_ATOMIC(n,s) bjam_calloc_atomic_x(n,s)
+ #define BJAM_REALLOC(p,s) bjam_realloc_x(p,s)
+
+ #define BJAM_MALLOC_RAW(s) bjam_malloc_raw_x(s)
+ #define BJAM_CALLOC_RAW(n,s) bjam_calloc_raw_x(n,s)
+ #define BJAM_REALLOC_RAW(p,s) bjam_realloc_raw_x(p,s)
+#endif
+
+#define BJAM_MEM_INIT() bjam_mem_init_x()
+#define BJAM_MEM_CLOSE() bjam_mem_close_x()
+
+#define BJAM_FREE(p) bjam_free_x(p)
+#define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
+
+#endif
diff --git a/src/boost/tools/build/src/engine/mkjambase.cpp b/src/boost/tools/build/src/engine/mkjambase.cpp
new file mode 100644
index 000000000..bc49a2b16
--- /dev/null
+++ b/src/boost/tools/build/src/engine/mkjambase.cpp
@@ -0,0 +1,123 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * mkjambase.c - turn Jambase into a big C structure
+ *
+ * Usage: mkjambase jambase.c Jambase ...
+ *
+ * Results look like this:
+ *
+ * char *jambase[] = {
+ * "...\n",
+ * ...
+ * 0 };
+ *
+ * Handles \'s and "'s specially; knows to delete blank and comment lines.
+ *
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+
+int main( int argc, char * * argv, char * * envp )
+{
+ char buf[ 1024 ];
+ FILE * fin;
+ FILE * fout;
+ char * p;
+ int doDotC = 0;
+
+ if ( argc < 3 )
+ {
+ fprintf( stderr, "usage: %s jambase.c Jambase ...\n", argv[ 0 ] );
+ return -1;
+ }
+
+ if ( !( fout = fopen( argv[1], "w" ) ) )
+ {
+ perror( argv[ 1 ] );
+ return -1;
+ }
+
+ /* If the file ends in .c generate a C source file. */
+ if ( ( p = strrchr( argv[1], '.' ) ) && ( !strcmp( p, ".c" ) || !strcmp( p, ".cpp" ) ) )
+ doDotC++;
+
+ /* Now process the files. */
+
+ argc -= 2;
+ argv += 2;
+
+ if ( doDotC )
+ {
+ fprintf( fout, "/* Generated by mkjambase from Jambase */\n" );
+ fprintf( fout, "const char *jambase[] = {\n" );
+ }
+
+ for ( ; argc--; ++argv )
+ {
+ if ( !( fin = fopen( *argv, "r" ) ) )
+ {
+ perror( *argv );
+ return -1;
+ }
+
+ if ( doDotC )
+ fprintf( fout, "/* %s */\n", *argv );
+ else
+ fprintf( fout, "### %s ###\n", *argv );
+
+ while ( fgets( buf, sizeof( buf ), fin ) )
+ {
+ if ( doDotC )
+ {
+ char * p = buf;
+
+ /* Strip leading whitespace. */
+ while ( ( *p == ' ' ) || ( *p == '\t' ) || ( *p == '\n' ) )
+ ++p;
+
+ /* Drop comments and empty lines. */
+ if ( ( *p == '#' ) || !*p )
+ continue;
+
+ /* Copy. */
+ putc( '"', fout );
+ for ( ; *p && ( *p != '\n' ); ++p )
+ switch ( *p )
+ {
+ case '\\': putc( '\\', fout ); putc( '\\', fout ); break;
+ case '"' : putc( '\\', fout ); putc( '"' , fout ); break;
+ case '\r': break;
+ default: putc( *p, fout ); break;
+ }
+
+ fprintf( fout, "\\n\",\n" );
+ }
+ else
+ {
+ fprintf( fout, "%s", buf );
+ }
+ }
+
+ fclose( fin );
+ }
+
+ if ( doDotC )
+ fprintf( fout, "0 };\n" );
+
+ fclose( fout );
+
+ return 0;
+}
diff --git a/src/boost/tools/build/src/engine/modules.cpp b/src/boost/tools/build/src/engine/modules.cpp
new file mode 100644
index 000000000..d30c40bc4
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules.cpp
@@ -0,0 +1,431 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "modules.h"
+
+#include "hash.h"
+#include "lists.h"
+#include "native.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "jam_strings.h"
+#include "variable.h"
+
+#include <assert.h>
+#include <string.h>
+
+static struct hash * module_hash = 0;
+static module_t root;
+
+
+module_t * bindmodule( OBJECT * name )
+{
+ if ( !name )
+ return &root;
+
+ {
+ PROFILE_ENTER( BINDMODULE );
+
+ module_t * m;
+ int found;
+
+ if ( !module_hash )
+ module_hash = hashinit( sizeof( module_t ), "modules" );
+
+ m = (module_t *)hash_insert( module_hash, name, &found );
+ if ( !found )
+ {
+ m->name = object_copy( name );
+ m->variables = 0;
+ m->variable_indices = 0;
+ m->num_fixed_variables = 0;
+ m->fixed_variables = 0;
+ m->rules = 0;
+ m->imported_modules = 0;
+ m->class_module = 0;
+ m->native_rules = 0;
+ m->user_module = 0;
+ }
+
+ PROFILE_EXIT( BINDMODULE );
+
+ return m;
+ }
+}
+
+
+/*
+ * demand_rules() - Get the module's "rules" hash on demand.
+ */
+struct hash * demand_rules( module_t * m )
+{
+ if ( !m->rules )
+ m->rules = hashinit( sizeof( RULE ), "rules" );
+ return m->rules;
+}
+
+
+/*
+ * delete_module() - wipe out the module's rules and variables.
+ */
+
+static void delete_rule_( void * xrule, void * data )
+{
+ rule_free( (RULE *)xrule );
+}
+
+
+static void delete_native_rule( void * xrule, void * data )
+{
+ native_rule_t * rule = (native_rule_t *)xrule;
+ object_free( rule->name );
+ if ( rule->procedure )
+ function_free( rule->procedure );
+}
+
+
+static void delete_imported_modules( void * xmodule_name, void * data )
+{
+ object_free( *(OBJECT * *)xmodule_name );
+}
+
+
+static void free_fixed_variable( void * xvar, void * data );
+
+void delete_module( module_t * m )
+{
+ /* Clear out all the rules. */
+ if ( m->rules )
+ {
+ hashenumerate( m->rules, delete_rule_, (void *)0 );
+ hash_free( m->rules );
+ m->rules = 0;
+ }
+
+ if ( m->native_rules )
+ {
+ hashenumerate( m->native_rules, delete_native_rule, (void *)0 );
+ hash_free( m->native_rules );
+ m->native_rules = 0;
+ }
+
+ if ( m->variables )
+ {
+ var_done( m );
+ m->variables = 0;
+ }
+
+ if ( m->fixed_variables )
+ {
+ int i;
+ for ( i = 0; i < m->num_fixed_variables; ++i )
+ {
+ list_free( m->fixed_variables[ i ] );
+ }
+ BJAM_FREE( m->fixed_variables );
+ m->fixed_variables = 0;
+ }
+
+ if ( m->variable_indices )
+ {
+ hashenumerate( m->variable_indices, &free_fixed_variable, (void *)0 );
+ hash_free( m->variable_indices );
+ m->variable_indices = 0;
+ }
+
+ if ( m->imported_modules )
+ {
+ hashenumerate( m->imported_modules, delete_imported_modules, (void *)0 );
+ hash_free( m->imported_modules );
+ m->imported_modules = 0;
+ }
+}
+
+
+struct module_stats
+{
+ OBJECT * module_name;
+ struct hashstats rules_stats[ 1 ];
+ struct hashstats variables_stats[ 1 ];
+ struct hashstats variable_indices_stats[ 1 ];
+ struct hashstats imported_modules_stats[ 1 ];
+};
+
+
+static void module_stat( struct hash * hp, OBJECT * module, const char * name )
+{
+ if ( hp )
+ {
+ struct hashstats stats[ 1 ];
+ string id[ 1 ];
+ hashstats_init( stats );
+ string_new( id );
+ string_append( id, object_str( module ) );
+ string_push_back( id, ' ' );
+ string_append( id, name );
+
+ hashstats_add( stats, hp );
+ hashstats_print( stats, id->value );
+
+ string_free( id );
+ }
+}
+
+
+static void class_module_stat( struct hashstats * stats, OBJECT * module, const char * name )
+{
+ if ( stats->item_size )
+ {
+ string id[ 1 ];
+ string_new( id );
+ string_append( id, object_str( module ) );
+ string_append( id, " object " );
+ string_append( id, name );
+
+ hashstats_print( stats, id->value );
+
+ string_free( id );
+ }
+}
+
+
+static void stat_module( void * xmodule, void * data )
+{
+ module_t *m = (module_t *)xmodule;
+
+ if ( DEBUG_MEM || DEBUG_PROFILE )
+ {
+ struct hash * class_info = (struct hash *)data;
+ if ( m->class_module )
+ {
+ int found;
+ struct module_stats * ms = (struct module_stats *)hash_insert( class_info, m->class_module->name, &found );
+ if ( !found )
+ {
+ ms->module_name = m->class_module->name;
+ hashstats_init( ms->rules_stats );
+ hashstats_init( ms->variables_stats );
+ hashstats_init( ms->variable_indices_stats );
+ hashstats_init( ms->imported_modules_stats );
+ }
+
+ hashstats_add( ms->rules_stats, m->rules );
+ hashstats_add( ms->variables_stats, m->variables );
+ hashstats_add( ms->variable_indices_stats, m->variable_indices );
+ hashstats_add( ms->imported_modules_stats, m->imported_modules );
+ }
+ else
+ {
+ module_stat( m->rules, m->name, "rules" );
+ module_stat( m->variables, m->name, "variables" );
+ module_stat( m->variable_indices, m->name, "fixed variables" );
+ module_stat( m->imported_modules, m->name, "imported modules" );
+ }
+ }
+
+ delete_module( m );
+ object_free( m->name );
+}
+
+static void print_class_stats( void * xstats, void * data )
+{
+ struct module_stats * stats = (struct module_stats *)xstats;
+ class_module_stat( stats->rules_stats, stats->module_name, "rules" );
+ class_module_stat( stats->variables_stats, stats->module_name, "variables" );
+ class_module_stat( stats->variable_indices_stats, stats->module_name, "fixed variables" );
+ class_module_stat( stats->imported_modules_stats, stats->module_name, "imported modules" );
+}
+
+
+static void delete_module_( void * xmodule, void * data )
+{
+ module_t *m = (module_t *)xmodule;
+
+ delete_module( m );
+ object_free( m->name );
+}
+
+
+void modules_done()
+{
+ if ( DEBUG_MEM || DEBUG_PROFILE )
+ {
+ struct hash * class_hash = hashinit( sizeof( struct module_stats ), "object info" );
+ hashenumerate( module_hash, stat_module, (void *)class_hash );
+ hashenumerate( class_hash, print_class_stats, (void *)0 );
+ hash_free( class_hash );
+ }
+ hashenumerate( module_hash, delete_module_, (void *)0 );
+ hashdone( module_hash );
+ module_hash = 0;
+ delete_module( &root );
+}
+
+module_t * root_module()
+{
+ return &root;
+}
+
+
+void import_module( LIST * module_names, module_t * target_module )
+{
+ PROFILE_ENTER( IMPORT_MODULE );
+
+ struct hash * h;
+ LISTITER iter;
+ LISTITER end;
+
+ if ( !target_module->imported_modules )
+ target_module->imported_modules = hashinit( sizeof( char * ), "imported"
+ );
+ h = target_module->imported_modules;
+
+ iter = list_begin( module_names );
+ end = list_end( module_names );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ int found;
+ OBJECT * const s = list_item( iter );
+ OBJECT * * const ss = (OBJECT * *)hash_insert( h, s, &found );
+ if ( !found )
+ *ss = object_copy( s );
+ }
+
+ PROFILE_EXIT( IMPORT_MODULE );
+}
+
+
+static void add_module_name( void * r_, void * result_ )
+{
+ OBJECT * * const r = (OBJECT * *)r_;
+ LIST * * const result = (LIST * *)result_;
+ *result = list_push_back( *result, object_copy( *r ) );
+}
+
+
+LIST * imported_modules( module_t * module )
+{
+ LIST * result = L0;
+ if ( module->imported_modules )
+ hashenumerate( module->imported_modules, add_module_name, &result );
+ return result;
+}
+
+
+FUNCTION * function_bind_variables( FUNCTION *, module_t *, int * counter );
+FUNCTION * function_unbind_variables( FUNCTION * );
+
+struct fixed_variable
+{
+ OBJECT * key;
+ int n;
+};
+
+struct bind_vars_t
+{
+ module_t * module;
+ int counter;
+};
+
+
+static void free_fixed_variable( void * xvar, void * data )
+{
+ object_free( ( (struct fixed_variable *)xvar )->key );
+}
+
+
+static void bind_variables_for_rule( void * xrule, void * xdata )
+{
+ RULE * rule = (RULE *)xrule;
+ struct bind_vars_t * data = (struct bind_vars_t *)xdata;
+ if ( rule->procedure && rule->module == data->module )
+ rule->procedure = function_bind_variables( rule->procedure,
+ data->module, &data->counter );
+}
+
+
+void module_bind_variables( struct module_t * m )
+{
+ if ( m != root_module() && m->rules )
+ {
+ struct bind_vars_t data;
+ data.module = m;
+ data.counter = m->num_fixed_variables;
+ hashenumerate( m->rules, &bind_variables_for_rule, &data );
+ module_set_fixed_variables( m, data.counter );
+ }
+}
+
+
+int module_add_fixed_var( struct module_t * m, OBJECT * name, int * counter )
+{
+ struct fixed_variable * v;
+ int found;
+
+ assert( !m->class_module );
+
+ if ( !m->variable_indices )
+ m->variable_indices = hashinit( sizeof( struct fixed_variable ), "variable index table" );
+
+ v = (struct fixed_variable *)hash_insert( m->variable_indices, name, &found );
+ if ( !found )
+ {
+ v->key = object_copy( name );
+ v->n = (*counter)++;
+ }
+
+ return v->n;
+}
+
+
+LIST * var_get_and_clear_raw( module_t * m, OBJECT * name );
+
+static void load_fixed_variable( void * xvar, void * data )
+{
+ struct fixed_variable * var = (struct fixed_variable *)xvar;
+ struct module_t * m = (struct module_t *)data;
+ if ( var->n >= m->num_fixed_variables )
+ m->fixed_variables[ var->n ] = var_get_and_clear_raw( m, var->key );
+}
+
+
+void module_set_fixed_variables( struct module_t * m, int n_variables )
+{
+ /* Reallocate */
+ struct hash * variable_indices;
+ LIST * * fixed_variables = (LIST * *)BJAM_MALLOC( n_variables * sizeof( LIST * ) );
+ if ( m->fixed_variables )
+ {
+ memcpy( fixed_variables, m->fixed_variables, m->num_fixed_variables * sizeof( LIST * ) );
+ BJAM_FREE( m->fixed_variables );
+ }
+ m->fixed_variables = fixed_variables;
+ variable_indices = m->class_module
+ ? m->class_module->variable_indices
+ : m->variable_indices;
+ if ( variable_indices )
+ hashenumerate( variable_indices, &load_fixed_variable, m );
+ m->num_fixed_variables = n_variables;
+}
+
+
+int module_get_fixed_var( struct module_t * m_, OBJECT * name )
+{
+ struct fixed_variable * v;
+ struct module_t * m = m_;
+
+ if ( m->class_module )
+ m = m->class_module;
+
+ if ( !m->variable_indices )
+ return -1;
+
+ v = (struct fixed_variable *)hash_find( m->variable_indices, name );
+ return v && v->n < m_->num_fixed_variables ? v->n : -1;
+}
diff --git a/src/boost/tools/build/src/engine/modules.h b/src/boost/tools/build/src/engine/modules.h
new file mode 100644
index 000000000..acad633bf
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+#ifndef MODULES_DWA10182001_H
+#define MODULES_DWA10182001_H
+
+#include "config.h"
+#include "lists.h"
+#include "object.h"
+
+typedef struct module_t module_t ;
+struct module_t
+{
+ OBJECT * name;
+ struct hash * rules;
+ struct hash * variables;
+ struct hash * variable_indices;
+ int num_fixed_variables;
+ LIST * * fixed_variables;
+ struct hash * imported_modules;
+ module_t * class_module;
+ struct hash * native_rules;
+ int user_module;
+};
+
+module_t * bindmodule( OBJECT * name );
+module_t * root_module();
+void delete_module( module_t * );
+
+void import_module( LIST * module_names, module_t * target_module );
+LIST * imported_modules( module_t * );
+
+struct hash * demand_rules( module_t * );
+
+void module_bind_variables( module_t * );
+
+/*
+ * After calling module_add_fixed_var, module_set_fixed_variables must be called
+ * before accessing any variables in the module.
+ */
+int module_add_fixed_var( module_t *, OBJECT * name, int * n );
+void module_set_fixed_variables( module_t *, int n );
+
+/*
+ * Returns the index of the variable or -1 if none exists.
+ */
+int module_get_fixed_var( module_t *, OBJECT * name );
+
+void modules_done();
+
+#endif
diff --git a/src/boost/tools/build/src/engine/modules/order.cpp b/src/boost/tools/build/src/engine/modules/order.cpp
new file mode 100644
index 000000000..38209b889
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules/order.cpp
@@ -0,0 +1,159 @@
+/* Copyright 2004. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../lists.h"
+#include "../mem.h"
+#include "../native.h"
+#include "../object.h"
+#include "../jam_strings.h"
+#include "../variable.h"
+
+
+/* Use quite klugy approach: when we add order dependency from 'a' to 'b', just
+ * append 'b' to of value of variable 'a'.
+ */
+LIST * add_pair( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( arg );
+ LISTITER const end = list_end( arg );
+ var_set( frame->module, list_item( iter ), list_copy_range( arg, list_next(
+ iter ), end ), VAR_APPEND );
+ return L0;
+}
+
+
+/* Given a list and a value, returns position of that value in the list, or -1
+ * if not found.
+ */
+int list_index( LIST * list, OBJECT * value )
+{
+ int result = 0;
+ LISTITER iter = list_begin( list );
+ LISTITER const end = list_end( list );
+ for ( ; iter != end; iter = list_next( iter ), ++result )
+ if ( object_equal( list_item( iter ), value ) )
+ return result;
+ return -1;
+}
+
+enum colors { white, gray, black };
+
+
+/* Main routine for topological sort. Calls itself recursively on all adjacent
+ * vertices which were not yet visited. After that, 'current_vertex' is added to
+ * '*result_ptr'.
+ */
+void do_ts( int * * graph, int current_vertex, int * colors, int * * result_ptr
+ )
+{
+ int i;
+
+ colors[ current_vertex ] = gray;
+ for ( i = 0; graph[ current_vertex ][ i ] != -1; ++i )
+ {
+ int adjacent_vertex = graph[ current_vertex ][ i ];
+ if ( colors[ adjacent_vertex ] == white )
+ do_ts( graph, adjacent_vertex, colors, result_ptr );
+ /* The vertex is either black, in which case we do not have to do
+ * anything, or gray, in which case we have a loop. If we have a loop,
+ * it is not clear what useful diagnostic we can emit, so we emit
+ * nothing.
+ */
+ }
+ colors[ current_vertex ] = black;
+ **result_ptr = current_vertex;
+ ( *result_ptr )++;
+}
+
+
+void topological_sort( int * * graph, int num_vertices, int * result )
+{
+ int i;
+ int * colors = ( int * )BJAM_CALLOC( num_vertices, sizeof( int ) );
+ for ( i = 0; i < num_vertices; ++i )
+ colors[ i ] = white;
+
+ for ( i = num_vertices - 1; i >= 0; --i )
+ if ( colors[ i ] == white )
+ do_ts( graph, i, colors, &result );
+
+ BJAM_FREE( colors );
+}
+
+
+LIST * order( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ int src;
+ LISTITER iter = list_begin( arg );
+ LISTITER const end = list_end( arg );
+
+ /* We need to create a graph of order dependencies between the passed
+ * objects. We assume there are no duplicates passed to 'add_pair'.
+ */
+ int length = list_length( arg );
+ int * * graph = ( int * * )BJAM_CALLOC( length, sizeof( int * ) );
+ int * order = ( int * )BJAM_MALLOC( ( length + 1 ) * sizeof( int ) );
+
+ for ( src = 0; iter != end; iter = list_next( iter ), ++src )
+ {
+ /* For all objects this one depends upon, add elements to 'graph'. */
+ LIST * dependencies = var_get( frame->module, list_item( iter ) );
+ int index = 0;
+ LISTITER dep_iter = list_begin( dependencies );
+ LISTITER const dep_end = list_end( dependencies );
+
+ graph[ src ] = ( int * )BJAM_CALLOC( list_length( dependencies ) + 1,
+ sizeof( int ) );
+ for ( ; dep_iter != dep_end; dep_iter = list_next( dep_iter ) )
+ {
+ int const dst = list_index( arg, list_item( dep_iter ) );
+ if ( dst != -1 )
+ graph[ src ][ index++ ] = dst;
+ }
+ graph[ src ][ index ] = -1;
+ }
+
+ topological_sort( graph, length, order );
+
+ {
+ int index = length - 1;
+ for ( ; index >= 0; --index )
+ {
+ int i;
+ LISTITER iter = list_begin( arg );
+ for ( i = 0; i < order[ index ]; ++i, iter = list_next( iter ) );
+ result = list_push_back( result, object_copy( list_item( iter ) ) );
+ }
+ }
+
+ /* Clean up */
+ {
+ int i;
+ for ( i = 0; i < length; ++i )
+ BJAM_FREE( graph[ i ] );
+ BJAM_FREE( graph );
+ BJAM_FREE( order );
+ }
+
+ return result;
+}
+
+
+void init_order()
+{
+ {
+ char const * args[] = { "first", "second", 0 };
+ declare_native_rule( "class@order", "add-pair", args, add_pair, 1 );
+ }
+
+ {
+ char const * args[] = { "objects", "*", 0 };
+ declare_native_rule( "class@order", "order", args, order, 1 );
+ }
+}
diff --git a/src/boost/tools/build/src/engine/modules/path.cpp b/src/boost/tools/build/src/engine/modules/path.cpp
new file mode 100644
index 000000000..8f7624839
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules/path.cpp
@@ -0,0 +1,25 @@
+/* Copyright Vladimir Prus 2003.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../constants.h"
+#include "../frames.h"
+#include "../lists.h"
+#include "../native.h"
+#include "../filesys.h"
+
+
+LIST * path_exists( FRAME * frame, int flags )
+{
+ return file_query( list_front( lol_get( frame->args, 0 ) ) ) ?
+ list_new( object_copy( constant_true ) ) : L0;
+}
+
+
+void init_path()
+{
+ char const * args[] = { "location", 0 };
+ declare_native_rule( "path", "exists", args, path_exists, 1 );
+}
diff --git a/src/boost/tools/build/src/engine/modules/property-set.cpp b/src/boost/tools/build/src/engine/modules/property-set.cpp
new file mode 100644
index 000000000..400859723
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules/property-set.cpp
@@ -0,0 +1,330 @@
+/*
+ * Copyright 2013 Steven Watanabe
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../object.h"
+#include "../lists.h"
+#include "../modules.h"
+#include "../rules.h"
+#include "../variable.h"
+#include "../native.h"
+#include "../compile.h"
+#include "../mem.h"
+#include "../constants.h"
+#include "string.h"
+
+struct ps_map_entry
+{
+ struct ps_map_entry * next;
+ LIST * key;
+ OBJECT * value;
+};
+
+struct ps_map
+{
+ struct ps_map_entry * * table;
+ size_t table_size;
+ size_t num_elems;
+};
+
+static unsigned list_hash(LIST * key)
+{
+ unsigned int hash = 0;
+ LISTITER iter = list_begin( key ), end = list_end( key );
+ for ( ; iter != end; ++iter )
+ {
+ hash = hash * 2147059363 + object_hash( list_item( iter ) );
+ }
+ return hash;
+}
+
+static int list_equal( LIST * lhs, LIST * rhs )
+{
+ LISTITER lhs_iter, lhs_end, rhs_iter;
+ if ( list_length( lhs ) != list_length( rhs ) )
+ {
+ return 0;
+ }
+ lhs_iter = list_begin( lhs );
+ lhs_end = list_end( lhs );
+ rhs_iter = list_begin( rhs );
+ for ( ; lhs_iter != lhs_end; ++lhs_iter, ++rhs_iter )
+ {
+ if ( ! object_equal( list_item( lhs_iter ), list_item( rhs_iter ) ) )
+ {
+ return 0;
+ }
+ }
+ return 1;
+}
+
+static void ps_map_init( struct ps_map * map )
+{
+ size_t i;
+ map->table_size = 2;
+ map->num_elems = 0;
+ map->table = (struct ps_map_entry * *)BJAM_MALLOC( map->table_size * sizeof( struct ps_map_entry * ) );
+ for ( i = 0; i < map->table_size; ++i )
+ {
+ map->table[ i ] = NULL;
+ }
+}
+
+static void ps_map_destroy( struct ps_map * map )
+{
+ size_t i;
+ for ( i = 0; i < map->table_size; ++i )
+ {
+ struct ps_map_entry * pos;
+ for ( pos = map->table[ i ]; pos; )
+ {
+ struct ps_map_entry * tmp = pos->next;
+ object_free( pos->value );
+ BJAM_FREE( pos );
+ pos = tmp;
+ }
+ }
+ BJAM_FREE( map->table );
+}
+
+static void ps_map_rehash( struct ps_map * map )
+{
+ struct ps_map old = *map;
+ size_t i;
+ map->table = (struct ps_map_entry * *)BJAM_MALLOC( map->table_size * 2 * sizeof( struct ps_map_entry * ) );
+ map->table_size *= 2;
+ for ( i = 0; i < map->table_size; ++i )
+ {
+ map->table[ i ] = NULL;
+ }
+ for ( i = 0; i < old.table_size; ++i )
+ {
+ struct ps_map_entry * pos;
+ for ( pos = old.table[ i ]; pos; )
+ {
+ struct ps_map_entry * tmp = pos->next;
+
+ unsigned hash_val = list_hash( pos->key );
+ unsigned bucket = hash_val % map->table_size;
+ pos->next = map->table[ bucket ];
+ map->table[ bucket ] = pos;
+
+ pos = tmp;
+ }
+ }
+ BJAM_FREE( old.table );
+}
+
+static struct ps_map_entry * ps_map_insert(struct ps_map * map, LIST * key)
+{
+ unsigned hash_val = list_hash( key );
+ unsigned bucket = hash_val % map->table_size;
+ struct ps_map_entry * pos;
+ for ( pos = map->table[bucket]; pos ; pos = pos->next )
+ {
+ if ( list_equal( pos->key, key ) )
+ return pos;
+ }
+
+ if ( map->num_elems >= map->table_size )
+ {
+ ps_map_rehash( map );
+ bucket = hash_val % map->table_size;
+ }
+ pos = (struct ps_map_entry *)BJAM_MALLOC( sizeof( struct ps_map_entry ) );
+ pos->next = map->table[bucket];
+ pos->key = key;
+ pos->value = 0;
+ map->table[bucket] = pos;
+ ++map->num_elems;
+ return pos;
+}
+
+static struct ps_map all_property_sets;
+
+LIST * property_set_create( FRAME * frame, int flags )
+{
+ LIST * properties = lol_get( frame->args, 0 );
+ LIST * sorted = list_sort( properties );
+ LIST * unique = list_unique( sorted );
+ struct ps_map_entry * pos = ps_map_insert( &all_property_sets, unique );
+ list_free( sorted );
+ if ( pos->value )
+ {
+ list_free( unique );
+ return list_new( object_copy( pos->value ) );
+ }
+ else
+ {
+ OBJECT * rulename = object_new( "new" );
+ OBJECT * varname = object_new( "self.raw" );
+ LIST * val = call_rule( rulename, frame,
+ list_new( object_new( "property-set" ) ), 0 );
+ LISTITER iter, end;
+ object_free( rulename );
+ pos->value = object_copy( list_front( val ) );
+ var_set( bindmodule( pos->value ), varname, unique, VAR_SET );
+ object_free( varname );
+
+ for ( iter = list_begin( unique ), end = list_end( unique ); iter != end; ++iter )
+ {
+ const char * str = object_str( list_item( iter ) );
+ if ( str[ 0 ] != '<' || ! strchr( str, '>' ) )
+ {
+ string message[ 1 ];
+ string_new( message );
+ string_append( message, "Invalid property: '" );
+ string_append( message, str );
+ string_append( message, "'" );
+ rulename = object_new( "errors.error" );
+ call_rule( rulename, frame,
+ list_new( object_new( message->value ) ), 0 );
+ /* unreachable */
+ string_free( message );
+ object_free( rulename );
+ }
+ }
+
+ return val;
+ }
+}
+
+/* binary search for the property value */
+LIST * property_set_get( FRAME * frame, int flags )
+{
+ OBJECT * varname = object_new( "self.raw" );
+ LIST * props = var_get( frame->module, varname );
+ const char * name = object_str( list_front( lol_get( frame->args, 0 ) ) );
+ size_t name_len = strlen( name );
+ LISTITER begin, end;
+ LIST * result = L0;
+ object_free( varname );
+
+ /* Assumes random access */
+ begin = list_begin( props ), end = list_end( props );
+
+ while ( 1 )
+ {
+ ptrdiff_t diff = (end - begin);
+ LISTITER mid = begin + diff / 2;
+ int res;
+ if ( diff == 0 )
+ {
+ return L0;
+ }
+ res = strncmp( object_str( list_item( mid ) ), name, name_len );
+ if ( res < 0 )
+ {
+ begin = mid + 1;
+ }
+ else if ( res > 0 )
+ {
+ end = mid;
+ }
+ else /* We've found the property */
+ {
+ /* Find the beginning of the group */
+ LISTITER tmp = mid;
+ while ( tmp > begin )
+ {
+ --tmp;
+ res = strncmp( object_str( list_item( tmp ) ), name, name_len );
+ if ( res != 0 )
+ {
+ ++tmp;
+ break;
+ }
+ }
+ begin = tmp;
+ /* Find the end of the group */
+ tmp = mid + 1;
+ while ( tmp < end )
+ {
+ res = strncmp( object_str( list_item( tmp ) ), name, name_len );
+ if ( res != 0 ) break;
+ ++tmp;
+ }
+ end = tmp;
+ break;
+ }
+ }
+
+ for ( ; begin != end; ++begin )
+ {
+ result = list_push_back( result,
+ object_new( object_str( list_item( begin ) ) + name_len ) );
+ }
+
+ return result;
+}
+
+/* binary search for the property value */
+LIST * property_set_contains_features( FRAME * frame, int flags )
+{
+ OBJECT * varname = object_new( "self.raw" );
+ LIST * props = var_get( frame->module, varname );
+ LIST * features = lol_get( frame->args, 0 );
+ LISTITER features_iter = list_begin( features );
+ LISTITER features_end = list_end( features ) ;
+ object_free( varname );
+
+ for ( ; features_iter != features_end; ++features_iter )
+ {
+ const char * name = object_str( list_item( features_iter ) );
+ size_t name_len = strlen( name );
+ LISTITER begin, end;
+ /* Assumes random access */
+ begin = list_begin( props ), end = list_end( props );
+
+ while ( 1 )
+ {
+ ptrdiff_t diff = (end - begin);
+ LISTITER mid = begin + diff / 2;
+ int res;
+ if ( diff == 0 )
+ {
+ /* The feature is missing */
+ return L0;
+ }
+ res = strncmp( object_str( list_item( mid ) ), name, name_len );
+ if ( res < 0 )
+ {
+ begin = mid + 1;
+ }
+ else if ( res > 0 )
+ {
+ end = mid;
+ }
+ else /* We've found the property */
+ {
+ break;
+ }
+ }
+ }
+ return list_new( object_copy( constant_true ) );
+}
+
+void init_property_set()
+{
+ {
+ char const * args[] = { "raw-properties", "*", 0 };
+ declare_native_rule( "property-set", "create", args, property_set_create, 1 );
+ }
+ {
+ char const * args[] = { "feature", 0 };
+ declare_native_rule( "class@property-set", "get", args, property_set_get, 1 );
+ }
+ {
+ char const * args[] = { "features", "*", 0 };
+ declare_native_rule( "class@property-set", "contains-features", args, property_set_contains_features, 1 );
+ }
+ ps_map_init( &all_property_sets );
+}
+
+void property_set_done()
+{
+ ps_map_destroy( &all_property_sets );
+}
diff --git a/src/boost/tools/build/src/engine/modules/readme.txt b/src/boost/tools/build/src/engine/modules/readme.txt
new file mode 100644
index 000000000..2a08ba10e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules/readme.txt
@@ -0,0 +1,3 @@
+
+This directory constains sources which declare native
+rules for B2 modules. \ No newline at end of file
diff --git a/src/boost/tools/build/src/engine/modules/regex.cpp b/src/boost/tools/build/src/engine/modules/regex.cpp
new file mode 100644
index 000000000..fa5355858
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules/regex.cpp
@@ -0,0 +1,233 @@
+/*
+ * Copyright 2003. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../mem.h"
+#include "../native.h"
+#include "../jam_strings.h"
+#include "../subst.h"
+
+/*
+rule split ( string separator )
+{
+ local result ;
+ local s = $(string) ;
+
+ local match = 1 ;
+ while $(match)
+ {
+ match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
+ if $(match)
+ {
+ match += "" ; # in case 3rd item was empty - works around MATCH bug
+ result = $(match[3]) $(result) ;
+ s = $(match[1]) ;
+ }
+ }
+ return $(s) $(result) ;
+}
+*/
+
+LIST * regex_split( FRAME * frame, int flags )
+{
+ LIST * args = lol_get( frame->args, 0 );
+ OBJECT * s;
+ OBJECT * separator;
+ regexp * re;
+ const char * pos, * prev;
+ LIST * result = L0;
+ LISTITER iter = list_begin( args );
+ s = list_item( iter );
+ separator = list_item( list_next( iter ) );
+
+ re = regex_compile( separator );
+
+ prev = pos = object_str( s );
+ while ( regexec( re, pos ) )
+ {
+ result = list_push_back( result, object_new_range( prev, re->startp[ 0 ] - prev ) );
+ prev = re->endp[ 0 ];
+ /* Handle empty matches */
+ if ( *pos == '\0' )
+ break;
+ else if ( pos == re->endp[ 0 ] )
+ pos++;
+ else
+ pos = re->endp[ 0 ];
+ }
+
+ result = list_push_back( result, object_new( pos ) );
+
+ return result;
+}
+
+/*
+rule replace (
+ string # The string to modify.
+ match # The characters to replace.
+ replacement # The string to replace with.
+ )
+{
+ local result = "" ;
+ local parts = 1 ;
+ while $(parts)
+ {
+ parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
+ if $(parts)
+ {
+ parts += "" ;
+ result = "$(replacement)$(parts[3])$(result)" ;
+ string = $(parts[1]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(string)$(result)" ;
+ return $(result) ;
+}
+*/
+
+LIST * regex_replace( FRAME * frame, int flags )
+{
+ LIST * args = lol_get( frame->args, 0 );
+ OBJECT * s;
+ OBJECT * match;
+ OBJECT * replacement;
+ regexp * re;
+ const char * pos;
+ string buf[ 1 ];
+ LIST * result;
+ LISTITER iter = list_begin( args );
+ s = list_item( iter );
+ iter = list_next( iter );
+ match = list_item( iter );
+ iter = list_next( iter );
+ replacement = list_item(iter );
+
+ re = regex_compile( match );
+
+ string_new( buf );
+
+ pos = object_str( s );
+ while ( regexec( re, pos ) )
+ {
+ string_append_range( buf, pos, re->startp[ 0 ] );
+ string_append( buf, object_str( replacement ) );
+ /* Handle empty matches */
+ if ( *pos == '\0' )
+ break;
+ else if ( pos == re->endp[ 0 ] )
+ string_push_back( buf, *pos++ );
+ else
+ pos = re->endp[ 0 ];
+ }
+ string_append( buf, pos );
+
+ result = list_new( object_new( buf->value ) );
+
+ string_free( buf );
+
+ return result;
+}
+
+/*
+rule transform ( list * : pattern : indices * )
+{
+ indices ?= 1 ;
+ local result ;
+ for local e in $(list)
+ {
+ local m = [ MATCH $(pattern) : $(e) ] ;
+ if $(m)
+ {
+ result += $(m[$(indices)]) ;
+ }
+ }
+ return $(result) ;
+}
+*/
+
+LIST * regex_transform( FRAME * frame, int flags )
+{
+ LIST * const l = lol_get( frame->args, 0 );
+ LIST * const pattern = lol_get( frame->args, 1 );
+ LIST * const indices_list = lol_get( frame->args, 2 );
+ int * indices = 0;
+ int size;
+ LIST * result = L0;
+
+ if ( !list_empty( indices_list ) )
+ {
+ int * p;
+ LISTITER iter = list_begin( indices_list );
+ LISTITER const end = list_end( indices_list );
+ size = list_length( indices_list );
+ indices = (int *)BJAM_MALLOC( size * sizeof( int ) );
+ for ( p = indices; iter != end; iter = list_next( iter ) )
+ *p++ = atoi( object_str( list_item( iter ) ) );
+ }
+ else
+ {
+ size = 1;
+ indices = (int *)BJAM_MALLOC( sizeof( int ) );
+ *indices = 1;
+ }
+
+ {
+ /* Result is cached and intentionally never freed */
+ regexp * const re = regex_compile( list_front( pattern ) );
+
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+
+ string buf[ 1 ];
+ string_new( buf );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( regexec( re, object_str( list_item( iter ) ) ) )
+ {
+ int i = 0;
+ for ( ; i < size; ++i )
+ {
+ int const index = indices[ i ];
+ /* Skip empty submatches. Not sure it is right in all cases,
+ * but surely is right for the case for which this routine
+ * is optimized -- header scanning.
+ */
+ if ( re->startp[ index ] != re->endp[ index ] )
+ {
+ string_append_range( buf, re->startp[ index ],
+ re->endp[ index ] );
+ result = list_push_back( result, object_new( buf->value
+ ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ }
+ string_free( buf );
+ }
+
+ BJAM_FREE( indices );
+ return result;
+}
+
+
+void init_regex()
+{
+ {
+ char const * args[] = { "string", "separator", 0 };
+ declare_native_rule( "regex", "split", args, regex_split, 1 );
+ }
+ {
+ char const * args[] = { "string", "match", "replacement", 0 };
+ declare_native_rule( "regex", "replace", args, regex_replace, 1 );
+ }
+ {
+ char const * args[] = { "list", "*", ":", "pattern", ":", "indices", "*", 0 };
+ declare_native_rule( "regex", "transform", args, regex_transform, 2 );
+ }
+}
diff --git a/src/boost/tools/build/src/engine/modules/sequence.cpp b/src/boost/tools/build/src/engine/modules/sequence.cpp
new file mode 100644
index 000000000..cece2cd5b
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules/sequence.cpp
@@ -0,0 +1,96 @@
+/*
+ * Copyright Vladimir Prus 2003.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../native.h"
+#include "../object.h"
+#include "../lists.h"
+#include "../compile.h"
+
+#include <stdlib.h>
+
+
+#ifndef max
+# define max(a,b) ((a)>(b)?(a):(b))
+#endif
+
+
+LIST * sequence_select_highest_ranked( FRAME * frame, int flags )
+{
+ /* Returns all of 'elements' for which corresponding element in parallel */
+ /* list 'rank' is equal to the maximum value in 'rank'. */
+
+ LIST * const elements = lol_get( frame->args, 0 );
+ LIST * const rank = lol_get( frame->args, 1 );
+
+ LIST * result = L0;
+ int highest_rank = -1;
+
+ {
+ LISTITER iter = list_begin( rank );
+ LISTITER const end = list_end( rank );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ int const current = atoi( object_str( list_item( iter ) ) );
+ highest_rank = max( highest_rank, current );
+ }
+ }
+
+ {
+ LISTITER iter = list_begin( rank );
+ LISTITER const end = list_end( rank );
+ LISTITER elements_iter = list_begin( elements );
+ for ( ; iter != end; iter = list_next( iter ), elements_iter =
+ list_next( elements_iter ) )
+ if ( atoi( object_str( list_item( iter ) ) ) == highest_rank )
+ result = list_push_back( result, object_copy( list_item(
+ elements_iter ) ) );
+ }
+
+ return result;
+}
+
+LIST * sequence_transform( FRAME * frame, int flags )
+{
+ LIST * function = lol_get( frame->args, 0 );
+ LIST * sequence = lol_get( frame->args, 1 );
+ LIST * result = L0;
+ OBJECT * function_name = list_front( function );
+ LISTITER args_begin = list_next( list_begin( function ) ), args_end = list_end( function );
+ LISTITER iter = list_begin( sequence ), end = list_end( sequence );
+ RULE * rule = bindrule( function_name, frame->prev->module );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ FRAME inner[ 1 ];
+
+ frame_init( inner );
+ inner->prev = frame;
+ inner->prev_user = frame->prev_user;
+ inner->module = frame->prev->module;
+
+ lol_add( inner->args, list_push_back( list_copy_range( function, args_begin, args_end ), object_copy( list_item( iter ) ) ) );
+ result = list_append( result, evaluate_rule( rule, function_name, inner ) );
+
+ frame_free( inner );
+ }
+
+ return result;
+}
+
+void init_sequence()
+{
+ {
+ char const * args[] = { "elements", "*", ":", "rank", "*", 0 };
+ declare_native_rule( "sequence", "select-highest-ranked", args,
+ sequence_select_highest_ranked, 1 );
+ }
+ {
+ char const * args[] = { "function", "+", ":", "sequence", "*", 0 };
+ declare_native_rule( "sequence", "transform", args,
+ sequence_transform, 1 );
+ }
+}
diff --git a/src/boost/tools/build/src/engine/modules/set.cpp b/src/boost/tools/build/src/engine/modules/set.cpp
new file mode 100644
index 000000000..77a314d57
--- /dev/null
+++ b/src/boost/tools/build/src/engine/modules/set.cpp
@@ -0,0 +1,43 @@
+/* Copyright Vladimir Prus 2003. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "../native.h"
+#include "../object.h"
+
+/*
+ local result = ;
+ local element ;
+ for element in $(B)
+ {
+ if ! ( $(element) in $(A) )
+ {
+ result += $(element) ;
+ }
+ }
+ return $(result) ;
+*/
+LIST *set_difference( FRAME *frame, int flags )
+{
+
+ LIST* b = lol_get( frame->args, 0 );
+ LIST* a = lol_get( frame->args, 1 );
+
+ LIST* result = L0;
+ LISTITER iter = list_begin( b ), end = list_end( b );
+ for( ; iter != end; iter = list_next( iter ) )
+ {
+ if (!list_in(a, list_item(iter)))
+ result = list_push_back(result, object_copy(list_item(iter)));
+ }
+ return result;
+}
+
+void init_set()
+{
+ {
+ const char* args[] = { "B", "*", ":", "A", "*", 0 };
+ declare_native_rule("set", "difference", args, set_difference, 1);
+ }
+
+}
diff --git a/src/boost/tools/build/src/engine/native.cpp b/src/boost/tools/build/src/engine/native.cpp
new file mode 100644
index 000000000..68828aa31
--- /dev/null
+++ b/src/boost/tools/build/src/engine/native.cpp
@@ -0,0 +1,34 @@
+/* Copyright 2003. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "native.h"
+
+#include "hash.h"
+
+#include <assert.h>
+
+
+void declare_native_rule( char const * module, char const * rule,
+ char const * * args, LIST * (*f)( FRAME *, int ), int version )
+{
+ OBJECT * const module_obj = module ? object_new( module ) : 0 ;
+ module_t * m = bindmodule( module_obj );
+ if ( module_obj )
+ object_free( module_obj );
+ if ( !m->native_rules )
+ m->native_rules = hashinit( sizeof( native_rule_t ), "native rules" );
+
+ {
+ OBJECT * const name = object_new( rule );
+ int found;
+ native_rule_t * const np = (native_rule_t *)hash_insert(
+ m->native_rules, name, &found );
+ np->name = name;
+ assert( !found );
+ np->procedure = function_builtin( f, 0, args );
+ np->version = version;
+ }
+}
diff --git a/src/boost/tools/build/src/engine/native.h b/src/boost/tools/build/src/engine/native.h
new file mode 100644
index 000000000..f80b0e0f0
--- /dev/null
+++ b/src/boost/tools/build/src/engine/native.h
@@ -0,0 +1,35 @@
+/* Copyright 2003. David Abrahams
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef NATIVE_H_VP_2003_12_09
+#define NATIVE_H_VP_2003_12_09
+
+#include "config.h"
+#include "function.h"
+#include "frames.h"
+#include "lists.h"
+#include "object.h"
+
+typedef struct native_rule_t
+{
+ OBJECT * name;
+ FUNCTION * procedure;
+
+ /* Version of the interface that the native rule provides. It is possible
+ * that we want to change the set parameter for existing native rule. In
+ * that case, version number should be incremented so B2 can check
+ * for the version it relies on.
+ *
+ * Versions are numbered from 1.
+ */
+ int version;
+} native_rule_t;
+/* MSVC debugger gets confused unless the native_rule_t typedef is provided. */
+
+void declare_native_rule( char const * module, char const * rule,
+ char const * * args, LIST * (*f)( FRAME *, int ), int version );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/object.cpp b/src/boost/tools/build/src/engine/object.cpp
new file mode 100644
index 000000000..07866baf4
--- /dev/null
+++ b/src/boost/tools/build/src/engine/object.cpp
@@ -0,0 +1,397 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * object.c - object manipulation routines
+ *
+ * External functions:
+ * object_new() - create an object from a string
+ * object_new_range() - create an object from a string of given length
+ * object_copy() - return a copy of an object
+ * object_free() - free an object
+ * object_str() - get the string value of an object
+ * object_done() - free string tables
+ *
+ * This implementation builds a hash table of all strings, so that multiple
+ * calls of object_new() on the same string allocate memory for the string once.
+ * Strings are never actually freed.
+ */
+
+#include "jam.h"
+#include "object.h"
+#include "output.h"
+
+#include <assert.h>
+#include <stddef.h>
+#include <stdlib.h>
+
+
+#define OBJECT_MAGIC 0xa762e0e3u
+
+#ifndef object_copy
+
+struct hash_header
+{
+#ifndef NDEBUG
+ unsigned int magic;
+#endif
+ unsigned int hash;
+ struct hash_item * next;
+};
+
+#endif
+
+struct hash_item
+{
+ struct hash_header header;
+ char data[ 1 ];
+};
+
+#define ALLOC_ALIGNMENT (sizeof(struct hash_item) - sizeof(struct hash_header))
+
+typedef struct string_set
+{
+ unsigned int num;
+ unsigned int size;
+ struct hash_item * * data;
+} string_set;
+
+static string_set strhash;
+static int strtotal = 0;
+static int strcount_in = 0;
+static int strcount_out = 0;
+
+
+/*
+ * Immortal string allocator implementation speeds string allocation and cuts
+ * down on internal fragmentation.
+ */
+
+#define STRING_BLOCK 4096
+typedef struct strblock
+{
+ struct strblock * next;
+ char data[ STRING_BLOCK ];
+} strblock;
+
+static strblock * strblock_chain = 0;
+
+/* Storage remaining in the current strblock */
+static char * storage_start = 0;
+static char * storage_finish = 0;
+
+
+/*
+ * allocate() - Allocate n bytes of immortal string storage.
+ */
+
+static char * allocate( size_t n )
+{
+#ifdef BJAM_NEWSTR_NO_ALLOCATE
+ return (char *)BJAM_MALLOC( n );
+#else
+ /* See if we can grab storage from an existing block. */
+ size_t remaining = storage_finish - storage_start;
+ n = ( ( n + ALLOC_ALIGNMENT - 1 ) / ALLOC_ALIGNMENT ) * ALLOC_ALIGNMENT;
+ if ( remaining >= n )
+ {
+ char * result = storage_start;
+ storage_start += n;
+ return result;
+ }
+ else /* Must allocate a new block. */
+ {
+ strblock * new_block;
+ size_t nalloc = n;
+ if ( nalloc < STRING_BLOCK )
+ nalloc = STRING_BLOCK;
+
+ /* Allocate a new block and link into the chain. */
+ new_block = (strblock *)BJAM_MALLOC( offsetof( strblock, data[ 0 ] ) +
+ nalloc * sizeof( new_block->data[ 0 ] ) );
+ if ( new_block == 0 )
+ return 0;
+ new_block->next = strblock_chain;
+ strblock_chain = new_block;
+
+ /* Take future allocations out of the larger remaining space. */
+ if ( remaining < nalloc - n )
+ {
+ storage_start = new_block->data + n;
+ storage_finish = new_block->data + nalloc;
+ }
+ return new_block->data;
+ }
+#endif
+}
+
+
+static unsigned int hash_keyval( char const * key, int const size )
+{
+ unsigned int const magic = 2147059363;
+ unsigned int hash = 0;
+
+ unsigned int i;
+ for ( i = 0; i < size / sizeof( unsigned int ); ++i )
+ {
+ unsigned int val;
+ memcpy( &val, key, sizeof( unsigned int ) );
+ hash = hash * magic + val;
+ key += sizeof( unsigned int );
+ }
+
+ {
+ unsigned int val = 0;
+ memcpy( &val, key, size % sizeof( unsigned int ) );
+ hash = hash * magic + val;
+ }
+
+ return hash + ( hash >> 17 );
+}
+
+
+static void string_set_init( string_set * set )
+{
+ set->size = 0;
+ set->num = 4;
+ set->data = (struct hash_item * *)BJAM_MALLOC( set->num * sizeof( struct hash_item * ) );
+ memset( set->data, 0, set->num * sizeof( struct hash_item * ) );
+}
+
+
+static void string_set_done( string_set * set )
+{
+ BJAM_FREE( set->data );
+}
+
+
+static void string_set_resize( string_set * set )
+{
+ unsigned i;
+ string_set new_set;
+ new_set.num = set->num * 2;
+ new_set.size = set->size;
+ new_set.data = (struct hash_item * *)BJAM_MALLOC( sizeof( struct hash_item *
+ ) * new_set.num );
+ memset( new_set.data, 0, sizeof( struct hash_item * ) * new_set.num );
+ for ( i = 0; i < set->num; ++i )
+ {
+ while ( set->data[ i ] )
+ {
+ struct hash_item * temp = set->data[ i ];
+ unsigned pos = temp->header.hash % new_set.num;
+ set->data[ i ] = temp->header.next;
+ temp->header.next = new_set.data[ pos ];
+ new_set.data[ pos ] = temp;
+ }
+ }
+ BJAM_FREE( set->data );
+ *set = new_set;
+}
+
+
+static char const * string_set_insert( string_set * set, char const * string,
+ int const size )
+{
+ unsigned hash = hash_keyval( string, size );
+ unsigned pos = hash % set->num;
+
+ struct hash_item * result;
+
+ for ( result = set->data[ pos ]; result; result = result->header.next )
+ if ( !strncmp( result->data, string, size ) && !result->data[ size ] )
+ return result->data;
+
+ if ( set->size >= set->num )
+ {
+ string_set_resize( set );
+ pos = hash % set->num;
+ }
+
+ result = (struct hash_item *)allocate( sizeof( struct hash_header ) + size +
+ 1 );
+ result->header.hash = hash;
+ result->header.next = set->data[ pos ];
+#ifndef NDEBUG
+ result->header.magic = OBJECT_MAGIC;
+#endif
+ memcpy( result->data, string, size );
+ result->data[ size ] = '\0';
+ assert( hash_keyval( result->data, size ) == result->header.hash );
+ set->data[ pos ] = result;
+ strtotal += size + 1;
+ ++set->size;
+
+ return result->data;
+}
+
+
+static struct hash_item * object_get_item( OBJECT * obj )
+{
+ return (struct hash_item *)( (char *)obj - offsetof( struct hash_item, data
+ ) );
+}
+
+
+static void object_validate( OBJECT * obj )
+{
+ assert( obj );
+ assert( object_get_item( obj )->header.magic == OBJECT_MAGIC );
+}
+
+
+/*
+ * object_new_range() - create an object from a string of given length
+ */
+
+OBJECT * object_new_range( char const * const string, int const size )
+{
+ ++strcount_in;
+
+#ifdef BJAM_NO_MEM_CACHE
+ {
+ struct hash_item * const m = (struct hash_item *)BJAM_MALLOC( sizeof(
+ struct hash_header ) + size + 1 );
+ strtotal += size + 1;
+ memcpy( m->data, string, size );
+ m->data[ size ] = '\0';
+#ifndef NDEBUG
+ m->header.magic = OBJECT_MAGIC;
+#endif
+ return (OBJECT *)m->data;
+ }
+#else
+ if ( !strhash.data )
+ string_set_init( &strhash );
+ return (OBJECT *)string_set_insert( &strhash, string, size );
+#endif
+}
+
+
+/*
+ * object_new() - create an object from a string
+ */
+
+OBJECT * object_new( char const * const string )
+{
+ return object_new_range( string, strlen( string ) );
+}
+
+
+#ifndef object_copy
+
+/*
+ * object_copy() - return a copy of an object
+ */
+
+OBJECT * object_copy( OBJECT * obj )
+{
+ object_validate( obj );
+#ifdef BJAM_NO_MEM_CACHE
+ return object_new( object_str( obj ) );
+#else
+ ++strcount_in;
+ return obj;
+#endif
+}
+
+
+/*
+ * object_free() - free an object
+ */
+
+void object_free( OBJECT * obj )
+{
+ object_validate( obj );
+#ifdef BJAM_NO_MEM_CACHE
+ BJAM_FREE( object_get_item( obj ) );
+#endif
+ ++strcount_out;
+}
+
+
+/*
+ * object_str() - return the OBJECT's internal C string
+ */
+
+char const * object_str( OBJECT * obj )
+{
+ object_validate( obj );
+ return (char const *)obj;
+}
+
+
+/*
+ * object_equal() - compare two objects
+ */
+
+int object_equal( OBJECT * lhs, OBJECT * rhs )
+{
+ object_validate( lhs );
+ object_validate( rhs );
+#ifdef BJAM_NO_MEM_CACHE
+ return !strcmp( object_str( lhs ), object_str( rhs ) );
+#else
+ assert( ( lhs == rhs ) == !strcmp( object_str( lhs ), object_str( rhs ) ) );
+ return lhs == rhs;
+#endif
+}
+
+
+/*
+ * object_hash() - returns the hash value of an object
+ */
+
+unsigned int object_hash( OBJECT * obj )
+{
+ object_validate( obj );
+#ifdef BJAM_NO_MEM_CACHE
+ return hash_keyval( object_str( obj ), strlen( object_str( obj ) ) );
+#else
+ return object_get_item( obj )->header.hash;
+#endif
+}
+
+#endif
+
+/*
+ * object_done() - free string tables.
+ */
+
+void object_done()
+{
+#ifdef BJAM_NEWSTR_NO_ALLOCATE
+ unsigned i;
+ for ( i = 0; i < strhash.num; ++i )
+ {
+ while ( strhash.data[ i ] )
+ {
+ struct hash_item * item = strhash.data[ i ];
+ strhash.data[ i ] = item->header.next;
+ BJAM_FREE( item );
+ }
+ }
+#else
+ /* Reclaim string blocks. */
+ while ( strblock_chain )
+ {
+ strblock * const n = strblock_chain->next;
+ BJAM_FREE( strblock_chain );
+ strblock_chain = n;
+ }
+#endif
+
+ string_set_done( &strhash );
+
+ if ( DEBUG_MEM )
+ {
+ out_printf( "%dK in strings\n", strtotal / 1024 );
+ if ( strcount_in != strcount_out )
+ out_printf( "--- %d strings of %d dangling\n", strcount_in -
+ strcount_out, strcount_in );
+ }
+}
diff --git a/src/boost/tools/build/src/engine/object.h b/src/boost/tools/build/src/engine/object.h
new file mode 100644
index 000000000..03fc0692b
--- /dev/null
+++ b/src/boost/tools/build/src/engine/object.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * object.h - object manipulation routines
+ */
+
+#ifndef BOOST_JAM_OBJECT_H
+#define BOOST_JAM_OBJECT_H
+
+#include "config.h"
+
+typedef struct _object OBJECT;
+
+OBJECT * object_new( char const * const );
+OBJECT * object_new_range( char const * const, int const size );
+void object_done( void );
+
+#if defined(NDEBUG) && !defined(BJAM_NO_MEM_CACHE)
+
+struct hash_header
+{
+ unsigned int hash;
+ struct hash_item * next;
+};
+
+#define object_str( obj ) ((char const *)(obj))
+#define object_copy( obj ) (obj)
+#define object_free( obj ) ((void)0)
+#define object_equal( lhs, rhs ) ((lhs) == (rhs))
+#define object_hash( obj ) (((struct hash_header *)((char *)(obj) - sizeof(struct hash_header)))->hash)
+
+#else
+
+char const * object_str ( OBJECT * );
+OBJECT * object_copy ( OBJECT * );
+void object_free ( OBJECT * );
+int object_equal( OBJECT *, OBJECT * );
+unsigned int object_hash ( OBJECT * );
+
+#endif
+
+#endif
diff --git a/src/boost/tools/build/src/engine/option.cpp b/src/boost/tools/build/src/engine/option.cpp
new file mode 100644
index 000000000..fa9246b02
--- /dev/null
+++ b/src/boost/tools/build/src/engine/option.cpp
@@ -0,0 +1,94 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+# include "jam.h"
+# include "option.h"
+
+/*
+ * option.c - command line option processing
+ *
+ * {o >o
+ * \<>) "Process command line options as defined in <option.h>.
+ * Return the number of argv[] elements used up by options,
+ * or -1 if an invalid option flag was given or an argument
+ * was supplied for an option that does not require one."
+ */
+
+int getoptions( int argc, char * * argv, const char * opts, bjam_option * optv )
+{
+ int i;
+ int optc = N_OPTS;
+
+ memset( (char *)optv, '\0', sizeof( *optv ) * N_OPTS );
+
+ for ( i = 0; i < argc; ++i )
+ {
+ char *arg;
+
+ if ( ( argv[ i ][ 0 ] != '-' ) ||
+ ( ( argv[ i ][ 1 ] != '-' ) && !isalpha( argv[ i ][ 1 ] ) ) )
+ continue;
+
+ if ( !optc-- )
+ {
+ printf( "too many options (%d max)\n", N_OPTS );
+ return -1;
+ }
+
+ for ( arg = &argv[ i ][ 1 ]; *arg; ++arg )
+ {
+ const char * f;
+
+ for ( f = opts; *f; ++f )
+ if ( *f == *arg )
+ break;
+
+ if ( !*f )
+ {
+ printf( "Invalid option: -%c\n", *arg );
+ return -1;
+ }
+
+ optv->flag = *f;
+
+ if ( f[ 1 ] != ':' )
+ {
+ optv++->val = (char *)"true";
+ }
+ else if ( arg[ 1 ] )
+ {
+ optv++->val = &arg[1];
+ break;
+ }
+ else if ( ++i < argc )
+ {
+ optv++->val = argv[ i ];
+ break;
+ }
+ else
+ {
+ printf( "option: -%c needs argument\n", *f );
+ return -1;
+ }
+ }
+ }
+
+ return i;
+}
+
+
+/*
+ * Name: getoptval() - find an option given its character.
+ */
+
+char * getoptval( bjam_option * optv, char opt, int subopt )
+{
+ int i;
+ for ( i = 0; i < N_OPTS; ++i, ++optv )
+ if ( ( optv->flag == opt ) && !subopt-- )
+ return optv->val;
+ return 0;
+}
diff --git a/src/boost/tools/build/src/engine/option.h b/src/boost/tools/build/src/engine/option.h
new file mode 100644
index 000000000..df3063105
--- /dev/null
+++ b/src/boost/tools/build/src/engine/option.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * option.h - command line option processing
+ *
+ * {o >o
+ * \ -) "Command line option."
+ */
+
+#include "config.h"
+
+typedef struct bjam_option
+{
+ char flag; /* filled in by getoption() */
+ char * val; /* set to random address if true */
+} bjam_option;
+
+#define N_OPTS 256
+
+int getoptions( int argc, char * * argv, const char * opts, bjam_option * optv );
+char * getoptval( bjam_option * optv, char opt, int subopt );
diff --git a/src/boost/tools/build/src/engine/output.cpp b/src/boost/tools/build/src/engine/output.cpp
new file mode 100644
index 000000000..395d6c6df
--- /dev/null
+++ b/src/boost/tools/build/src/engine/output.cpp
@@ -0,0 +1,159 @@
+/*
+ Copyright 2007 Rene Rivera
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include "jam.h"
+#include "output.h"
+
+#include <stdio.h>
+#include <stdarg.h>
+
+
+#define bjam_out (stdout)
+#define bjam_err (stderr)
+
+static void out_( char const * data, FILE * const io )
+{
+ while ( *data )
+ {
+ size_t const len = strcspn( data, "\r" );
+ data += fwrite( data, 1, len, io );
+ if ( *data == '\r' ) ++data;
+ }
+}
+
+
+void out_flush()
+{
+ fflush( bjam_out );
+ if ( globs.out ) fflush( globs.out );
+}
+void err_flush()
+{
+ fflush( bjam_err );
+ if ( globs.out ) fflush( globs.out );
+}
+void out_puts(char const * const s)
+{
+ fputs( s, bjam_out );
+ if ( globs.out ) fputs( s, globs.out );
+}
+void err_puts(char const * const s)
+{
+ fputs( s, bjam_err );
+ if ( globs.out ) fputs( s, globs.out );
+}
+void out_putc(const char c)
+{
+ fputc( c, bjam_out );
+ if ( globs.out ) fputc( c, globs.out );
+}
+void err_putc(const char c)
+{
+ fputc( c, bjam_err );
+ if ( globs.out ) fputc( c, globs.out );
+}
+void out_data(char const * const s)
+{
+ out_( s, bjam_out );
+ if ( globs.out ) out_( s, globs.out );
+}
+void err_data(char const * const s)
+{
+ out_( s, bjam_err );
+ if ( globs.out ) out_( s, globs.out );
+}
+void out_printf(char const * const f, ...)
+{
+ {
+ va_list args;
+ va_start( args, f );
+ vfprintf( bjam_out, f, args );
+ va_end( args );
+ }
+ if ( globs.out )
+ {
+ va_list args;
+ va_start( args, f );
+ vfprintf( globs.out, f, args );
+ va_end( args );
+ }
+}
+void err_printf(char const * const f, ...)
+{
+ {
+ va_list args;
+ va_start( args, f );
+ vfprintf( bjam_err, f, args );
+ va_end( args );
+ }
+ if ( globs.out )
+ {
+ va_list args;
+ va_start( args, f );
+ vfprintf( globs.out, f, args );
+ va_end( args );
+ }
+}
+
+
+void out_action
+(
+ char const * const action,
+ char const * const target,
+ char const * const command,
+ char const * const out_d,
+ char const * const err_d,
+ int const exit_reason
+)
+{
+ /* Print out the action + target line, if the action is quiet the action
+ * should be null.
+ */
+ if ( action )
+ out_printf( "%s %s\n", action, target );
+
+ /* Print out the command executed if given -d+2. */
+ if ( DEBUG_EXEC )
+ {
+ out_puts( command );
+ out_putc( '\n' );
+ }
+
+ /* Print out the command output, if requested, or if the program failed, but
+ * only output for non-quiet actions.
+ */
+ if ( action || exit_reason != EXIT_OK )
+ {
+ if ( out_d &&
+ ( ( globs.pipe_action & 1 /* STDOUT_FILENO */ ) ||
+ ( globs.pipe_action == 0 ) ) )
+ out_data( out_d );
+ if ( err_d && ( globs.pipe_action & 2 /* STDERR_FILENO */ ) )
+ err_data( err_d );
+ }
+}
+
+
+OBJECT * outf_int( int const value )
+{
+ char buffer[ 50 ];
+ sprintf( buffer, "%i", value );
+ return object_new( buffer );
+}
+
+
+OBJECT * outf_double( double const value )
+{
+ char buffer[ 50 ];
+ sprintf( buffer, "%f", value );
+ return object_new( buffer );
+}
+
+
+OBJECT * outf_time( timestamp const * const time )
+{
+ return object_new( timestamp_str( time ) );
+}
diff --git a/src/boost/tools/build/src/engine/output.h b/src/boost/tools/build/src/engine/output.h
new file mode 100644
index 000000000..a85edaaf2
--- /dev/null
+++ b/src/boost/tools/build/src/engine/output.h
@@ -0,0 +1,42 @@
+/*
+ Copyright 2007 Rene Rivera
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifndef BJAM_OUTPUT_H
+#define BJAM_OUTPUT_H
+
+#include "config.h"
+#include "object.h"
+#include "timestamp.h"
+
+#define EXIT_OK 0
+#define EXIT_FAIL 1
+#define EXIT_TIMEOUT 2
+
+void out_action(
+ char const * const action,
+ char const * const target,
+ char const * const command,
+ char const * const out_data,
+ char const * const err_data,
+ int const exit_reason
+);
+
+void out_flush();
+void err_flush();
+void out_puts(char const * const s);
+void err_puts(char const * const s);
+void out_putc(const char c);
+void err_putc(const char c);
+void out_data(char const * const s);
+void err_data(char const * const s);
+void out_printf(char const * const f, ...);
+void err_printf(char const * const f, ...);
+
+OBJECT * outf_int( int const value );
+OBJECT * outf_double( double const value );
+OBJECT * outf_time( timestamp const * const value );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/parse.cpp b/src/boost/tools/build/src/engine/parse.cpp
new file mode 100644
index 000000000..42f64f06e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/parse.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "object.h"
+#include "modules.h"
+#include "frames.h"
+#include "function.h"
+
+/*
+ * parse.c - make and destroy parse trees as driven by the parser
+ *
+ * 09/07/00 (seiwald) - ref count on PARSE to avoid freeing when used,
+ * as per Matt Armstrong.
+ * 09/11/00 (seiwald) - structure reworked to reflect that (*func)()
+ * returns a LIST *.
+ */
+
+static PARSE * yypsave;
+
+static void parse_impl( FRAME * frame )
+{
+
+ /* Now parse each block of rules and execute it. Execute it outside of the
+ * parser so that recursive calls to yyrun() work (no recursive yyparse's).
+ */
+
+ for ( ; ; )
+ {
+ PARSE * p;
+ FUNCTION * func;
+
+ /* Filled by yyparse() calling parse_save(). */
+ yypsave = 0;
+
+ /* If parse error or empty parse, outta here. */
+ if ( yyparse() || !( p = yypsave ) )
+ break;
+
+ /* Run the parse tree. */
+ func = function_compile( p );
+ parse_free( p );
+ list_free( function_run( func, frame, stack_global() ) );
+ function_free( func );
+ }
+
+ yyfdone();
+}
+
+
+void parse_file( OBJECT * f, FRAME * frame )
+{
+ /* Suspend scan of current file and push this new file in the stream. */
+ yyfparse( f );
+
+ parse_impl( frame );
+}
+
+
+void parse_string( OBJECT * name, const char * * lines, FRAME * frame )
+{
+ yysparse( name, lines );
+ parse_impl( frame );
+}
+
+
+void parse_save( PARSE * p )
+{
+ yypsave = p;
+}
+
+
+PARSE * parse_make(
+ int type,
+ PARSE * left,
+ PARSE * right,
+ PARSE * third,
+ OBJECT * string,
+ OBJECT * string1,
+ int num )
+{
+ PARSE * p = (PARSE *)BJAM_MALLOC( sizeof( PARSE ) );
+
+ p->type = type;
+ p->left = left;
+ p->right = right;
+ p->third = third;
+ p->string = string;
+ p->string1 = string1;
+ p->num = num;
+ p->refs = 1;
+ p->rulename = 0;
+
+ if ( left )
+ {
+ p->file = object_copy( left->file );
+ p->line = left->line;
+ }
+ else
+ {
+ yyinput_last_read_token( &p->file, &p->line );
+ p->file = object_copy( p->file );
+ }
+
+ return p;
+}
+
+
+void parse_refer( PARSE * p )
+{
+ ++p->refs;
+}
+
+
+void parse_free( PARSE * p )
+{
+ if ( --p->refs )
+ return;
+
+ if ( p->string )
+ object_free( p->string );
+ if ( p->string1 )
+ object_free( p->string1 );
+ if ( p->left )
+ parse_free( p->left );
+ if ( p->right )
+ parse_free( p->right );
+ if ( p->third )
+ parse_free( p->third );
+ if ( p->rulename )
+ object_free( p->rulename );
+ if ( p->file )
+ object_free( p->file );
+
+ BJAM_FREE( (char *)p );
+}
diff --git a/src/boost/tools/build/src/engine/parse.h b/src/boost/tools/build/src/engine/parse.h
new file mode 100644
index 000000000..7dc2e660d
--- /dev/null
+++ b/src/boost/tools/build/src/engine/parse.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * parse.h - make and destroy parse trees as driven by the parser.
+ */
+
+#ifndef PARSE_DWA20011020_H
+#define PARSE_DWA20011020_H
+
+#include "config.h"
+#include "frames.h"
+#include "lists.h"
+#include "modules.h"
+
+
+#define PARSE_APPEND 0
+#define PARSE_FOREACH 1
+#define PARSE_IF 2
+#define PARSE_EVAL 3
+#define PARSE_INCLUDE 4
+#define PARSE_LIST 5
+#define PARSE_LOCAL 6
+#define PARSE_MODULE 7
+#define PARSE_CLASS 8
+#define PARSE_NULL 9
+#define PARSE_ON 10
+#define PARSE_RULE 11
+#define PARSE_RULES 12
+#define PARSE_SET 13
+#define PARSE_SETCOMP 14
+#define PARSE_SETEXEC 15
+#define PARSE_SETTINGS 16
+#define PARSE_SWITCH 17
+#define PARSE_WHILE 18
+#define PARSE_RETURN 19
+#define PARSE_BREAK 20
+#define PARSE_CONTINUE 21
+
+
+/*
+ * Parse tree node.
+ */
+
+typedef struct _PARSE PARSE;
+
+struct _PARSE {
+ int type;
+ PARSE * left;
+ PARSE * right;
+ PARSE * third;
+ OBJECT * string;
+ OBJECT * string1;
+ int num;
+ int refs;
+ OBJECT * rulename;
+ OBJECT * file;
+ int line;
+};
+
+void parse_file( OBJECT *, FRAME * );
+void parse_string( OBJECT * name, const char * * lines, FRAME * frame );
+void parse_save( PARSE * );
+
+PARSE * parse_make( int type, PARSE * left, PARSE * right, PARSE * third,
+ OBJECT * string, OBJECT * string1, int num );
+
+void parse_refer( PARSE * );
+void parse_free( PARSE * );
+LIST * parse_evaluate( PARSE *, FRAME * );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/patchlevel.h b/src/boost/tools/build/src/engine/patchlevel.h
new file mode 100644
index 000000000..494883da6
--- /dev/null
+++ b/src/boost/tools/build/src/engine/patchlevel.h
@@ -0,0 +1,16 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+/*
+This file is ALSO:
+Copyright 2018-2019 Rene Rivera
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+
+#define VERSION_MAJOR 4
+#define VERSION_MINOR 2
+#define VERSION_PATCH 0
diff --git a/src/boost/tools/build/src/engine/pathnt.cpp b/src/boost/tools/build/src/engine/pathnt.cpp
new file mode 100644
index 000000000..5b0cc4659
--- /dev/null
+++ b/src/boost/tools/build/src/engine/pathnt.cpp
@@ -0,0 +1,409 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * pathnt.c - NT specific path manipulation support
+ */
+
+#include "jam.h"
+#include "pathsys.h"
+#include "hash.h"
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+
+#ifdef OS_CYGWIN
+# include <cygwin/version.h>
+# include <sys/cygwin.h>
+# ifdef CYGWIN_VERSION_CYGWIN_CONV
+# include <errno.h>
+# endif
+# include <windows.h>
+#endif
+
+#include <assert.h>
+#include <stdlib.h>
+
+
+/* The definition of this in winnt.h is not ANSI-C compatible. */
+#undef INVALID_FILE_ATTRIBUTES
+#define INVALID_FILE_ATTRIBUTES ((DWORD)-1)
+
+
+typedef struct path_key_entry
+{
+ OBJECT * path;
+ OBJECT * key;
+ int exists;
+} path_key_entry;
+
+static struct hash * path_key_cache;
+
+
+/*
+ * path_get_process_id_()
+ */
+
+unsigned long path_get_process_id_( void )
+{
+ return GetCurrentProcessId();
+}
+
+
+/*
+ * path_get_temp_path_()
+ */
+
+void path_get_temp_path_( string * buffer )
+{
+ DWORD pathLength = GetTempPathA( 0, NULL );
+ string_reserve( buffer, pathLength );
+ pathLength = GetTempPathA( pathLength, buffer->value );
+ buffer->value[ pathLength - 1 ] = '\0';
+ buffer->size = pathLength - 1;
+}
+
+
+/*
+ * canonicWindowsPath() - convert a given path into its canonic/long format
+ *
+ * Appends the canonic path to the end of the given 'string' object.
+ *
+ * FIXME: This function is still work-in-progress as it originally did not
+ * necessarily return the canonic path format (could return slightly different
+ * results for certain equivalent path strings) and could accept paths pointing
+ * to non-existing file system entities as well.
+ *
+ * Caches results internally, automatically caching any parent paths it has to
+ * convert to their canonic format in the process.
+ *
+ * Prerequisites:
+ * - path given in normalized form, i.e. all of its folder separators have
+ * already been converted into '\\'
+ * - path_key_cache path/key mapping cache object already initialized
+ */
+
+static int canonicWindowsPath( char const * const path, int const path_length,
+ string * const out )
+{
+ char const * last_element;
+ unsigned long saved_size;
+ char const * p;
+ int missing_parent;
+
+ /* This is only called via path_key(), which initializes the cache. */
+ assert( path_key_cache );
+
+ if ( !path_length )
+ return 1;
+
+ if ( path_length == 1 && path[ 0 ] == '\\' )
+ {
+ string_push_back( out, '\\' );
+ return 1;
+ }
+
+ if ( path[ 1 ] == ':' &&
+ ( path_length == 2 ||
+ ( path_length == 3 && path[ 2 ] == '\\' ) ) )
+ {
+ string_push_back( out, toupper( path[ 0 ] ) );
+ string_push_back( out, ':' );
+ string_push_back( out, '\\' );
+ return 1;
+ }
+
+ /* Find last '\\'. */
+ for ( p = path + path_length - 1; p >= path && *p != '\\'; --p );
+ last_element = p + 1;
+
+ /* Special case '\' && 'D:\' - include trailing '\'. */
+ if ( p == path ||
+ (p == path + 2 && path[ 1 ] == ':') )
+ ++p;
+
+ missing_parent = 0;
+
+ if ( p >= path )
+ {
+ char const * const dir = path;
+ int const dir_length = p - path;
+ OBJECT * const dir_obj = object_new_range( dir, dir_length );
+ int found;
+ path_key_entry * const result = (path_key_entry *)hash_insert(
+ path_key_cache, dir_obj, &found );
+ if ( !found )
+ {
+ result->path = dir_obj;
+ if ( canonicWindowsPath( dir, dir_length, out ) )
+ result->exists = 1;
+ else
+ result->exists = 0;
+ result->key = object_new( out->value );
+ }
+ else
+ {
+ object_free( dir_obj );
+ string_append( out, object_str( result->key ) );
+ }
+ if ( !result->exists )
+ missing_parent = 1;
+ }
+
+ if ( out->size && out->value[ out->size - 1 ] != '\\' )
+ string_push_back( out, '\\' );
+
+ saved_size = out->size;
+ string_append_range( out, last_element, path + path_length );
+
+ if ( !missing_parent )
+ {
+ char const * const n = last_element;
+ int const n_length = path + path_length - n;
+ if ( !( n_length == 1 && n[ 0 ] == '.' )
+ && !( n_length == 2 && n[ 0 ] == '.' && n[ 1 ] == '.' ) )
+ {
+ WIN32_FIND_DATAA fd;
+ HANDLE const hf = FindFirstFileA( out->value, &fd );
+ if ( hf != INVALID_HANDLE_VALUE )
+ {
+ string_truncate( out, saved_size );
+ string_append( out, fd.cFileName );
+ FindClose( hf );
+ return 1;
+ }
+ }
+ else
+ {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+
+/*
+ * normalize_path() - 'normalizes' the given path for the path-key mapping
+ *
+ * The resulting string has nothing to do with 'normalized paths' as used in
+ * Boost Jam build scripts and the built-in NORMALIZE_PATH rule. It is intended
+ * to be used solely as an intermediate step when mapping an arbitrary path to
+ * its canonical representation.
+ *
+ * When choosing the intermediate string the important things are for it to be
+ * inexpensive to calculate and any two paths having different canonical
+ * representations also need to have different calculated intermediate string
+ * representations. Any implemented additional rules serve only to simplify
+ * constructing the canonical path representation from the calculated
+ * intermediate string.
+ *
+ * Implemented returned path rules:
+ * - use backslashes as path separators
+ * - lowercase only (since all Windows file systems are case insensitive)
+ * - trim trailing path separator except in case of a root path, i.e. 'X:\'
+ */
+
+static void normalize_path( string * path )
+{
+ char * s;
+ for ( s = path->value; s < path->value + path->size; ++s )
+ *s = *s == '/' ? '\\' : tolower( *s );
+ /* Strip trailing "/". */
+ if ( path->size && path->size != 3 && path->value[ path->size - 1 ] == '\\'
+ )
+ string_pop_back( path );
+}
+
+
+static path_key_entry * path_key( OBJECT * const path,
+ int const known_to_be_canonic )
+{
+ path_key_entry * result;
+ int found;
+
+ if ( !path_key_cache )
+ path_key_cache = hashinit( sizeof( path_key_entry ), "path to key" );
+
+ result = (path_key_entry *)hash_insert( path_key_cache, path, &found );
+ if ( !found )
+ {
+ OBJECT * normalized;
+ int normalized_size;
+ path_key_entry * nresult;
+ result->path = path;
+ {
+ string buf[ 1 ];
+ string_copy( buf, object_str( path ) );
+ normalize_path( buf );
+ normalized = object_new( buf->value );
+ normalized_size = buf->size;
+ string_free( buf );
+ }
+ nresult = (path_key_entry *)hash_insert( path_key_cache, normalized,
+ &found );
+ if ( !found || nresult == result )
+ {
+ nresult->path = normalized;
+ if ( known_to_be_canonic )
+ {
+ nresult->key = object_copy( path );
+ nresult->exists = 1;
+ }
+ else
+ {
+ string canonic_path[ 1 ];
+ string_new( canonic_path );
+ if ( canonicWindowsPath( object_str( normalized ), normalized_size,
+ canonic_path ) )
+ nresult->exists = 1;
+ else
+ nresult->exists = 0;
+ nresult->key = object_new( canonic_path->value );
+ string_free( canonic_path );
+ }
+ }
+ else
+ object_free( normalized );
+ if ( nresult != result )
+ {
+ result->path = object_copy( path );
+ result->key = object_copy( nresult->key );
+ result->exists = nresult->exists;
+ }
+ }
+
+ return result;
+}
+
+
+/*
+ * translate_path_cyg2win() - conversion of a cygwin to a Windows path.
+ *
+ * FIXME: skip grist
+ */
+
+#ifdef OS_CYGWIN
+static int translate_path_cyg2win( string * path )
+{
+ int translated = 0;
+
+#ifdef CYGWIN_VERSION_CYGWIN_CONV
+ /* Use new Cygwin API added with Cygwin 1.7. Old one had no error
+ * handling and has been deprecated.
+ */
+ char * dynamicBuffer = 0;
+ char buffer[ MAX_PATH + 1001 ];
+ char const * result = buffer;
+ cygwin_conv_path_t const conv_type = CCP_POSIX_TO_WIN_A | CCP_RELATIVE;
+ ssize_t const apiResult = cygwin_conv_path( conv_type, path->value,
+ buffer, sizeof( buffer ) / sizeof( *buffer ) );
+ assert( apiResult == 0 || apiResult == -1 );
+ assert( apiResult || strlen( result ) < sizeof( buffer ) / sizeof(
+ *buffer ) );
+ if ( apiResult )
+ {
+ result = 0;
+ if ( errno == ENOSPC )
+ {
+ ssize_t const size = cygwin_conv_path( conv_type, path->value,
+ NULL, 0 );
+ assert( size >= -1 );
+ if ( size > 0 )
+ {
+ dynamicBuffer = (char *)BJAM_MALLOC_ATOMIC( size );
+ if ( dynamicBuffer )
+ {
+ ssize_t const apiResult = cygwin_conv_path( conv_type,
+ path->value, dynamicBuffer, size );
+ assert( apiResult == 0 || apiResult == -1 );
+ if ( !apiResult )
+ {
+ result = dynamicBuffer;
+ assert( strlen( result ) < size );
+ }
+ }
+ }
+ }
+ }
+#else /* CYGWIN_VERSION_CYGWIN_CONV */
+ /* Use old Cygwin API deprecated with Cygwin 1.7. */
+ char result[ MAX_PATH + 1 ];
+ cygwin_conv_to_win32_path( path->value, result );
+ assert( strlen( result ) <= MAX_PATH );
+#endif /* CYGWIN_VERSION_CYGWIN_CONV */
+
+ if ( result )
+ {
+ string_truncate( path, 0 );
+ string_append( path, result );
+ translated = 1;
+ }
+
+#ifdef CYGWIN_VERSION_CYGWIN_CONV
+ if ( dynamicBuffer )
+ BJAM_FREE( dynamicBuffer );
+#endif
+
+ return translated;
+}
+#endif /* OS_CYGWIN */
+
+
+/*
+ * path_translate_to_os_()
+ */
+
+int path_translate_to_os_( char const * f, string * file )
+{
+ int translated = 0;
+
+ /* by default, pass on the original path */
+ string_copy( file, f );
+
+#ifdef OS_CYGWIN
+ translated = translate_path_cyg2win( file );
+#endif
+
+ return translated;
+}
+
+
+void path_register_key( OBJECT * canonic_path )
+{
+ path_key( canonic_path, 1 );
+}
+
+
+OBJECT * path_as_key( OBJECT * path )
+{
+ return object_copy( path_key( path, 0 )->key );
+}
+
+
+static void free_path_key_entry( void * xentry, void * const data )
+{
+ path_key_entry * const entry = (path_key_entry *)xentry;
+ object_free( entry->path );
+ object_free( entry->key );
+}
+
+
+void path_done( void )
+{
+ if ( path_key_cache )
+ {
+ hashenumerate( path_key_cache, &free_path_key_entry, 0 );
+ hashdone( path_key_cache );
+ }
+}
diff --git a/src/boost/tools/build/src/engine/pathsys.cpp b/src/boost/tools/build/src/engine/pathsys.cpp
new file mode 100644
index 000000000..61bacd6b1
--- /dev/null
+++ b/src/boost/tools/build/src/engine/pathsys.cpp
@@ -0,0 +1,302 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * pathsys.c - platform independent path manipulation support
+ *
+ * External routines:
+ * path_build() - build a filename given dir/base/suffix/member
+ * path_parent() - make a PATHNAME point to its parent dir
+ * path_parse() - split a file name into dir/base/suffix/member
+ * path_tmpdir() - returns the system dependent temporary folder path
+ * path_tmpfile() - returns a new temporary path
+ * path_tmpnam() - returns a new temporary name
+ *
+ * File_parse() and path_build() just manipulate a string and a structure;
+ * they do not make system calls.
+ */
+
+#include "jam.h"
+#include "pathsys.h"
+
+#include "filesys.h"
+
+#include <stdlib.h>
+#include <time.h>
+
+
+/* Internal OS specific implementation details - have names ending with an
+ * underscore and are expected to be implemented in an OS specific pathXXX.c
+ * module.
+ */
+unsigned long path_get_process_id_( void );
+void path_get_temp_path_( string * buffer );
+int path_translate_to_os_( char const * f, string * file );
+
+
+/*
+ * path_parse() - split a file name into dir/base/suffix/member
+ */
+
+void path_parse( char const * file, PATHNAME * f )
+{
+ char const * p;
+ char const * q;
+ char const * end;
+
+ memset( (char *)f, 0, sizeof( *f ) );
+
+ /* Look for '<grist>'. */
+
+ if ( ( file[ 0 ] == '<' ) && ( p = strchr( file, '>' ) ) )
+ {
+ f->f_grist.ptr = file;
+ f->f_grist.len = p - file;
+ file = p + 1;
+ }
+
+ /* Look for 'dir/'. */
+
+ p = strrchr( file, '/' );
+
+#if PATH_DELIM == '\\'
+ /* On NT, look for dir\ as well */
+ {
+ char const * p1 = strrchr( p ? p + 1 : file, '\\' );
+ if ( p1 ) p = p1;
+ }
+#endif
+
+ if ( p )
+ {
+ f->f_dir.ptr = file;
+ f->f_dir.len = p - file;
+
+ /* Special case for / - dirname is /, not "" */
+ if ( !f->f_dir.len )
+ ++f->f_dir.len;
+
+#if PATH_DELIM == '\\'
+ /* Special case for D:/ - dirname is D:/, not "D:" */
+ if ( f->f_dir.len == 2 && file[ 1 ] == ':' )
+ ++f->f_dir.len;
+#endif
+
+ file = p + 1;
+ }
+
+ end = file + strlen( file );
+
+ /* Look for '(member)'. */
+ if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) )
+ {
+ f->f_member.ptr = p + 1;
+ f->f_member.len = end - p - 2;
+ end = p;
+ }
+
+ /* Look for '.suffix'. This would be memrchr(). */
+ p = 0;
+ for ( q = file; ( q = (char *)memchr( q, '.', end - q ) ); ++q )
+ p = q;
+ if ( p )
+ {
+ f->f_suffix.ptr = p;
+ f->f_suffix.len = end - p;
+ end = p;
+ }
+
+ /* Leaves base. */
+ f->f_base.ptr = file;
+ f->f_base.len = end - file;
+}
+
+
+/*
+ * is_path_delim() - true iff c is a path delimiter
+ */
+
+static int is_path_delim( char const c )
+{
+ return c == PATH_DELIM
+#if PATH_DELIM == '\\'
+ || c == '/'
+#endif
+ ;
+}
+
+
+/*
+ * as_path_delim() - convert c to a path delimiter if it is not one already
+ */
+
+static char as_path_delim( char const c )
+{
+ return is_path_delim( c ) ? c : PATH_DELIM;
+}
+
+
+/*
+ * path_build() - build a filename given dir/base/suffix/member
+ *
+ * To avoid changing slash direction on NT when reconstituting paths, instead of
+ * unconditionally appending PATH_DELIM we check the past-the-end character of
+ * the previous path element. If it is a path delimiter, we append that, and
+ * only append PATH_DELIM as a last resort. This heuristic is based on the fact
+ * that PATHNAME objects are usually the result of calling path_parse, which
+ * leaves the original slashes in the past-the-end position. Correctness depends
+ * on the assumption that all strings are zero terminated, so a past-the-end
+ * character will always be available.
+ *
+ * As an attendant patch, we had to ensure that backslashes are used explicitly
+ * in 'timestamp.c'.
+ */
+
+void path_build( PATHNAME * f, string * file )
+{
+ int check_f;
+ int check_f_pos;
+
+ file_build1( f, file );
+
+ /* Do not prepend root if it is '.' or the directory is rooted. */
+ check_f = (f->f_root.len
+ && !( f->f_root.len == 1 && f->f_root.ptr[ 0 ] == '.')
+ && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '/' ));
+#if PATH_DELIM == '\\'
+ check_f = (check_f
+ && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '\\' )
+ && !( f->f_dir.len && f->f_dir.ptr[ 1 ] == ':' ));
+#endif
+ if (check_f)
+ {
+ string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len
+ );
+ /* If 'root' already ends with a path delimiter, do not add another one.
+ */
+ if ( !is_path_delim( f->f_root.ptr[ f->f_root.len - 1 ] ) )
+ string_push_back( file, as_path_delim( f->f_root.ptr[ f->f_root.len
+ ] ) );
+ }
+
+ if ( f->f_dir.len )
+ string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
+
+ /* Put path separator between dir and file. */
+ /* Special case for root dir: do not add another path separator. */
+ check_f_pos = (f->f_dir.len && ( f->f_base.len || f->f_suffix.len ));
+#if PATH_DELIM == '\\'
+ check_f_pos = (check_f_pos && !( f->f_dir.len == 3 && f->f_dir.ptr[ 1 ] == ':' ));
+#endif
+ check_f_pos = (check_f_pos && !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[ 0 ])));
+ if (check_f_pos)
+ string_push_back( file, as_path_delim( f->f_dir.ptr[ f->f_dir.len ] ) );
+
+ if ( f->f_base.len )
+ string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len
+ );
+
+ if ( f->f_suffix.len )
+ string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr +
+ f->f_suffix.len );
+
+ if ( f->f_member.len )
+ {
+ string_push_back( file, '(' );
+ string_append_range( file, f->f_member.ptr, f->f_member.ptr +
+ f->f_member.len );
+ string_push_back( file, ')' );
+ }
+}
+
+
+/*
+ * path_parent() - make a PATHNAME point to its parent dir
+ */
+
+void path_parent( PATHNAME * f )
+{
+ f->f_base.ptr = f->f_suffix.ptr = f->f_member.ptr = "";
+ f->f_base.len = f->f_suffix.len = f->f_member.len = 0;
+}
+
+
+/*
+ * path_tmpdir() - returns the system dependent temporary folder path
+ *
+ * Returned value is stored inside a static buffer and should not be modified.
+ * Returned value does *not* include a trailing path separator.
+ */
+
+string const * path_tmpdir()
+{
+ static string buffer[ 1 ];
+ static int have_result;
+ if ( !have_result )
+ {
+ string_new( buffer );
+ path_get_temp_path_( buffer );
+ have_result = 1;
+ }
+ return buffer;
+}
+
+
+/*
+ * path_tmpnam() - returns a new temporary name
+ */
+
+OBJECT * path_tmpnam( void )
+{
+ char name_buffer[ 64 ];
+ unsigned long const pid = path_get_process_id_();
+ static unsigned long t;
+ if ( !t ) t = time( 0 ) & 0xffff;
+ t += 1;
+ sprintf( name_buffer, "jam%lx%lx.000", pid, t );
+ return object_new( name_buffer );
+}
+
+
+/*
+ * path_tmpfile() - returns a new temporary path
+ */
+
+OBJECT * path_tmpfile( void )
+{
+ OBJECT * result;
+ OBJECT * tmpnam;
+
+ string file_path[ 1 ];
+ string_copy( file_path, path_tmpdir()->value );
+ string_push_back( file_path, PATH_DELIM );
+ tmpnam = path_tmpnam();
+ string_append( file_path, object_str( tmpnam ) );
+ object_free( tmpnam );
+ result = object_new( file_path->value );
+ string_free( file_path );
+
+ return result;
+}
+
+
+/*
+ * path_translate_to_os() - translate filename to OS-native path
+ *
+ */
+
+int path_translate_to_os( char const * f, string * file )
+{
+ return path_translate_to_os_( f, file );
+}
diff --git a/src/boost/tools/build/src/engine/pathsys.h b/src/boost/tools/build/src/engine/pathsys.h
new file mode 100644
index 000000000..839476e94
--- /dev/null
+++ b/src/boost/tools/build/src/engine/pathsys.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * pathsys.h - PATHNAME struct
+ */
+
+/*
+ * PATHNAME - a name of a file, broken into <grist>dir/base/suffix(member)
+ *
+ * <grist> - salt to distinguish between targets that would otherwise have the
+ * same name - it never appears in the bound name of a target.
+ *
+ * (member) - archive member name: the syntax is arbitrary, but must agree in
+ * path_parse(), path_build() and the Jambase.
+ */
+
+#ifndef PATHSYS_VP_20020211_H
+#define PATHSYS_VP_20020211_H
+
+#include "config.h"
+#include "object.h"
+#include "jam_strings.h"
+
+
+typedef struct _pathpart
+{
+ char const * ptr;
+ int len;
+} PATHPART;
+
+typedef struct _pathname
+{
+ PATHPART part[ 6 ];
+
+#define f_grist part[ 0 ]
+#define f_root part[ 1 ]
+#define f_dir part[ 2 ]
+#define f_base part[ 3 ]
+#define f_suffix part[ 4 ]
+#define f_member part[ 5 ]
+} PATHNAME;
+
+
+void path_build( PATHNAME *, string * file );
+void path_parse( char const * file, PATHNAME * );
+void path_parent( PATHNAME * );
+int path_translate_to_os( char const *, string * file );
+
+/* Given a path, returns an object containing an equivalent path in canonical
+ * format that can be used as a unique key for that path. Equivalent paths such
+ * as a/b, A\B, and a\B on NT all yield the same key.
+ */
+OBJECT * path_as_key( OBJECT * path );
+
+/* Called as an optimization when we know we have a path that is already in its
+ * canonical/long/key form. Avoids the need for some subsequent path_as_key()
+ * call to do a potentially expensive path conversion requiring access to the
+ * actual underlying file system.
+ */
+void path_register_key( OBJECT * canonic_path );
+
+/* Returns a static pointer to the system dependent path to the temporary
+ * directory. NOTE: Does *not* include a trailing path separator.
+ */
+string const * path_tmpdir( void );
+
+/* Returns a new temporary name. */
+OBJECT * path_tmpnam( void );
+
+/* Returns a new temporary path. */
+OBJECT * path_tmpfile( void );
+
+/* Give the first argument to 'main', return a full path to our executable.
+ * Returns null in the unlikely case it cannot be determined. Caller is
+ * responsible for freeing the string.
+ *
+ * Implemented in jam.c
+ */
+char * executable_path( char const * argv0 );
+
+void path_done( void );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/pathunix.cpp b/src/boost/tools/build/src/engine/pathunix.cpp
new file mode 100644
index 000000000..2b2347c87
--- /dev/null
+++ b/src/boost/tools/build/src/engine/pathunix.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * pathunix.c - UNIX specific path manipulation support
+ */
+
+#include "pathsys.h"
+
+#include <stdlib.h>
+#include <unistd.h> /* needed for getpid() */
+
+
+/*
+ * path_get_process_id_()
+ */
+
+unsigned long path_get_process_id_( void )
+{
+ return getpid();
+}
+
+
+/*
+ * path_get_temp_path_()
+ */
+
+void path_get_temp_path_( string * buffer )
+{
+ char const * t = getenv( "TMPDIR" );
+ string_append( buffer, t ? t : "/tmp" );
+}
+
+
+/*
+ * path_translate_to_os_()
+ */
+
+int path_translate_to_os_( char const * f, string * file )
+{
+ int translated = 0;
+
+ /* by default, pass on the original path */
+ string_copy( file, f );
+
+ return translated;
+}
+
+
+/*
+ * path_register_key()
+ */
+
+void path_register_key( OBJECT * path )
+{
+}
+
+
+/*
+ * path_as_key()
+ */
+
+OBJECT * path_as_key( OBJECT * path )
+{
+ return object_copy( path );
+}
+
+
+/*
+ * path_done()
+ */
+
+void path_done( void )
+{
+}
diff --git a/src/boost/tools/build/src/engine/pathvms.cpp b/src/boost/tools/build/src/engine/pathvms.cpp
new file mode 100644
index 000000000..da6619777
--- /dev/null
+++ b/src/boost/tools/build/src/engine/pathvms.cpp
@@ -0,0 +1,254 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Copyright 2015 Artur Shepilko.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+
+/*
+ * pathvms.c - VMS-specific path manipulation support
+ *
+ * This implementation is based on POSIX-style path manipulation.
+ *
+ * VMS CTRL directly supports both POSIX- and native VMS-style path expressions,
+ * with the POSIX-to-VMS path translation performed internally by the same
+ * set of functions. For the most part such processing is transparent, with
+ * few differences mainly related to file-versions (in POSIX mode only the recent
+ * version is visible).
+ *
+ * This should allow us to some extent re-use pathunix.c implementation.
+ *
+ * Thus in jam-files the path references can also remain POSIX/UNIX-like on all
+ * levels EXCEPT in actions scope, where the path references must be translated
+ * to the native VMS-style. This approach is somewhat similar to jam CYGWIN
+ * handling.
+ *
+ *
+ * External routines:
+ * path_register_key()
+ * path_as_key()
+ * path_done()
+ *
+ * External routines called only via routines in pathsys.c:
+ * path_get_process_id_()
+ * path_get_temp_path_()
+ * path_translate_to_os_()
+ */
+
+
+#include "jam.h"
+
+#ifdef OS_VMS
+
+#include "pathsys.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <unistd.h> /* needed for getpid() */
+#include <unixlib.h> /* needed for decc$to_vms() */
+
+
+/*
+ * path_get_process_id_()
+ */
+
+unsigned long path_get_process_id_( void )
+{
+ return getpid();
+}
+
+
+/*
+ * path_get_temp_path_()
+ */
+
+void path_get_temp_path_( string * buffer )
+{
+ char const * t = getenv( "TMPDIR" );
+ string_append( buffer, t ? t : "/tmp" );
+}
+
+
+/*
+ * translate_path_posix2vms()
+ *
+ * POSIX-to-VMS file specification translation:
+ *
+ * Translation is performed with decc$to_vms() CTRL routine (default decc$features)
+ * Some limitations apply:
+ * -- ODS-2 compliant file specs only (no spaces, punctuation chars etc.)
+ *
+ * -- wild-cards are not allowed
+ * In general decc$to_vms() can expand the wildcard for existing files,
+ * yet it cannot retain wild-cards in translated spec. Use GLOB for this.
+ *
+ * -- rooted path must refer to an existing/defined device or root-dir
+ * (e.g. /defconcealed/dir/file.ext or /existingrootdir/dir/file.ext )
+ *
+ * -- POSIX dir/no-type-file path ambiguity (e.g. dir/newsubdir vs. dir/newfile
+ * is handled as follows:
+ *
+ * 1) first try as directory:
+ * -- if translated (may be a dir): means the file-path has no .type/suffix
+ * -- if not translated, then it may be a file (has .type) OR invalid spec
+ * 2) then try as file:
+ * -- if translated and also is a dir -- check if such file exists (stat)
+ * -- if not translated, but is a dir -- return as dir
+ *
+ * NOTE: on VMS it's possible to have both a file and a dir of the same name
+ * appear in the same directory. In such case _directory_ intent is assumed.
+ *
+ * It's preferable to avoid such naming ambiguity in this context, so
+ * append an empty .type to specify a no-type file (eg. "filename.")
+ *
+ */
+
+
+static string * m_vmsfilespec = NULL;
+
+/*
+ * copy_vmsfilespec() - decc$to_vms action routine for matched filenames
+ */
+
+static int copy_vmsfilespec( char * f, int type )
+{
+ assert ( NULL != m_vmsfilespec && "Must be bound to a valid object" );
+
+ string_copy( m_vmsfilespec, f );
+
+ /* 0:Exit on first match (1:Process all) */
+ return 0;
+}
+
+
+static int translate_path_posix2vms( string * path )
+{
+ int translated = 0;
+
+ string as_dir[ 1 ];
+ string as_file[ 1 ];
+ int dir_count;
+ int file_count;
+
+ unsigned char is_dir;
+ unsigned char is_file;
+ unsigned char is_ambiguous;
+
+ string_new( as_dir );
+ string_new( as_file );
+
+
+ m_vmsfilespec = as_dir;
+
+ /* MATCH 0:do not allow wildcards, 0:allow directories (2:dir only) */
+ dir_count = decc$to_vms( path->value, copy_vmsfilespec, 0, 2 );
+
+
+ m_vmsfilespec = as_file;
+
+ /* MATCH 0:do not allow wildcards, 0:allow directories (2:dir only) */
+ file_count = decc$to_vms( path->value, copy_vmsfilespec, 0, 0 );
+
+ m_vmsfilespec = NULL;
+
+
+ translated = ( file_count || dir_count );
+
+ if ( file_count && dir_count )
+ {
+ struct stat statbuf;
+
+ /* use as_file only when exists AND as_dir does not exist
+ * otherwise use as_dir
+ */
+ if ( stat(as_dir->value, &statbuf ) < 0
+ && stat(as_file->value, &statbuf ) > 0
+ && ( statbuf.st_mode & S_IFREG ) )
+ {
+ string_truncate( path, 0 );
+ string_append( path, as_file->value );
+ }
+ else
+ {
+ string_truncate( path, 0 );
+ string_append( path, as_dir->value );
+ }
+ }
+ else if ( file_count )
+ {
+ string_truncate( path, 0 );
+ string_append( path, as_file->value );
+ }
+ else if ( dir_count )
+ {
+ string_truncate( path, 0 );
+ string_append( path, as_dir->value );
+ }
+ else
+ {
+ /* error: unable to translate path to native format */
+ translated = 0;
+ }
+
+ string_free( as_dir );
+ string_free( as_file );
+
+ return translated;
+}
+
+
+/*
+ * path_translate_to_os_()
+ */
+
+int path_translate_to_os_( char const * f, string * file )
+{
+ int translated = 0;
+
+ /* by default, pass on the original path */
+ string_copy( file, f );
+
+ translated = translate_path_posix2vms( file );
+
+ return translated;
+}
+
+
+/*
+ * path_register_key()
+ */
+
+void path_register_key( OBJECT * path )
+{
+}
+
+
+/*
+ * path_as_key()
+ */
+
+OBJECT * path_as_key( OBJECT * path )
+{
+ return object_copy( path );
+}
+
+
+/*
+ * path_done()
+ */
+
+void path_done( void )
+{
+}
+
+#endif
+
diff --git a/src/boost/tools/build/src/engine/regexp.cpp b/src/boost/tools/build/src/engine/regexp.cpp
new file mode 100644
index 000000000..537bc828d
--- /dev/null
+++ b/src/boost/tools/build/src/engine/regexp.cpp
@@ -0,0 +1,1330 @@
+/*
+ * regcomp and regexec -- regsub and regerror are elsewhere
+ *
+ * Copyright (c) 1986 by University of Toronto.
+ * Written by Henry Spencer. Not derived from licensed software.
+ *
+ * Permission is granted to anyone to use this software for any
+ * purpose on any computer system, and to redistribute it freely,
+ * subject to the following restrictions:
+ *
+ * 1. The author is not responsible for the consequences of use of
+ * this software, no matter how awful, even if they arise
+ * from defects in it.
+ *
+ * 2. The origin of this software must not be misrepresented, either
+ * by explicit claim or by omission.
+ *
+ * 3. Altered versions must be plainly marked as such, and must not
+ * be misrepresented as being the original software.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 27 Dec 1986, to add \n as an alternative to |
+ *** to assist in implementing egrep.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 27 Dec 1986, to add \< and \> for word-matching
+ *** as in BSD grep and ex.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 28 Dec 1986, to optimize characters quoted with \.
+ *** THIS IS AN ALTERED VERSION. It was altered by James A. Woods,
+ *** ames!jaw, on 19 June 1987, to quash a regcomp() redundancy.
+ *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
+ *** seiwald@vix.com, on 28 August 1993, for use in jam. Regmagic.h
+ *** was moved into regexp.h, and the include of regexp.h now uses "'s
+ *** to avoid conflicting with the system regexp.h. Const, bless its
+ *** soul, was removed so it can compile everywhere. The declaration
+ *** of strchr() was in conflict on AIX, so it was removed (as it is
+ *** happily defined in string.h).
+ *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
+ *** seiwald@perforce.com, on 20 January 2000, to use function prototypes.
+ *
+ * Beware that some of this code is subtly aware of the way operator precedence
+ * is structured in regular expressions. Serious changes in regular-expression
+ * syntax might require a total rethink.
+ */
+
+
+#include "jam.h"
+#include "regexp.h"
+#include "output.h"
+
+#include <stdio.h>
+#include <ctype.h>
+#ifndef ultrix
+# include <stdlib.h>
+#endif
+#include <string.h>
+
+
+/*
+ * The "internal use only" fields in regexp.h are present to pass info from
+ * compile to execute that permits the execute phase to run lots faster on
+ * simple cases. They are:
+ :
+ * regstart char that must begin a match; '\0' if none obvious.
+ * reganch is the match anchored (at beginning-of-line only)?
+ * regmust string (pointer into program) that match must include, or NULL.
+ * regmlen length of regmust string.
+ *
+ * Regstart and reganch permit very fast decisions on suitable starting points
+ * for a match, cutting down the work a lot. Regmust permits fast rejection of
+ * lines that cannot possibly match. The regmust tests are costly enough that
+ * regcomp() supplies a regmust only if the r.e. contains something potentially
+ * expensive (at present, the only such thing detected is * or + at the start of
+ * the r.e., which can involve a lot of backup). Regmlen is supplied because the
+ * test in regexec() needs it and regcomp() is computing it anyway.
+ */
+
+/*
+ * Structure for regexp "program". This is essentially a linear encoding of a
+ * nondeterministic finite-state machine (aka syntax charts or "railroad normal
+ * form" in parsing technology). Each node is an opcode plus a "next" pointer,
+ * possibly plus an operand. "Next" pointers of all nodes except BRANCH
+ * implement concatenation; a "next" pointer with a BRANCH on both ends of it is
+ * connecting two alternatives. [Here we have one of the subtle syntax
+ * dependencies: an individual BRANCH, as opposed to a collection of them, is
+ * never concatenated with anything because of operator precedence.] The operand
+ * of some types of node is a literal string; for others, it is a node leading
+ * into a sub-FSM. In particular, the operand of a BRANCH node is the first node
+ * of the branch. [NB this is *not* a tree structure: the tail of the branch
+ * connects to the thing following the set of BRANCHes.] The opcodes are:
+ */
+
+/* definition number opnd? meaning */
+#define END 0 /* no End of program. */
+#define BOL 1 /* no Match "" at beginning of line. */
+#define EOL 2 /* no Match "" at end of line. */
+#define ANY 3 /* no Match any one character. */
+#define ANYOF 4 /* str Match any character in this string. */
+#define ANYBUT 5 /* str Match any character not in this string. */
+#define BRANCH 6 /* node Match this alternative, or the next... */
+#define BACK 7 /* no Match "", "next" ptr points backward. */
+#define EXACTLY 8 /* str Match this string. */
+#define NOTHING 9 /* no Match empty string. */
+#define STAR 10 /* node Match this (simple) thing 0 or more times. */
+#define PLUS 11 /* node Match this (simple) thing 1 or more times. */
+#define WORDA 12 /* no Match "" at wordchar, where prev is nonword */
+#define WORDZ 13 /* no Match "" at nonwordchar, where prev is word */
+#define OPEN 20 /* no Mark this point in input as start of #n. */
+ /* OPEN+1 is number 1, etc. */
+#define CLOSE 30 /* no Analogous to OPEN. */
+
+
+/*
+ * Opcode notes:
+ *
+ * BRANCH The set of branches constituting a single choice are hooked
+ * together with their "next" pointers, since precedence prevents
+ * anything being concatenated to any individual branch. The
+ * "next" pointer of the last BRANCH in a choice points to the
+ * thing following the whole choice. This is also where the
+ * final "next" pointer of each individual branch points; each
+ * branch starts with the operand node of a BRANCH node.
+ *
+ * BACK Normal "next" pointers all implicitly point forward; BACK
+ * exists to make loop structures possible.
+ *
+ * STAR,PLUS '?', and complex '*' and '+', are implemented as circular
+ * BRANCH structures using BACK. Simple cases (one character
+ * per match) are implemented with STAR and PLUS for speed
+ * and to minimize recursive plunges.
+ *
+ * OPEN,CLOSE ...are numbered at compile time.
+ */
+
+/*
+ * A node is one char of opcode followed by two chars of "next" pointer.
+ * "Next" pointers are stored as two 8-bit pieces, high order first. The
+ * value is a positive offset from the opcode of the node containing it.
+ * An operand, if any, simply follows the node. (Note that much of the
+ * code generation knows about this implicit relationship.)
+ *
+ * Using two bytes for the "next" pointer is vast overkill for most things,
+ * but allows patterns to get big without disasters.
+ */
+#define OP(p) (*(p))
+#define NEXT(p) (((*((p)+1)&0377)<<8) + (*((p)+2)&0377))
+#define OPERAND(p) ((p) + 3)
+
+/*
+ * See regmagic.h for one further detail of program structure.
+ */
+
+
+/*
+ * Utility definitions.
+ */
+#ifndef CHARBITS
+#define UCHARAT(p) ((int)*(const unsigned char *)(p))
+#else
+#define UCHARAT(p) ((int)*(p)&CHARBITS)
+#endif
+
+#define FAIL(m) { regerror(m); return(NULL); }
+#define ISMULT(c) ((c) == '*' || (c) == '+' || (c) == '?')
+
+/*
+ * Flags to be passed up and down.
+ */
+#define HASWIDTH 01 /* Known never to match null string. */
+#define SIMPLE 02 /* Simple enough to be STAR/PLUS operand. */
+#define SPSTART 04 /* Starts with * or +. */
+#define WORST 0 /* Worst case. */
+
+/*
+ * Global work variables for regcomp().
+ */
+static char *regparse; /* Input-scan pointer. */
+static int regnpar; /* () count. */
+static char regdummy;
+static char *regcode; /* Code-emit pointer; &regdummy = don't. */
+static long regsize; /* Code size. */
+
+/*
+ * Forward declarations for regcomp()'s friends.
+ */
+#ifndef STATIC
+#define STATIC static
+#endif
+STATIC char *reg( int paren, int *flagp );
+STATIC char *regbranch( int *flagp );
+STATIC char *regpiece( int *flagp );
+STATIC char *regatom( int *flagp );
+STATIC char *regnode( int op );
+STATIC char *regnext( char *p );
+STATIC void regc( int b );
+STATIC void reginsert( char op, char *opnd );
+STATIC void regtail( char *p, char *val );
+STATIC void regoptail( char *p, char *val );
+#ifdef STRCSPN
+STATIC int strcspn();
+#endif
+
+/*
+ - regcomp - compile a regular expression into internal code
+ *
+ * We can't allocate space until we know how big the compiled form will be,
+ * but we can't compile it (and thus know how big it is) until we've got a
+ * place to put the code. So we cheat: we compile it twice, once with code
+ * generation turned off and size counting turned on, and once "for real".
+ * This also means that we don't allocate space until we are sure that the
+ * thing really will compile successfully, and we never have to move the
+ * code and thus invalidate pointers into it. (Note that it has to be in
+ * one piece because free() must be able to free it all.)
+ *
+ * Beware that the optimization-preparation code in here knows about some
+ * of the structure of the compiled regexp.
+ */
+regexp *
+regcomp( const char *exp )
+{
+ regexp *r;
+ char *scan;
+ char *longest;
+ unsigned len;
+ int flags;
+
+ if (exp == NULL)
+ FAIL("NULL argument");
+
+ /* First pass: determine size, legality. */
+#ifdef notdef
+ if (exp[0] == '.' && exp[1] == '*') exp += 2; /* aid grep */
+#endif
+ regparse = (char *)exp;
+ regnpar = 1;
+ regsize = 0L;
+ regcode = &regdummy;
+ regc(MAGIC);
+ if (reg(0, &flags) == NULL)
+ return(NULL);
+
+ /* Small enough for pointer-storage convention? */
+ if (regsize >= 32767L) /* Probably could be 65535L. */
+ FAIL("regexp too big");
+
+ /* Allocate space. */
+ r = (regexp *)BJAM_MALLOC(sizeof(regexp) + (unsigned)regsize);
+ if (r == NULL)
+ FAIL("out of space");
+
+ /* Second pass: emit code. */
+ regparse = (char *)exp;
+ regnpar = 1;
+ regcode = r->program;
+ regc(MAGIC);
+ if (reg(0, &flags) == NULL)
+ return(NULL);
+
+ /* Dig out information for optimizations. */
+ r->regstart = '\0'; /* Worst-case defaults. */
+ r->reganch = 0;
+ r->regmust = NULL;
+ r->regmlen = 0;
+ scan = r->program+1; /* First BRANCH. */
+ if (OP(regnext(scan)) == END) { /* Only one top-level choice. */
+ scan = OPERAND(scan);
+
+ /* Starting-point info. */
+ if (OP(scan) == EXACTLY)
+ r->regstart = *OPERAND(scan);
+ else if (OP(scan) == BOL)
+ r->reganch++;
+
+ /*
+ * If there's something expensive in the r.e., find the
+ * longest literal string that must appear and make it the
+ * regmust. Resolve ties in favor of later strings, since
+ * the regstart check works with the beginning of the r.e.
+ * and avoiding duplication strengthens checking. Not a
+ * strong reason, but sufficient in the absence of others.
+ */
+ if (flags&SPSTART) {
+ longest = NULL;
+ len = 0;
+ for (; scan != NULL; scan = regnext(scan))
+ if (OP(scan) == EXACTLY && strlen(OPERAND(scan)) >= len) {
+ longest = OPERAND(scan);
+ len = strlen(OPERAND(scan));
+ }
+ r->regmust = longest;
+ r->regmlen = len;
+ }
+ }
+
+ return(r);
+}
+
+/*
+ - reg - regular expression, i.e. main body or parenthesized thing
+ *
+ * Caller must absorb opening parenthesis.
+ *
+ * Combining parenthesis handling with the base level of regular expression
+ * is a trifle forced, but the need to tie the tails of the branches to what
+ * follows makes it hard to avoid.
+ */
+static char *
+reg(
+ int paren, /* Parenthesized? */
+ int *flagp )
+{
+ char *ret;
+ char *br;
+ char *ender;
+ int parno = 0;
+ int flags;
+
+ *flagp = HASWIDTH; /* Tentatively. */
+
+ /* Make an OPEN node, if parenthesized. */
+ if (paren) {
+ if (regnpar >= NSUBEXP)
+ FAIL("too many ()");
+ parno = regnpar;
+ regnpar++;
+ ret = regnode(OPEN+parno);
+ } else
+ ret = NULL;
+
+ /* Pick up the branches, linking them together. */
+ br = regbranch(&flags);
+ if (br == NULL)
+ return(NULL);
+ if (ret != NULL)
+ regtail(ret, br); /* OPEN -> first. */
+ else
+ ret = br;
+ if (!(flags&HASWIDTH))
+ *flagp &= ~HASWIDTH;
+ *flagp |= flags&SPSTART;
+ while (*regparse == '|' || *regparse == '\n') {
+ regparse++;
+ br = regbranch(&flags);
+ if (br == NULL)
+ return(NULL);
+ regtail(ret, br); /* BRANCH -> BRANCH. */
+ if (!(flags&HASWIDTH))
+ *flagp &= ~HASWIDTH;
+ *flagp |= flags&SPSTART;
+ }
+
+ /* Make a closing node, and hook it on the end. */
+ ender = regnode((paren) ? CLOSE+parno : END);
+ regtail(ret, ender);
+
+ /* Hook the tails of the branches to the closing node. */
+ for (br = ret; br != NULL; br = regnext(br))
+ regoptail(br, ender);
+
+ /* Check for proper termination. */
+ if (paren && *regparse++ != ')') {
+ FAIL("unmatched ()");
+ } else if (!paren && *regparse != '\0') {
+ if (*regparse == ')') {
+ FAIL("unmatched ()");
+ } else
+ FAIL("junk on end"); /* "Can't happen". */
+ /* NOTREACHED */
+ }
+
+ return(ret);
+}
+
+/*
+ - regbranch - one alternative of an | operator
+ *
+ * Implements the concatenation operator.
+ */
+static char *
+regbranch( int *flagp )
+{
+ char *ret;
+ char *chain;
+ char *latest;
+ int flags;
+
+ *flagp = WORST; /* Tentatively. */
+
+ ret = regnode(BRANCH);
+ chain = NULL;
+ while (*regparse != '\0' && *regparse != ')' &&
+ *regparse != '\n' && *regparse != '|') {
+ latest = regpiece(&flags);
+ if (latest == NULL)
+ return(NULL);
+ *flagp |= flags&HASWIDTH;
+ if (chain == NULL) /* First piece. */
+ *flagp |= flags&SPSTART;
+ else
+ regtail(chain, latest);
+ chain = latest;
+ }
+ if (chain == NULL) /* Loop ran zero times. */
+ (void) regnode(NOTHING);
+
+ return(ret);
+}
+
+/*
+ - regpiece - something followed by possible [*+?]
+ *
+ * Note that the branching code sequences used for ? and the general cases
+ * of * and + are somewhat optimized: they use the same NOTHING node as
+ * both the endmarker for their branch list and the body of the last branch.
+ * It might seem that this node could be dispensed with entirely, but the
+ * endmarker role is not redundant.
+ */
+static char *
+regpiece( int *flagp )
+{
+ char *ret;
+ char op;
+ char *next;
+ int flags;
+
+ ret = regatom(&flags);
+ if (ret == NULL)
+ return(NULL);
+
+ op = *regparse;
+ if (!ISMULT(op)) {
+ *flagp = flags;
+ return(ret);
+ }
+
+ if (!(flags&HASWIDTH) && op != '?')
+ FAIL("*+ operand could be empty");
+ *flagp = (op != '+') ? (WORST|SPSTART) : (WORST|HASWIDTH);
+
+ if (op == '*' && (flags&SIMPLE))
+ reginsert(STAR, ret);
+ else if (op == '*') {
+ /* Emit x* as (x&|), where & means "self". */
+ reginsert(BRANCH, ret); /* Either x */
+ regoptail(ret, regnode(BACK)); /* and loop */
+ regoptail(ret, ret); /* back */
+ regtail(ret, regnode(BRANCH)); /* or */
+ regtail(ret, regnode(NOTHING)); /* null. */
+ } else if (op == '+' && (flags&SIMPLE))
+ reginsert(PLUS, ret);
+ else if (op == '+') {
+ /* Emit x+ as x(&|), where & means "self". */
+ next = regnode(BRANCH); /* Either */
+ regtail(ret, next);
+ regtail(regnode(BACK), ret); /* loop back */
+ regtail(next, regnode(BRANCH)); /* or */
+ regtail(ret, regnode(NOTHING)); /* null. */
+ } else if (op == '?') {
+ /* Emit x? as (x|) */
+ reginsert(BRANCH, ret); /* Either x */
+ regtail(ret, regnode(BRANCH)); /* or */
+ next = regnode(NOTHING); /* null. */
+ regtail(ret, next);
+ regoptail(ret, next);
+ }
+ regparse++;
+ if (ISMULT(*regparse))
+ FAIL("nested *?+");
+
+ return(ret);
+}
+
+/*
+ - regatom - the lowest level
+ *
+ * Optimization: gobbles an entire sequence of ordinary characters so that
+ * it can turn them into a single node, which is smaller to store and
+ * faster to run. Backslashed characters are exceptions, each becoming a
+ * separate node; the code is simpler that way and it's not worth fixing.
+ */
+static char *
+regatom( int *flagp )
+{
+ char *ret;
+ int flags;
+
+ *flagp = WORST; /* Tentatively. */
+
+ switch (*regparse++) {
+ /* FIXME: these chars only have meaning at beg/end of pat? */
+ case '^':
+ ret = regnode(BOL);
+ break;
+ case '$':
+ ret = regnode(EOL);
+ break;
+ case '.':
+ ret = regnode(ANY);
+ *flagp |= HASWIDTH|SIMPLE;
+ break;
+ case '[': {
+ int classr;
+ int classend;
+
+ if (*regparse == '^') { /* Complement of range. */
+ ret = regnode(ANYBUT);
+ regparse++;
+ } else
+ ret = regnode(ANYOF);
+ if (*regparse == ']' || *regparse == '-')
+ regc(*regparse++);
+ while (*regparse != '\0' && *regparse != ']') {
+ if (*regparse == '-') {
+ regparse++;
+ if (*regparse == ']' || *regparse == '\0')
+ regc('-');
+ else {
+ classr = UCHARAT(regparse-2)+1;
+ classend = UCHARAT(regparse);
+ if (classr > classend+1)
+ FAIL("invalid [] range");
+ for (; classr <= classend; classr++)
+ regc(classr);
+ regparse++;
+ }
+ } else
+ regc(*regparse++);
+ }
+ regc('\0');
+ if (*regparse != ']')
+ FAIL("unmatched []");
+ regparse++;
+ *flagp |= HASWIDTH|SIMPLE;
+ }
+ break;
+ case '(':
+ ret = reg(1, &flags);
+ if (ret == NULL)
+ return(NULL);
+ *flagp |= flags&(HASWIDTH|SPSTART);
+ break;
+ case '\0':
+ case '|':
+ case '\n':
+ case ')':
+ FAIL("internal urp"); /* Supposed to be caught earlier. */
+ break;
+ case '?':
+ case '+':
+ case '*':
+ FAIL("?+* follows nothing");
+ break;
+ case '\\':
+ switch (*regparse++) {
+ case '\0':
+ FAIL("trailing \\");
+ break;
+ case '<':
+ ret = regnode(WORDA);
+ break;
+ case '>':
+ ret = regnode(WORDZ);
+ break;
+ /* FIXME: Someday handle \1, \2, ... */
+ default:
+ /* Handle general quoted chars in exact-match routine */
+ goto de_fault;
+ }
+ break;
+ de_fault:
+ default:
+ /*
+ * Encode a string of characters to be matched exactly.
+ *
+ * This is a bit tricky due to quoted chars and due to
+ * '*', '+', and '?' taking the SINGLE char previous
+ * as their operand.
+ *
+ * On entry, the char at regparse[-1] is going to go
+ * into the string, no matter what it is. (It could be
+ * following a \ if we are entered from the '\' case.)
+ *
+ * Basic idea is to pick up a good char in ch and
+ * examine the next char. If it's *+? then we twiddle.
+ * If it's \ then we frozzle. If it's other magic char
+ * we push ch and terminate the string. If none of the
+ * above, we push ch on the string and go around again.
+ *
+ * regprev is used to remember where "the current char"
+ * starts in the string, if due to a *+? we need to back
+ * up and put the current char in a separate, 1-char, string.
+ * When regprev is NULL, ch is the only char in the
+ * string; this is used in *+? handling, and in setting
+ * flags |= SIMPLE at the end.
+ */
+ {
+ char *regprev;
+ char ch;
+
+ regparse--; /* Look at cur char */
+ ret = regnode(EXACTLY);
+ for ( regprev = 0 ; ; ) {
+ ch = *regparse++; /* Get current char */
+ switch (*regparse) { /* look at next one */
+
+ default:
+ regc(ch); /* Add cur to string */
+ break;
+
+ case '.': case '[': case '(':
+ case ')': case '|': case '\n':
+ case '$': case '^':
+ case '\0':
+ /* FIXME, $ and ^ should not always be magic */
+ magic:
+ regc(ch); /* dump cur char */
+ goto done; /* and we are done */
+
+ case '?': case '+': case '*':
+ if (!regprev) /* If just ch in str, */
+ goto magic; /* use it */
+ /* End mult-char string one early */
+ regparse = regprev; /* Back up parse */
+ goto done;
+
+ case '\\':
+ regc(ch); /* Cur char OK */
+ switch (regparse[1]){ /* Look after \ */
+ case '\0':
+ case '<':
+ case '>':
+ /* FIXME: Someday handle \1, \2, ... */
+ goto done; /* Not quoted */
+ default:
+ /* Backup point is \, scan * point is after it. */
+ regprev = regparse;
+ regparse++;
+ continue; /* NOT break; */
+ }
+ }
+ regprev = regparse; /* Set backup point */
+ }
+ done:
+ regc('\0');
+ *flagp |= HASWIDTH;
+ if (!regprev) /* One char? */
+ *flagp |= SIMPLE;
+ }
+ break;
+ }
+
+ return(ret);
+}
+
+/*
+ - regnode - emit a node
+ */
+static char * /* Location. */
+regnode( int op )
+{
+ char *ret;
+ char *ptr;
+
+ ret = regcode;
+ if (ret == &regdummy) {
+ regsize += 3;
+ return(ret);
+ }
+
+ ptr = ret;
+ *ptr++ = op;
+ *ptr++ = '\0'; /* Null "next" pointer. */
+ *ptr++ = '\0';
+ regcode = ptr;
+
+ return(ret);
+}
+
+/*
+ - regc - emit (if appropriate) a byte of code
+ */
+static void
+regc( int b )
+{
+ if (regcode != &regdummy)
+ *regcode++ = b;
+ else
+ regsize++;
+}
+
+/*
+ - reginsert - insert an operator in front of already-emitted operand
+ *
+ * Means relocating the operand.
+ */
+static void
+reginsert(
+ char op,
+ char *opnd )
+{
+ char *src;
+ char *dst;
+ char *place;
+
+ if (regcode == &regdummy) {
+ regsize += 3;
+ return;
+ }
+
+ src = regcode;
+ regcode += 3;
+ dst = regcode;
+ while (src > opnd)
+ *--dst = *--src;
+
+ place = opnd; /* Op node, where operand used to be. */
+ *place++ = op;
+ *place++ = '\0';
+ *place++ = '\0';
+}
+
+/*
+ - regtail - set the next-pointer at the end of a node chain
+ */
+static void
+regtail(
+ char *p,
+ char *val )
+{
+ char *scan;
+ char *temp;
+ int offset;
+
+ if (p == &regdummy)
+ return;
+
+ /* Find last node. */
+ scan = p;
+ for (;;) {
+ temp = regnext(scan);
+ if (temp == NULL)
+ break;
+ scan = temp;
+ }
+
+ if (OP(scan) == BACK)
+ offset = scan - val;
+ else
+ offset = val - scan;
+ *(scan+1) = (offset>>8)&0377;
+ *(scan+2) = offset&0377;
+}
+
+/*
+ - regoptail - regtail on operand of first argument; nop if operandless
+ */
+
+static void
+regoptail(
+ char *p,
+ char *val )
+{
+ /* "Operandless" and "op != BRANCH" are synonymous in practice. */
+ if (p == NULL || p == &regdummy || OP(p) != BRANCH)
+ return;
+ regtail(OPERAND(p), val);
+}
+
+/*
+ * regexec and friends
+ */
+
+/*
+ * Global work variables for regexec().
+ */
+static const char *reginput; /* String-input pointer. */
+static const char *regbol; /* Beginning of input, for ^ check. */
+static const char **regstartp; /* Pointer to startp array. */
+static const char **regendp; /* Ditto for endp. */
+
+/*
+ * Forwards.
+ */
+STATIC int regtry( regexp *prog, const char *string );
+STATIC int regmatch( char *prog );
+STATIC int regrepeat( char *p );
+
+#ifdef DEBUG
+int regnarrate = 0;
+void regdump();
+STATIC char *regprop();
+#endif
+
+/*
+ - regexec - match a regexp against a string
+ */
+int
+regexec(
+ regexp *prog,
+ const char *string )
+{
+ char *s;
+
+ /* Be paranoid... */
+ if (prog == NULL || string == NULL) {
+ regerror("NULL parameter");
+ return(0);
+ }
+
+ /* Check validity of program. */
+ if (UCHARAT(prog->program) != MAGIC) {
+ regerror("corrupted program");
+ return(0);
+ }
+
+ /* If there is a "must appear" string, look for it. */
+ if ( prog->regmust != NULL )
+ {
+ s = (char *)string;
+ while ( ( s = strchr( s, prog->regmust[ 0 ] ) ) != NULL )
+ {
+ if ( !strncmp( s, prog->regmust, prog->regmlen ) )
+ break; /* Found it. */
+ ++s;
+ }
+ if ( s == NULL ) /* Not present. */
+ return 0;
+ }
+
+ /* Mark beginning of line for ^ . */
+ regbol = (char *)string;
+
+ /* Simplest case: anchored match need be tried only once. */
+ if ( prog->reganch )
+ return regtry( prog, string );
+
+ /* Messy cases: unanchored match. */
+ s = (char *)string;
+ if (prog->regstart != '\0')
+ /* We know what char it must start with. */
+ while ((s = strchr(s, prog->regstart)) != NULL) {
+ if (regtry(prog, s))
+ return(1);
+ s++;
+ }
+ else
+ /* We do not -- general case. */
+ do {
+ if ( regtry( prog, s ) )
+ return( 1 );
+ } while ( *s++ != '\0' );
+
+ /* Failure. */
+ return 0;
+}
+
+
+/*
+ * regtry() - try match at specific point.
+ */
+
+static int /* 0 failure, 1 success */
+regtry(
+ regexp *prog,
+ const char *string )
+{
+ int i;
+ const char * * sp;
+ const char * * ep;
+
+ reginput = string;
+ regstartp = prog->startp;
+ regendp = prog->endp;
+
+ sp = prog->startp;
+ ep = prog->endp;
+ for ( i = NSUBEXP; i > 0; --i )
+ {
+ *sp++ = NULL;
+ *ep++ = NULL;
+ }
+ if ( regmatch( prog->program + 1 ) )
+ {
+ prog->startp[ 0 ] = string;
+ prog->endp[ 0 ] = reginput;
+ return 1;
+ }
+ else
+ return 0;
+}
+
+
+/*
+ * regmatch() - main matching routine.
+ *
+ * Conceptually the strategy is simple: check to see whether the current node
+ * matches, call self recursively to see whether the rest matches, and then act
+ * accordingly. In practice we make some effort to avoid recursion, in
+ * particular by going through "ordinary" nodes (that do not need to know
+ * whether the rest of the match failed) by a loop instead of by recursion.
+ */
+
+static int /* 0 failure, 1 success */
+regmatch( char * prog )
+{
+ char * scan; /* Current node. */
+ char * next; /* Next node. */
+
+ scan = prog;
+#ifdef DEBUG
+ if (scan != NULL && regnarrate)
+ err_printf("%s(\n", regprop(scan));
+#endif
+ while (scan != NULL) {
+#ifdef DEBUG
+ if (regnarrate)
+ err_printf("%s...\n", regprop(scan));
+#endif
+ next = regnext(scan);
+
+ switch (OP(scan)) {
+ case BOL:
+ if (reginput != regbol)
+ return(0);
+ break;
+ case EOL:
+ if (*reginput != '\0')
+ return(0);
+ break;
+ case WORDA:
+ /* Must be looking at a letter, digit, or _ */
+ if ((!isalnum(*reginput)) && *reginput != '_')
+ return(0);
+ /* Prev must be BOL or nonword */
+ if (reginput > regbol &&
+ (isalnum(reginput[-1]) || reginput[-1] == '_'))
+ return(0);
+ break;
+ case WORDZ:
+ /* Must be looking at non letter, digit, or _ */
+ if (isalnum(*reginput) || *reginput == '_')
+ return(0);
+ /* We don't care what the previous char was */
+ break;
+ case ANY:
+ if (*reginput == '\0')
+ return(0);
+ reginput++;
+ break;
+ case EXACTLY: {
+ int len;
+ char *opnd;
+
+ opnd = OPERAND(scan);
+ /* Inline the first character, for speed. */
+ if (*opnd != *reginput)
+ return(0);
+ len = strlen(opnd);
+ if (len > 1 && strncmp(opnd, reginput, len) != 0)
+ return(0);
+ reginput += len;
+ }
+ break;
+ case ANYOF:
+ if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) == NULL)
+ return(0);
+ reginput++;
+ break;
+ case ANYBUT:
+ if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) != NULL)
+ return(0);
+ reginput++;
+ break;
+ case NOTHING:
+ break;
+ case BACK:
+ break;
+ case OPEN+1:
+ case OPEN+2:
+ case OPEN+3:
+ case OPEN+4:
+ case OPEN+5:
+ case OPEN+6:
+ case OPEN+7:
+ case OPEN+8:
+ case OPEN+9: {
+ int no;
+ const char *save;
+
+ no = OP(scan) - OPEN;
+ save = reginput;
+
+ if (regmatch(next)) {
+ /*
+ * Don't set startp if some later
+ * invocation of the same parentheses
+ * already has.
+ */
+ if (regstartp[no] == NULL)
+ regstartp[no] = save;
+ return(1);
+ } else
+ return(0);
+ }
+ break;
+ case CLOSE+1:
+ case CLOSE+2:
+ case CLOSE+3:
+ case CLOSE+4:
+ case CLOSE+5:
+ case CLOSE+6:
+ case CLOSE+7:
+ case CLOSE+8:
+ case CLOSE+9: {
+ int no;
+ const char *save;
+
+ no = OP(scan) - CLOSE;
+ save = reginput;
+
+ if (regmatch(next)) {
+ /*
+ * Don't set endp if some later
+ * invocation of the same parentheses
+ * already has.
+ */
+ if (regendp[no] == NULL)
+ regendp[no] = save;
+ return(1);
+ } else
+ return(0);
+ }
+ break;
+ case BRANCH: {
+ const char *save;
+
+ if (OP(next) != BRANCH) /* No choice. */
+ next = OPERAND(scan); /* Avoid recursion. */
+ else {
+ do {
+ save = reginput;
+ if (regmatch(OPERAND(scan)))
+ return(1);
+ reginput = save;
+ scan = regnext(scan);
+ } while (scan != NULL && OP(scan) == BRANCH);
+ return(0);
+ /* NOTREACHED */
+ }
+ }
+ break;
+ case STAR:
+ case PLUS: {
+ char nextch;
+ int no;
+ const char *save;
+ int min;
+
+ /*
+ * Lookahead to avoid useless match attempts
+ * when we know what character comes next.
+ */
+ nextch = '\0';
+ if (OP(next) == EXACTLY)
+ nextch = *OPERAND(next);
+ min = (OP(scan) == STAR) ? 0 : 1;
+ save = reginput;
+ no = regrepeat(OPERAND(scan));
+ while (no >= min) {
+ /* If it could work, try it. */
+ if (nextch == '\0' || *reginput == nextch)
+ if (regmatch(next))
+ return(1);
+ /* Couldn't or didn't -- back up. */
+ no--;
+ reginput = save + no;
+ }
+ return(0);
+ }
+ break;
+ case END:
+ return(1); /* Success! */
+ break;
+ default:
+ regerror("memory corruption");
+ return(0);
+ break;
+ }
+
+ scan = next;
+ }
+
+ /*
+ * We get here only if there's trouble -- normally "case END" is
+ * the terminating point.
+ */
+ regerror("corrupted pointers");
+ return(0);
+}
+
+/*
+ - regrepeat - repeatedly match something simple, report how many
+ */
+static int
+regrepeat( char *p )
+{
+ int count = 0;
+ const char *scan;
+ char *opnd;
+
+ scan = reginput;
+ opnd = OPERAND(p);
+ switch (OP(p)) {
+ case ANY:
+ count = strlen(scan);
+ scan += count;
+ break;
+ case EXACTLY:
+ while (*opnd == *scan) {
+ count++;
+ scan++;
+ }
+ break;
+ case ANYOF:
+ while (*scan != '\0' && strchr(opnd, *scan) != NULL) {
+ count++;
+ scan++;
+ }
+ break;
+ case ANYBUT:
+ while (*scan != '\0' && strchr(opnd, *scan) == NULL) {
+ count++;
+ scan++;
+ }
+ break;
+ default: /* Oh dear. Called inappropriately. */
+ regerror("internal foulup");
+ count = 0; /* Best compromise. */
+ break;
+ }
+ reginput = scan;
+
+ return(count);
+}
+
+/*
+ - regnext - dig the "next" pointer out of a node
+ */
+static char *
+regnext( char *p )
+{
+ int offset;
+
+ if (p == &regdummy)
+ return(NULL);
+
+ offset = NEXT(p);
+ if (offset == 0)
+ return(NULL);
+
+ if (OP(p) == BACK)
+ return(p-offset);
+ else
+ return(p+offset);
+}
+
+#ifdef DEBUG
+
+STATIC char *regprop();
+
+/*
+ - regdump - dump a regexp onto stdout in vaguely comprehensible form
+ */
+void
+regdump( regexp *r )
+{
+ char *s;
+ char op = EXACTLY; /* Arbitrary non-END op. */
+ char *next;
+
+
+ s = r->program + 1;
+ while (op != END) { /* While that wasn't END last time... */
+ op = OP(s);
+ out_printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */
+ next = regnext(s);
+ if (next == NULL) /* Next ptr. */
+ out_printf("(0)");
+ else
+ out_printf("(%d)", (s-r->program)+(next-s));
+ s += 3;
+ if (op == ANYOF || op == ANYBUT || op == EXACTLY) {
+ /* Literal string, where present. */
+ while (*s != '\0') {
+ out_putc(*s);
+ s++;
+ }
+ s++;
+ }
+ out_putc('\n');
+ }
+
+ /* Header fields of interest. */
+ if (r->regstart != '\0')
+ out_printf("start `%c' ", r->regstart);
+ if (r->reganch)
+ out_printf("anchored ");
+ if (r->regmust != NULL)
+ out_printf("must have \"%s\"", r->regmust);
+ out_printf("\n");
+}
+
+/*
+ - regprop - printable representation of opcode
+ */
+static char *
+regprop( char *op )
+{
+ char *p;
+ static char buf[50];
+
+ (void) strcpy(buf, ":");
+
+ switch (OP(op)) {
+ case BOL:
+ p = "BOL";
+ break;
+ case EOL:
+ p = "EOL";
+ break;
+ case ANY:
+ p = "ANY";
+ break;
+ case ANYOF:
+ p = "ANYOF";
+ break;
+ case ANYBUT:
+ p = "ANYBUT";
+ break;
+ case BRANCH:
+ p = "BRANCH";
+ break;
+ case EXACTLY:
+ p = "EXACTLY";
+ break;
+ case NOTHING:
+ p = "NOTHING";
+ break;
+ case BACK:
+ p = "BACK";
+ break;
+ case END:
+ p = "END";
+ break;
+ case OPEN+1:
+ case OPEN+2:
+ case OPEN+3:
+ case OPEN+4:
+ case OPEN+5:
+ case OPEN+6:
+ case OPEN+7:
+ case OPEN+8:
+ case OPEN+9:
+ sprintf(buf+strlen(buf), "OPEN%d", OP(op)-OPEN);
+ p = NULL;
+ break;
+ case CLOSE+1:
+ case CLOSE+2:
+ case CLOSE+3:
+ case CLOSE+4:
+ case CLOSE+5:
+ case CLOSE+6:
+ case CLOSE+7:
+ case CLOSE+8:
+ case CLOSE+9:
+ sprintf(buf+strlen(buf), "CLOSE%d", OP(op)-CLOSE);
+ p = NULL;
+ break;
+ case STAR:
+ p = "STAR";
+ break;
+ case PLUS:
+ p = "PLUS";
+ break;
+ case WORDA:
+ p = "WORDA";
+ break;
+ case WORDZ:
+ p = "WORDZ";
+ break;
+ default:
+ regerror("corrupted opcode");
+ break;
+ }
+ if (p != NULL)
+ (void) strcat(buf, p);
+ return(buf);
+}
+#endif
+
+/*
+ * The following is provided for those people who do not have strcspn() in
+ * their C libraries. They should get off their butts and do something
+ * about it; at least one public-domain implementation of those (highly
+ * useful) string routines has been published on Usenet.
+ */
+#ifdef STRCSPN
+/*
+ * strcspn - find length of initial segment of s1 consisting entirely
+ * of characters not from s2
+ */
+
+static int
+strcspn(
+ char *s1,
+ char *s2 )
+{
+ char *scan1;
+ char *scan2;
+ int count;
+
+ count = 0;
+ for (scan1 = s1; *scan1 != '\0'; scan1++) {
+ for (scan2 = s2; *scan2 != '\0';) /* ++ moved down. */
+ if (*scan1 == *scan2++)
+ return(count);
+ count++;
+ }
+ return(count);
+}
+#endif
diff --git a/src/boost/tools/build/src/engine/regexp.h b/src/boost/tools/build/src/engine/regexp.h
new file mode 100644
index 000000000..3a52ba3d6
--- /dev/null
+++ b/src/boost/tools/build/src/engine/regexp.h
@@ -0,0 +1,36 @@
+/*
+ * Definitions etc. for regexp(3) routines.
+ *
+ * Caveat: this is V8 regexp(3) [actually, a reimplementation thereof],
+ * not the System V one.
+ */
+#ifndef REGEXP_DWA20011023_H
+#define REGEXP_DWA20011023_H
+
+#include "config.h"
+
+#define NSUBEXP 10
+typedef struct regexp {
+ char const * startp[ NSUBEXP ];
+ char const * endp[ NSUBEXP ];
+ char regstart; /* Internal use only. */
+ char reganch; /* Internal use only. */
+ char * regmust; /* Internal use only. */
+ int regmlen; /* Internal use only. */
+ char program[ 1 ]; /* Unwarranted chumminess with compiler. */
+} regexp;
+
+
+regexp * regcomp( char const * exp );
+int regexec( regexp * prog, char const * string );
+void regerror( char const * s );
+
+
+/*
+ * The first byte of the regexp internal "program" is actually this magic
+ * number; the start node begins in the second byte.
+ */
+#define MAGIC 0234
+
+#endif
+
diff --git a/src/boost/tools/build/src/engine/rules.cpp b/src/boost/tools/build/src/engine/rules.cpp
new file mode 100644
index 000000000..3bbfc6574
--- /dev/null
+++ b/src/boost/tools/build/src/engine/rules.cpp
@@ -0,0 +1,739 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * rules.c - access to RULEs, TARGETs, and ACTIONs
+ *
+ * External routines:
+ * bindrule() - return pointer to RULE, creating it if necessary.
+ * bindtarget() - return pointer to TARGET, creating it if necessary.
+ * touch_target() - mark a target to simulate being new.
+ * targetlist() - turn list of target names into a TARGET chain.
+ * targetentry() - add a TARGET to a chain of TARGETS.
+ * actionlist() - append to an ACTION chain.
+ * addsettings() - add a deferred "set" command to a target.
+ * pushsettings() - set all target specific variables.
+ * popsettings() - reset target specific variables to their pre-push values.
+ * freesettings() - delete a settings list.
+ * rules_done() - free RULE and TARGET tables.
+ */
+
+#include "jam.h"
+#include "rules.h"
+
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+#include "pathsys.h"
+#include "search.h"
+#include "variable.h"
+
+
+static void set_rule_actions( RULE *, rule_actions * );
+static void set_rule_body ( RULE *, FUNCTION * );
+
+static struct hash * targethash = 0;
+
+
+/*
+ * get_target_includes() - lazy creates a target's internal includes node
+ *
+ * The newly created node is not entered into the hash table as there should
+ * never be a need to bind them directly from a target names. If you want to
+ * access an internal includes node by name, first access the actual target and
+ * then read the internal includes node from there.
+ */
+
+static TARGET * get_target_includes( TARGET * const t )
+{
+ if ( !t->includes )
+ {
+ TARGET * const i = (TARGET *)BJAM_MALLOC( sizeof( *t ) );
+ memset( (char *)i, '\0', sizeof( *i ) );
+ i->name = object_copy( t->name );
+ i->boundname = object_copy( i->name );
+ i->flags |= T_FLAG_NOTFILE | T_FLAG_INTERNAL;
+ t->includes = i;
+ }
+ return t->includes;
+}
+
+
+/*
+ * target_include() - adds a target to the given targe's 'included' list
+ * target_include_many() - adds targets to the given target's 'included' list
+ *
+ * Included targets are modeled as dependencies of the including target's
+ * internal include node.
+ */
+
+void target_include( TARGET * const including, TARGET * const included )
+{
+ TARGET * const internal = get_target_includes( including );
+ internal->depends = targetentry( internal->depends, included );
+}
+
+void target_include_many( TARGET * const including, LIST * const included_names
+ )
+{
+ TARGET * const internal = get_target_includes( including );
+ internal->depends = targetlist( internal->depends, included_names );
+}
+
+
+/*
+ * enter_rule() - return pointer to RULE, creating it if necessary in
+ * target_module.
+ */
+
+static RULE * enter_rule( OBJECT * rulename, module_t * target_module )
+{
+ int found;
+ RULE * const r = (RULE *)hash_insert( demand_rules( target_module ),
+ rulename, &found );
+ if ( !found )
+ {
+ r->name = object_copy( rulename );
+ r->procedure = 0;
+ r->module = 0;
+ r->actions = 0;
+ r->exported = 0;
+ r->module = target_module;
+ }
+ return r;
+}
+
+
+/*
+ * define_rule() - return pointer to RULE, creating it if necessary in
+ * target_module. Prepare it to accept a body or action originating in
+ * src_module.
+ */
+
+static RULE * define_rule( module_t * src_module, OBJECT * rulename,
+ module_t * target_module )
+{
+ RULE * const r = enter_rule( rulename, target_module );
+ if ( r->module != src_module )
+ {
+ /* If the rule was imported from elsewhere, clear it now. */
+ set_rule_body( r, 0 );
+ set_rule_actions( r, 0 );
+ /* r will be executed in the source module. */
+ r->module = src_module;
+ }
+ return r;
+}
+
+
+void rule_free( RULE * r )
+{
+ object_free( r->name );
+ r->name = 0;
+ if ( r->procedure )
+ function_free( r->procedure );
+ r->procedure = 0;
+ if ( r->actions )
+ actions_free( r->actions );
+ r->actions = 0;
+}
+
+
+/*
+ * bindtarget() - return pointer to TARGET, creating it if necessary.
+ */
+
+TARGET * bindtarget( OBJECT * const target_name )
+{
+ int found;
+ TARGET * t;
+
+ if ( !targethash )
+ targethash = hashinit( sizeof( TARGET ), "targets" );
+
+ t = (TARGET *)hash_insert( targethash, target_name, &found );
+ if ( !found )
+ {
+ memset( (char *)t, '\0', sizeof( *t ) );
+ t->name = object_copy( target_name );
+ t->boundname = object_copy( t->name ); /* default for T_FLAG_NOTFILE */
+ }
+
+ return t;
+}
+
+
+static void bind_explicitly_located_target( void * xtarget, void * data )
+{
+ TARGET * t = (TARGET *)xtarget;
+ if ( !( t->flags & T_FLAG_NOTFILE ) )
+ {
+ /* Check if there is a setting for LOCATE. */
+ SETTINGS * s = t->settings;
+ for ( ; s ; s = s->next )
+ {
+ if ( object_equal( s->symbol, constant_LOCATE ) && ! list_empty( s->value ) )
+ {
+ set_explicit_binding( t->name, list_front( s->value ) );
+ break;
+ }
+ }
+ }
+}
+
+
+void bind_explicitly_located_targets()
+{
+ if ( targethash )
+ hashenumerate( targethash, bind_explicitly_located_target, (void *)0 );
+}
+
+
+/*
+ * touch_target() - mark a target to simulate being new.
+ */
+
+void touch_target( OBJECT * const t )
+{
+ bindtarget( t )->flags |= T_FLAG_TOUCHED;
+}
+
+
+/*
+ * target_scc() - returns the root of a strongly connected component that this
+ * target is a part of.
+ */
+
+TARGET * target_scc( TARGET * t )
+{
+ TARGET * result = t;
+ while ( result->scc_root )
+ result = result->scc_root;
+ while ( t->scc_root )
+ {
+ TARGET * const tmp = t->scc_root;
+ t->scc_root = result;
+ t = tmp;
+ }
+ return result;
+}
+
+
+/*
+ * targetlist() - turn list of target names into a TARGET chain.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * targets list of target names
+ */
+
+TARGETS * targetlist( TARGETS * chain, LIST * target_names )
+{
+ LISTITER iter = list_begin( target_names );
+ LISTITER const end = list_end( target_names );
+ for ( ; iter != end; iter = list_next( iter ) )
+ chain = targetentry( chain, bindtarget( list_item( iter ) ) );
+ return chain;
+}
+
+
+/*
+ * targetentry() - add a TARGET to a chain of TARGETS.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * target new target to append
+ */
+
+TARGETS * targetentry( TARGETS * chain, TARGET * target )
+{
+ TARGETS * const c = (TARGETS *)BJAM_MALLOC( sizeof( TARGETS ) );
+ c->target = target;
+
+ if ( !chain ) chain = c;
+ else chain->tail->next = c;
+ chain->tail = c;
+ c->next = 0;
+
+ return chain;
+}
+
+
+/*
+ * targetchain() - append two TARGET chains.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * target new target to append
+ */
+
+TARGETS * targetchain( TARGETS * chain, TARGETS * targets )
+{
+ if ( !targets ) return chain;
+ if ( !chain ) return targets;
+
+ chain->tail->next = targets;
+ chain->tail = targets->tail;
+ return chain;
+}
+
+/*
+ * action_free - decrement the ACTIONs reference count and (maybe) free it.
+ */
+
+void action_free( ACTION * action )
+{
+ if ( --action->refs == 0 )
+ {
+ freetargets( action->targets );
+ freetargets( action->sources );
+ BJAM_FREE( action );
+ }
+}
+
+
+/*
+ * actionlist() - append to an ACTION chain.
+ */
+
+ACTIONS * actionlist( ACTIONS * chain, ACTION * action )
+{
+ ACTIONS * const actions = (ACTIONS *)BJAM_MALLOC( sizeof( ACTIONS ) );
+ actions->action = action;
+ ++action->refs;
+ if ( !chain ) chain = actions;
+ else chain->tail->next = actions;
+ chain->tail = actions;
+ actions->next = 0;
+ return chain;
+}
+
+static SETTINGS * settings_freelist;
+
+
+/*
+ * addsettings() - add a deferred "set" command to a target.
+ *
+ * Adds a variable setting (varname=list) onto a chain of settings for a
+ * particular target. 'flag' controls the relationship between new and old
+ * values in the same way as in var_set() function (see variable.c). Returns the
+ * head of the settings chain.
+ */
+
+SETTINGS * addsettings( SETTINGS * head, int flag, OBJECT * symbol,
+ LIST * value )
+{
+ SETTINGS * v;
+
+ /* Look for previous settings. */
+ for ( v = head; v; v = v->next )
+ if ( object_equal( v->symbol, symbol ) )
+ break;
+
+ /* If not previously set, alloc a new. */
+ /* If appending, do so. */
+ /* Else free old and set new. */
+ if ( !v )
+ {
+ v = settings_freelist;
+ if ( v )
+ settings_freelist = v->next;
+ else
+ v = (SETTINGS *)BJAM_MALLOC( sizeof( *v ) );
+
+ v->symbol = object_copy( symbol );
+ v->value = value;
+ v->next = head;
+ head = v;
+ }
+ else if ( flag == VAR_APPEND )
+ {
+ v->value = list_append( v->value, value );
+ }
+ else if ( flag != VAR_DEFAULT )
+ {
+ list_free( v->value );
+ v->value = value;
+ }
+ else
+ list_free( value );
+
+ /* Return (new) head of list. */
+ return head;
+}
+
+
+/*
+ * pushsettings() - set all target specific variables.
+ */
+
+void pushsettings( struct module_t * module, SETTINGS * v )
+{
+ for ( ; v; v = v->next )
+ v->value = var_swap( module, v->symbol, v->value );
+}
+
+
+/*
+ * popsettings() - reset target specific variables to their pre-push values.
+ */
+
+void popsettings( struct module_t * module, SETTINGS * v )
+{
+ pushsettings( module, v ); /* just swap again */
+}
+
+
+/*
+ * copysettings() - duplicate a settings list, returning the new copy.
+ */
+
+SETTINGS * copysettings( SETTINGS * head )
+{
+ SETTINGS * copy = 0;
+ SETTINGS * v;
+ for ( v = head; v; v = v->next )
+ copy = addsettings( copy, VAR_SET, v->symbol, list_copy( v->value ) );
+ return copy;
+}
+
+
+/*
+ * freetargets() - delete a targets list.
+ */
+
+void freetargets( TARGETS * chain )
+{
+ while ( chain )
+ {
+ TARGETS * const n = chain->next;
+ BJAM_FREE( chain );
+ chain = n;
+ }
+}
+
+
+/*
+ * freeactions() - delete an action list.
+ */
+
+void freeactions( ACTIONS * chain )
+{
+ while ( chain )
+ {
+ ACTIONS * const n = chain->next;
+ action_free( chain->action );
+ BJAM_FREE( chain );
+ chain = n;
+ }
+}
+
+
+/*
+ * freesettings() - delete a settings list.
+ */
+
+void freesettings( SETTINGS * v )
+{
+ while ( v )
+ {
+ SETTINGS * const n = v->next;
+ object_free( v->symbol );
+ list_free( v->value );
+ v->next = settings_freelist;
+ settings_freelist = v;
+ v = n;
+ }
+}
+
+
+static void freetarget( void * xt, void * data )
+{
+ TARGET * const t = (TARGET *)xt;
+ if ( t->name ) object_free ( t->name );
+ if ( t->boundname ) object_free ( t->boundname );
+ if ( t->settings ) freesettings( t->settings );
+ if ( t->depends ) freetargets ( t->depends );
+ if ( t->dependants ) freetargets ( t->dependants );
+ if ( t->parents ) freetargets ( t->parents );
+ if ( t->actions ) freeactions ( t->actions );
+ if ( t->includes )
+ {
+ freetarget( t->includes, (void *)0 );
+ BJAM_FREE( t->includes );
+ }
+}
+
+
+/*
+ * rules_done() - free RULE and TARGET tables.
+ */
+
+void rules_done()
+{
+ if ( targethash )
+ {
+ hashenumerate( targethash, freetarget, 0 );
+ hashdone( targethash );
+ }
+ while ( settings_freelist )
+ {
+ SETTINGS * const n = settings_freelist->next;
+ BJAM_FREE( settings_freelist );
+ settings_freelist = n;
+ }
+}
+
+
+/*
+ * actions_refer() - add a new reference to the given actions.
+ */
+
+void actions_refer( rule_actions * a )
+{
+ ++a->reference_count;
+}
+
+
+/*
+ * actions_free() - release a reference to given actions.
+ */
+
+void actions_free( rule_actions * a )
+{
+ if ( --a->reference_count <= 0 )
+ {
+ function_free( a->command );
+ list_free( a->bindlist );
+ BJAM_FREE( a );
+ }
+}
+
+
+/*
+ * set_rule_body() - set the argument list and procedure of the given rule.
+ */
+
+static void set_rule_body( RULE * rule, FUNCTION * procedure )
+{
+ if ( procedure )
+ function_refer( procedure );
+ if ( rule->procedure )
+ function_free( rule->procedure );
+ rule->procedure = procedure;
+}
+
+
+/*
+ * global_name() - given a rule, return the name for a corresponding rule in the
+ * global module.
+ */
+
+static OBJECT * global_rule_name( RULE * r )
+{
+ if ( r->module == root_module() )
+ return object_copy( r->name );
+
+ {
+ char name[ 4096 ] = "";
+ if ( r->module->name )
+ {
+ strncat( name, object_str( r->module->name ), sizeof( name ) - 1 );
+ strncat( name, ".", sizeof( name ) - 1 );
+ }
+ strncat( name, object_str( r->name ), sizeof( name ) - 1 );
+ return object_new( name );
+ }
+}
+
+
+/*
+ * global_rule() - given a rule, produce a corresponding entry in the global
+ * module.
+ */
+
+static RULE * global_rule( RULE * r )
+{
+ if ( r->module == root_module() )
+ return r;
+
+ {
+ OBJECT * const name = global_rule_name( r );
+ RULE * const result = define_rule( r->module, name, root_module() );
+ object_free( name );
+ return result;
+ }
+}
+
+
+/*
+ * new_rule_body() - make a new rule named rulename in the given module, with
+ * the given argument list and procedure. If exported is true, the rule is
+ * exported to the global module as modulename.rulename.
+ */
+
+RULE * new_rule_body( module_t * m, OBJECT * rulename, FUNCTION * procedure,
+ int exported )
+{
+ RULE * const local = define_rule( m, rulename, m );
+ local->exported = exported;
+ set_rule_body( local, procedure );
+
+ /* Mark the procedure with the global rule name, regardless of whether the
+ * rule is exported. That gives us something reasonably identifiable that we
+ * can use, e.g. in profiling output. Only do this once, since this could be
+ * called multiple times with the same procedure.
+ */
+ if ( !function_rulename( procedure ) )
+ function_set_rulename( procedure, global_rule_name( local ) );
+
+ return local;
+}
+
+
+static void set_rule_actions( RULE * rule, rule_actions * actions )
+{
+ if ( actions )
+ actions_refer( actions );
+ if ( rule->actions )
+ actions_free( rule->actions );
+ rule->actions = actions;
+}
+
+
+static rule_actions * actions_new( FUNCTION * command, LIST * bindlist,
+ int flags )
+{
+ rule_actions * const result = (rule_actions *)BJAM_MALLOC( sizeof(
+ rule_actions ) );
+ function_refer( command );
+ result->command = command;
+ result->bindlist = bindlist;
+ result->flags = flags;
+ result->reference_count = 0;
+ return result;
+}
+
+
+RULE * new_rule_actions( module_t * m, OBJECT * rulename, FUNCTION * command,
+ LIST * bindlist, int flags )
+{
+ RULE * const local = define_rule( m, rulename, m );
+ RULE * const global = global_rule( local );
+ set_rule_actions( local, actions_new( command, bindlist, flags ) );
+ set_rule_actions( global, local->actions );
+ return local;
+}
+
+
+/*
+ * Looks for a rule in the specified module, and returns it, if found. First
+ * checks if the rule is present in the module's rule table. Second, if the
+ * rule's name is in the form name1.name2 and name1 is in the list of imported
+ * modules, look in module 'name1' for rule 'name2'.
+ */
+
+RULE * lookup_rule( OBJECT * rulename, module_t * m, int local_only )
+{
+ RULE * r;
+ RULE * result = 0;
+ module_t * original_module = m;
+
+ if ( m->class_module )
+ m = m->class_module;
+
+ if ( m->rules && ( r = (RULE *)hash_find( m->rules, rulename ) ) )
+ result = r;
+ else if ( !local_only && m->imported_modules )
+ {
+ /* Try splitting the name into module and rule. */
+ const char * p = strchr( object_str( rulename ), '.' ) ;
+ if ( p )
+ {
+ /* Now, r->name keeps the module name, and p + 1 keeps the rule
+ * name.
+ */
+ OBJECT * rule_part = object_new( p + 1 );
+ OBJECT * module_part;
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append_range( buf, object_str( rulename ), p );
+ module_part = object_new( buf->value );
+ string_free( buf );
+ }
+ if ( hash_find( m->imported_modules, module_part ) )
+ result = lookup_rule( rule_part, bindmodule( module_part ), 1 );
+ object_free( module_part );
+ object_free( rule_part );
+ }
+ }
+
+ if ( result )
+ {
+ if ( local_only && !result->exported )
+ result = 0;
+ else if ( original_module != m )
+ {
+ /* Lookup started in class module. We have found a rule in class
+ * module, which is marked for execution in that module, or in some
+ * instance. Mark it for execution in the instance where we started
+ * the lookup.
+ */
+ int const execute_in_class = result->module == m;
+ int const execute_in_some_instance =
+ result->module->class_module == m;
+ if ( execute_in_class || execute_in_some_instance )
+ result->module = original_module;
+ }
+ }
+
+ return result;
+}
+
+
+RULE * bindrule( OBJECT * rulename, module_t * m )
+{
+ RULE * result = lookup_rule( rulename, m, 0 );
+ if ( !result )
+ result = lookup_rule( rulename, root_module(), 0 );
+ /* We have only one caller, 'evaluate_rule', which will complain about
+ * calling an undefined rule. We could issue the error here, but we do not
+ * have the necessary information, such as frame.
+ */
+ if ( !result )
+ result = enter_rule( rulename, m );
+ return result;
+}
+
+
+RULE * import_rule( RULE * source, module_t * m, OBJECT * name )
+{
+ RULE * const dest = define_rule( source->module, name, m );
+ set_rule_body( dest, source->procedure );
+ set_rule_actions( dest, source->actions );
+ return dest;
+}
+
+
+void rule_localize( RULE * rule, module_t * m )
+{
+ rule->module = m;
+ if ( rule->procedure )
+ {
+ FUNCTION * procedure = function_unbind_variables( rule->procedure );
+ function_refer( procedure );
+ function_free( rule->procedure );
+ rule->procedure = procedure;
+ }
+}
diff --git a/src/boost/tools/build/src/engine/rules.h b/src/boost/tools/build/src/engine/rules.h
new file mode 100644
index 000000000..29889511e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/rules.h
@@ -0,0 +1,274 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * rules.h - targets, rules, and related information
+ *
+ * This file describes the structures holding the targets, rules, and related
+ * information accumulated by interpreting the statements of the jam files.
+ *
+ * The following are defined:
+ *
+ * RULE - a generic jam rule, the product of RULE and ACTIONS.
+ * ACTIONS - a chain of ACTIONs.
+ * ACTION - a RULE instance with targets and sources.
+ * SETTINGS - variables to set when executing a TARGET's ACTIONS.
+ * TARGETS - a chain of TARGETs.
+ * TARGET - an entity (e.g. a file) that can be built.
+ */
+
+#ifndef RULES_DWA_20011020_H
+#define RULES_DWA_20011020_H
+
+#include "config.h"
+#include "function.h"
+#include "modules.h"
+#include "timestamp.h"
+
+
+typedef struct _rule RULE;
+typedef struct _target TARGET;
+typedef struct _targets TARGETS;
+typedef struct _action ACTION;
+typedef struct _actions ACTIONS;
+typedef struct _settings SETTINGS ;
+
+/* RULE - a generic jam rule, the product of RULE and ACTIONS. */
+
+/* Build actions corresponding to a rule. */
+struct rule_actions
+{
+ int reference_count;
+ FUNCTION * command; /* command string from ACTIONS */
+ LIST * bindlist;
+ int flags; /* modifiers on ACTIONS */
+
+#define RULE_NEWSRCS 0x01 /* $(>) is updated sources only */
+#define RULE_TOGETHER 0x02 /* combine actions on single target */
+#define RULE_IGNORE 0x04 /* ignore return status of executes */
+#define RULE_QUIETLY 0x08 /* do not mention it unless verbose */
+#define RULE_PIECEMEAL 0x10 /* split exec so each $(>) is small */
+#define RULE_EXISTING 0x20 /* $(>) is pre-exisitng sources only */
+};
+
+typedef struct rule_actions rule_actions;
+typedef struct argument_list argument_list;
+
+struct _rule
+{
+ OBJECT * name;
+ FUNCTION * procedure;
+ rule_actions * actions; /* build actions, or NULL for no actions */
+ module_t * module; /* module in which this rule is executed */
+ int exported; /* nonzero if this rule is supposed to appear in
+ * the global module and be automatically
+ * imported into other modules
+ */
+};
+
+/* ACTIONS - a chain of ACTIONs. */
+struct _actions
+{
+ ACTIONS * next;
+ ACTIONS * tail; /* valid only for head */
+ ACTION * action;
+};
+
+/* ACTION - a RULE instance with targets and sources. */
+struct _action
+{
+ RULE * rule;
+ TARGETS * targets;
+ TARGETS * sources; /* aka $(>) */
+ char running; /* has been started */
+#define A_INIT 0
+#define A_RUNNING_NOEXEC 1
+#define A_RUNNING 2
+ int refs;
+
+ /* WARNING: These variables are used to pass state required by make1cmds and
+ * are not valid anywhere else.
+ */
+ void * first_cmd; /* Pointer to the first CMD created by this action */
+ void * last_cmd; /* Pointer to the last CMD created by this action */
+};
+
+/* SETTINGS - variables to set when executing a TARGET's ACTIONS. */
+struct _settings
+{
+ SETTINGS * next;
+ OBJECT * symbol; /* symbol name for var_set() */
+ LIST * value; /* symbol value for var_set() */
+};
+
+/* TARGETS - a chain of TARGETs. */
+struct _targets
+{
+ TARGETS * next;
+ TARGETS * tail; /* valid only for head */
+ TARGET * target;
+};
+
+/* TARGET - an entity (e.g. a file) that can be built. */
+struct _target
+{
+ OBJECT * name;
+ OBJECT * boundname; /* if search() relocates target */
+ ACTIONS * actions; /* rules to execute, if any */
+ SETTINGS * settings; /* variables to define */
+
+ TARGETS * depends; /* dependencies */
+ TARGETS * dependants; /* the inverse of dependencies */
+ TARGETS * rebuilds; /* targets that should be force-rebuilt
+ * whenever this one is
+ */
+ TARGET * includes; /* internal includes node */
+
+ timestamp time; /* update time */
+ timestamp leaf; /* update time of leaf sources */
+
+ short flags; /* status info */
+
+#define T_FLAG_TEMP 0x0001 /* TEMPORARY applied */
+#define T_FLAG_NOCARE 0x0002 /* NOCARE applied */
+#define T_FLAG_NOTFILE 0x0004 /* NOTFILE applied */
+#define T_FLAG_TOUCHED 0x0008 /* ALWAYS applied or -t target */
+#define T_FLAG_LEAVES 0x0010 /* LEAVES applied */
+#define T_FLAG_NOUPDATE 0x0020 /* NOUPDATE applied */
+#define T_FLAG_VISITED 0x0040 /* CWM: Used in debugging */
+
+/* This flag has been added to support a new built-in rule named "RMBAD". It is
+ * used to force removal of outdated targets whose dependencies fail to build.
+ */
+#define T_FLAG_RMOLD 0x0080 /* RMBAD applied */
+
+/* This flag was added to support a new built-in rule named "FAIL_EXPECTED" used
+ * to indicate that the result of running a given action should be inverted,
+ * i.e. ok <=> fail. Useful for launching certain test runs from a Jamfile.
+ */
+#define T_FLAG_FAIL_EXPECTED 0x0100 /* FAIL_EXPECTED applied */
+
+#define T_FLAG_INTERNAL 0x0200 /* internal INCLUDES node */
+
+/* Indicates that the target must be a file. Prevents matching non-files, like
+ * directories, when a target is searched.
+ */
+#define T_FLAG_ISFILE 0x0400
+
+#define T_FLAG_PRECIOUS 0x0800
+
+ char binding; /* how target relates to a real file or
+ * folder
+ */
+
+#define T_BIND_UNBOUND 0 /* a disembodied name */
+#define T_BIND_MISSING 1 /* could not find real file */
+#define T_BIND_PARENTS 2 /* using parent's timestamp */
+#define T_BIND_EXISTS 3 /* real file, timestamp valid */
+
+ char fate; /* make0()'s diagnosis */
+
+#define T_FATE_INIT 0 /* nothing done to target */
+#define T_FATE_MAKING 1 /* make0(target) on stack */
+
+#define T_FATE_STABLE 2 /* target did not need updating */
+#define T_FATE_NEWER 3 /* target newer than parent */
+
+#define T_FATE_SPOIL 4 /* >= SPOIL rebuilds parents */
+#define T_FATE_ISTMP 4 /* unneeded temp target oddly present */
+
+#define T_FATE_BUILD 5 /* >= BUILD rebuilds target */
+#define T_FATE_TOUCHED 5 /* manually touched with -t */
+#define T_FATE_REBUILD 6
+#define T_FATE_MISSING 7 /* is missing, needs updating */
+#define T_FATE_NEEDTMP 8 /* missing temp that must be rebuild */
+#define T_FATE_OUTDATED 9 /* is out of date, needs updating */
+#define T_FATE_UPDATE 10 /* deps updated, needs updating */
+
+#define T_FATE_BROKEN 11 /* >= BROKEN ruins parents */
+#define T_FATE_CANTFIND 11 /* no rules to make missing target */
+#define T_FATE_CANTMAKE 12 /* can not find dependencies */
+
+ char progress; /* tracks make1() progress */
+
+#define T_MAKE_INIT 0 /* make1(target) not yet called */
+#define T_MAKE_ONSTACK 1 /* make1(target) on stack */
+#define T_MAKE_ACTIVE 2 /* make1(target) in make1b() */
+#define T_MAKE_RUNNING 3 /* make1(target) running commands */
+#define T_MAKE_DONE 4 /* make1(target) done */
+#define T_MAKE_NOEXEC_DONE 5 /* make1(target) done with -n in effect */
+
+#ifdef OPT_SEMAPHORE
+ #define T_MAKE_SEMAPHORE 5 /* Special target type for semaphores */
+#endif
+
+ char status; /* exec_cmd() result */
+
+#ifdef OPT_SEMAPHORE
+ TARGET * semaphore; /* used in serialization */
+#endif
+
+ int asynccnt; /* child deps outstanding */
+ TARGETS * parents; /* used by make1() for completion */
+ TARGET * scc_root; /* used by make to resolve cyclic includes
+ */
+ TARGET * rescanning; /* used by make0 to mark visited targets
+ * when rescanning
+ */
+ int depth; /* The depth of the target in the make0
+ * stack.
+ */
+ char * cmds; /* type-punned command list */
+
+ char const * failed;
+};
+
+
+/* Action related functions. */
+void action_free ( ACTION * );
+ACTIONS * actionlist ( ACTIONS *, ACTION * );
+void freeactions ( ACTIONS * );
+SETTINGS * addsettings ( SETTINGS *, int flag, OBJECT * symbol, LIST * value );
+void pushsettings ( module_t *, SETTINGS * );
+void popsettings ( module_t *, SETTINGS * );
+SETTINGS * copysettings ( SETTINGS * );
+void freesettings ( SETTINGS * );
+void actions_refer( rule_actions * );
+void actions_free ( rule_actions * );
+
+/* Rule related functions. */
+RULE * bindrule ( OBJECT * rulename, module_t * );
+RULE * import_rule ( RULE * source, module_t *, OBJECT * name );
+void rule_localize ( RULE * rule, module_t * module );
+RULE * new_rule_body ( module_t *, OBJECT * rulename, FUNCTION * func, int exprt );
+RULE * new_rule_actions( module_t *, OBJECT * rulename, FUNCTION * command, LIST * bindlist, int flags );
+void rule_free ( RULE * );
+
+/* Target related functions. */
+void bind_explicitly_located_targets();
+TARGET * bindtarget ( OBJECT * const );
+void freetargets ( TARGETS * );
+TARGETS * targetchain ( TARGETS *, TARGETS * );
+TARGETS * targetentry ( TARGETS *, TARGET * );
+void target_include ( TARGET * const including,
+ TARGET * const included );
+void target_include_many ( TARGET * const including,
+ LIST * const included_names );
+TARGETS * targetlist ( TARGETS *, LIST * target_names );
+void touch_target ( OBJECT * const );
+void clear_includes ( TARGET * );
+TARGET * target_scc ( TARGET * );
+
+/* Final module cleanup. */
+void rules_done();
+
+#endif
diff --git a/src/boost/tools/build/src/engine/scan.cpp b/src/boost/tools/build/src/engine/scan.cpp
new file mode 100644
index 000000000..37fc5a1dc
--- /dev/null
+++ b/src/boost/tools/build/src/engine/scan.cpp
@@ -0,0 +1,743 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * scan.c - the jam yacc scanner
+ *
+ */
+
+#include "jam.h"
+#include "scan.h"
+#include "output.h"
+
+#include "constants.h"
+#include "jambase.h"
+#include "jamgram.hpp"
+
+
+struct keyword
+{
+ const char * word;
+ int type;
+} keywords[] =
+{
+#include "jamgramtab.h"
+ { 0, 0 }
+};
+
+typedef struct include include;
+struct include
+{
+ include * next; /* next serial include file */
+ char * string; /* pointer into current line */
+ char * * strings; /* for yyfparse() -- text to parse */
+ LISTITER pos; /* for yysparse() -- text to parse */
+ LIST * list; /* for yysparse() -- text to parse */
+ FILE * file; /* for yyfparse() -- file being read */
+ OBJECT * fname; /* for yyfparse() -- file name */
+ int line; /* line counter for error messages */
+ char buf[ 512 ]; /* for yyfparse() -- line buffer */
+};
+
+static include * incp = 0; /* current file; head of chain */
+
+static int scanmode = SCAN_NORMAL;
+static int anyerrors = 0;
+
+
+static char * symdump( YYSTYPE * );
+
+#define BIGGEST_TOKEN 10240 /* no single token can be larger */
+
+
+/*
+ * Set parser mode: normal, string, or keyword.
+ */
+
+int yymode( int n )
+{
+ int result = scanmode;
+ scanmode = n;
+ return result;
+}
+
+
+void yyerror( char const * s )
+{
+ /* We use yylval instead of incp to access the error location information as
+ * the incp pointer will already be reset to 0 in case the error occurred at
+ * EOF.
+ *
+ * The two may differ only if ran into an unexpected EOF or we get an error
+ * while reading a lexical token spanning multiple lines, e.g. a multi-line
+ * string literal or action body, in which case yylval location information
+ * will hold the information about where the token started while incp will
+ * hold the information about where reading it broke.
+ */
+ out_printf( "%s:%d: %s at %s\n", object_str( yylval.file ), yylval.line, s,
+ symdump( &yylval ) );
+ ++anyerrors;
+}
+
+
+int yyanyerrors()
+{
+ return anyerrors != 0;
+}
+
+
+void yyfparse( OBJECT * s )
+{
+ include * i = (include *)BJAM_MALLOC( sizeof( *i ) );
+
+ /* Push this onto the incp chain. */
+ i->string = (char*)"";
+ i->strings = 0;
+ i->file = 0;
+ i->fname = object_copy( s );
+ i->line = 0;
+ i->next = incp;
+ incp = i;
+
+ /* If the filename is "+", it means use the internal jambase. */
+ if ( !strcmp( object_str( s ), "+" ) )
+ i->strings = (char**)jambase;
+}
+
+
+void yysparse( OBJECT * name, const char * * lines )
+{
+ yyfparse( name );
+ incp->strings = (char * *)lines;
+}
+
+
+/*
+ * yyfdone() - cleanup after we're done parsing a file.
+ */
+void yyfdone( void )
+{
+ include * const i = incp;
+ incp = i->next;
+
+ /* Close file, free name. */
+ if(i->file && (i->file != stdin))
+ fclose(i->file);
+ object_free(i->fname);
+ BJAM_FREE((char *)i);
+}
+
+
+/*
+ * yyline() - read new line and return first character.
+ *
+ * Fabricates a continuous stream of characters across include files, returning
+ * EOF at the bitter end.
+ */
+
+int yyline()
+{
+ include * const i = incp;
+
+ if ( !incp )
+ return EOF;
+
+ /* Once we start reading from the input stream, we reset the include
+ * insertion point so that the next include file becomes the head of the
+ * list.
+ */
+
+ /* If there is more data in this line, return it. */
+ if ( *i->string )
+ return *i->string++;
+
+ /* If we are reading from an internal string list, go to the next string. */
+ if ( i->strings )
+ {
+ if ( *i->strings )
+ {
+ ++i->line;
+ i->string = *(i->strings++);
+ return *i->string++;
+ }
+ }
+ else
+ {
+ /* If necessary, open the file. */
+ if ( !i->file )
+ {
+ FILE * f = stdin;
+ if ( strcmp( object_str( i->fname ), "-" ) && !( f = fopen( object_str( i->fname ), "r" ) ) )
+ perror( object_str( i->fname ) );
+ i->file = f;
+ }
+
+ /* If there is another line in this file, start it. */
+ if ( i->file && fgets( i->buf, sizeof( i->buf ), i->file ) )
+ {
+ ++i->line;
+ i->string = i->buf;
+ return *i->string++;
+ }
+ }
+
+ /* This include is done. Return EOF so yyparse() returns to
+ * parse_file().
+ */
+
+ return EOF;
+}
+
+/* This allows us to get an extra character of lookahead.
+ * There are a few places where we need to look ahead two
+ * characters and yyprev only guarantees a single character
+ * of putback.
+ */
+int yypeek()
+{
+ if ( *incp->string )
+ {
+ return *incp->string;
+ }
+ else if ( incp->strings )
+ {
+ if ( *incp->strings )
+ return **incp->strings;
+ }
+ else if ( incp->file )
+ {
+ /* Don't bother opening the file. yypeek is
+ * only used in special cases and never at the
+ * beginning of a file.
+ */
+ int ch = fgetc( incp->file );
+ if ( ch != EOF )
+ ungetc( ch, incp->file );
+ return ch;
+ }
+ return EOF;
+}
+
+/*
+ * yylex() - set yylval to current token; return its type.
+ *
+ * Macros to move things along:
+ *
+ * yychar() - return and advance character; invalid after EOF.
+ * yyprev() - back up one character; invalid before yychar().
+ *
+ * yychar() returns a continuous stream of characters, until it hits the EOF of
+ * the current include file.
+ */
+
+#define yychar() ( *incp->string ? *incp->string++ : yyline() )
+#define yyprev() ( incp->string-- )
+
+static int use_new_scanner = 0;
+
+#define yystartkeyword() if(use_new_scanner) break; else token_warning()
+#define yyendkeyword() if(use_new_scanner) break; else if ( 1 ) { expect_whitespace = 1; continue; } else (void)0
+
+void do_token_warning()
+{
+ out_printf( "%s:%d: %s %s\n", object_str( yylval.file ), yylval.line, "Unescaped special character in",
+ symdump( &yylval ) );
+}
+
+#define token_warning() has_token_warning = 1
+
+int yylex()
+{
+ int c;
+ char buf[ BIGGEST_TOKEN ];
+ char * b = buf;
+
+ if ( !incp )
+ goto eof;
+
+ /* Get first character (whitespace or of token). */
+ c = yychar();
+
+ if ( scanmode == SCAN_STRING )
+ {
+ /* If scanning for a string (action's {}'s), look for the closing brace.
+ * We handle matching braces, if they match.
+ */
+
+ int nest = 1;
+
+ while ( ( c != EOF ) && ( b < buf + sizeof( buf ) ) )
+ {
+ if ( c == '{' )
+ ++nest;
+
+ if ( ( c == '}' ) && !--nest )
+ break;
+
+ *b++ = c;
+
+ c = yychar();
+
+ /* Turn trailing "\r\n" sequences into plain "\n" for Cygwin. */
+ if ( ( c == '\n' ) && ( b[ -1 ] == '\r' ) )
+ --b;
+ }
+
+ /* We ate the ending brace -- regurgitate it. */
+ if ( c != EOF )
+ yyprev();
+
+ /* Check for obvious errors. */
+ if ( b == buf + sizeof( buf ) )
+ {
+ yyerror( "action block too big" );
+ goto eof;
+ }
+
+ if ( nest )
+ {
+ yyerror( "unmatched {} in action block" );
+ goto eof;
+ }
+
+ *b = 0;
+ yylval.type = STRING;
+ yylval.string = object_new( buf );
+ yylval.file = incp->fname;
+ yylval.line = incp->line;
+ }
+ else
+ {
+ char * b = buf;
+ struct keyword * k;
+ int inquote = 0;
+ int notkeyword;
+ int hastoken = 0;
+ int hasquote = 0;
+ int ingrist = 0;
+ int invarexpand = 0;
+ int expect_whitespace = 0;
+ int has_token_warning = 0;
+
+ /* Eat white space. */
+ for ( ; ; )
+ {
+ /* Skip past white space. */
+ while ( ( c != EOF ) && isspace( c ) )
+ c = yychar();
+
+ /* Not a comment? */
+ if ( c != '#' )
+ break;
+
+ c = yychar();
+ if ( ( c != EOF ) && c == '|' )
+ {
+ /* Swallow up block comment. */
+ int c0 = yychar();
+ int c1 = yychar();
+ while ( ! ( c0 == '|' && c1 == '#' ) && ( c0 != EOF && c1 != EOF ) )
+ {
+ c0 = c1;
+ c1 = yychar();
+ }
+ c = yychar();
+ }
+ else
+ {
+ /* Swallow up comment line. */
+ while ( ( c != EOF ) && ( c != '\n' ) ) c = yychar();
+ }
+ }
+
+ /* c now points to the first character of a token. */
+ if ( c == EOF )
+ goto eof;
+
+ yylval.file = incp->fname;
+ yylval.line = incp->line;
+
+ /* While scanning the word, disqualify it for (expensive) keyword lookup
+ * when we can: $anything, "anything", \anything
+ */
+ notkeyword = c == '$';
+
+ /* Look for white space to delimit word. "'s get stripped but preserve
+ * white space. \ protects next character.
+ */
+ while
+ (
+ ( c != EOF ) &&
+ ( b < buf + sizeof( buf ) ) &&
+ ( inquote || invarexpand || !isspace( c ) )
+ )
+ {
+ if ( expect_whitespace || ( isspace( c ) && ! inquote ) )
+ {
+ token_warning();
+ expect_whitespace = 0;
+ }
+ if ( !inquote && !invarexpand )
+ {
+ if ( scanmode == SCAN_COND || scanmode == SCAN_CONDB )
+ {
+ if ( hastoken && ( c == '=' || c == '<' || c == '>' || c == '!' || c == '(' || c == ')' || c == '&' || c == '|' ) )
+ {
+ /* Don't treat > as special if we started with a grist. */
+ if ( ! ( scanmode == SCAN_CONDB && ingrist == 1 && c == '>' ) )
+ {
+ yystartkeyword();
+ }
+ }
+ else if ( c == '=' || c == '(' || c == ')' )
+ {
+ *b++ = c;
+ c = yychar();
+ yyendkeyword();
+ }
+ else if ( c == '!' || ( scanmode == SCAN_COND && ( c == '<' || c == '>' ) ) )
+ {
+ *b++ = c;
+ if ( ( c = yychar() ) == '=' )
+ {
+ *b++ = c;
+ c = yychar();
+ }
+ yyendkeyword();
+ }
+ else if ( c == '&' || c == '|' )
+ {
+ *b++ = c;
+ if ( yychar() == c )
+ {
+ *b++ = c;
+ c = yychar();
+ }
+ yyendkeyword();
+ }
+ }
+ else if ( scanmode == SCAN_PARAMS )
+ {
+ if ( c == '*' || c == '+' || c == '?' || c == '(' || c == ')' )
+ {
+ if ( !hastoken )
+ {
+ *b++ = c;
+ c = yychar();
+ yyendkeyword();
+ }
+ else
+ {
+ yystartkeyword();
+ }
+ }
+ }
+ else if ( scanmode == SCAN_XASSIGN && ! hastoken )
+ {
+ if ( c == '=' )
+ {
+ *b++ = c;
+ c = yychar();
+ yyendkeyword();
+ }
+ else if ( c == '+' || c == '?' )
+ {
+ if ( yypeek() == '=' )
+ {
+ *b++ = c;
+ *b++ = yychar();
+ c = yychar();
+ yyendkeyword();
+ }
+ }
+ }
+ else if ( scanmode == SCAN_NORMAL || scanmode == SCAN_ASSIGN )
+ {
+ if ( c == '=' )
+ {
+ if ( !hastoken )
+ {
+ *b++ = c;
+ c = yychar();
+ yyendkeyword();
+ }
+ else
+ {
+ yystartkeyword();
+ }
+ }
+ else if ( c == '+' || c == '?' )
+ {
+ if ( yypeek() == '=' )
+ {
+ if ( hastoken )
+ {
+ yystartkeyword();
+ }
+ else
+ {
+ *b++ = c;
+ *b++ = yychar();
+ c = yychar();
+ yyendkeyword();
+ }
+ }
+ }
+ }
+ if ( scanmode != SCAN_CASE && ( c == ';' || c == '{' || c == '}' ||
+ ( scanmode != SCAN_PARAMS && ( c == '[' || c == ']' ) ) ) )
+ {
+ if ( ! hastoken )
+ {
+ *b++ = c;
+ c = yychar();
+ yyendkeyword();
+ }
+ else
+ {
+ yystartkeyword();
+ }
+ }
+ else if ( c == ':' )
+ {
+ if ( ! hastoken )
+ {
+ *b++ = c;
+ c = yychar();
+ yyendkeyword();
+ break;
+ }
+ else if ( hasquote )
+ {
+ /* Special rules for ':' do not apply after we quote anything. */
+ yystartkeyword();
+ }
+ else if ( ingrist == 0 )
+ {
+ int next = yychar();
+ int is_win_path = 0;
+ int is_conditional = 0;
+ if ( next == '\\' )
+ {
+ if( yypeek() == '\\' )
+ {
+ is_win_path = 1;
+ }
+ }
+ else if ( next == '/' )
+ {
+ is_win_path = 1;
+ }
+ yyprev();
+ if ( is_win_path )
+ {
+ /* Accept windows paths iff they are at the start or immediately follow a grist. */
+ if ( b > buf && isalpha( b[ -1 ] ) && ( b == buf + 1 || b[ -2 ] == '>' ) )
+ {
+ is_win_path = 1;
+ }
+ else
+ {
+ is_win_path = 0;
+ }
+ }
+ if ( next == '<' )
+ {
+ /* Accept conditionals only for tokens that start with "<" or "!<" */
+ if ( ( (b > buf) && (buf[ 0 ] == '<') ) ||
+ ( (b > (buf + 1)) && (buf[ 0 ] == '!') && (buf[ 1 ] == '<') ))
+ {
+ is_conditional = 1;
+ }
+ }
+ if ( !is_conditional && !is_win_path )
+ {
+ yystartkeyword();
+ }
+ }
+ }
+ }
+ hastoken = 1;
+ if ( c == '"' )
+ {
+ /* begin or end " */
+ inquote = !inquote;
+ hasquote = 1;
+ notkeyword = 1;
+ }
+ else if ( c != '\\' )
+ {
+ if ( !invarexpand && c == '<' )
+ {
+ if ( ingrist == 0 ) ingrist = 1;
+ else ingrist = -1;
+ }
+ else if ( !invarexpand && c == '>' )
+ {
+ if ( ingrist == 1 ) ingrist = 0;
+ else ingrist = -1;
+ }
+ else if ( c == '$' )
+ {
+ if ( ( c = yychar() ) == EOF )
+ {
+ *b++ = '$';
+ break;
+ }
+ else if ( c == '(' )
+ {
+ /* inside $(), we only care about quotes */
+ *b++ = '$';
+ c = '(';
+ ++invarexpand;
+ }
+ else
+ {
+ c = '$';
+ yyprev();
+ }
+ }
+ else if ( c == '@' )
+ {
+ if ( ( c = yychar() ) == EOF )
+ {
+ *b++ = '@';
+ break;
+ }
+ else if ( c == '(' )
+ {
+ /* inside @(), we only care about quotes */
+ *b++ = '@';
+ c = '(';
+ ++invarexpand;
+ }
+ else
+ {
+ c = '@';
+ yyprev();
+ }
+ }
+ else if ( invarexpand && c == '(' )
+ {
+ ++invarexpand;
+ }
+ else if ( invarexpand && c == ')' )
+ {
+ --invarexpand;
+ }
+ /* normal char */
+ *b++ = c;
+ }
+ else if ( ( c = yychar() ) != EOF )
+ {
+ /* \c */
+ if (c == 'n')
+ c = '\n';
+ else if (c == 'r')
+ c = '\r';
+ else if (c == 't')
+ c = '\t';
+ *b++ = c;
+ notkeyword = 1;
+ }
+ else
+ {
+ /* \EOF */
+ break;
+ }
+
+ c = yychar();
+ }
+
+ /* Automatically switch modes after reading the token. */
+ if ( scanmode == SCAN_CONDB )
+ scanmode = SCAN_COND;
+
+ /* Check obvious errors. */
+ if ( b == buf + sizeof( buf ) )
+ {
+ yyerror( "string too big" );
+ goto eof;
+ }
+
+ if ( inquote )
+ {
+ yyerror( "unmatched \" in string" );
+ goto eof;
+ }
+
+ /* We looked ahead a character - back up. */
+ if ( c != EOF )
+ yyprev();
+
+ /* Scan token table. Do not scan if it is obviously not a keyword or if
+ * it is an alphabetic when were looking for punctuation.
+ */
+
+ *b = 0;
+ yylval.type = ARG;
+
+ if ( !notkeyword && !( isalpha( *buf ) && ( scanmode == SCAN_PUNCT || scanmode == SCAN_PARAMS || scanmode == SCAN_ASSIGN ) ) )
+ for ( k = keywords; k->word; ++k )
+ if ( ( *buf == *k->word ) && !strcmp( k->word, buf ) )
+ {
+ yylval.type = k->type;
+ yylval.keyword = k->word; /* used by symdump */
+ break;
+ }
+
+ if ( yylval.type == ARG )
+ yylval.string = object_new( buf );
+
+ if ( scanmode == SCAN_NORMAL && yylval.type == ARG )
+ scanmode = SCAN_XASSIGN;
+
+ if ( has_token_warning )
+ do_token_warning();
+ }
+
+ if ( DEBUG_SCAN )
+ out_printf( "scan %s\n", symdump( &yylval ) );
+
+ return yylval.type;
+
+eof:
+ /* We do not reset yylval.file & yylval.line here so unexpected EOF error
+ * messages would include correct error location information.
+ */
+ yylval.type = EOF;
+ return yylval.type;
+}
+
+
+static char * symdump( YYSTYPE * s )
+{
+ static char buf[ BIGGEST_TOKEN + 20 ];
+ switch ( s->type )
+ {
+ case EOF : sprintf( buf, "EOF" ); break;
+ case 0 : sprintf( buf, "unknown symbol %s", object_str( s->string ) ); break;
+ case ARG : sprintf( buf, "argument %s" , object_str( s->string ) ); break;
+ case STRING: sprintf( buf, "string \"%s\"" , object_str( s->string ) ); break;
+ default : sprintf( buf, "keyword %s" , s->keyword ); break;
+ }
+ return buf;
+}
+
+
+/*
+ * Get information about the current file and line, for those epsilon
+ * transitions that produce a parse.
+ */
+
+void yyinput_last_read_token( OBJECT * * name, int * line )
+{
+ /* TODO: Consider whether and when we might want to report where the last
+ * read token ended, e.g. EOF errors inside string literals.
+ */
+ *name = yylval.file;
+ *line = yylval.line;
+}
diff --git a/src/boost/tools/build/src/engine/scan.h b/src/boost/tools/build/src/engine/scan.h
new file mode 100644
index 000000000..2ad736ad6
--- /dev/null
+++ b/src/boost/tools/build/src/engine/scan.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * scan.h - the jam yacc scanner
+ *
+ * External functions:
+ * yyerror( char *s ) - print a parsing error message.
+ * yyfparse( char *s ) - scan include file s.
+ * yylex() - parse the next token, returning its type.
+ * yymode() - adjust lexicon of scanner.
+ * yyparse() - declaration for yacc parser.
+ * yyanyerrors() - indicate if any parsing errors occurred.
+ *
+ * The yymode() function is for the parser to adjust the lexicon of the scanner.
+ * Aside from normal keyword scanning, there is a mode to handle action strings
+ * (look only for the closing }) and a mode to ignore most keywords when looking
+ * for a punctuation keyword. This allows non-punctuation keywords to be used in
+ * lists without quoting.
+ */
+
+#include "config.h"
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+
+
+/*
+ * YYSTYPE - value of a lexical token
+ */
+
+#define YYSTYPE YYSYMBOL
+
+typedef struct _YYSTYPE
+{
+ int type;
+ OBJECT * string;
+ PARSE * parse;
+ LIST * list;
+ int number;
+ OBJECT * file;
+ int line;
+ char const * keyword;
+} YYSTYPE;
+
+extern YYSTYPE yylval;
+
+int yymode( int n );
+void yyerror( char const * s );
+int yyanyerrors();
+void yyfparse( OBJECT * s );
+void yyfdone( void );
+void yysparse( OBJECT * name, const char * * lines );
+int yyline();
+int yylex();
+int yyparse();
+void yyinput_last_read_token( OBJECT * * name, int * line );
+
+#define SCAN_NORMAL 0 /* normal parsing */
+#define SCAN_STRING 1 /* look only for matching } */
+#define SCAN_PUNCT 2 /* only punctuation keywords */
+#define SCAN_COND 3 /* look for operators that can appear in conditions. */
+#define SCAN_PARAMS 4 /* The parameters of a rule "()*?+" */
+#define SCAN_CALL 5 /* Inside a rule call. [].*/
+#define SCAN_CASE 6 /* A case statement. We only recognize ':' as special. */
+#define SCAN_CONDB 7 /* The beginning of a condition (ignores leading comparison operators, so that if <x> in $(y) works.)*/
+#define SCAN_ASSIGN 8 /* The list may be terminated by an assignment operator. */
+#define SCAN_XASSIGN 9 /* The next token might be an assignment, but to token afterwards cannot. */
diff --git a/src/boost/tools/build/src/engine/search.cpp b/src/boost/tools/build/src/engine/search.cpp
new file mode 100644
index 000000000..7529e02dc
--- /dev/null
+++ b/src/boost/tools/build/src/engine/search.cpp
@@ -0,0 +1,275 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "search.h"
+
+#include "compile.h"
+#include "filesys.h"
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "pathsys.h"
+#include "jam_strings.h"
+#include "timestamp.h"
+#include "variable.h"
+#include "output.h"
+
+#include <string.h>
+
+
+typedef struct _binding
+{
+ OBJECT * binding;
+ OBJECT * target;
+} BINDING;
+
+static struct hash * explicit_bindings = 0;
+
+
+void call_bind_rule( OBJECT * target_, OBJECT * boundname_ )
+{
+ LIST * const bind_rule = var_get( root_module(), constant_BINDRULE );
+ if ( !list_empty( bind_rule ) )
+ {
+ OBJECT * target = object_copy( target_ );
+ OBJECT * boundname = object_copy( boundname_ );
+ if ( boundname && target )
+ {
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ frame_init( frame );
+
+ /* First argument is the target name. */
+ lol_add( frame->args, list_new( target ) );
+
+ lol_add( frame->args, list_new( boundname ) );
+ if ( lol_get( frame->args, 1 ) )
+ {
+ OBJECT * rulename = list_front( bind_rule );
+ list_free( evaluate_rule( bindrule( rulename, root_module() ), rulename, frame ) );
+ }
+
+ /* Clean up */
+ frame_free( frame );
+ }
+ else
+ {
+ if ( boundname )
+ object_free( boundname );
+ if ( target )
+ object_free( target );
+ }
+ }
+}
+
+/* Records the binding of a target with an explicit LOCATE. */
+void set_explicit_binding( OBJECT * target, OBJECT * locate )
+{
+ OBJECT * boundname;
+ OBJECT * key;
+ PATHNAME f[ 1 ];
+ string buf[ 1 ];
+ int found;
+ BINDING * ba;
+
+ if ( !explicit_bindings )
+ explicit_bindings = hashinit( sizeof( BINDING ), "explicitly specified "
+ "locations" );
+
+ string_new( buf );
+
+ /* Parse the filename. */
+ path_parse( object_str( target ), f );
+
+ /* Ignore the grist. */
+ f->f_grist.ptr = 0;
+ f->f_grist.len = 0;
+
+ /* Root the target path at the given location. */
+ f->f_root.ptr = object_str( locate );
+ f->f_root.len = strlen( object_str( locate ) );
+
+ path_build( f, buf );
+ boundname = object_new( buf->value );
+ if ( DEBUG_SEARCH )
+ out_printf( "explicit locate %s: %s\n", object_str( target ), buf->value );
+ string_free( buf );
+ key = path_as_key( boundname );
+ object_free( boundname );
+
+ ba = (BINDING *)hash_insert( explicit_bindings, key, &found );
+ if ( !found )
+ {
+ ba->binding = key;
+ ba->target = target;
+ }
+ else
+ object_free( key );
+}
+
+/*
+ * search.c - find a target along $(SEARCH) or $(LOCATE).
+ *
+ * First, check if LOCATE is set. If so, use it to determine the location of
+ * target and return, regardless of whether anything exists at that location.
+ *
+ * Second, examine all directories in SEARCH. If the file exists there or there
+ * is another target with the same name already placed at this location via the
+ * LOCATE setting, stop and return the location. In case of a previous target,
+ * return its name via the 'another_target' argument.
+ *
+ * This behaviour allows handling dependencies on generated files.
+ *
+ * If caller does not expect that the target is generated, 0 can be passed as
+ * 'another_target'.
+ */
+
+OBJECT * search( OBJECT * target, timestamp * const time,
+ OBJECT * * another_target, int const file )
+{
+ PATHNAME f[ 1 ];
+ LIST * varlist;
+ string buf[ 1 ];
+ int found = 0;
+ OBJECT * boundname = 0;
+
+ if ( another_target )
+ *another_target = 0;
+
+ if ( !explicit_bindings )
+ explicit_bindings = hashinit( sizeof( BINDING ), "explicitly specified "
+ "locations" );
+
+ string_new( buf );
+
+ /* Parse the filename. */
+ path_parse( object_str( target ), f );
+
+ f->f_grist.ptr = 0;
+ f->f_grist.len = 0;
+
+ varlist = var_get( root_module(), constant_LOCATE );
+ if ( !list_empty( varlist ) )
+ {
+ OBJECT * key;
+ f->f_root.ptr = object_str( list_front( varlist ) );
+ f->f_root.len = strlen( object_str( list_front( varlist ) ) );
+
+ path_build( f, buf );
+
+ if ( DEBUG_SEARCH )
+ out_printf( "locate %s: %s\n", object_str( target ), buf->value );
+
+ key = object_new( buf->value );
+ timestamp_from_path( time, key );
+ object_free( key );
+ found = 1;
+ }
+ else if ( varlist = var_get( root_module(), constant_SEARCH ),
+ !list_empty( varlist ) )
+ {
+ LISTITER iter = list_begin( varlist );
+ LISTITER const end = list_end( varlist );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ BINDING * ba;
+ file_info_t * ff;
+ OBJECT * key;
+ OBJECT * test_path;
+
+ f->f_root.ptr = object_str( list_item( iter ) );
+ f->f_root.len = strlen( object_str( list_item( iter ) ) );
+
+ string_truncate( buf, 0 );
+ path_build( f, buf );
+
+ if ( DEBUG_SEARCH )
+ out_printf( "search %s: %s\n", object_str( target ), buf->value );
+
+ test_path = object_new( buf->value );
+ key = path_as_key( test_path );
+ object_free( test_path );
+ ff = file_query( key );
+ timestamp_from_path( time, key );
+
+ if ( ( ba = (BINDING *)hash_find( explicit_bindings, key ) ) )
+ {
+ if ( DEBUG_SEARCH )
+ out_printf(" search %s: found explicitly located target %s\n",
+ object_str( target ), object_str( ba->target ) );
+ if ( another_target )
+ *another_target = ba->target;
+ found = 1;
+ object_free( key );
+ break;
+ }
+ else if ( ff )
+ {
+ if ( !file || ff->is_file )
+ {
+ found = 1;
+ object_free( key );
+ break;
+ }
+ }
+ object_free( key );
+ }
+ }
+
+ if ( !found )
+ {
+ /* Look for the obvious. */
+ /* This is a questionable move. Should we look in the obvious place if
+ * SEARCH is set?
+ */
+ OBJECT * key;
+
+ f->f_root.ptr = 0;
+ f->f_root.len = 0;
+
+ string_truncate( buf, 0 );
+ path_build( f, buf );
+
+ if ( DEBUG_SEARCH )
+ out_printf( "search %s: %s\n", object_str( target ), buf->value );
+
+ key = object_new( buf->value );
+ timestamp_from_path( time, key );
+ object_free( key );
+ }
+
+ boundname = object_new( buf->value );
+ string_free( buf );
+
+ /* Prepare a call to BINDRULE if the variable is set. */
+ call_bind_rule( target, boundname );
+
+ return boundname;
+}
+
+
+static void free_binding( void * xbinding, void * data )
+{
+ object_free( ( (BINDING *)xbinding )->binding );
+}
+
+
+void search_done( void )
+{
+ if ( explicit_bindings )
+ {
+ hashenumerate( explicit_bindings, free_binding, 0 );
+ hashdone( explicit_bindings );
+ }
+}
diff --git a/src/boost/tools/build/src/engine/search.h b/src/boost/tools/build/src/engine/search.h
new file mode 100644
index 000000000..80d69fa79
--- /dev/null
+++ b/src/boost/tools/build/src/engine/search.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * search.h - find a target along $(SEARCH) or $(LOCATE)
+ */
+
+#ifndef SEARCH_SW20111118_H
+#define SEARCH_SW20111118_H
+
+#include "config.h"
+#include "object.h"
+#include "timestamp.h"
+
+void set_explicit_binding( OBJECT * target, OBJECT * locate );
+OBJECT * search( OBJECT * target, timestamp * const time,
+ OBJECT * * another_target, int const file );
+void search_done( void );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/subst.cpp b/src/boost/tools/build/src/engine/subst.cpp
new file mode 100644
index 000000000..a5fcee08c
--- /dev/null
+++ b/src/boost/tools/build/src/engine/subst.cpp
@@ -0,0 +1,116 @@
+#include "jam.h"
+#include "subst.h"
+
+#include "builtins.h"
+#include "frames.h"
+#include "hash.h"
+#include "lists.h"
+
+#include <stddef.h>
+
+
+typedef struct regex_entry
+{
+ OBJECT * pattern;
+ regexp * regex;
+} regex_entry;
+
+static struct hash * regex_hash;
+
+
+regexp * regex_compile( OBJECT * pattern )
+{
+ int found;
+ regex_entry * e ;
+
+ if ( !regex_hash )
+ regex_hash = hashinit( sizeof( regex_entry ), "regex" );
+
+ e = (regex_entry *)hash_insert( regex_hash, pattern, &found );
+ if ( !found )
+ {
+ e->pattern = object_copy( pattern );
+ e->regex = regcomp( (char *)pattern );
+ }
+
+ return e->regex;
+}
+
+
+LIST * builtin_subst( FRAME * frame, int flags )
+{
+ LIST * result = L0;
+ LIST * const arg1 = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( arg1 );
+ LISTITER const end = list_end( arg1 );
+
+ if ( iter != end && list_next( iter ) != end && list_next( list_next( iter )
+ ) != end )
+ {
+ char const * const source = object_str( list_item( iter ) );
+ OBJECT * const pattern = list_item( list_next( iter ) );
+ regexp * const repat = regex_compile( pattern );
+
+ if ( regexec( repat, (char *)source) )
+ {
+ LISTITER subst = list_next( iter );
+
+ while ( ( subst = list_next( subst ) ) != end )
+ {
+#define BUFLEN 4096
+ char buf[ BUFLEN + 1 ];
+ char const * in = object_str( list_item( subst ) );
+ char * out = buf;
+
+ for ( ; *in && out < buf + BUFLEN; ++in )
+ {
+ if ( *in == '\\' || *in == '$' )
+ {
+ ++in;
+ if ( *in == 0 )
+ break;
+ if ( *in >= '0' && *in <= '9' )
+ {
+ unsigned int const n = *in - '0';
+ size_t const srclen = repat->endp[ n ] -
+ repat->startp[ n ];
+ size_t const remaining = buf + BUFLEN - out;
+ size_t const len = srclen < remaining
+ ? srclen
+ : remaining;
+ memcpy( out, repat->startp[ n ], len );
+ out += len;
+ continue;
+ }
+ /* fall through and copy the next character */
+ }
+ *out++ = *in;
+ }
+ *out = 0;
+
+ result = list_push_back( result, object_new( buf ) );
+#undef BUFLEN
+ }
+ }
+ }
+
+ return result;
+}
+
+
+static void free_regex( void * xregex, void * data )
+{
+ regex_entry * const regex = (regex_entry *)xregex;
+ object_free( regex->pattern );
+ BJAM_FREE( regex->regex );
+}
+
+
+void regex_done()
+{
+ if ( regex_hash )
+ {
+ hashenumerate( regex_hash, free_regex, (void *)0 );
+ hashdone( regex_hash );
+ }
+}
diff --git a/src/boost/tools/build/src/engine/subst.h b/src/boost/tools/build/src/engine/subst.h
new file mode 100644
index 000000000..6e43aa024
--- /dev/null
+++ b/src/boost/tools/build/src/engine/subst.h
@@ -0,0 +1,15 @@
+/* Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef SUBST_JG20120722_H
+#define SUBST_JG20120722_H
+
+#include "config.h"
+#include "object.h"
+#include "regexp.h"
+
+regexp * regex_compile( OBJECT * pattern );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/sysinfo.cpp b/src/boost/tools/build/src/engine/sysinfo.cpp
new file mode 100644
index 000000000..c3257e71e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/sysinfo.cpp
@@ -0,0 +1,137 @@
+/* Copyright 2019 Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "sysinfo.h"
+#include "jam.h"
+#include "output.h"
+
+#include <thread>
+
+#if defined(OS_MACOSX)
+#include <sys/types.h>
+#include <sys/sysctl.h>
+#endif
+
+#if !defined(OS_NT)
+#include <unistd.h>
+#endif
+
+#if defined(OS_LINUX)
+// Need to define this in case it's not as that's the only way to get the
+// sched_* APIs.
+#ifndef _GNU_SOURCE
+#define _GNU_SOURCE
+#endif
+#include <sched.h>
+#endif
+
+
+b2::system_info::system_info()
+{
+}
+
+namespace
+{
+ unsigned int macosx_physicalcpu()
+ {
+ #if defined(OS_MACOSX)
+ int out_hw_ncpu = 0;
+ size_t len_hw_ncpu = sizeof(out_hw_ncpu);
+ int result = ::sysctlbyname(
+ "hw.physicalcpu", &out_hw_ncpu, &len_hw_ncpu, nullptr, 0);
+ if (result == 0) return out_hw_ncpu;
+ #endif
+ return 0;
+ }
+
+ unsigned int macosx_logicalcpu()
+ {
+ #if defined(OS_MACOSX)
+ int out_hw_ncpu = 0;
+ size_t len_hw_ncpu = sizeof(out_hw_ncpu);
+ int result = ::sysctlbyname(
+ "hw.logicalcpu", &out_hw_ncpu, &len_hw_ncpu, nullptr, 0);
+ if (result == 0) return out_hw_ncpu;
+ #endif
+ return 0;
+ }
+
+ unsigned int sched_affinity_cpu_count()
+ {
+ #if defined(CPU_COUNT_S)
+ ::cpu_set_t cpu_set;
+ if (::sched_getaffinity(0, sizeof(cpu_set_t), &cpu_set) == 0)
+ {
+ return CPU_COUNT_S(sizeof(cpu_set_t), &cpu_set);
+ }
+ #endif
+ return 0;
+ }
+
+ unsigned int sysconf_nprocs_configured()
+ {
+ #if defined(_SC_NPROCESSORS_ONLN)
+ return ::sysconf(_SC_NPROCESSORS_CONF);
+ #else
+ return 0;
+ #endif
+ }
+
+ unsigned int sysconf_nprocs_online()
+ {
+ #if defined(_SC_NPROCESSORS_ONLN)
+ return ::sysconf(_SC_NPROCESSORS_ONLN);
+ #else
+ return 0;
+ #endif
+ }
+
+ unsigned int std_thread_hardware_concurrency()
+ {
+ return std::thread::hardware_concurrency();
+ }
+}
+
+unsigned int b2::system_info::cpu_core_count()
+{
+ if (cpu_core_count_ == 0)
+ {
+ cpu_thread_count_ = macosx_physicalcpu();
+ }
+ if (cpu_thread_count_ == 0)
+ {
+ cpu_thread_count_ = sysconf_nprocs_configured();
+ }
+ if (cpu_core_count_ <= 0)
+ {
+ cpu_core_count_ = 1;
+ }
+ return cpu_core_count_;
+}
+
+unsigned int b2::system_info::cpu_thread_count()
+{
+ if (cpu_thread_count_ == 0)
+ {
+ cpu_thread_count_ = macosx_logicalcpu();
+ }
+ if (cpu_thread_count_ == 0)
+ {
+ cpu_thread_count_ = sched_affinity_cpu_count();
+ }
+ if (cpu_thread_count_ == 0)
+ {
+ cpu_thread_count_ = sysconf_nprocs_online();
+ }
+ if (cpu_thread_count_ == 0)
+ {
+ cpu_thread_count_ = std_thread_hardware_concurrency();
+ }
+ if (cpu_thread_count_ == 0)
+ {
+ cpu_thread_count_ = cpu_core_count();
+ }
+ return cpu_thread_count_;
+}
diff --git a/src/boost/tools/build/src/engine/sysinfo.h b/src/boost/tools/build/src/engine/sysinfo.h
new file mode 100644
index 000000000..28c42558f
--- /dev/null
+++ b/src/boost/tools/build/src/engine/sysinfo.h
@@ -0,0 +1,46 @@
+/* Copyright 2019 Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef B2_SYSINFO_H
+#define B2_SYSINFO_H
+
+# include "config.h"
+
+namespace b2
+{
+ /*
+ Provides information about the system, hardware and software, we are
+ running in.
+ */
+ class system_info
+ {
+ public:
+
+ system_info();
+
+ /*
+ Returns the number of physical CPU cores if available. Otherwise
+ returns 1.
+
+ Currently implemented for: OS_MACOSX.
+ */
+ unsigned int cpu_core_count();
+
+ /*
+ Returns the number of logical CPU threads is available. Otherwise
+ returns `spu_core_count()`.
+
+ Currently implemented for: OS_MACOSX.
+ */
+ unsigned int cpu_thread_count();
+
+ private:
+
+ unsigned int cpu_core_count_ = 0;
+ unsigned int cpu_thread_count_ = 0;
+ };
+}
+
+#endif
diff --git a/src/boost/tools/build/src/engine/timestamp.cpp b/src/boost/tools/build/src/engine/timestamp.cpp
new file mode 100644
index 000000000..67090ca55
--- /dev/null
+++ b/src/boost/tools/build/src/engine/timestamp.cpp
@@ -0,0 +1,230 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * timestamp.c - get the timestamp of a file or archive member
+ *
+ * External routines:
+ * timestamp_from_path() - return timestamp for a path, if present
+ * timestamp_done() - free timestamp tables
+ *
+ * Internal routines:
+ * free_timestamps() - worker function for freeing timestamp table contents
+ */
+
+#include "jam.h"
+#include "timestamp.h"
+
+#include "filesys.h"
+#include "hash.h"
+#include "object.h"
+#include "pathsys.h"
+#include "jam_strings.h"
+#include "output.h"
+
+
+/*
+ * BINDING - all known files
+ */
+
+typedef struct _binding
+{
+ OBJECT * name;
+ short flags;
+
+#define BIND_SCANNED 0x01 /* if directory or arch, has been scanned */
+
+ short progress;
+
+#define BIND_INIT 0 /* never seen */
+#define BIND_NOENTRY 1 /* timestamp requested but file never found */
+#define BIND_SPOTTED 2 /* file found but not timed yet */
+#define BIND_MISSING 3 /* file found but can not get timestamp */
+#define BIND_FOUND 4 /* file found and time stamped */
+
+ /* update time - cleared if the there is nothing to bind */
+ timestamp time;
+} BINDING;
+
+static struct hash * bindhash = 0;
+
+
+#ifdef OS_NT
+/*
+ * timestamp_from_filetime() - Windows FILETIME --> timestamp conversion
+ *
+ * Lifted shamelessly from the CPython implementation.
+ */
+
+void timestamp_from_filetime( timestamp * const t, FILETIME const * const ft )
+{
+ /* Seconds between 1.1.1601 and 1.1.1970 */
+ static __int64 const secs_between_epochs = 11644473600;
+
+ /* We can not simply cast and dereference a FILETIME, since it might not be
+ * aligned properly. __int64 type variables are expected to be aligned to an
+ * 8 byte boundary while FILETIME structures may be aligned to any 4 byte
+ * boundary. Using an incorrectly aligned __int64 variable may cause a
+ * performance penalty on some platforms or even exceptions on others
+ * (documented on MSDN).
+ */
+ __int64 in;
+ memcpy( &in, ft, sizeof( in ) );
+
+ /* FILETIME resolution: 100ns. */
+ timestamp_init( t, (time_t)( ( in / 10000000 ) - secs_between_epochs ),
+ (int)( in % 10000000 ) * 100 );
+}
+#endif /* OS_NT */
+
+
+void timestamp_clear( timestamp * const time )
+{
+ time->secs = time->nsecs = 0;
+}
+
+
+int timestamp_cmp( timestamp const * const lhs, timestamp const * const rhs )
+{
+ return int(
+ lhs->secs == rhs->secs
+ ? lhs->nsecs - rhs->nsecs
+ : lhs->secs - rhs->secs );
+}
+
+
+void timestamp_copy( timestamp * const target, timestamp const * const source )
+{
+ target->secs = source->secs;
+ target->nsecs = source->nsecs;
+}
+
+
+void timestamp_current( timestamp * const t )
+{
+#ifdef OS_NT
+ /* GetSystemTimeAsFileTime()'s resolution seems to be about 15 ms on Windows
+ * XP and under a millisecond on Windows 7.
+ */
+ FILETIME ft;
+ GetSystemTimeAsFileTime( &ft );
+ timestamp_from_filetime( t, &ft );
+#elif defined(_POSIX_TIMERS) && defined(CLOCK_REALTIME) && \
+ (!defined(__GLIBC__) || (__GLIBC__ > 2) || (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 17))
+ /* Some older versions of XCode define _POSIX_TIMERS, but don't actually
+ * have clock_gettime. Check CLOCK_REALTIME as well. Prior to glibc 2.17,
+ * clock_gettime requires -lrt. This is a non-critical feature, so
+ * we just disable it to keep bootstrapping simple.
+ */
+ struct timespec ts;
+ clock_gettime( CLOCK_REALTIME, &ts );
+ timestamp_init( t, ts.tv_sec, ts.tv_nsec );
+#else /* OS_NT */
+ timestamp_init( t, time( 0 ), 0 );
+#endif /* OS_NT */
+}
+
+
+int timestamp_empty( timestamp const * const time )
+{
+ return !time->secs && !time->nsecs;
+}
+
+
+/*
+ * timestamp_from_path() - return timestamp for a path, if present
+ */
+
+void timestamp_from_path( timestamp * const time, OBJECT * const path )
+{
+ PROFILE_ENTER( timestamp );
+
+ if ( file_time( path, time ) < 0 )
+ timestamp_clear( time );
+
+ PROFILE_EXIT( timestamp );
+}
+
+
+void timestamp_init( timestamp * const time, time_t const secs, int const nsecs
+ )
+{
+ time->secs = secs;
+ time->nsecs = nsecs;
+}
+
+
+void timestamp_max( timestamp * const max, timestamp const * const lhs,
+ timestamp const * const rhs )
+{
+ if ( timestamp_cmp( lhs, rhs ) > 0 )
+ timestamp_copy( max, lhs );
+ else
+ timestamp_copy( max, rhs );
+}
+
+
+static char const * timestamp_formatstr( timestamp const * const time,
+ char const * const format )
+{
+ static char result1[ 500 ];
+ static char result2[ 500 ];
+ strftime( result1, sizeof( result1 ) / sizeof( *result1 ), format, gmtime(
+ &time->secs ) );
+ sprintf( result2, result1, time->nsecs );
+ return result2;
+}
+
+
+char const * timestamp_str( timestamp const * const time )
+{
+ return timestamp_formatstr( time, "%Y-%m-%d %H:%M:%S.%%09d +0000" );
+}
+
+
+char const * timestamp_timestr( timestamp const * const time )
+{
+ return timestamp_formatstr( time, "%H:%M:%S.%%09d" );
+}
+
+
+/*
+ * free_timestamps() - worker function for freeing timestamp table contents
+ */
+
+static void free_timestamps( void * xbinding, void * data )
+{
+ object_free( ( (BINDING *)xbinding )->name );
+}
+
+
+/*
+ * timestamp_done() - free timestamp tables
+ */
+
+void timestamp_done()
+{
+ if ( bindhash )
+ {
+ hashenumerate( bindhash, free_timestamps, 0 );
+ hashdone( bindhash );
+ }
+}
+
+/*
+ * timestamp_delta_seconds() - seconds from time a to b.
+ */
+double timestamp_delta_seconds( timestamp const * const a , timestamp const * const b )
+{
+ return ((b->secs*1000000.0+b->nsecs)-(a->secs*1000000.0+a->nsecs))/1000000.0;
+}
diff --git a/src/boost/tools/build/src/engine/timestamp.h b/src/boost/tools/build/src/engine/timestamp.h
new file mode 100644
index 000000000..2a05d2b1c
--- /dev/null
+++ b/src/boost/tools/build/src/engine/timestamp.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * timestamp.h - get the timestamp of a file or archive member
+ */
+
+#ifndef TIMESTAMP_H_SW_2011_11_18
+#define TIMESTAMP_H_SW_2011_11_18
+
+#include "config.h"
+#include "object.h"
+
+#ifdef OS_NT
+# include <windows.h>
+#endif
+
+#include <time.h>
+
+typedef struct timestamp
+{
+ time_t secs;
+ int nsecs;
+} timestamp;
+
+void timestamp_clear( timestamp * const );
+int timestamp_cmp( timestamp const * const lhs, timestamp const * const rhs );
+void timestamp_copy( timestamp * const target, timestamp const * const source );
+void timestamp_current( timestamp * const );
+int timestamp_empty( timestamp const * const );
+void timestamp_from_path( timestamp * const, OBJECT * const path );
+void timestamp_init( timestamp * const, time_t const secs, int const nsecs );
+void timestamp_max( timestamp * const max, timestamp const * const lhs,
+ timestamp const * const rhs );
+char const * timestamp_str( timestamp const * const );
+char const * timestamp_timestr( timestamp const * const );
+
+#ifdef OS_NT
+void timestamp_from_filetime( timestamp * const, FILETIME const * const );
+#endif
+
+void timestamp_done();
+double timestamp_delta_seconds( timestamp const * const, timestamp const * const );
+
+#endif
diff --git a/src/boost/tools/build/src/engine/variable.cpp b/src/boost/tools/build/src/engine/variable.cpp
new file mode 100644
index 000000000..574c344ce
--- /dev/null
+++ b/src/boost/tools/build/src/engine/variable.cpp
@@ -0,0 +1,393 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Reece H. Dunn.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * variable.c - handle Jam multi-element variables.
+ *
+ * External routines:
+ *
+ * var_defines() - load a bunch of variable=value settings
+ * var_get() - get value of a user defined symbol
+ * var_set() - set a variable in jam's user defined symbol table.
+ * var_swap() - swap a variable's value with the given one
+ * var_done() - free variable tables
+ *
+ * Internal routines:
+ *
+ * var_enter() - make new var symbol table entry, returning var ptr
+ * var_dump() - dump a variable to stdout
+ */
+
+#include "jam.h"
+#include "variable.h"
+
+#include "filesys.h"
+#include "hash.h"
+#include "modules.h"
+#include "parse.h"
+#include "pathsys.h"
+#include "jam_strings.h"
+#include "output.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+
+/*
+ * VARIABLE - a user defined multi-value variable
+ */
+
+typedef struct _variable VARIABLE ;
+
+struct _variable
+{
+ OBJECT * symbol;
+ LIST * value;
+};
+
+static LIST * * var_enter( struct module_t *, OBJECT * symbol );
+static void var_dump( OBJECT * symbol, LIST * value, const char * what );
+
+
+/*
+ * var_defines() - load a bunch of variable=value settings
+ *
+ * If preprocess is false, take the value verbatim.
+ *
+ * Otherwise, if the variable value is enclosed in quotes, strip the quotes.
+ * Otherwise, if variable name ends in PATH, split value at :'s.
+ * Otherwise, split the value at blanks.
+ */
+
+void var_defines( struct module_t * module, const char * const * e, int preprocess )
+{
+ string buf[ 1 ];
+
+ string_new( buf );
+
+ for ( ; *e; ++e )
+ {
+ const char * val;
+
+ if ( ( val = strchr( *e, '=' ) )
+#if defined( OS_MAC )
+ /* On the mac (MPW), the var=val is actually var\0val */
+ /* Think different. */
+ || ( val = *e + strlen( *e ) )
+#endif
+ )
+ {
+ LIST * l = L0;
+ size_t const len = strlen( val + 1 );
+ int const quoted = ( val[ 1 ] == '"' ) && ( val[ len ] == '"' ) &&
+ ( len > 1 );
+
+ if ( quoted && preprocess )
+ {
+ string_append_range( buf, val + 2, val + len );
+ l = list_push_back( l, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ else
+ {
+ const char * p;
+ const char * pp;
+ char split =
+#if defined( OPT_NO_EXTERNAL_VARIABLE_SPLIT )
+ '\0'
+#elif defined( OS_MAC )
+ ','
+#else
+ ' '
+#endif
+ ;
+
+ /* Split *PATH at :'s, not spaces. */
+ if ( val - 4 >= *e )
+ {
+ if ( !strncmp( val - 4, "PATH", 4 ) ||
+ !strncmp( val - 4, "Path", 4 ) ||
+ !strncmp( val - 4, "path", 4 ) )
+ split = SPLITPATH;
+ }
+
+ /* Do the split. */
+ for
+ (
+ pp = val + 1;
+ preprocess && ( ( p = strchr( pp, split ) ) != 0 );
+ pp = p + 1
+ )
+ {
+ string_append_range( buf, pp, p );
+ l = list_push_back( l, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+
+ l = list_push_back( l, object_new( pp ) );
+ }
+
+ /* Get name. */
+ string_append_range( buf, *e, val );
+ {
+ OBJECT * const varname = object_new( buf->value );
+ var_set( module, varname, l, VAR_SET );
+ object_free( varname );
+ }
+ string_truncate( buf, 0 );
+ }
+ }
+ string_free( buf );
+}
+
+
+/* Last returned variable value saved so we may clear it in var_done(). */
+static LIST * saved_var = L0;
+
+
+/*
+ * var_get() - get value of a user defined symbol
+ *
+ * Returns NULL if symbol unset.
+ */
+
+LIST * var_get( struct module_t * module, OBJECT * symbol )
+{
+ LIST * result = L0;
+#ifdef OPT_AT_FILES
+ /* Some "fixed" variables... */
+ if ( object_equal( symbol, constant_TMPDIR ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( object_new( path_tmpdir()->value ) );
+ }
+ else if ( object_equal( symbol, constant_TMPNAME ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( path_tmpnam() );
+ }
+ else if ( object_equal( symbol, constant_TMPFILE ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( path_tmpfile() );
+ }
+ else if ( object_equal( symbol, constant_STDOUT ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( object_copy( constant_STDOUT ) );
+ }
+ else if ( object_equal( symbol, constant_STDERR ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( object_copy( constant_STDERR ) );
+ }
+ else
+#endif
+ {
+ VARIABLE * v;
+ int n;
+
+ if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 )
+ {
+ if ( DEBUG_VARGET )
+ var_dump( symbol, module->fixed_variables[ n ], "get" );
+ result = module->fixed_variables[ n ];
+ }
+ else if ( module->variables && ( v = (VARIABLE *)hash_find(
+ module->variables, symbol ) ) )
+ {
+ if ( DEBUG_VARGET )
+ var_dump( v->symbol, v->value, "get" );
+ result = v->value;
+ }
+
+#ifdef OS_VMS
+ else if ( ( module->name && object_equal( module->name, constant_ENVIRON ) )
+ || root_module() == module )
+ {
+ /* On VMS, when a variable from root or ENVIRON module is not found,
+ * explicitly request it from the process.
+ * By design, process variables (and logicals) are not made available
+ * to C main(), and thus will not get loaded in bulk to root/ENVRON.
+ * So we get around it by getting any such variable on first request.
+ */
+ const char * val = getenv( object_str( symbol ) );
+
+ if ( val )
+ {
+ struct module_t * environ_module = module;
+ char * environ[ 2 ] = { 0 }; /* NULL-terminated */
+ string buf[ 1 ];
+
+ if ( root_module() == module )
+ {
+ environ_module = bindmodule( constant_ENVIRON );
+ }
+
+ string_copy( buf, object_str( symbol ) );
+ string_append( buf, "=" );
+ string_append( buf, val );
+
+ environ[ 0 ] = buf->value;
+
+ /* Load variable to global module, with splitting, for backward
+ * compatibility. Then to .ENVIRON, without splitting.
+ */
+ var_defines( root_module(), environ, 1 );
+ var_defines( environ_module, environ, 0 );
+ string_free( buf );
+
+ if ( module->variables && ( v = (VARIABLE *)hash_find(
+ module->variables, symbol ) ) )
+ {
+ if ( DEBUG_VARGET )
+ var_dump( v->symbol, v->value, "get" );
+ result = v->value;
+ }
+ }
+ }
+#endif
+ }
+ return result;
+}
+
+
+LIST * var_get_and_clear_raw( module_t * module, OBJECT * symbol )
+{
+ LIST * result = L0;
+ VARIABLE * v;
+
+ if ( module->variables && ( v = (VARIABLE *)hash_find( module->variables,
+ symbol ) ) )
+ {
+ result = v->value;
+ v->value = L0;
+ }
+
+ return result;
+}
+
+
+/*
+ * var_set() - set a variable in Jam's user defined symbol table
+ *
+ * 'flag' controls the relationship between new and old values of the variable:
+ * SET replaces the old with the new; APPEND appends the new to the old; DEFAULT
+ * only uses the new if the variable was previously unset.
+ *
+ * Copies symbol. Takes ownership of value.
+ */
+
+void var_set( struct module_t * module, OBJECT * symbol, LIST * value, int flag
+ )
+{
+ LIST * * v = var_enter( module, symbol );
+
+ if ( DEBUG_VARSET )
+ var_dump( symbol, value, "set" );
+
+ switch ( flag )
+ {
+ case VAR_SET: /* Replace value */
+ list_free( *v );
+ *v = value;
+ break;
+
+ case VAR_APPEND: /* Append value */
+ *v = list_append( *v, value );
+ break;
+
+ case VAR_DEFAULT: /* Set only if unset */
+ if ( list_empty( *v ) )
+ *v = value;
+ else
+ list_free( value );
+ break;
+ }
+}
+
+
+/*
+ * var_swap() - swap a variable's value with the given one
+ */
+
+LIST * var_swap( struct module_t * module, OBJECT * symbol, LIST * value )
+{
+ LIST * * v = var_enter( module, symbol );
+ LIST * oldvalue = *v;
+ if ( DEBUG_VARSET )
+ var_dump( symbol, value, "set" );
+ *v = value;
+ return oldvalue;
+}
+
+
+/*
+ * var_enter() - make new var symbol table entry, returning var ptr
+ */
+
+static LIST * * var_enter( struct module_t * module, OBJECT * symbol )
+{
+ int found;
+ VARIABLE * v;
+ int n;
+
+ if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 )
+ return &module->fixed_variables[ n ];
+
+ if ( !module->variables )
+ module->variables = hashinit( sizeof( VARIABLE ), "variables" );
+
+ v = (VARIABLE *)hash_insert( module->variables, symbol, &found );
+ if ( !found )
+ {
+ v->symbol = object_copy( symbol );
+ v->value = L0;
+ }
+
+ return &v->value;
+}
+
+
+/*
+ * var_dump() - dump a variable to stdout
+ */
+
+static void var_dump( OBJECT * symbol, LIST * value, const char * what )
+{
+ out_printf( "%s %s = ", what, object_str( symbol ) );
+ list_print( value );
+ out_printf( "\n" );
+}
+
+
+/*
+ * var_done() - free variable tables
+ */
+
+static void delete_var_( void * xvar, void * data )
+{
+ VARIABLE * const v = (VARIABLE *)xvar;
+ object_free( v->symbol );
+ list_free( v->value );
+}
+
+void var_done( struct module_t * module )
+{
+ list_free( saved_var );
+ saved_var = L0;
+ hashenumerate( module->variables, delete_var_, 0 );
+ hash_free( module->variables );
+}
diff --git a/src/boost/tools/build/src/engine/variable.h b/src/boost/tools/build/src/engine/variable.h
new file mode 100644
index 000000000..948ac3432
--- /dev/null
+++ b/src/boost/tools/build/src/engine/variable.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * variable.h - handle jam multi-element variables
+ */
+
+#ifndef VARIABLE_SW20111119_H
+#define VARIABLE_SW20111119_H
+
+#include "config.h"
+#include "lists.h"
+#include "object.h"
+
+
+struct module_t;
+
+void var_defines( struct module_t *, const char * const * e, int preprocess );
+LIST * var_get( struct module_t *, OBJECT * symbol );
+void var_set( struct module_t *, OBJECT * symbol, LIST * value, int flag );
+LIST * var_swap( struct module_t *, OBJECT * symbol, LIST * value );
+void var_done( struct module_t * );
+
+/*
+ * Defines for var_set().
+ */
+
+#define VAR_SET 0 /* override previous value */
+#define VAR_APPEND 1 /* append to previous value */
+#define VAR_DEFAULT 2 /* set only if no previous value */
+
+#endif
diff --git a/src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd b/src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd
new file mode 100644
index 000000000..d642a578e
--- /dev/null
+++ b/src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd
@@ -0,0 +1,59 @@
+:: Copyright 2017 - Refael Ackermann
+:: Copyright 2019 - Thomas Kent
+:: Distributed under MIT style license
+:: See accompanying file LICENSE at https://github.com/node4good/windows-autoconf
+:: Forked from version: 1.15.4
+
+@if not defined DEBUG_HELPER @ECHO OFF
+setlocal
+set "InstallerPath=%ProgramFiles(x86)%\Microsoft Visual Studio\Installer"
+if not exist "%InstallerPath%" set "InstallerPath=%ProgramFiles%\Microsoft Visual Studio\Installer"
+if not exist "%InstallerPath%" goto :no-vswhere
+:: Manipulate %Path% for easier " handeling
+set Path=%Path%;%InstallerPath%
+where vswhere 2> nul > nul
+if errorlevel 1 goto :no-vswhere
+set VSWHERE_REQ=-requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64
+set VSWHERE_PRP=-property installationPath
+
+REM Visual Studio 2017 (15.X, toolset 14.1)
+set VSWHERE_LMT=-version "[15.0,16.0)"
+SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT%
+for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do (
+ endlocal
+ echo Found with vswhere %%i
+ @rem comment out setting VCINSTALLDIR for Boost.build
+ @rem set "VCINSTALLDIR=%%i\VC\"
+ set "VS150COMNTOOLS=%%i\Common7\Tools\"
+ exit /B 0
+)
+
+REM Visual Studio 2019 (16.X, toolset 14.2)
+set VSWHERE_LMT=-version "[16.0,17.0)"
+SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT%
+for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do (
+ endlocal
+ echo Found with vswhere %%i
+ @rem comment out setting VCINSTALLDIR for Boost.build
+ @rem set "VCINSTALLDIR=%%i\VC\"
+ set "VS160COMNTOOLS=%%i\Common7\Tools\"
+ exit /B 0
+)
+
+REM Visual Studio Unknown Version, Beyond 2019
+set VSWHERE_LMT=-version "[17.0)"
+set VSWHERE_PRERELEASE=-prerelease
+SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT% %VSWHERE_PRERELEASE%
+for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do (
+ endlocal
+ echo Found with vswhere %%i
+ @rem comment out setting VCINSTALLDIR for Boost.build
+ @rem set "VCINSTALLDIR=%%i\VC\"
+ set "VSUNKCOMNTOOLS=%%i\Common7\Tools\"
+ exit /B 0
+)
+
+:no-vswhere
+endlocal
+echo could not find "vswhere"
+exit /B 1 \ No newline at end of file
diff --git a/src/boost/tools/build/src/engine/w32_getreg.cpp b/src/boost/tools/build/src/engine/w32_getreg.cpp
new file mode 100644
index 000000000..1ba06274b
--- /dev/null
+++ b/src/boost/tools/build/src/engine/w32_getreg.cpp
@@ -0,0 +1,201 @@
+/*
+Copyright Paul Lin 2003. Copyright 2006 Bojan Resnik.
+Distributed under the Boost Software License, Version 1.0. (See accompanying
+file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+# include "jam.h"
+
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+
+# include "lists.h"
+# include "object.h"
+# include "parse.h"
+# include "frames.h"
+# include "jam_strings.h"
+
+# define WIN32_LEAN_AND_MEAN
+# include <windows.h>
+
+# define MAX_REGISTRY_DATA_LENGTH 4096
+# define MAX_REGISTRY_KEYNAME_LENGTH 256
+# define MAX_REGISTRY_VALUENAME_LENGTH 16384
+
+typedef struct
+{
+ LPCSTR name;
+ HKEY value;
+} KeyMap;
+
+static const KeyMap dlRootKeys[] = {
+ { "HKLM", HKEY_LOCAL_MACHINE },
+ { "HKCU", HKEY_CURRENT_USER },
+ { "HKCR", HKEY_CLASSES_ROOT },
+ { "HKEY_LOCAL_MACHINE", HKEY_LOCAL_MACHINE },
+ { "HKEY_CURRENT_USER", HKEY_CURRENT_USER },
+ { "HKEY_CLASSES_ROOT", HKEY_CLASSES_ROOT },
+ { 0, 0 }
+};
+
+static HKEY get_key(char const** path)
+{
+ const KeyMap *p;
+
+ for (p = dlRootKeys; p->name; ++p)
+ {
+ int n = strlen(p->name);
+ if (!strncmp(*path,p->name,n))
+ {
+ if ((*path)[n] == '\\' || (*path)[n] == 0)
+ {
+ *path += n + 1;
+ break;
+ }
+ }
+ }
+
+ return p->value;
+}
+
+LIST * builtin_system_registry( FRAME * frame, int flags )
+{
+ char const* path = object_str( list_front( lol_get(frame->args, 0) ) );
+ LIST* result = L0;
+ HKEY key = get_key(&path);
+
+ if (
+ key != 0
+ && ERROR_SUCCESS == RegOpenKeyExA(key, path, 0, KEY_QUERY_VALUE, &key)
+ )
+ {
+ DWORD type;
+ BYTE data[MAX_REGISTRY_DATA_LENGTH];
+ DWORD len = sizeof(data);
+ LIST * const field = lol_get(frame->args, 1);
+
+ if ( ERROR_SUCCESS ==
+ RegQueryValueExA(key, field ? object_str( list_front( field ) ) : 0, 0, &type, data, &len) )
+ {
+ switch (type)
+ {
+
+ case REG_EXPAND_SZ:
+ {
+ unsigned long len;
+ string expanded[1];
+ string_new(expanded);
+
+ while (
+ (len = ExpandEnvironmentStringsA(
+ (LPCSTR)data, expanded->value, expanded->capacity))
+ > expanded->capacity
+ )
+ string_reserve(expanded, len);
+
+ expanded->size = len - 1;
+
+ result = list_push_back( result, object_new(expanded->value) );
+ string_free( expanded );
+ }
+ break;
+
+ case REG_MULTI_SZ:
+ {
+ char* s;
+
+ for (s = (char*)data; *s; s += strlen(s) + 1)
+ result = list_push_back( result, object_new(s) );
+
+ }
+ break;
+
+ case REG_DWORD:
+ {
+ char buf[100];
+ sprintf( buf, "%lu", *(PDWORD)data );
+ result = list_push_back( result, object_new(buf) );
+ }
+ break;
+
+ case REG_SZ:
+ result = list_push_back( result, object_new( (const char *)data ) );
+ break;
+ }
+ }
+ RegCloseKey(key);
+ }
+ return result;
+}
+
+static LIST* get_subkey_names(HKEY key, char const* path)
+{
+ LIST* result = 0;
+
+ if ( ERROR_SUCCESS ==
+ RegOpenKeyExA(key, path, 0, KEY_ENUMERATE_SUB_KEYS, &key)
+ )
+ {
+ char name[MAX_REGISTRY_KEYNAME_LENGTH];
+ DWORD name_size = sizeof(name);
+ DWORD index;
+ FILETIME last_write_time;
+
+ for ( index = 0;
+ ERROR_SUCCESS == RegEnumKeyExA(
+ key, index, name, &name_size, 0, 0, 0, &last_write_time);
+ ++index,
+ name_size = sizeof(name)
+ )
+ {
+ name[name_size] = 0;
+ result = list_append(result, list_new(object_new(name)));
+ }
+
+ RegCloseKey(key);
+ }
+
+ return result;
+}
+
+static LIST* get_value_names(HKEY key, char const* path)
+{
+ LIST* result = 0;
+
+ if ( ERROR_SUCCESS == RegOpenKeyExA(key, path, 0, KEY_QUERY_VALUE, &key) )
+ {
+ char name[MAX_REGISTRY_VALUENAME_LENGTH];
+ DWORD name_size = sizeof(name);
+ DWORD index;
+
+ for ( index = 0;
+ ERROR_SUCCESS == RegEnumValueA(
+ key, index, name, &name_size, 0, 0, 0, 0);
+ ++index,
+ name_size = sizeof(name)
+ )
+ {
+ name[name_size] = 0;
+ result = list_append(result, list_new(object_new(name)));
+ }
+
+ RegCloseKey(key);
+ }
+
+ return result;
+}
+
+LIST * builtin_system_registry_names( FRAME * frame, int flags )
+{
+ char const* path = object_str( list_front( lol_get(frame->args, 0) ) );
+ char const* result_type = object_str( list_front( lol_get(frame->args, 1) ) );
+
+ HKEY key = get_key(&path);
+
+ if ( !strcmp(result_type, "subkeys") )
+ return get_subkey_names(key, path);
+ if ( !strcmp(result_type, "values") )
+ return get_value_names(key, path);
+ return 0;
+}
+
+# endif
diff --git a/src/boost/tools/build/src/engine/yyacc.cpp b/src/boost/tools/build/src/engine/yyacc.cpp
new file mode 100644
index 000000000..1ff3223f0
--- /dev/null
+++ b/src/boost/tools/build/src/engine/yyacc.cpp
@@ -0,0 +1,268 @@
+/* Copyright 2002 Rene Rivera.
+** Distributed under the Boost Software License, Version 1.0.
+** (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include <stdio.h>
+#include <string.h>
+#include <ctype.h>
+#include <stdlib.h>
+
+/*
+# yyacc - yacc wrapper
+#
+# Allows tokens to be written as `literal` and then automatically
+# substituted with #defined tokens.
+#
+# Usage:
+# yyacc file.y filetab.h file.yy
+#
+# inputs:
+# file.yy yacc grammar with ` literals
+#
+# outputs:
+# file.y yacc grammar
+# filetab.h array of string <-> token mappings
+#
+# 3-13-93
+# Documented and p moved in sed command (for some reason,
+# s/x/y/p doesn't work).
+# 10-12-93
+# Take basename as second argument.
+# 12-31-96
+# reversed order of args to be compatible with GenFile rule
+# 11-20-2002
+# Reimplemented as a C program for portability. (Rene Rivera)
+*/
+
+void print_usage();
+char * copy_string(char * s, int l);
+char * tokenize_string(char * s);
+int cmp_literal(const void * a, const void * b);
+
+typedef struct
+{
+ char * string;
+ char * token;
+} literal;
+
+int main(int argc, char ** argv)
+{
+ int result = 0;
+ if (argc != 4)
+ {
+ print_usage();
+ result = 1;
+ }
+ else
+ {
+ FILE * token_output_f = 0;
+ FILE * grammar_output_f = 0;
+ FILE * grammar_source_f = 0;
+
+ grammar_source_f = fopen(argv[3],"r");
+ if (grammar_source_f == 0) { result = 1; }
+ if (result == 0)
+ {
+ literal literals[1024];
+ int t = 0;
+ char l[2048];
+ while (1)
+ {
+ if (fgets(l,2048,grammar_source_f) != 0)
+ {
+ char * c = l;
+ while (1)
+ {
+ char * c1 = strchr(c,'`');
+ if (c1 != 0)
+ {
+ char * c2 = strchr(c1+1,'`');
+ if (c2 != 0)
+ {
+ literals[t].string = copy_string(c1+1,c2-c1-1);
+ literals[t].token = tokenize_string(literals[t].string);
+ t += 1;
+ c = c2+1;
+ }
+ else
+ break;
+ }
+ else
+ break;
+ }
+ }
+ else
+ {
+ break;
+ }
+ }
+ literals[t].string = 0;
+ literals[t].token = 0;
+ qsort(literals,t,sizeof(literal),cmp_literal);
+ {
+ int p = 1;
+ int i = 1;
+ while (literals[i].string != 0)
+ {
+ if (strcmp(literals[p-1].string,literals[i].string) != 0)
+ {
+ literals[p] = literals[i];
+ p += 1;
+ }
+ i += 1;
+ }
+ literals[p].string = 0;
+ literals[p].token = 0;
+ t = p;
+ }
+ token_output_f = fopen(argv[2],"w");
+ if (token_output_f != 0)
+ {
+ int i = 0;
+ while (literals[i].string != 0)
+ {
+ fprintf(token_output_f," { \"%s\", %s },\n",literals[i].string,literals[i].token);
+ i += 1;
+ }
+ fclose(token_output_f);
+ }
+ else
+ result = 1;
+ if (result == 0)
+ {
+ grammar_output_f = fopen(argv[1],"w");
+ if (grammar_output_f != 0)
+ {
+ int i = 0;
+ while (literals[i].string != 0)
+ {
+ fprintf(grammar_output_f,"%%token %s\n",literals[i].token);
+ i += 1;
+ }
+ rewind(grammar_source_f);
+ while (1)
+ {
+ if (fgets(l,2048,grammar_source_f) != 0)
+ {
+ char * c = l;
+ while (1)
+ {
+ char * c1 = strchr(c,'`');
+ if (c1 != 0)
+ {
+ char * c2 = strchr(c1+1,'`');
+ if (c2 != 0)
+ {
+ literal key;
+ literal * replacement = 0;
+ key.string = copy_string(c1+1,c2-c1-1);
+ key.token = 0;
+ replacement = (literal*)bsearch(
+ &key,literals,t,sizeof(literal),cmp_literal);
+ *c1 = 0;
+ fprintf(grammar_output_f,"%s%s",c,replacement->token);
+ c = c2+1;
+ }
+ else
+ {
+ fprintf(grammar_output_f,"%s",c);
+ break;
+ }
+ }
+ else
+ {
+ fprintf(grammar_output_f,"%s",c);
+ break;
+ }
+ }
+ }
+ else
+ {
+ break;
+ }
+ }
+ fclose(grammar_output_f);
+ }
+ else
+ result = 1;
+ }
+ }
+ if (result != 0)
+ {
+ perror("yyacc");
+ }
+ }
+ return result;
+}
+
+static const char * usage[] = {
+ "yyacc <grammar output.y> <token table output.h> <grammar source.yy>",
+ 0 };
+
+void print_usage()
+{
+ const char ** u;
+ for (u = usage; *u != 0; ++u)
+ {
+ fputs(*u,stderr); putc('\n',stderr);
+ }
+}
+
+char * copy_string(char * s, int l)
+{
+ char * result = (char*)malloc(l+1);
+ strncpy(result,s,l);
+ result[l] = 0;
+ return result;
+}
+
+char * tokenize_string(char * s)
+{
+ char * result;
+ const char * literal = s;
+ int l;
+ int c;
+
+ if (strcmp(s,":") == 0) literal = "_colon";
+ else if (strcmp(s,"!") == 0) literal = "_bang";
+ else if (strcmp(s,"!=") == 0) literal = "_bang_equals";
+ else if (strcmp(s,"&&") == 0) literal = "_amperamper";
+ else if (strcmp(s,"&") == 0) literal = "_amper";
+ else if (strcmp(s,"+") == 0) literal = "_plus";
+ else if (strcmp(s,"+=") == 0) literal = "_plus_equals";
+ else if (strcmp(s,"||") == 0) literal = "_barbar";
+ else if (strcmp(s,"|") == 0) literal = "_bar";
+ else if (strcmp(s,";") == 0) literal = "_semic";
+ else if (strcmp(s,"-") == 0) literal = "_minus";
+ else if (strcmp(s,"<") == 0) literal = "_langle";
+ else if (strcmp(s,"<=") == 0) literal = "_langle_equals";
+ else if (strcmp(s,">") == 0) literal = "_rangle";
+ else if (strcmp(s,">=") == 0) literal = "_rangle_equals";
+ else if (strcmp(s,".") == 0) literal = "_period";
+ else if (strcmp(s,"?") == 0) literal = "_question";
+ else if (strcmp(s,"?=") == 0) literal = "_question_equals";
+ else if (strcmp(s,"=") == 0) literal = "_equals";
+ else if (strcmp(s,",") == 0) literal = "_comma";
+ else if (strcmp(s,"[") == 0) literal = "_lbracket";
+ else if (strcmp(s,"]") == 0) literal = "_rbracket";
+ else if (strcmp(s,"{") == 0) literal = "_lbrace";
+ else if (strcmp(s,"}") == 0) literal = "_rbrace";
+ else if (strcmp(s,"(") == 0) literal = "_lparen";
+ else if (strcmp(s,")") == 0) literal = "_rparen";
+ l = strlen(literal)+2;
+ result = (char*)malloc(l+1);
+ for (c = 0; literal[c] != 0; ++c)
+ {
+ result[c] = toupper(literal[c]);
+ }
+ result[l-2] = '_';
+ result[l-1] = 't';
+ result[l] = 0;
+ return result;
+}
+
+int cmp_literal(const void * a, const void * b)
+{
+ return strcmp(((const literal *)a)->string,((const literal *)b)->string);
+}
diff --git a/src/boost/tools/build/src/exceptions.py b/src/boost/tools/build/src/exceptions.py
new file mode 100644
index 000000000..70d4d9831
--- /dev/null
+++ b/src/boost/tools/build/src/exceptions.py
@@ -0,0 +1,55 @@
+# Copyright Pedro Ferreira 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+
+class BaseBoostBuildException(Exception):
+ """A base Exception class for all other Boost.Build exceptions to inherit from."""
+
+
+class UserError(BaseBoostBuildException):
+ pass
+
+
+class FeatureConflict(BaseBoostBuildException):
+ pass
+
+
+class InvalidSource(BaseBoostBuildException):
+ pass
+
+
+class InvalidFeature(BaseBoostBuildException):
+ pass
+
+
+class InvalidProperty(BaseBoostBuildException):
+ pass
+
+
+class InvalidValue(BaseBoostBuildException):
+ pass
+
+
+class InvalidAttribute(BaseBoostBuildException):
+ pass
+
+
+class AlreadyDefined(BaseBoostBuildException):
+ pass
+
+
+class IllegalOperation(BaseBoostBuildException):
+ pass
+
+
+class Recursion(BaseBoostBuildException):
+ pass
+
+
+class NoBestMatchingAlternative(BaseBoostBuildException):
+ pass
+
+
+class NoAction(BaseBoostBuildException):
+ pass
diff --git a/src/boost/tools/build/src/kernel/boost-build.jam b/src/boost/tools/build/src/kernel/boost-build.jam
new file mode 100644
index 000000000..377f6ec02
--- /dev/null
+++ b/src/boost/tools/build/src/kernel/boost-build.jam
@@ -0,0 +1,5 @@
+# Copyright 2003 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+boost-build . ;
diff --git a/src/boost/tools/build/src/kernel/bootstrap.jam b/src/boost/tools/build/src/kernel/bootstrap.jam
new file mode 100644
index 000000000..25cc57a3d
--- /dev/null
+++ b/src/boost/tools/build/src/kernel/bootstrap.jam
@@ -0,0 +1,265 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005, 2006 Rene Rivera
+# Copyright 2003, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# First of all, check the jam version.
+if $(JAM_VERSION:J="") < 030112
+{
+ ECHO "error: Boost.Jam version 3.1.12 or later required" ;
+ EXIT ;
+}
+
+local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ;
+for local r in $(required-rules)
+{
+ if ! $(r) in [ RULENAMES ]
+ {
+ ECHO "error: builtin rule '$(r)' is not present" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+local native = regex transform 2 ;
+while $(native)
+{
+ if ! [ HAS_NATIVE_RULE $(native[1]) : $(native[2]) : $(native[3]) ]
+ {
+ ECHO "error: missing native rule '$(native[1]).$(native[2])'" ;
+ ECHO "error: or interface version of that rule is too low" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+ native = $(native[4-]) ;
+}
+
+
+# Check that the builtin .ENVIRON module is present. We do not have a builtin to
+# check that a module is present, so we assume that the PATH environment
+# variable is always set and verify that the .ENVIRON module has a non-empty
+# value of that variable.
+module .ENVIRON
+{
+ local p = $(PATH) $(Path) $(path) ;
+ if ! $(p)
+ {
+ ECHO "error: no builtin module .ENVIRON is found" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Check that @() functionality is present. Similarly to modules, we do not have
+# a way to test this directly. Instead we check that $(TMPNAME) functionality is
+# present which was added at roughly the same time (more precisely, it was added
+# just before).
+{
+ if ! $(TMPNAME)
+ {
+ ECHO "error: no @() functionality found" ;
+ ECHO "error: your version of b2 is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Make sure that \n escape is available.
+if "\n" = "n"
+{
+ if $(OS) = CYGWIN
+ {
+ ECHO "warning: escape sequences are not supported" ;
+ ECHO "warning: this will cause major misbehaviour on cygwin" ;
+ ECHO "warning: your version of b2 is likely out of date" ;
+ ECHO "warning: please get a fresh version from SVN." ;
+ }
+}
+
+
+# Bootstrap the module system. Then bring the import rule into the global module.
+#
+SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ;
+module modules { include <module@>modules.jam ; }
+IMPORT modules : import : : import ;
+
+{
+ # Add module subdirectories to the BOOST_BUILD_PATH, which allows us to make
+ # incremental refactoring steps by moving modules to appropriate
+ # subdirectories, thereby achieving some physical separation of different
+ # layers without changing all of our code to specify subdirectories in
+ # import statements or use an extra level of qualification on imported
+ # names.
+
+ local subdirs =
+ kernel # only the most-intrinsic modules: modules, errors
+ util # low-level substrate: string/number handling, etc.
+ build # essential elements of the build system architecture
+ tools # toolsets for handling specific build jobs and targets.
+ contrib # user contributed (unreviewed) modules
+ . # build-system.jam lives here
+ ;
+ local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ;
+ BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
+
+ modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
+
+ modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
+}
+
+# Reload the modules, to clean up things. The modules module can tolerate being
+# imported twice.
+#
+import modules ;
+
+# Process option plugins first to allow them to prevent loading the rest of the
+# build system.
+#
+import option ;
+local dont-build = [ option.process ] ;
+
+# Should we skip building, i.e. loading the build system, according to the
+# options processed?
+#
+if ! $(dont-build)
+{
+ if ! --python in $(ARGV)
+ {
+ # Allow users to override the build system file from the command-line
+ # (mostly for testing).
+ local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
+ build-system ?= build-system ;
+
+ # Use last element in case of multiple command-line options.
+ import $(build-system[-1]) ;
+ }
+ else
+ {
+
+ # Define additional interface exposed to Python code. Python code will
+ # also have access to select bjam builtins in the 'bjam' module, but
+ # some things are easier to define outside C.
+ module python_interface
+ {
+ rule load ( module-name : location )
+ {
+ USER_MODULE $(module-name) ;
+ # Make all rules in the loaded module available in the global
+ # namespace, so that we do not have to bother specifying the
+ # "correct" module when calling from Python.
+ module $(module-name)
+ {
+ __name__ = $(1) ;
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+ }
+
+ rule peek ( module-name ? : variables + )
+ {
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+ }
+
+ rule set-variable ( module-name : name : value * )
+ {
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+ }
+
+ rule set-top-level-targets ( targets * )
+ {
+ DEPENDS all : $(targets) ;
+ }
+
+ rule call-in-module ( m : rulename : * )
+ {
+ module $(m)
+ {
+ return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
+ : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16)
+ : $(17) : $(18) : $(19) ] ;
+ }
+ }
+
+
+ rule set-update-action ( action : targets * : sources * :
+ properties * )
+ {
+ $(action) $(targets) : $(sources) : $(properties) ;
+ }
+
+ rule set-update-action-in-module ( m : action : targets * :
+ sources * : properties * )
+ {
+ module $(m)
+ {
+ $(2) $(3) : $(4) : $(5) ;
+ }
+ }
+
+ rule set-target-variable ( targets + : variable : value * : append ?
+ )
+ {
+ if $(append)
+ {
+ $(variable) on $(targets) += $(value) ;
+ }
+ else
+ {
+ $(variable) on $(targets) = $(value) ;
+ }
+ }
+
+ rule get-target-variable ( targets + : variable )
+ {
+ return [ on $(targets) return $($(variable)) ] ;
+ }
+
+ rule import-rules-from-parent ( parent-module : this-module :
+ user-rules * )
+ {
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) :
+ $(user-rules) ;
+ EXPORT $(this-module) : $(user-rules) ;
+ }
+
+ rule mark-included ( targets * : includes * )
+ {
+ NOCARE $(includes) ;
+ INCLUDES $(targets) : $(includes) ;
+ ISFILE $(includes) ;
+ }
+ }
+
+ PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
+ modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
+
+ module PyBB
+ {
+ local ok = [ bootstrap $(root) ] ;
+ if ! $(ok)
+ {
+ EXIT ;
+ }
+ }
+
+
+ #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
+
+ #module PyBB
+ #{
+ # main ;
+ #}
+ }
+}
diff --git a/src/boost/tools/build/src/kernel/bootstrap.py b/src/boost/tools/build/src/kernel/bootstrap.py
new file mode 100644
index 000000000..3746b5e66
--- /dev/null
+++ b/src/boost/tools/build/src/kernel/bootstrap.py
@@ -0,0 +1,25 @@
+# Copyright 2009 Vladimir Prus
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import imp
+import sys
+
+def bootstrap(root_path):
+ """Performs python-side bootstrapping of Boost.Build/Python.
+
+ This function arranges for 'b2.whatever' package names to work, while also
+ allowing to put python files alongside corresponding jam modules.
+ """
+
+ m = imp.new_module("b2")
+ # Note that:
+ # 1. If __path__ is not list of strings, nothing will work
+ # 2. root_path is already list of strings.
+ m.__path__ = root_path
+ sys.modules["b2"] = m
+
+ import b2.build_system
+ return b2.build_system.main()
+
diff --git a/src/boost/tools/build/src/kernel/class.jam b/src/boost/tools/build/src/kernel/class.jam
new file mode 100644
index 000000000..7c3786418
--- /dev/null
+++ b/src/boost/tools/build/src/kernel/class.jam
@@ -0,0 +1,420 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Polymorphic class system built on top of core Jam facilities.
+#
+# Classes are defined by 'class' keywords:
+#
+# class myclass
+# {
+# rule __init__ ( arg1 ) # constructor
+# {
+# self.attribute = $(arg1) ;
+# }
+#
+# rule method1 ( ) # method
+# {
+# return [ method2 ] ;
+# }
+#
+# rule method2 ( ) # method
+# {
+# return $(self.attribute) ;
+# }
+# }
+#
+# The __init__ rule is the constructor, and sets member variables.
+#
+# New instances are created by invoking [ new <class> <args...> ]:
+#
+# local x = [ new myclass foo ] ; # x is a new myclass object
+# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo"
+#
+# Derived class are created by mentioning base classes in the declaration::
+#
+# class derived : myclass
+# {
+# rule __init__ ( arg )
+# {
+# myclass.__init__ $(arg) ; # call base __init__
+#
+# }
+#
+# rule method2 ( ) # method override
+# {
+# return $(self.attribute)XXX ;
+# }
+# }
+#
+# All methods operate virtually, replacing behavior in the base classes. For
+# example::
+#
+# local y = [ new derived foo ] ; # y is a new derived object
+# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo"
+#
+# Each class instance is its own core Jam module. All instance attributes and
+# methods are accessible without additional qualification from within the class
+# instance. All rules imported in class declaration, or visible in base classes
+# are also visible. Base methods are available in qualified form:
+# base-name.method-name. By convention, attribute names are prefixed with
+# "self.".
+
+import modules ;
+import numbers ;
+
+
+rule xinit ( instance : class )
+{
+ module $(instance)
+ {
+ __class__ = $(2) ;
+ __name__ = $(1) ;
+ }
+}
+
+
+rule new ( class args * : * )
+{
+ .next-instance ?= 1 ;
+ local id = object($(class))@$(.next-instance) ;
+
+ INSTANCE $(id) : class@$(class) ;
+ xinit $(id) : $(class) ;
+ IMPORT_MODULE $(id) ;
+ $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) :
+ $(18) : $(19) ;
+
+ # Bump the next unique object name.
+ .next-instance = [ numbers.increment $(.next-instance) ] ;
+
+ # Return the name of the new instance.
+ return $(id) ;
+}
+
+
+rule bases ( class )
+{
+ module class@$(class)
+ {
+ return $(__bases__) ;
+ }
+}
+
+
+rule is-derived ( class : bases + )
+{
+ local stack = $(class) ;
+ local visited found ;
+ while ! $(found) && $(stack)
+ {
+ local top = $(stack[1]) ;
+ stack = $(stack[2-]) ;
+ if ! ( $(top) in $(visited) )
+ {
+ visited += $(top) ;
+ stack += [ bases $(top) ] ;
+
+ if $(bases) in $(visited)
+ {
+ found = true ;
+ }
+ }
+ }
+ return $(found) ;
+}
+
+
+# Returns true if the 'value' is a class instance.
+#
+rule is-instance ( value )
+{
+ return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ;
+}
+
+
+# Check if the given value is of the given type.
+#
+rule is-a (
+ instance # The value to check.
+ : type # The type to test for.
+)
+{
+ if [ is-instance $(instance) ]
+ {
+ return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ;
+ }
+}
+
+
+local rule typecheck ( x )
+{
+ local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ;
+ if ! [ is-a $(x) : $(class-name) ]
+ {
+ return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+
+ # This will be the construction function for a class called 'myclass'.
+ #
+ class myclass
+ {
+ import assert ;
+
+ rule __init__ ( x_ * : y_ * )
+ {
+ # Set some instance variables.
+ x = $(x_) ;
+ y = $(y_) ;
+ foo += 10 ;
+ }
+
+ rule set-x ( newx * )
+ {
+ x = $(newx) ;
+ }
+
+ rule get-x ( )
+ {
+ return $(x) ;
+ }
+
+ rule set-y ( newy * )
+ {
+ y = $(newy) ;
+ }
+
+ rule get-y ( )
+ {
+ return $(y) ;
+ }
+
+ rule f ( )
+ {
+ return [ g $(x) ] ;
+ }
+
+ rule g ( args * )
+ {
+ if $(x) in $(y)
+ {
+ return $(x) ;
+ }
+ else if $(y) in $(x)
+ {
+ return $(y) ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ rule get-class ( )
+ {
+ return $(__class__) ;
+ }
+
+ rule get-instance ( )
+ {
+ return $(__name__) ;
+ }
+
+ rule invariant ( )
+ {
+ assert.equal 1 : 1 ;
+ }
+
+ rule get-foo ( )
+ {
+ return $(foo) ;
+ }
+ } # class myclass ;
+
+ class derived1 : myclass
+ {
+ rule __init__ ( z_ )
+ {
+ myclass.__init__ $(z_) : X ;
+ z = $(z_) ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived1.g ;
+ }
+
+ rule h ( )
+ {
+ return derived1.h ;
+ }
+
+ rule get-z ( )
+ {
+ return $(z) ;
+ }
+
+ # Check that 'assert.equal' visible in base class is visible here.
+ #
+ rule invariant2 ( )
+ {
+ assert.equal 2 : 2 ;
+ }
+
+ # Check that 'assert.variable-not-empty' visible in base class is
+ # visible here.
+ #
+ rule invariant3 ( )
+ {
+ local v = 10 ;
+ assert.variable-not-empty v ;
+ }
+ } # class derived1 : myclass ;
+
+ class derived2 : myclass
+ {
+ rule __init__ ( )
+ {
+ myclass.__init__ 1 : 2 ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived2.g ;
+ }
+
+ # Test the ability to call base class functions with qualification.
+ #
+ rule get-x ( )
+ {
+ return [ myclass.get-x ] ;
+ }
+ } # class derived2 : myclass ;
+
+ class derived2a : derived2
+ {
+ rule __init__
+ {
+ derived2.__init__ ;
+ }
+ } # class derived2a : derived2 ;
+
+ local rule expect_derived2 ( [derived2] x ) { }
+
+ local a = [ new myclass 3 4 5 : 4 5 ] ;
+ local b = [ new derived1 4 ] ;
+ local b2 = [ new derived1 4 ] ;
+ local c = [ new derived2 ] ;
+ local d = [ new derived2 ] ;
+ local e = [ new derived2a ] ;
+
+ expect_derived2 $(d) ;
+ expect_derived2 $(e) ;
+
+ # Argument checking is set up to call exit(1) directly on failure, and we
+ # can not hijack that with try, so we should better not do this test by
+ # default. We could fix this by having errors look up and invoke the EXIT
+ # rule instead; EXIT can be hijacked (;-)
+ if --fail-typecheck in [ modules.peek : ARGV ]
+ {
+ try ;
+ {
+ expect_derived2 $(a) ;
+ }
+ catch
+ "Expected an instance of derived2 but got" instead
+ ;
+ }
+
+ #try ;
+ #{
+ # new bad_subclass ;
+ #}
+ #catch
+ # bad_subclass.bad_subclass failed to call base class constructor
+ # myclass.__init__
+ # ;
+
+ #try ;
+ #{
+ # class bad_subclass ;
+ #}
+ #catch bad_subclass has already been declared ;
+
+ assert.result 3 4 5 : $(a).get-x ;
+ assert.result 4 5 : $(a).get-y ;
+ assert.result 4 : $(b).get-x ;
+ assert.result X : $(b).get-y ;
+ assert.result 4 : $(b).get-z ;
+ assert.result 1 : $(c).get-x ;
+ assert.result 2 : $(c).get-y ;
+ assert.result 4 5 : $(a).f ;
+ assert.result derived1.g : $(b).f ;
+ assert.result derived2.g : $(c).f ;
+ assert.result derived2.g : $(d).f ;
+
+ assert.result 10 : $(b).get-foo ;
+
+ $(a).invariant ;
+ $(b).invariant2 ;
+ $(b).invariant3 ;
+
+ # Check that the __class__ attribute is getting properly set.
+ assert.result myclass : $(a).get-class ;
+ assert.result derived1 : $(b).get-class ;
+ assert.result $(a) : $(a).get-instance ;
+
+ $(a).set-x a.x ;
+ $(b).set-x b.x ;
+ $(c).set-x c.x ;
+ $(d).set-x d.x ;
+ assert.result a.x : $(a).get-x ;
+ assert.result b.x : $(b).get-x ;
+ assert.result c.x : $(c).get-x ;
+ assert.result d.x : $(d).get-x ;
+
+ class derived3 : derived1 derived2
+ {
+ rule __init__ ( )
+ {
+ }
+ }
+
+ assert.result : bases myclass ;
+ assert.result myclass : bases derived1 ;
+ assert.result myclass : bases derived2 ;
+ assert.result derived1 derived2 : bases derived3 ;
+
+ assert.true is-derived derived1 : myclass ;
+ assert.true is-derived derived2 : myclass ;
+ assert.true is-derived derived3 : derived1 ;
+ assert.true is-derived derived3 : derived2 ;
+ assert.true is-derived derived3 : derived1 derived2 myclass ;
+ assert.true is-derived derived3 : myclass ;
+
+ assert.false is-derived myclass : derived1 ;
+
+ assert.true is-instance $(a) ;
+ assert.false is-instance bar ;
+
+ assert.true is-a $(a) : myclass ;
+ assert.true is-a $(c) : derived2 ;
+ assert.true is-a $(d) : myclass ;
+ assert.false is-a literal : myclass ;
+}
diff --git a/src/boost/tools/build/src/kernel/errors.jam b/src/boost/tools/build/src/kernel/errors.jam
new file mode 100644
index 000000000..531f37f36
--- /dev/null
+++ b/src/boost/tools/build/src/kernel/errors.jam
@@ -0,0 +1,287 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Print a stack backtrace leading to this rule's caller. Each argument
+# represents a line of output to be printed after the first line of the
+# backtrace.
+#
+rule backtrace ( skip-frames prefix messages * : * )
+{
+ local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
+ local drop-elements = $(frame-skips[$(skip-frames)]) ;
+ if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
+ {
+ ECHO "warning: backtrace doesn't support skipping $(skip-frames) "
+ "frames; using 1 instead." ;
+ drop-elements = 5 ;
+ }
+
+ local args = $(.args) ;
+ if $(.user-modules-only)
+ {
+ local bt = [ nearest-user-location ] ;
+ if $(bt)
+ {
+ ECHO $(prefix) at $(bt) ;
+ }
+ for local n in $(args)
+ {
+ if $($(n))-is-defined
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ }
+ else
+ {
+ # Get the whole backtrace, then drop the initial quadruples
+ # corresponding to the frames that must be skipped.
+ local bt = [ BACKTRACE ] ;
+ bt = $(bt[$(drop-elements)-]) ;
+
+ while $(bt)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ ECHO "$(bt[1]):$(bt[2]):" "in" $(bt[4]) "from module" $(m) ;
+
+ # The first time through, print each argument on a separate line.
+ for local n in $(args)
+ {
+ if $($(n))-is-defined
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ args = ; # Kill args so that this never happens again.
+
+ # Move on to the next quadruple.
+ bt = $(bt[5-]) ;
+ }
+ }
+}
+
+.args ?= messages 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
+.disabled ?= ;
+.last-error-$(.args) ?= ;
+
+
+# try-catch --
+#
+# This is not really an exception-handling mechanism, but it does allow us to
+# perform some error-checking on our error-checking. Errors are suppressed after
+# a try, and the first one is recorded. Use catch to check that the error
+# message matched expectations.
+
+# Begin looking for error messages.
+#
+rule try ( )
+{
+ .disabled += true ;
+ .last-error-$(.args) = ;
+}
+
+
+# Stop looking for error messages; generate an error if an argument of messages
+# is not found in the corresponding argument in the error call.
+#
+rule catch ( messages * : * )
+{
+ .disabled = $(.disabled[2-]) ; # Pop the stack.
+
+ import sequence ;
+
+ if ! $(.last-error-$(.args))-is-defined
+ {
+ error-skip-frames 3 expected an error, but none occurred ;
+ }
+ else
+ {
+ for local n in $(.args)
+ {
+ if ! $($(n)) in $(.last-error-$(n))
+ {
+ local v = [ sequence.join $($(n)) : " " ] ;
+ v ?= "" ;
+ local joined = [ sequence.join $(.last-error-$(n)) : " " ] ;
+
+ .last-error-$(.args) = ;
+ error-skip-frames 3 expected \"$(v)\" in argument $(n) of error
+ : got \"$(joined)\" instead ;
+ }
+ }
+ }
+}
+
+
+rule error-skip-frames ( skip-frames messages * : * )
+{
+ if ! $(.disabled)
+ {
+ backtrace $(skip-frames) "error:" $(messages) : $(2) : $(3) : $(4) : $(5)
+ : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14)
+ : $(15) : $(16) : $(17) : $(18) : $(19) ;
+ EXIT ;
+ }
+ else if ! $(.last-error-$(.args))
+ {
+ for local n in $(.args)
+ {
+ # Add an extra empty string so that we always have something in the
+ # event of an error.
+ .last-error-$(n) = $($(n)) "" ;
+ }
+ }
+}
+
+if --no-error-backtrace in [ modules.peek : ARGV ]
+{
+ .no-error-backtrace = true ;
+}
+
+
+# Print an error message with a stack backtrace and exit.
+#
+rule error ( messages * : * )
+{
+ if $(.no-error-backtrace)
+ {
+ local first-printed ;
+ # Print each argument on a separate line.
+ for local n in $(.args)
+ {
+ if $($(n))-is-defined
+ {
+ if ! $(first-printed)
+ {
+ ECHO "error:" $($(n)) ;
+ first-printed = true ;
+ }
+ else
+ {
+ ECHO $($(n)) ;
+ }
+ }
+ }
+ EXIT ;
+ }
+ else
+ {
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) :
+ $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16)
+ : $(17) : $(18) : $(19) ;
+ }
+}
+
+
+# Same as 'error', but the generated backtrace will include only user files.
+#
+rule user-error ( messages * : * )
+{
+ .user-modules-only = 1 ;
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) :
+ $(18) : $(19) ;
+}
+
+
+# Print a warning message with a stack backtrace and exit.
+#
+rule warning
+{
+ backtrace 2 "warning:" $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) :
+ $(18) : $(19) ;
+}
+
+
+# Convert an arbitrary argument list into a list with ":" separators and quoted
+# elements representing the same information. This is mostly useful for
+# formatting descriptions of arguments with which a rule was called when
+# reporting an error.
+#
+rule lol->list ( * )
+{
+ local result ;
+ local remaining = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
+ while $($(remaining))
+ {
+ local n = $(remaining[1]) ;
+ remaining = $(remaining[2-]) ;
+
+ if $(n) != 1
+ {
+ result += ":" ;
+ }
+ result += \"$($(n))\" ;
+ }
+ return $(result) ;
+}
+
+
+# Return the file:line for the nearest entry in backtrace which correspond to a
+# user module.
+#
+rule nearest-user-location ( )
+{
+ local bt = [ BACKTRACE ] ;
+
+ local result ;
+ while $(bt) && ! $(result)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ local user-modules = "([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-config.jam|project-root.jam)" ;
+
+ if [ MATCH $(user-modules) : $(bt[1]:D=) ]
+ {
+ result = "$(bt[1]):$(bt[2])" ;
+ }
+ bt = $(bt[5-]) ;
+ }
+ return $(result) ;
+}
+
+
+# If optimized rule is available in Jam, use it.
+if NEAREST_USER_LOCATION in [ RULENAMES ]
+{
+ rule nearest-user-location ( )
+ {
+ local r = [ NEAREST_USER_LOCATION ] ;
+ return "$(r[1]):$(r[2])" ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Show that we can correctly catch an expected error.
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : somewhere ;
+
+ # Show that unexpected errors generate real errors.
+ try ;
+ {
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : nowhere ;
+ }
+ catch expected \"nowhere\" in argument 2 ;
+
+ # Show that not catching an error where one was expected is an error.
+ try ;
+ {
+ try ;
+ {
+ }
+ catch ;
+ }
+ catch expected an error, but none occurred ;
+}
diff --git a/src/boost/tools/build/src/kernel/modules.jam b/src/boost/tools/build/src/kernel/modules.jam
new file mode 100644
index 000000000..6915fa750
--- /dev/null
+++ b/src/boost/tools/build/src/kernel/modules.jam
@@ -0,0 +1,365 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Essentially an include guard; ensures that no module is loaded multiple times.
+.loaded ?= ;
+
+# A list of modules currently being loaded for error reporting of circular
+# dependencies.
+.loading ?= ;
+
+# A list of modules needing to be tested using their __test__ rule.
+.untested ?= ;
+
+# A list of modules which have been tested using their __test__ rule.
+.tested ?= ;
+
+
+# Runs internal Boost Build unit tests for the specified module. The module's
+# __test__ rule is executed in its own module to eliminate any inadvertent
+# effects of testing module dependencies (such as assert) on the module itself.
+#
+local rule run-module-test ( m )
+{
+ local tested-modules = [ modules.peek modules : .tested ] ;
+
+ if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations.
+ && ( ( --debug in $(argv) ) || ( "--debug-module=$(m)" in $(argv) ) )
+ {
+ modules.poke modules : .tested : $(tested-modules) $(m) ;
+
+ if ! ( __test__ in [ RULENAMES $(m) ] )
+ {
+ local argv = [ peek : ARGV ] ;
+ if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) )
+ {
+ ECHO "warning:" no __test__ rule defined in module $(m) ;
+ }
+ }
+ else
+ {
+ if ! ( --quiet in $(argv) )
+ {
+ ECHO testing module $(m)... ;
+ }
+
+ local test-module = __test-$(m)__ ;
+ IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m)
+ ] ;
+ IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ;
+ module $(test-module)
+ {
+ __test__ ;
+ }
+ }
+ }
+}
+
+
+# Return the binding of the given module.
+#
+rule binding ( module )
+{
+ return $($(module).__binding__) ;
+}
+
+
+# Sets the module-local value of a variable. This is the most reliable way to
+# set a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule poke ( module-name ? : variables + : value * )
+{
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+}
+
+
+# Returns the module-local value of a variable. This is the most reliable way to
+# examine a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule peek ( module-name ? : variables + )
+{
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+}
+
+
+# Call the given rule locally in the given module. Use this for rules accepting
+# rule names as arguments, so that the passed rule may be invoked in the context
+# of the rule's caller (for example, if the rule accesses module globals or is a
+# local rule). Note that rules called this way may accept at most 18 parameters.
+#
+rule call-in ( module-name ? : rule-name args * : * )
+{
+ module $(module-name)
+ {
+ return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) :
+ $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) :
+ $(19) ] ;
+ }
+}
+
+
+# Given a possibly qualified rule name and arguments, remove any initial module
+# qualification from the rule and invoke it in that module. If there is no
+# module qualification, the rule is invoked in the global module. Note that
+# rules called this way may accept at most 18 parameters.
+#
+rule call-locally ( qualified-rule-name args * : * )
+{
+ local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
+ local rule-name = $(module-rule[2]) ;
+ rule-name ?= $(qualified-rule-name) ;
+ # We pass only 18 parameters here since Boost Jam allows at most 19 rule
+ # parameter positions and the call-in rule already uses up the initial
+ # position for the module name.
+ return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) :
+ $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13)
+ $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] ;
+}
+
+
+# Load the indicated module if it is not already loaded.
+#
+rule load (
+ module-name # Name of module to load. Rules will be defined in this
+ # module.
+ : filename ? # (partial) path to file; Defaults to $(module-name).jam.
+ : search * # Directories in which to search for filename. Defaults to
+ # $(BOOST_BUILD_PATH).
+)
+{
+ # Avoid loading modules twice.
+ if ! ( $(module-name) in $(.loaded) )
+ {
+ filename ?= $(module-name).jam ;
+
+ # Mark the module loaded so we do not try to load it recursively.
+ .loaded += $(module-name:B) ;
+
+ # Suppress tests if any module loads are already in progress.
+ local suppress-test = $(.loading[1]) ;
+
+ # Push this module on the loading stack.
+ .loading += $(module-name) ;
+
+ # Remember that it is untested.
+ .untested += $(module-name) ;
+
+ # Insert the new module's __name__ and __file__ globals.
+ poke $(module-name) : __name__ : $(module-name) ;
+ poke $(module-name) : __file__ : $(filename) ;
+
+ module $(module-name)
+ {
+ # Add some grist so that the module will have a unique target name.
+ local module-target = $(__file__:G=module@) ;
+
+ local search = $(3) ;
+ search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
+ SEARCH on $(module-target) = $(search) ;
+ BINDRULE on $(module-target) = modules.record-binding ;
+
+ include $(module-target) ;
+
+ # Allow the module to see its own names with full qualification.
+ local rules = [ RULENAMES $(__name__) ] ;
+ IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
+ }
+
+ if $(module-name) != modules && ! [ binding $(module-name) ]
+ {
+ import errors ;
+ errors.error "Could not find module" $(module-name) in $(search) ;
+ }
+
+ # Pop the loading stack. Must happen before testing or we will run into
+ # a circular loading dependency.
+ .loading = $(.loading[1--2]) ;
+
+ # Run any pending tests if this is an outer load.
+ if ! $(suppress-test)
+ {
+ local argv = [ peek : ARGV ] ;
+ for local m in $(.untested)
+ {
+ run-module-test $(m) ;
+ }
+ .untested = ;
+ }
+ }
+ else if $(module-name) in $(.loading)
+ {
+ import errors ;
+ errors.error loading \"$(module-name)\"
+ : circular module loading "dependency:"
+ : $(.loading)" ->" $(module-name) ;
+ }
+}
+
+
+# This helper is used by load (above) to record the binding (path) of each
+# loaded module.
+#
+rule record-binding ( module-target : binding )
+{
+ $(.loading[-1]).__binding__ = $(binding) ;
+}
+
+
+# Transform each path in the list, with all backslashes converted to forward
+# slashes and all detectable redundancy removed. Something like this is probably
+# needed in path.jam, but I am not sure of that, I do not understand it, and I
+# am not ready to move all of path.jam into the kernel.
+#
+local rule normalize-raw-paths ( paths * )
+{
+ local result ;
+ for p in $(paths:T)
+ {
+ result += [ NORMALIZE_PATH $(p) ] ;
+ }
+ return $(result) ;
+}
+
+
+.cwd = [ PWD ] ;
+
+
+# Load the indicated module and import rule names into the current module. Any
+# members of rules-opt will be available without qualification in the caller's
+# module. Any members of rename-opt will be taken as the names of the rules in
+# the caller's module, in place of the names they have in the imported module.
+# If rules-opt = '*', all rules from the indicated module are imported into the
+# caller's module. If rename-opt is supplied, it must have the same number of
+# elements as rules-opt.
+#
+rule import ( module-names + : rules-opt * : rename-opt * )
+{
+ if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
+ {
+ import errors ;
+ errors.error "Rule aliasing is only available for explicit imports." ;
+ }
+
+ if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
+ {
+ import errors ;
+ errors.error "When loading multiple modules, no specific rules or"
+ "renaming is allowed" ;
+ }
+
+ local caller = [ CALLER_MODULE ] ;
+
+ # Import each specified module
+ for local m in $(module-names)
+ {
+ local module-name = $(m:B) ;
+ if ! $(module-name) in $(.loaded)
+ {
+ # If the importing module is not already in the BOOST_BUILD_PATH,
+ # prepend it to the path. We do not want to invert the search order
+ # of modules that are already there.
+
+ local caller-location ;
+ if $(caller)
+ {
+ caller-location = [ binding $(caller) ] ;
+ caller-location = $(caller-location:D) ;
+ caller-location = [ normalize-raw-paths
+ $(caller-location:R=$(.cwd)) ] ;
+ }
+
+ local search = [ peek : BOOST_BUILD_PATH ] ;
+ search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
+
+ if $(caller-location) && ! $(caller-location) in $(search)
+ {
+ search = $(caller-location) $(search) ;
+ }
+
+ if $(m:D)
+ {
+ search = $(caller-location)/$(m:D) $(search)/$(m:D) $(search) ;
+ }
+
+ load $(module-name) : : $(search) ;
+ }
+
+ IMPORT_MODULE $(module-name) : $(caller) ;
+
+ if $(rules-opt)
+ {
+ local source-names ;
+ if $(rules-opt) = *
+ {
+ local all-rules = [ RULENAMES $(module-name) ] ;
+ source-names = $(all-rules) ;
+ }
+ else
+ {
+ source-names = $(rules-opt) ;
+ }
+ local target-names = $(rename-opt) ;
+ target-names ?= $(source-names) ;
+ IMPORT $(module-name) : $(source-names) : $(caller) : $(target-names) ;
+ }
+ }
+}
+
+
+# Define exported copies in $(target-module) of all rules exported from
+# $(source-module). Also make them available in the global module with
+# qualification, so that it is just as though the rules were defined originally
+# in $(target-module).
+#
+rule clone-rules ( source-module target-module )
+{
+ local r = [ RULENAMES $(source-module) ] ;
+ IMPORT $(source-module) : $(r) : $(target-module) : $(r) : LOCALIZE ;
+ EXPORT $(target-module) : $(r) ;
+ IMPORT $(target-module) : $(r) : : $(target-module).$(r) ;
+}
+
+
+# These rules need to be available in all modules to implement module loading
+# itself and other fundamental operations.
+local globalize = peek poke record-binding ;
+IMPORT modules : $(globalize) : : modules.$(globalize) ;
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import modules : normalize-raw-paths ;
+
+ module modules.__test__
+ {
+ foo = bar ;
+ }
+
+ assert.result bar : peek modules.__test__ : foo ;
+
+ poke modules.__test__ : foo : bar baz ;
+ assert.result bar baz : peek modules.__test__ : foo ;
+
+ assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
+ assert.result . : normalize-raw-paths . ;
+ assert.result .. : normalize-raw-paths .. ;
+ assert.result ../.. : normalize-raw-paths ../.. ;
+ assert.result .. : normalize-raw-paths ./.. ;
+ assert.result / / : normalize-raw-paths / \\ ;
+ assert.result a : normalize-raw-paths a ;
+ assert.result a : normalize-raw-paths a/ ;
+ assert.result /a : normalize-raw-paths /a/ ;
+ assert.result / : normalize-raw-paths /a/.. ;
+}
diff --git a/src/boost/tools/build/src/manager.py b/src/boost/tools/build/src/manager.py
new file mode 100644
index 000000000..9c1e05733
--- /dev/null
+++ b/src/boost/tools/build/src/manager.py
@@ -0,0 +1,110 @@
+# Copyright Pedro Ferreira 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import bjam
+
+# To simplify implementation of tools level, we'll
+# have a global variable keeping the current manager.
+the_manager = None
+def get_manager():
+ return the_manager
+
+class Manager:
+ """ This class is a facade to the Boost.Build system.
+ It serves as the root to access all data structures in use.
+ """
+
+ def __init__ (self, engine, global_build_dir):
+ """ Constructor.
+ engine: the build engine that will actually construct the targets.
+ """
+ from build.virtual_target import VirtualTargetRegistry
+ from build.targets import TargetRegistry
+ from build.project import ProjectRegistry
+ from build.scanner import ScannerRegistry
+ from build.errors import Errors
+ from b2.util.logger import NullLogger
+ from build import build_request, property_set, feature
+
+ self.engine_ = engine
+ self.virtual_targets_ = VirtualTargetRegistry (self)
+ self.projects_ = ProjectRegistry (self, global_build_dir)
+ self.targets_ = TargetRegistry ()
+ self.logger_ = NullLogger ()
+ self.scanners_ = ScannerRegistry (self)
+ self.argv_ = bjam.variable("ARGV")
+ self.boost_build_path_ = bjam.variable("BOOST_BUILD_PATH")
+ self.errors_ = Errors()
+ self.command_line_free_features_ = property_set.empty()
+
+ global the_manager
+ the_manager = self
+
+ def scanners (self):
+ return self.scanners_
+
+ def engine (self):
+ return self.engine_
+
+ def virtual_targets (self):
+ return self.virtual_targets_
+
+ def targets (self):
+ return self.targets_
+
+ def projects (self):
+ return self.projects_
+
+ def argv (self):
+ return self.argv_
+
+ def logger (self):
+ return self.logger_
+
+ def set_logger (self, logger):
+ self.logger_ = logger
+
+ def errors (self):
+ return self.errors_
+
+ def getenv(self, name):
+ return bjam.variable(name)
+
+ def boost_build_path(self):
+ return self.boost_build_path_
+
+ def command_line_free_features(self):
+ return self.command_line_free_features_
+
+ def set_command_line_free_features(self, v):
+ self.command_line_free_features_ = v
+
+ def construct (self, properties = [], targets = []):
+ """ Constructs the dependency graph.
+ properties: the build properties.
+ targets: the targets to consider. If none is specified, uses all.
+ """
+ if not targets:
+ for name, project in self.projects ().projects ():
+ targets.append (project.target ())
+
+ property_groups = build_request.expand_no_defaults (properties)
+
+ virtual_targets = []
+ build_prop_sets = []
+ for p in property_groups:
+ build_prop_sets.append (property_set.create (feature.split (p)))
+
+ if not build_prop_sets:
+ build_prop_sets = [property_set.empty ()]
+
+ for build_properties in build_prop_sets:
+ for target in targets:
+ result = target.generate (build_properties)
+ virtual_targets.extend (result.targets ())
+
+ actual_targets = []
+ for virtual_target in virtual_targets:
+ actual_targets.extend (virtual_target.actualize ())
+
diff --git a/src/boost/tools/build/src/options/help.jam b/src/boost/tools/build/src/options/help.jam
new file mode 100644
index 000000000..cbd303776
--- /dev/null
+++ b/src/boost/tools/build/src/options/help.jam
@@ -0,0 +1,222 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Rene Rivera
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module is the plug-in handler for the --help and --help-.*
+# command-line options
+import modules ;
+import assert ;
+import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ;
+import sequence ;
+import set ;
+import project ;
+import print ;
+import os ;
+import version ;
+import path ;
+
+# List of possible modules, but which really aren't.
+#
+.not-modules =
+ boost-build bootstrap site-config test user-config
+ -tools allyourbase boost-base features python stlport testing unit-tests ;
+
+# The help system options are parsed here and handed off to the doc
+# module to translate into documentation requests and actions. The
+# understood options are:
+#
+# --help-disable-<option>
+# --help-doc-options
+# --help-enable-<option>
+# --help-internal
+# --help-options
+# --help-usage
+# --help-output <type>
+# --help-output-file <file>
+# --help [<module-or-class>]
+#
+rule process (
+ command # The option.
+ : values * # The values, starting after the "=".
+ )
+{
+ assert.result --help : MATCH ^(--help).* : $(command) ;
+ local did-help = ;
+ switch $(command)
+ {
+ case --help-internal :
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules += . ;
+ local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ;
+ local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ;
+ local modules-to-list =
+ [ sequence.insertion-sort
+ [ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ;
+ local modules-to-scan ;
+ for local m in $(modules-to-list)
+ {
+ local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ;
+ modules-to-scan += $(module-files[1]) ;
+ }
+ do-scan $(modules-to-scan) : print-help-all ;
+ did-help = true ;
+
+ case --help-enable-* :
+ local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) : enabled ;
+ did-help = true ;
+
+ case --help-disable-* :
+ local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) ;
+ did-help = true ;
+
+ case --help-output :
+ set-output $(values[1]) ;
+ did-help = true ;
+
+ case --help-output-file :
+ set-output-file $(values[1]) ;
+ did-help = true ;
+
+ case --help-doc-options :
+ local doc-module-spec = [ split-symbol doc ] ;
+ do-scan $(doc-module-spec[1]) : print-help-options ;
+ did-help = true ;
+
+ case --help-options :
+ print-help-usage ;
+ local BOOST_BUILD_PATH = [ modules.peek : BOOST_BUILD_PATH ] ;
+ local plugin-dir = options ;
+ local option-files = [ GLOB $(plugin-dir:D=$(BOOST_BUILD_PATH)) : *.jam ] ;
+ if $(option-files)
+ {
+ for local file in $(option-files)
+ {
+ do-scan $(file) : print-help-options ;
+ }
+ }
+ did-help = true ;
+
+ case --help :
+ local spec = $(values[1]) ;
+ if $(spec)
+ {
+ local spec-parts = [ split-symbol $(spec) ] ;
+ if $(spec-parts)
+ {
+ if $(spec-parts[2])
+ {
+ do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ;
+ }
+ else
+ {
+ do-scan $(spec-parts[1]) : print-help-module ;
+ }
+ }
+ else
+ {
+ EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ;
+ }
+ }
+ else
+ {
+ version.print ;
+ ECHO ;
+ # First print documentation from the current Jamfile, if any.
+ # FIXME: Generally, this duplication of project.jam logic is bad.
+ local names = [ modules.peek project : JAMROOT ]
+ [ modules.peek project : JAMFILE ] ;
+ local project-file = [ path.glob . : $(names) ] ;
+ if ! $(project-file)
+ {
+ project-file = [ path.glob-in-parents . : $(names) ] ;
+ }
+
+ for local p in $(project-file)
+ {
+ do-scan $(p) : print-help-project $(p) ;
+ }
+
+ # Next any user-config help.
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local user-config = [ GLOB $(user-path) : user-config.jam ] ;
+ if $(user-config)
+ {
+ do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ;
+ }
+
+ # Next any site-config help.
+ local site-config = [ GLOB $(user-path) : site-config.jam ] ;
+ if $(site-config)
+ {
+ do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ;
+ }
+
+ # Then the overall help.
+ print-help-top ;
+ }
+ did-help = true ;
+ }
+ if $(did-help)
+ {
+ UPDATE all ;
+ NOCARE all ;
+ }
+ return $(did-help) ;
+}
+
+# Split a reference to a symbol into module and symbol parts.
+#
+local rule split-symbol (
+ symbol # The symbol to split.
+ )
+{
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules += . ;
+ local module-name = $(symbol) ;
+ local symbol-name = ;
+ local result = ;
+ while ! $(result)
+ {
+ local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ;
+ if $(module-path)
+ {
+ # The 'module-name' in fact refers to module. Return the full
+ # module path and a symbol within it. If 'symbol' passed to this
+ # rule is already module, 'symbol-name' will be empty. Otherwise,
+ # it's initialized on the previous loop iteration.
+ # In case there are several modules by this name,
+ # use the first one.
+ result = $(module-path[1]) $(symbol-name) ;
+ }
+ else
+ {
+ if ! $(module-name:S)
+ {
+ result = - ;
+ }
+ else
+ {
+ local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ;
+ if $(symbol-name)
+ {
+ symbol-name = $(next-symbol-part).$(symbol-name) ;
+ }
+ else
+ {
+ symbol-name = $(next-symbol-part) ;
+ }
+ module-name = $(module-name:B) ;
+ }
+ }
+ }
+ if $(result) != -
+ {
+ return $(result) ;
+ }
+}
diff --git a/src/boost/tools/build/src/tools/__init__.py b/src/boost/tools/build/src/tools/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/boost/tools/build/src/tools/__init__.py
diff --git a/src/boost/tools/build/src/tools/acc.jam b/src/boost/tools/build/src/tools/acc.jam
new file mode 100644
index 000000000..f7ecfe8c3
--- /dev/null
+++ b/src/boost/tools/build/src/tools/acc.jam
@@ -0,0 +1,160 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Toon Knapen 2004.
+# Copyright Boris Gubenko 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.acc]]
+= HP aC++ compiler
+
+The `acc` module supports the
+http://h21007.www2.hp.com/dspp/tech/tech_TechSoftwareDetailPage_IDX/1,1703,1740,00.html[HP
+aC++ compiler] for the HP-UX operating system.
+
+The module is initialized using the following syntax:
+
+----
+using acc : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the command is not specified, the `aCC` binary will be searched in
+PATH.
+
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+|# # end::doc[]
+
+#
+# B2 V2 toolset for the HP aC++ compiler.
+#
+
+import toolset : flags ;
+import feature ;
+import generators ;
+import common ;
+
+feature.extend toolset : acc ;
+toolset.inherit acc : unix ;
+generators.override builtin.lib-generator : acc.prebuilt ;
+generators.override acc.searched-lib-generator : searched-lib-generator ;
+
+# Configures the acc toolset.
+rule init ( version ? : user-provided-command * : options * )
+{
+ local condition = [ common.check-init-parameters acc
+ : version $(version) ] ;
+
+ local command = [ common.get-invocation-command acc : aCC
+ : $(user-provided-command) ] ;
+
+ common.handle-options acc : $(condition) : $(command) : $(options) ;
+}
+
+
+# Declare generators
+generators.register-c-compiler acc.compile.c : C : OBJ : <toolset>acc ;
+generators.register-c-compiler acc.compile.c++ : CPP : OBJ : <toolset>acc ;
+
+# Declare flags.
+flags acc CFLAGS <optimization>off : ;
+flags acc CFLAGS <optimization>speed : -O3 ;
+flags acc CFLAGS <optimization>space : -O2 ;
+
+flags acc CFLAGS <inlining>off : +d ;
+flags acc CFLAGS <inlining>on : ;
+flags acc CFLAGS <inlining>full : ;
+
+flags acc C++FLAGS <exception-handling>off : ;
+flags acc C++FLAGS <exception-handling>on : ;
+
+flags acc C++FLAGS <rtti>off : ;
+flags acc C++FLAGS <rtti>on : ;
+
+# We want the full path to the sources in the debug symbols because otherwise
+# the debugger won't find the sources when we use boost.build.
+flags acc CFLAGS <debug-symbols>on : -g ;
+flags acc LINKFLAGS <debug-symbols>on : -g ;
+flags acc LINKFLAGS <debug-symbols>off : -s ;
+
+# V2 does not have <shared-linkable>, not sure what this meant in V1.
+# flags acc CFLAGS <shared-linkable>true : +Z ;
+
+flags acc CFLAGS <profiling>on : -pg ;
+flags acc LINKFLAGS <profiling>on : -pg ;
+
+flags acc CFLAGS <address-model>64 : +DD64 ;
+flags acc LINKFLAGS <address-model>64 : +DD64 ;
+
+# It is unknown if there's separate option for rpath used only
+# at link time, similar to -rpath-link in GNU. We'll use -L.
+flags acc RPATH_LINK : <xdll-path> ;
+
+flags acc CFLAGS <cflags> ;
+flags acc C++FLAGS <cxxflags> ;
+flags acc DEFINES <define> ;
+flags acc UNDEFS <undef> ;
+flags acc HDRS <include> ;
+flags acc STDHDRS <sysinclude> ;
+flags acc LINKFLAGS <linkflags> ;
+flags acc ARFLAGS <arflags> ;
+
+flags acc LIBPATH <library-path> ;
+flags acc NEEDLIBS <library-file> ;
+flags acc FINDLIBS <find-shared-library> ;
+flags acc FINDLIBS <find-static-library> ;
+
+# Select the compiler name according to the threading model.
+flags acc CFLAGS <threading>multi : -mt ;
+flags acc LINKFLAGS <threading>multi : -mt ;
+
+flags acc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+
+actions acc.link bind NEEDLIBS
+{
+ $(CONFIG_COMMAND) -AA $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
+}
+
+SPACE = " " ;
+actions acc.link.dll bind NEEDLIBS
+{
+ $(CONFIG_COMMAND) -AA -b $(LINKFLAGS) -o "$(<[1])" -L"$(RPATH_LINK)" -Wl,+h$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS)
+}
+
+actions acc.compile.c
+{
+ cc -c -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
+}
+
+actions acc.compile.c++
+{
+ $(CONFIG_COMMAND) -AA -c -Wc,--pending_instantiations=$(TEMPLATE_DEPTH) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" $(OPTIONS)
+}
+
+actions updated together piecemeal acc.archive
+{
+ ar ru$(ARFLAGS:E="") "$(<)" "$(>)"
+}
diff --git a/src/boost/tools/build/src/tools/asciidoctor.jam b/src/boost/tools/build/src/tools/asciidoctor.jam
new file mode 100644
index 000000000..73d5c9348
--- /dev/null
+++ b/src/boost/tools/build/src/tools/asciidoctor.jam
@@ -0,0 +1,212 @@
+#|
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+#| tag::doc[]
+= Asciidoctor
+
+The asciidoctor tool converts the ascidoc documentation format to various
+backend formats for either viewing or further processing by documentation
+tools. This tool supports the baseline asciidoctor distribution (i.e. the
+Ruby based tool).
+|# # end::doc[]
+
+import common ;
+import feature ;
+import generators ;
+import toolset ;
+import "class" : new ;
+
+feature.feature asciidoctor : : implicit propagated symmetric ;
+
+#| tag::doc[]
+
+== Feature: `asciidoctor-attribute`
+
+Defines arbitrary asciidoctor attributes. The value of the feature should be
+specified with the CLI syntax for attributes.
+For example to use as a target requirement:
+
+```
+html example : example.adoc :
+ <asciidoctor-attribute>idprefix=ex ;
+```
+
+This is a `free` feature and is not `propagated`. I.e. it applies only to the
+target it's specified on.
+
+|# # end::doc[]
+
+feature.subfeature asciidoctor : attribute : : free ;
+
+#| tag::doc[]
+
+== Feature: `asciidoctor-doctype`
+
+Specifies the `doctype` to use for generating the output format. Allowed
+`doctype` values are: `article`, `book`, `manpage`, and `inline`.
+
+|# # end::doc[]
+
+feature.subfeature asciidoctor : doctype
+ : article book manpage inline
+ : optional propagated ;
+
+#| tag::doc[]
+
+== Feature: `asciidoctor-backend`
+
+Specifies the `backend` to use to produce output from the source asciidoc.
+This feature is automatically applied to fit the build target type. For
+example, when specifying an `html` target for an `asciidoc` source:
+
+```
+html example : example.adoc ;
+```
+
+The target will by default acquire the `<asciidoctor-backend>html5`
+requirement. The default for each target type are:
+
+* `html`: `<asciidoctor-backend>html5`
+* `docbook`: `<asciidoctor-backend>docbook45`
+* `man`: `<asciidoctor-backend>manpage`
+* `pdf`: `<asciidoctor-backend>pdf`
+
+To override the defaults you specify it as a requirement on the target:
+
+```
+docbook example : example.adoc :
+ <asciidoctor-backend>docbook5 ;
+```
+
+Allowed `backend` values are: `html5`, `docbook45`, `docbook5`, `pdf`.
+
+|# # end::doc[]
+
+feature.subfeature asciidoctor : backend
+ : html5 docbook5 docbook45 manpage pdf
+ : propagated symmetric composite ;
+
+#| tag::doc[]
+
+== Initialization
+
+To use the `asciidoctor` tool you need to declare it in a configuration file
+with the `using` rule. The initialization takes the following arguments:
+
+* `command`: The command, with any extra arguments, to execute.
+
+For example you could insert the following in your `user-config.jam`:
+
+```
+using asciidoctor : "/usr/local/bin/asciidoctor" ;
+```
+
+If no `command` is given it defaults to just `asciidoctor` with assumption
+that the `asciidoctor` is available in the search `PATH`.
+
+|# # end::doc[]
+
+rule init ( command * )
+{
+ if ! $(.initialized)
+ {
+ # Setup only if we were called via "using .. ;"
+ .initialized = true ;
+
+ # Register generators.
+ for local target-type in HTML MANPAGE PDF DOCBOOK
+ {
+ generators.register
+ [ new asciidoctor-generator asciidoctor.convert
+ : ASCIIDOC : $(target-type) ] ;
+ }
+
+ # Seriously bad kludge to prevent docbook generators from being
+ # considered when we are generating html directly.
+ # TODO: Design and implement a mechanism to resolve generator conflicts.
+ generators.override asciidoctor.convert : boostbook.docbook-to-onehtml ;
+ }
+
+ # The command.. Default is bare asciidoctor.
+ command ?= asciidoctor ;
+ # We attempt to resolve each component of the command to account for
+ # script interpreter wrappers.
+ ASCIIDOCTOR = ;
+ for local c in $(command)
+ {
+ local t = [ common.find-tool $(c) ] ;
+ t ?= $(c) ;
+ ASCIIDOCTOR += $(t) ;
+ }
+}
+
+class asciidoctor-generator : generator
+{
+ import property-set ;
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ # ECHO *** asciidoctor-generator.run $(project) $(name) :: [ $(property-set).raw ] :: $(sources) ;
+
+ # We set a default backend based on the target type.
+ local backend = [ $(property-set).get <asciidoctor-backend> ] ;
+
+ # For now, we only accept a single adoc source.
+ if ( ! $(sources[2]) ) && ( [ $(sources[1]).type ] = ASCIIDOC )
+ {
+ # If no output name is specified, guess it from sources.
+ # NOTE: For some reason the "?=" conditional assign op doesn't
+ # work here. It assigns the value regardless, so work around it.
+ # TODO: Look into why that happens.
+ if ! $(name)
+ {
+ name = [ generator.determine-output-name $(sources) ] ;
+ }
+
+ # Depending on the kind of target we set up the backend, and
+ # and any other options.
+ if ! $(backend)
+ {
+ switch [ $(property-set).get <main-target-type> ]
+ {
+ case HTML : backend = html5 ;
+ case DOCBOOK : backend = docbook45 ;
+ case MANPAGE : backend = manpage ;
+ case PDF : backend = pdf ;
+ }
+ }
+ }
+
+ # We build a reduced property set so that we are not toolset dependent.
+ local raw-set = <asciidoctor-backend>$(backend) ;
+ for local p in [ $(property-set).raw ]
+ {
+ if $(p:G) in <asciidoctor-attribute> <asciidoctor-doctype>
+ <flags>
+ {
+ raw-set += $(p) ;
+ }
+ }
+ raw-set = [ feature.expand-composites $(raw-set) ] ;
+ raw-set += [ $(property-set).incidental ] ;
+ property-set = [ property-set.create $(raw-set) ] ;
+ return [ generator.run $(project) $(name) : $(property-set) : $(sources) ] ;
+ }
+}
+
+_ = " " ;
+toolset.flags asciidoctor ATTRIBUTE : <asciidoctor-attribute> ;
+toolset.flags asciidoctor DOCTYPE : <asciidoctor-doctype> ;
+toolset.flags asciidoctor BACKEND : <asciidoctor-backend> ;
+toolset.flags asciidoctor FLAGS : <flags> ;
+
+feature.compose <asciidoctor-backend>pdf : <flags>"-r asciidoctor-pdf" ;
+
+actions convert
+{
+ "$(ASCIIDOCTOR)" -o$(_)"$(<:D=)" -D$(_)"$(<:D)" -b$(_)"$(BACKEND)" -a$(_)"$(ATTRIBUTE)" -d$(_)"$(DOCTYPE)" $(FLAGS) "$(>)"
+}
diff --git a/src/boost/tools/build/src/tools/auto-index.jam b/src/boost/tools/build/src/tools/auto-index.jam
new file mode 100644
index 000000000..41d04828a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/auto-index.jam
@@ -0,0 +1,204 @@
+
+import feature ;
+import generators ;
+import "class" ;
+import toolset ;
+import targets ;
+import "class" : new ;
+import project ;
+
+feature.feature auto-index : off "on" ;
+feature.feature auto-index-internal : off "on" ;
+feature.feature auto-index-verbose : off "on" ;
+feature.feature auto-index-no-duplicates : off "on" ;
+feature.feature auto-index-script : : free path ;
+feature.feature auto-index-prefix : : free path ;
+feature.feature auto-index-type : : free ;
+feature.feature auto-index-section-names : "on" off ;
+
+toolset.flags auto-index.auto-index FLAGS <auto-index-internal>on : --internal-index ;
+toolset.flags auto-index.auto-index SCRIPT <auto-index-script> ;
+toolset.flags auto-index.auto-index PREFIX <auto-index-prefix> ;
+toolset.flags auto-index.auto-index INDEX_TYPE <auto-index-type> ;
+toolset.flags auto-index.auto-index FLAGS <auto-index-verbose>on : --verbose ;
+toolset.flags auto-index.auto-index FLAGS <auto-index-no-duplicates>on : --no-duplicates ;
+toolset.flags auto-index.auto-index FLAGS <auto-index-section-names>off : --no-section-names ;
+
+# <auto-index-binary> shell command to run AutoIndex
+# <auto-index-binary-dependencies> targets to build AutoIndex from sources.
+feature.feature <auto-index-binary> : : free ;
+feature.feature <auto-index-binary-dependencies> : : free dependency ;
+
+class auto-index-generator : generator
+{
+ import common modules path targets build-system ;
+ rule run ( project name ? : property-set : sources * )
+ {
+ # AutoIndex invocation command and dependencies.
+ local auto-index-binary = [ modules.peek auto-index : .command ] ;
+ local auto-index-binary-dependencies ;
+
+ if $(auto-index-binary)
+ {
+ # Use user-supplied command.
+ auto-index-binary = [ common.get-invocation-command auto-index : auto-index : $(auto-index-binary) ] ;
+ }
+ else
+ {
+ # Search for AutoIndex sources in sensible places, like
+ # $(BOOST_ROOT)/tools/auto_index
+ # $(BOOST_BUILD_PATH)/../../auto_index
+
+ # And build auto-index executable from sources.
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ local boost-build-path = [ build-system.location ] ;
+ local boost-build-path2 = [ modules.peek : BOOST_BUILD_PATH ] ;
+
+ local auto-index-dir ;
+
+ if $(boost-root)
+ {
+ auto-index-dir += [ path.join $(boost-root) tools ] ;
+ }
+
+ if $(boost-build-path)
+ {
+ auto-index-dir += $(boost-build-path)/../.. ;
+ }
+ if $(boost-build-path2)
+ {
+ auto-index-dir += $(boost-build-path2)/.. ;
+ }
+
+ #ECHO $(auto-index-dir) ;
+ auto-index-dir = [ path.glob $(auto-index-dir) : auto_index ] ;
+ #ECHO $(auto-index-dir) ;
+
+ # If the AutoIndex source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'auto-index' in user's PATH
+ if $(auto-index-dir)
+ {
+ auto-index-dir = [ path.make $(auto-index-dir[1]) ] ;
+ auto-index-dir = $(auto-index-dir)/build ;
+
+ #ECHO $(auto-index-dir) ;
+
+ # Get the main-target in AutoIndex directory.
+ local auto-index-main-target = [ targets.resolve-reference $(auto-index-dir) : $(project) ] ;
+
+ #ECHO $(auto-index-main-target) ;
+
+ # The first element are actual targets, the second are
+ # properties found in target-id. We do not care about these
+ # since we have passed the id ourselves.
+ auto-index-main-target =
+ [ $(auto-index-main-target[1]).main-target auto_index ] ;
+
+ #ECHO $(auto-index-main-target) ;
+
+ auto-index-binary-dependencies =
+ [ $(auto-index-main-target).generate [ $(property-set).propagated ] ] ;
+
+ # Ignore usage-requirements returned as first element.
+ auto-index-binary-dependencies = $(auto-index-binary-dependencies[2-]) ;
+
+ # Some toolsets generate extra targets (e.g. RSP). We must mark
+ # all targets as dependencies for the project, but we will only
+ # use the EXE target for auto-index-to-boostbook translation.
+ for local target in $(auto-index-binary-dependencies)
+ {
+ if [ $(target).type ] = EXE
+ {
+ auto-index-binary =
+ [ path.native
+ [ path.join
+ [ $(target).path ]
+ [ $(target).name ]
+ ]
+ ] ;
+ }
+ }
+ }
+ else
+ {
+ ECHO "AutoIndex warning: The path to the auto-index executable was" ;
+ ECHO " not provided. Additionally, couldn't find AutoIndex" ;
+ ECHO " sources searching in" ;
+ ECHO " * BOOST_ROOT/tools/auto-index" ;
+ ECHO " * BOOST_BUILD_PATH/../../auto-index" ;
+ ECHO " Will now try to find a precompiled executable by searching" ;
+ ECHO " the PATH for 'auto-index'." ;
+ ECHO " To disable this warning in the future, or to completely" ;
+ ECHO " avoid compilation of auto-index, you can explicitly set the" ;
+ ECHO " path to a auto-index executable command in user-config.jam" ;
+ ECHO " or site-config.jam with the call" ;
+ ECHO " using auto-index : /path/to/auto-index ;" ;
+
+ # As a last resort, search for 'auto-index' command in path. Note
+ # that even if the 'auto-index' command is not found,
+ # get-invocation-command will still return 'auto-index' and might
+ # generate an error while generating the virtual-target.
+
+ auto-index-binary = [ common.get-invocation-command auto-index : auto-index ] ;
+ }
+ }
+
+ # Add $(auto-index-binary-dependencies) as a dependency of the current
+ # project and set it as the <auto-index-binary> feature for the
+ # auto-index-to-boostbook rule, below.
+ property-set = [ $(property-set).add-raw
+ <dependency>$(auto-index-binary-dependencies)
+ <auto-index-binary>$(auto-index-binary)
+ <auto-index-binary-dependencies>$(auto-index-binary-dependencies)
+ ] ;
+
+ #ECHO "binary = " $(auto-index-binary) ;
+ #ECHO "dependencies = " $(auto-index-binary-dependencies) ;
+
+ return [ generator.run $(project) $(name) : $(property-set) : $(sources) ] ;
+ }
+}
+
+# Initialization of toolset.
+#
+# Parameters:
+# command ? -> path to AutoIndex executable.
+#
+# When command is not supplied toolset will search for AutoIndex directory and
+# compile the executable from source. If that fails we still search the path for
+# 'auto_index'.
+#
+rule init (
+ command ? # path to the AutoIndex executable.
+ )
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+ .command = $(command) ;
+ }
+}
+
+toolset.flags auto-index.auto-index AI-COMMAND <auto-index-binary> ;
+toolset.flags auto-index.auto-index AI-DEPENDENCIES <auto-index-binary-dependencies> ;
+
+generators.register [ class.new auto-index-generator auto-index.auto-index : DOCBOOK : DOCBOOK(%.auto_index) : <auto-index>on ] ;
+generators.override auto-index.auto-index : boostbook.boostbook-to-docbook ;
+
+rule auto-index ( target : source : properties * )
+{
+ # Signal dependency of auto-index sources on <auto-index-binary-dependencies>
+ # upon invocation of auto-index-to-boostbook.
+ #ECHO "AI-COMMAND= " $(AI-COMMAND) ;
+ DEPENDS $(target) : [ on $(target) return $(AI-DEPENDENCIES) ] ;
+ #DEPENDS $(target) : [ on $(target) return $(SCRIPT) ] ;
+}
+
+actions auto-index
+{
+ $(AI-COMMAND) $(FLAGS) "--prefix="$(PREFIX) "--script="$(SCRIPT) "--index-type="$(INDEX_TYPE) "--in="$(>) "--out="$(<)
+}
+
+
diff --git a/src/boost/tools/build/src/tools/bison.jam b/src/boost/tools/build/src/tools/bison.jam
new file mode 100644
index 000000000..96b86a327
--- /dev/null
+++ b/src/boost/tools/build/src/tools/bison.jam
@@ -0,0 +1,26 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import generators ;
+import feature ;
+import toolset : flags ;
+import type ;
+
+feature.feature bison.prefix : : free ;
+type.register Y : y ;
+type.register YY : yy ;
+generators.register-standard bison.bison : Y : C H ;
+generators.register-standard bison.bison : YY : CPP HPP ;
+
+rule init ( )
+{
+}
+
+flags bison.bison PREFIX <bison.prefix> ;
+_ = " " ;
+
+actions bison
+{
+ bison -p$(_)$(PREFIX) -d -o $(<[1]) $(>)
+}
diff --git a/src/boost/tools/build/src/tools/boostbook-config.jam b/src/boost/tools/build/src/tools/boostbook-config.jam
new file mode 100644
index 000000000..6e3f3ddc1
--- /dev/null
+++ b/src/boost/tools/build/src/tools/boostbook-config.jam
@@ -0,0 +1,13 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for BoostBook tools. To use, just import this module.
+#
+# This module is deprecated.
+# using boostbook ;
+# with no arguments now suffices.
+
+import toolset : using ;
+
+using boostbook ;
diff --git a/src/boost/tools/build/src/tools/boostbook.jam b/src/boost/tools/build/src/tools/boostbook.jam
new file mode 100644
index 000000000..e78a7d951
--- /dev/null
+++ b/src/boost/tools/build/src/tools/boostbook.jam
@@ -0,0 +1,740 @@
+# Copyright 2003, 2004, 2005 Dave Abrahams
+# Copyright 2003, 2004, 2005 Douglas Gregor
+# Copyright 2005, 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of documentation from BoostBook
+# sources.
+#
+# The type of output is controlled by the <format> feature which can have the
+# following values:
+# * html: Generates html documentation. This is the default.
+# * xhtml: Generates xhtml documentation.
+# * htmlhelp: Generates html help output.
+# * onehtml: Generates a single html page.
+# * man: Generates man pages.
+# * pdf: Generates pdf documentation.
+# * ps: Generates postscript output.
+# * docbook: Generates docbook XML.
+# * fo: Generates XSL formatting objects.
+# * tests: Extracts test cases from the boostbook XML.
+#
+# <format> is an implicit feature, so for example, typing pdf on the command
+# line is a short-cut for format=pdf.
+
+import build-system ;
+import "class" : new ;
+import common ;
+import feature ;
+import generators ;
+import make ;
+import modules ;
+import os ;
+import param ;
+import path ;
+import print ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import scanner ;
+import sequence ;
+import targets ;
+import type ;
+import virtual-target ;
+import xsltproc ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project boostbook ;
+
+.debug-configuration = [ MATCH ^(--debug-configuration)$ : [ modules.peek : ARGV
+ ] ] ;
+
+feature.feature format
+ : html xhtml htmlhelp onehtml man pdf ps docbook fo tests none
+ : implicit composite propagated ;
+
+type.register DTDXML : dtdxml ;
+type.register BOOSTBOOK : boostbook : XML ;
+type.register FO : fo : XML ;
+type.register PS : ps ;
+type.register XSLT : xsl xslt : XML ;
+type.register HTMLDIR ;
+type.register XHTMLDIR ;
+type.register HTMLHELP ;
+type.register MANPAGES ;
+type.register TESTS : tests ;
+
+
+# Initialize BoostBook support.
+#
+rule init (
+ docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not provided,
+ # we use DOCBOOK_XSL_DIR from the environment (if
+ # available) or look in standard locations. Otherwise,
+ # we let the XML processor load the stylesheets
+ # remotely.
+
+ : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use
+ # DOCBOOK_DTD_DIR From the environment (if available) or
+ # look in standard locations. Otherwise, we let the XML
+ # processor load the DTD remotely.
+
+ : boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs.
+)
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ check-boostbook-dir $(boostbook-dir) ;
+ find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
+
+ # Register generators only if we were called via "using boostbook ;"
+ local reg-gen = generators.register-xslt ;
+ $(reg-gen) boostbook.dtdxml-to-boostbook : DTDXML : XML ;
+ $(reg-gen) boostbook.boostbook-to-docbook : XML : DOCBOOK ;
+ $(reg-gen) boostbook.boostbook-to-tests : XML : TESTS ;
+ $(reg-gen) boostbook.docbook-to-onehtml : DOCBOOK : HTML ;
+ $(reg-gen) boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ;
+ $(reg-gen) boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ;
+ $(reg-gen) boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ;
+ $(reg-gen) boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ;
+ $(reg-gen) boostbook.docbook-to-fo : DOCBOOK : FO ;
+
+ # The same about Jamfile main target rules.
+ IMPORT $(__name__) : boostbook : : boostbook ;
+ }
+ else
+ {
+ if $(docbook-xsl-dir)
+ {
+ modify-config ;
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ;
+ check-docbook-xsl-dir ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ modify-config ;
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ;
+ check-docbook-dtd-dir ;
+ }
+ if $(boostbook-dir)
+ {
+ modify-config ;
+ check-boostbook-dir $(boostbook-dir) ;
+ local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ;
+ local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ;
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+ }
+ }
+}
+
+
+local rule lock-config ( )
+{
+ if ! $(.initialized)
+ {
+ import errors ;
+ errors.user-error BoostBook has not been configured. ;
+ }
+ if ! $(.config-locked)
+ {
+ .config-locked = true ;
+
+ if $(.error-message)
+ {
+ print-error $(.error-message) ;
+ }
+ }
+}
+
+
+local rule modify-config ( )
+{
+ if $(.config-locked)
+ {
+ import errors ;
+ errors.user-error BoostBook configuration cannot be changed after it has
+ been used. ;
+ }
+}
+
+rule print-error ( location message * )
+{
+ ECHO "error:" at $(location) ;
+ ECHO "error:" $(message) ;
+ EXIT ;
+}
+
+rule make-error ( message * )
+{
+ import errors ;
+ return [ errors.nearest-user-location ] $(message) ;
+}
+
+
+rule find-boost-in-registry ( keys * )
+{
+ local boost-root ;
+ for local R in $(keys)
+ {
+ local installed-boost = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)" : "InstallRoot" ] ;
+ if $(installed-boost)
+ {
+ boost-root += [ path.make $(installed-boost) ] ;
+ }
+ }
+ return $(boost-root) ;
+}
+
+
+rule check-docbook-xsl-dir ( )
+{
+ if $(.docbook-xsl-dir)
+ {
+ if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ]
+ {
+ .error-message = [ make-error "BoostBook:" could not find docbook XSL stylesheets
+ "in:" [ path.native $(.docbook-xsl-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "notice:" "BoostBook:" found docbook XSL stylesheets "in:" [
+ path.native $(.docbook-xsl-dir) ] ;
+ }
+ }
+}
+
+
+rule check-docbook-dtd-dir ( )
+{
+ if $(.docbook-dtd-dir)
+ {
+ if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ]
+ {
+ .error-message = [ make-error "BoostBook:" could not find docbook DTD "in:" [
+ path.native $(.docbook-dtd-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "notice:" "BoostBook:" found docbook DTD "in:" [ path.native
+ $(.docbook-dtd-dir) ] ;
+ }
+ }
+}
+
+
+rule check-boostbook-xsl-dir ( )
+{
+ if ! $(.boostbook-xsl-dir)
+ {
+ .error-message = [ make-error "BoostBook:" could not find boostbook XSL "stylesheets." ] ;
+ }
+ else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ]
+ {
+ .error-message = [ make-error "BoostBook:" could not find docbook XSL stylesheets "in:"
+ [ path.native $(.boostbook-xsl-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "notice:" "BoostBook:" found boostbook XSL stylesheets "in:" [
+ path.native $(.boostbook-xsl-dir) ] ;
+ }
+}
+
+
+rule check-boostbook-dtd-dir ( )
+{
+ if ! $(.boostbook-dtd-dir)
+ {
+ .error-message = [ make-error "BoostBook:" could not find boostbook DTD. ] ;
+ }
+ else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ]
+ {
+ .error-message = [ make-error "BoostBook:" could not find boostbook DTD "in:" [
+ path.native $(.boostbook-dtd-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "notice:" "BoostBook:" found boostbook DTD "in:" [ path.native
+ $(.boostbook-dtd-dir) ] ;
+ }
+}
+
+
+rule check-boostbook-dir ( boostbook-dir ? )
+{
+ if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ]
+ {
+ .error-message = [ make-error "BoostBook:" could not find boostbook "in:" [ path.native
+ $(boostbook-dir) ] ] ;
+ }
+}
+
+
+rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
+{
+ docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ;
+ docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ;
+ boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ;
+
+ # Look for the boostbook stylesheets relative to BOOST_ROOT and B2.
+ local boost-build-root = [ path.make [ build-system.location ] ] ;
+ local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ;
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ if $(boost-root)
+ {
+ boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ]
+ ;
+ }
+ boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;
+
+ # Try to find the tools in platform specific locations.
+ if [ os.name ] = NT
+ {
+ # If installed by the Boost installer.
+ local boost-root = ;
+
+ local boost-installer-versions = snapshot cvs 1.33.0 ;
+ local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ;
+ local boostpro-installer-versions =
+ 1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0
+ 1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ;
+
+ local old-installer-root = [ find-boost-in-registry
+ Boost.org\\$(boost-installer-versions) ] ;
+
+ # Make sure that the most recent version is searched for first.
+ boost-root += [ sequence.reverse [ find-boost-in-registry
+ Boost-Consulting.com\\$(boost-consulting-installer-versions)
+ boostpro.com\\$(boostpro-installer-versions) ] ] ;
+
+ # Plausible locations.
+ local root = [ PWD ] ;
+ while $(root) != $(root:D) { root = $(root:D) ; }
+ root = [ path.make $(root) ] ;
+ local search-dirs ;
+ local docbook-search-dirs ;
+ for local p in $(boost-root)
+ {
+ search-dirs += [ path.join $(p) tools ] ;
+ }
+ for local p in $(old-installer-root)
+ {
+ search-dirs += [ path.join $(p) share ] ;
+ docbook-search-dirs += [ path.join $(p) share ] ;
+ }
+ search-dirs += [ path.join $(root) Boost tools ] ;
+ search-dirs += [ path.join $(root) Boost share ] ;
+ docbook-search-dirs += [ path.join $(root) Boost share ] ;
+
+ docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ;
+ docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ;
+ boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ;
+ }
+ else
+ {
+ # Plausible locations.
+
+ local share = /usr/local/share /usr/share /opt/share /opt/local/share ;
+ local dtd-versions = 4.2 ;
+
+ docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ;
+ docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ]
+ ;
+ docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ;
+
+ docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook :
+ xml-dtd-$(dtd-versions)* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ]
+ ;
+
+ boostbook-dir ?= [ path.glob $(share) : boostbook* ] ;
+
+ # Ubuntu Linux.
+ docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet :
+ nwalsh ] ;
+ docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd :
+ $(dtd-versions) ] ;
+
+ # SUSE.
+ docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet/nwalsh
+ : current ] ;
+ }
+
+ if $(docbook-xsl-dir)
+ {
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ;
+ }
+
+ if $(.debug-configuration)
+ {
+ ECHO "notice:" "Boost.Book:" searching XSL/DTD "in" ;
+ ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ;
+ }
+ local boostbook-xsl-dir ;
+ for local dir in $(boostbook-dir)
+ {
+ boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ;
+ }
+ local boostbook-dtd-dir ;
+ for local dir in $(boostbook-dir)
+ {
+ boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ;
+ }
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+
+ check-docbook-xsl-dir ;
+ check-docbook-dtd-dir ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+}
+
+
+rule xsl-dir
+{
+ lock-config ;
+ return $(.boostbook-xsl-dir) ;
+}
+
+
+rule dtd-dir
+{
+ lock-config ;
+ return $(.boostbook-dtd-dir) ;
+}
+
+
+rule docbook-xsl-dir
+{
+ lock-config ;
+ return $(.docbook-xsl-dir) ;
+}
+
+
+rule docbook-dtd-dir
+{
+ lock-config ;
+ return $(.docbook-dtd-dir) ;
+}
+
+
+rule dtdxml-to-boostbook ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl ] ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+
+rule boostbook-to-docbook ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+
+rule docbook-to-onehtml ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+
+rule docbook-to-htmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html
+ ;
+}
+
+
+rule docbook-to-xhtmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) :
+ xhtml ;
+}
+
+
+rule docbook-to-htmlhelp ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) :
+ htmlhelp ;
+}
+
+
+rule docbook-to-manpages ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man
+ ;
+}
+
+
+rule docbook-to-fo ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+
+rule format-catalog-path ( path )
+{
+ local result = $(path) ;
+ if [ xsltproc.is-cygwin ]
+ {
+ if [ os.name ] = NT
+ {
+ drive = [ MATCH "^/(.):(.*)$" : $(path) ] ;
+ result = /cygdrive/$(drive[1])$(drive[2]) ;
+ }
+ }
+ else
+ {
+ if [ os.name ] = CYGWIN
+ {
+ local native-path = [ path.native $(path) ] ;
+ result = [ path.make $(native-path:W) ] ;
+ }
+ }
+ return [ regex.replace $(result) " " "%20" ] ;
+}
+
+
+rule generate-xml-catalog ( target : sources * : properties * )
+{
+ print.output $(target) ;
+
+ # BoostBook DTD catalog entry.
+ local boostbook-dtd-dir = [ boostbook.dtd-dir ] ;
+ if $(boostbook-dtd-dir)
+ {
+ boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ;
+ }
+
+ print.text
+ "<?xml version=\"1.0\"?>"
+ "<!DOCTYPE catalog "
+ " PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\""
+ " \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">"
+ "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">"
+ " <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>"
+ : true ;
+
+ local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;
+ if ! $(docbook-xsl-dir)
+ {
+ ECHO "BoostBook warning: no DocBook XSL directory specified." ;
+ ECHO " If you have the DocBook XSL stylesheets installed, please " ;
+ ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ;
+ ECHO " are available here: http://docbook.sourceforge.net/ " ;
+ ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ;
+ }
+
+ local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;
+ if ! $(docbook-dtd-dir)
+ {
+ ECHO "BoostBook warning: no DocBook DTD directory specified." ;
+ ECHO " If you have the DocBook DTD installed, please set " ;
+ ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook DTD is available " ;
+ ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ;
+ ECHO " The DTD will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ;
+ }
+
+ print.text "</catalog>" ;
+}
+
+
+# Returns information about the global XML catalog target, creating it lazily if
+# needed. To get the global catalog generated only once we do not create it in
+# every project that requests it but instead only create it based on the first
+# project requesting it and then reuse it from there for any later requests.
+#
+# To get 'as close as possible' to having the global catalog stored in the same
+# location independent of which folder our build was run from, we assign its
+# target to the given project's base Jamroot project. This works correctly as
+# long as we know the passed project is not standalone or one of Boost Build's
+# configuration module projects, as those to not have a Jamroot project in their
+# parent chain. Note also that we can still get our targets generated in
+# different folders in case when one build project references a target from
+# another build project with its own separate Jamroot.
+#
+# FIXME: Ideally the catalog target should be created as part of the boostbook
+# project and stored in some central location for all used standalone pojects,
+# shared between all builds made on that system. This however would require much
+# more though to add the necessary changes to Boost Build's internal design.
+#
+local rule xml-catalog ( project )
+{
+ if ! $(.xml-catalog)
+ {
+ local project-module = [ $(project).project-module ] ;
+ local root-module = [ project.get-jamroot-module $(project-module) ] ;
+ if ! $(root-module)
+ {
+ import errors ;
+ if [ project.is-config-module $(project-module) ]
+ {
+ errors.user-error boostbook targets can not be declared in Boost
+ Build's configuration modules. ;
+ }
+ else
+ {
+ errors.user-error boostbook targets can not be declared in
+ standalone projects. : use a Jamfile/Jamroot project
+ instead. ;
+ }
+ }
+ local root-project = [ project.target $(root-module) ] ;
+
+ .xml-catalog = [ virtual-target.register [ new file-target
+ boostbook_catalog : XML : $(root-project) : [ new action :
+ boostbook.generate-xml-catalog ] ] ] ;
+ .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ;
+ .xml-catalog-file = $(.xml-catalog-file:J=/) ;
+ }
+ return $(.xml-catalog) $(.xml-catalog-file) ;
+}
+
+
+class boostbook-target-class : basic-target
+{
+ import generators ;
+ import property-set ;
+ import virtual-target ;
+ import path ;
+
+ rule construct ( name : sources * : property-set )
+ {
+ # Generate the catalog, but only once.
+ IMPORT boostbook : xml-catalog : $(__name__) : boostbook.xml-catalog ;
+ local global-catalog = [ boostbook.xml-catalog [ project ] ] ;
+ local catalog = $(global-catalog[1]) ;
+ local catalog-file = $(global-catalog[2]) ;
+ local targets ;
+
+ # Add the catalog to the property set.
+ property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ;
+
+ local type = none ;
+ local manifest ;
+ local format = [ $(property-set).get <format> ] ;
+ switch $(format)
+ {
+ case html : type = HTMLDIR ; manifest = HTML.manifest ;
+ case xhtml : type = XHTMLDIR ; manifest = HTML.manifest ;
+ case htmlhelp : type = HTMLHELP ; manifest = HTML.manifest ;
+ case onehtml : type = HTML ;
+ case man : type = MANPAGES ; manifest = man.manifest ;
+ case docbook : type = DOCBOOK ;
+ case fo : type = FO ;
+ case pdf : type = PDF ;
+ case ps : type = PS ;
+ case tests : type = TESTS ;
+ }
+
+ local target ;
+ if $(manifest)
+ {
+ # Sources --> DOCBOOK.
+ local docbook-target = [ generators.construct [ project ] : DOCBOOK
+ : $(property-set) : $(sources) ] ;
+ docbook-target = $(docbook-target[2]) ;
+ $(docbook-target).depends $(catalog) ;
+
+ # DOCBOOK --> type.
+ target = [ generators.construct [ project ] $(name)_$(manifest) :
+ $(type) : [ $(property-set).add-raw
+ <xsl:param>manifest=$(name)_$(manifest) ] : $(docbook-target) ]
+ ;
+ target = $(target[2]) ;
+ local name = [ $(property-set).get <name> ] ;
+ name ?= $(format) ;
+ if ! [ path.is-rooted $(name) ]
+ {
+ local p = [ project ] ;
+ name = [ path.join [ $(p).location ] $(name) ] ;
+ }
+ $(target).set-path $(name) ;
+ }
+ else
+ {
+ # Sources --> type.
+ target = [ generators.construct [ project ] : $(type) :
+ $(property-set) : $(sources) ] ;
+ target = $(target[2]) ;
+ if ! $(target)
+ {
+ import errors ;
+ errors.error Cannot build documentation type '$(format)'. ;
+ }
+ }
+ $(target).depends $(catalog) ;
+
+ return [ property-set.empty ] $(target) ;
+ }
+}
+
+
+# Declare a boostbook target.
+#
+rule boostbook ( target-name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ param.handle-named-params
+ sources requirements default-build usage-requirements ;
+ return [ targets.create-metatarget boostbook-target-class :
+ [ project.current ] : $(target-name) : $(sources) : $(requirements) :
+ $(default-build) : $(usage-requirements) ] ;
+}
+
+
+rule boostbook-to-tests ( target : source : properties * )
+{
+ lock-config ;
+ local boost_root = [ modules.peek : BOOST_ROOT ] ;
+ local native-path = [ path.native [ path.join $(.boostbook-xsl-dir) testing
+ Jamfile ] ] ;
+ local stylesheet = $(native-path:S=.xsl) ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties)
+ <xsl:param>boost.root=$(boost_root) ;
+}
diff --git a/src/boost/tools/build/src/tools/borland.jam b/src/boost/tools/build/src/tools/borland.jam
new file mode 100644
index 000000000..6ad52e0de
--- /dev/null
+++ b/src/boost/tools/build/src/tools/borland.jam
@@ -0,0 +1,270 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2003 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.borland]]
+= Borland C++ Compiler
+
+The `borland` module supports the command line C++ compiler included in
+http://www.borland.com/us/products/cbuilder/index.html[C++ Builder 2006]
+product and earlier version of it, running on Microsoft Windows.
+
+The supported products are listed below. The version reported by the
+command lines tools is also listed for reference.:
+
+* C++ Builder 2006 -- 5.8.2
+* CBuilderX -- 5.6.5, 5.6.4 (depending on release)
+* CBuilder6 -- 5.6.4
+* Free command line tools -- 5.5.1
+
+The module is initialized using the following syntax:
+
+----
+using borland : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the command is not specified, B2 will search for a binary
+named `bcc32` in PATH.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+|# # end::doc[]
+
+# Support for the Borland's command line compiler
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature : get-values ;
+import type ;
+import common ;
+
+feature.extend toolset : borland ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters borland :
+ version $(version) ] ;
+
+ local command = [ common.get-invocation-command borland : bcc32.exe
+ : $(command) ] ;
+
+ common.handle-options borland : $(condition) : $(command) : $(options) ;
+
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ root = $(command:D) ;
+
+ flags borland.compile STDHDRS $(condition) : $(root)/include/ ;
+ flags borland.link STDLIBPATH $(condition) : $(root)/lib ;
+ flags borland.link RUN_PATH $(condition) : $(root)/bin ;
+ flags borland .root $(condition) : $(root)/bin/ ;
+}
+
+
+# A borland-specific target type
+type.register BORLAND.TDS : tds ;
+
+# Declare generators
+
+generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ;
+generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ;
+
+generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ;
+generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ;
+generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ;
+generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ;
+
+# Declare flags
+
+flags borland.compile OPTIONS <debug-symbols>on : -v ;
+flags borland.link OPTIONS <debug-symbols>on : -v ;
+
+flags borland.compile OPTIONS <optimization>off : -Od ;
+flags borland.compile OPTIONS <optimization>speed : -O2 ;
+flags borland.compile OPTIONS <optimization>space : -O1 ;
+
+if $(.BORLAND_HAS_FIXED_INLINING_BUGS)
+{
+ flags borland CFLAGS <inlining>off : -vi- ;
+ flags borland CFLAGS <inlining>on : -vi -w-inl ;
+ flags borland CFLAGS <inlining>full : -vi -w-inl ;
+}
+else
+{
+ flags borland CFLAGS : -vi- ;
+}
+
+flags borland.compile OPTIONS <warnings>off : -w- ;
+flags borland.compile OPTIONS <warnings>all : -w ;
+flags borland.compile OPTIONS <warnings-as-errors>on : -w! ;
+
+
+# Deal with various runtime configs...
+
+# This should be not for DLL
+flags borland OPTIONS <user-interface>console : -tWC ;
+
+# -tWR sets -tW as well, so we turn it off here and then turn it
+# on again later if we need it:
+flags borland OPTIONS <runtime-link>shared : -tWR -tWC ;
+flags borland OPTIONS <user-interface>gui : -tW ;
+
+flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ;
+# Hmm.. not sure what's going on here.
+flags borland OPTIONS : -WM- ;
+flags borland OPTIONS <threading>multi : -tWM ;
+
+
+
+flags borland.compile OPTIONS <cflags> ;
+flags borland.compile.c++ OPTIONS <cxxflags> ;
+flags borland.compile DEFINES <define> ;
+flags borland.compile INCLUDES <include> ;
+
+flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ;
+
+#
+# for C++ compiles the following options are turned on by default:
+#
+# -j5 stops after 5 errors
+# -g255 allow an unlimited number of warnings
+# -q no banner
+# -c compile to object
+# -P C++ code regardless of file extension
+# -a8 8 byte alignment, this option is on in the IDE by default
+# and effects binary compatibility.
+#
+
+# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+# For C, we don't pass -P flag
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+
+# Declare flags and action for linking
+toolset.flags borland.link OPTIONS <debug-symbols>on : -v ;
+toolset.flags borland.link LIBRARY_PATH <library-path> ;
+toolset.flags borland.link FINDLIBS_ST <find-static-library> ;
+toolset.flags borland.link FINDLIBS_SA <find-shared-library> ;
+toolset.flags borland.link LIBRARIES <library-file> ;
+
+flags borland.link OPTIONS <linkflags> ;
+flags borland.link OPTIONS <link>shared : -tWD ;
+
+flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ;
+flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ;
+
+
+
+# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly
+# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add
+# $(BCC_TOOL_PATH) to the path
+# The NEED_IMPLIB variable controls whether we need to invoke implib.
+
+flags borland.archive AROPTIONS <archiveflags> ;
+
+# Declare action for archives. We don't use response file
+# since it's hard to get "+-" there.
+# The /P256 increases 'page' size -- with too low
+# values tlib fails when building large applications.
+# CONSIDER: don't know what 'together' is for...
+actions updated together piecemeal archive
+{
+ $(.set-path)$(.root:W)$(.old-path)
+ tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)"
+}
+
+
+if [ os.name ] = CYGWIN
+{
+ .set-path = "cmd /S /C set \"PATH=" ;
+ .old-path = ";%PATH%\" \"&&\"" ;
+
+
+ # Couldn't get TLIB to stop being confused about pathnames
+ # containing dashes (it seemed to treat them as option separators
+ # when passed through from bash), so we explicitly write the
+ # command into a .bat file and execute that. TLIB is also finicky
+ # about pathname style! Forward slashes, too, are treated as
+ # options.
+ actions updated together piecemeal archive
+ {
+ chdir $(<:D)
+ echo +-$(>:BS) > $(<:BS).rsp
+ $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp
+ }
+}
+else if [ os.name ] = NT
+{
+ .set-path = "set \"PATH=" ;
+ .old-path = ";%PATH%\"
+ " ;
+}
+else
+{
+ .set-path = "PATH=\"" ;
+ .old-path = "\":$PATH
+ export PATH
+ " ;
+}
+
+RM = [ common.rm-command ] ;
+
+nl = "
+" ;
+
+actions link
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
+
+actions link.dll bind LIBRARIES RSP
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)"
+}
+
+# It seems impossible to specify output file with directory when compiling
+# asm files using bcc32, so use tasm32 directly.
+# /ml makes all symbol names case-sensitive
+actions asm
+{
+ $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)"
+}
+
diff --git a/src/boost/tools/build/src/tools/builtin.jam b/src/boost/tools/build/src/tools/builtin.jam
new file mode 100644
index 000000000..1c1614e2c
--- /dev/null
+++ b/src/boost/tools/build/src/tools/builtin.jam
@@ -0,0 +1,96 @@
+# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
+# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera
+# Copyright 2006 Juergen Hunold
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines standard features and rules.
+
+import alias ;
+import "class" : new ;
+import errors ;
+import feature ;
+import features/__init_features__ ;
+import generators ;
+import numbers ;
+import os ;
+import path ;
+import print ;
+import project ;
+import property ;
+import regex ;
+import scanner ;
+import sequence ;
+import stage ;
+import symlink ;
+import toolset ;
+import type ;
+import targets ;
+import types/register ;
+import utility ;
+import virtual-target ;
+import message ;
+import convert ;
+
+# Generators need the target types registered first. So this import needs
+# to be after that.
+import generators/__init_generators__ ;
+
+# FIXME: the following generate module import is not needed here but removing it
+# too hastily will break using code (e.g. the main Boost library Jamroot file)
+# that forgot to import the generate module before calling the generate rule.
+import generate ;
+
+
+variant debug : <optimization>off <debug-symbols>on <inlining>off
+ <runtime-debugging>on ;
+variant release : <optimization>speed <debug-symbols>off <inlining>full
+ <runtime-debugging>off <define>NDEBUG ;
+variant profile : release : <profiling>on <debug-symbols>on ;
+
+
+class preprocessed-target-class : basic-target
+{
+ import generators ;
+ rule construct ( name : sources * : property-set )
+ {
+ local result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ;
+ if ! $(result)
+ {
+ result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ;
+ }
+ if ! $(result)
+ {
+ local s ;
+ for x in $(sources)
+ {
+ s += [ $(x).name ] ;
+ }
+ local p = [ project ] ;
+ errors.user-error
+ "In project" [ $(p).name ] :
+ "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ;
+ }
+ return $(result) ;
+ }
+}
+
+rule preprocessed ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+ return [ targets.main-target-alternative
+ [ new preprocessed-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
+
+IMPORT $(__name__) : preprocessed : : preprocessed ;
diff --git a/src/boost/tools/build/src/tools/builtin.py b/src/boost/tools/build/src/tools/builtin.py
new file mode 100644
index 000000000..6bd2a8720
--- /dev/null
+++ b/src/boost/tools/build/src/tools/builtin.py
@@ -0,0 +1,805 @@
+# Status: minor updates by Steven Watanabe to make gcc work
+#
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+""" Defines standard features and rules.
+"""
+
+import b2.build.targets as targets
+
+import sys
+from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
+from b2.util.utility import *
+from b2.util import path, regex, bjam_signature, is_iterable_typed
+import b2.tools.types
+from b2.manager import get_manager
+
+
+# Records explicit properties for a variant.
+# The key is the variant name.
+__variant_explicit_properties = {}
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __variant_explicit_properties
+
+ __variant_explicit_properties = {}
+
+@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"]))
+def variant (name, parents_or_properties, explicit_properties = []):
+ """ Declares a new variant.
+ First determines explicit properties for this variant, by
+ refining parents' explicit properties with the passed explicit
+ properties. The result is remembered and will be used if
+ this variant is used as parent.
+
+ Second, determines the full property set for this variant by
+ adding to the explicit properties default values for all properties
+ which neither present nor are symmetric.
+
+ Lastly, makes appropriate value of 'variant' property expand
+ to the full property set.
+ name: Name of the variant
+ parents_or_properties: Specifies parent variants, if
+ 'explicit_properties' are given,
+ and explicit_properties otherwise.
+ explicit_properties: Explicit properties.
+ """
+ parents = []
+ if not explicit_properties:
+ explicit_properties = parents_or_properties
+ else:
+ parents = parents_or_properties
+
+ inherited = property_set.empty()
+ if parents:
+
+ # If we allow multiple parents, we'd have to to check for conflicts
+ # between base variants, and there was no demand for so to bother.
+ if len (parents) > 1:
+ raise BaseException ("Multiple base variants are not yet supported")
+
+ p = parents[0]
+ # TODO: the check may be stricter
+ if not feature.is_implicit_value (p):
+ raise BaseException ("Invalid base variant '%s'" % p)
+
+ inherited = __variant_explicit_properties[p]
+
+ explicit_properties = property_set.create_with_validation(explicit_properties)
+ explicit_properties = inherited.refine(explicit_properties)
+
+ # Record explicitly specified properties for this variant
+ # We do this after inheriting parents' properties, so that
+ # they affect other variants, derived from this one.
+ __variant_explicit_properties[name] = explicit_properties
+
+ feature.extend('variant', [name])
+ feature.compose ("<variant>" + name, explicit_properties.all())
+
+__os_names = """
+ amiga aix appletv bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd
+ openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware
+ vms windows
+""".split()
+
+# Translates from bjam current OS to the os tags used in host-os and target-os,
+# i.e. returns the running host-os.
+#
+def default_host_os():
+ host_os = os_name()
+ if host_os not in (x.upper() for x in __os_names):
+ if host_os == 'NT': host_os = 'windows'
+ elif host_os == 'AS400': host_os = 'unix'
+ elif host_os == 'MINGW': host_os = 'windows'
+ elif host_os == 'BSDI': host_os = 'bsd'
+ elif host_os == 'COHERENT': host_os = 'unix'
+ elif host_os == 'DRAGONFLYBSD': host_os = 'bsd'
+ elif host_os == 'IRIX': host_os = 'sgi'
+ elif host_os == 'MACOSX': host_os = 'darwin'
+ elif host_os == 'KFREEBSD': host_os = 'freebsd'
+ elif host_os == 'LINUX': host_os = 'linux'
+ elif host_os == 'HAIKU': host_os = 'haiku'
+ else: host_os = 'unix'
+ return host_os.lower()
+
+def register_globals ():
+ """ Registers all features and variants declared by this module.
+ """
+
+ # This feature is used to determine which OS we're on.
+ # In future, this may become <target-os> and <host-os>
+ # TODO: check this. Compatibility with bjam names? Subfeature for version?
+ os = sys.platform
+ feature.feature ('os', [os], ['propagated', 'link-incompatible'])
+
+
+ # The two OS features define a known set of abstract OS names. The host-os is
+ # the OS under which bjam is running. Even though this should really be a fixed
+ # property we need to list all the values to prevent unknown value errors. Both
+ # set the default value to the current OS to account for the default use case of
+ # building on the target OS.
+ feature.feature('host-os', __os_names)
+ feature.set_default('host-os', default_host_os())
+
+ feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
+ feature.set_default('target-os', default_host_os())
+
+ feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
+
+ feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
+
+ feature.feature ('link', ['shared', 'static'], ['propagated'])
+ feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
+ feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
+
+
+ feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated'])
+ feature.feature ('profiling', ['off', 'on'], ['propagated'])
+ feature.feature ('inlining', ['off', 'on', 'full'], ['propagated'])
+
+ feature.feature ('threading', ['single', 'multi'], ['propagated'])
+ feature.feature ('rtti', ['on', 'off'], ['propagated'])
+ feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
+
+ # Whether there is support for asynchronous EH (e.g. catching SEGVs).
+ feature.feature ('asynch-exceptions', ['off', 'on'], ['propagated'])
+
+ # Whether all extern "C" functions are considered nothrow by default.
+ feature.feature ('extern-c-nothrow', ['off', 'on'], ['propagated'])
+
+ feature.feature ('debug-symbols', ['on', 'off'], ['propagated'])
+ feature.feature ('define', [], ['free'])
+ feature.feature ('undef', [], ['free'])
+ feature.feature ('include', [], ['free', 'path']) #order-sensitive
+ feature.feature ('cflags', [], ['free'])
+ feature.feature ('cxxflags', [], ['free'])
+ feature.feature ('asmflags', [], ['free'])
+ feature.feature ('linkflags', [], ['free'])
+ feature.feature ('archiveflags', [], ['free'])
+ feature.feature ('version', [], ['free'])
+
+ feature.feature ('location-prefix', [], ['free'])
+
+ feature.feature ('action', [], ['free'])
+
+
+ # The following features are incidental, since
+ # in themself they have no effect on build products.
+ # Not making them incidental will result in problems in corner
+ # cases, for example:
+ #
+ # unit-test a : a.cpp : <use>b ;
+ # lib b : a.cpp b ;
+ #
+ # Here, if <use> is not incidental, we'll decide we have two
+ # targets for a.obj with different properties, and will complain.
+ #
+ # Note that making feature incidental does not mean it's ignored. It may
+ # be ignored when creating the virtual target, but the rest of build process
+ # will use them.
+ feature.feature ('use', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('dependency', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental'])
+
+ feature.feature('warnings', [
+ 'on', # Enable default/"reasonable" warning level for the tool.
+ 'all', # Enable all possible warnings issued by the tool.
+ 'off'], # Disable all warnings issued by the tool.
+ ['incidental', 'propagated'])
+
+ feature.feature('warnings-as-errors', [
+ 'off', # Do not fail the compilation if there are warnings.
+ 'on'], # Fail the compilation if there are warnings.
+ ['incidental', 'propagated'])
+
+ feature.feature('coverage', [
+ 'off', # Disable coverage generation for the tool.
+ 'on'], # Enable coverage generation for the tool.
+ ['incidental', 'propagated'])
+
+ feature.feature('c++-template-depth',
+ [str(i) for i in range(64,1024+1,64)] +
+ [str(i) for i in range(20,1000+1,10)] +
+ # Maximum template instantiation depth guaranteed for ANSI/ISO C++
+ # conforming programs.
+ ['17'],
+ ['incidental', 'optional', 'propagated'])
+
+ feature.feature ('source', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('library', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('file', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('find-shared-library', [], ['free']) #order-sensitive ;
+ feature.feature ('find-static-library', [], ['free']) #order-sensitive ;
+ feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ;
+ # Internal feature.
+ feature.feature ('library-file', [], ['free', 'dependency'])
+
+ feature.feature ('name', [], ['free'])
+ feature.feature ('tag', [], ['free'])
+ feature.feature ('search', [], ['free', 'path']) #order-sensitive ;
+ feature.feature ('location', [], ['free', 'path'])
+
+ feature.feature ('dll-path', [], ['free', 'path'])
+ feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental'])
+
+
+ # This is internal feature which holds the paths of all dependency
+ # dynamic libraries. On Windows, it's needed so that we can all
+ # those paths to PATH, when running applications.
+ # On Linux, it's needed to add proper -rpath-link command line options.
+ feature.feature ('xdll-path', [], ['free', 'path'])
+
+ #provides means to specify def-file for windows dlls.
+ feature.feature ('def-file', [], ['free', 'dependency'])
+
+ # This feature is used to allow specific generators to run.
+ # For example, QT tools can only be invoked when QT library
+ # is used. In that case, <allow>qt will be in usage requirement
+ # of the library.
+ feature.feature ('allow', [], ['free'])
+
+ # The addressing model to generate code for. Currently a limited set only
+ # specifying the bit size of pointers.
+ feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
+
+ # Type of CPU architecture to compile for.
+ feature.feature('architecture', [
+ # x86 and x86-64
+ 'x86',
+
+ # ia64
+ 'ia64',
+
+ # Sparc
+ 'sparc',
+
+ # RS/6000 & PowerPC
+ 'power',
+
+ # MIPS/SGI
+ 'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64',
+
+ # HP/PA-RISC
+ 'parisc',
+
+ # Advanced RISC Machines
+ 'arm',
+
+ # z Systems (aka s390x)
+ 's390x',
+
+ # Combined architectures for platforms/toolsets that support building for
+ # multiple architectures at once. "combined" would be the default multi-arch
+ # for the toolset.
+ 'combined',
+ 'combined-x86-power'],
+
+ ['propagated', 'optional'])
+
+ # The specific instruction set in an architecture to compile.
+ feature.feature('instruction-set', [
+ # x86 and x86-64
+ 'native', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3',
+ 'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'core2', 'corei7', 'corei7-avx', 'core-avx-i',
+ 'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'merom', 'merom-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
+ 'yorksfield', 'nehalem', 'sandy-bridge', 'ivy-bridge', 'haswell', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp',
+ 'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'k8-sse3', 'opteron-sse3', 'athlon64-sse3', 'amdfam10', 'barcelona',
+ 'bdver1', 'bdver2', 'bdver3', 'btver1', 'btver2', 'winchip-c6', 'winchip2', 'c3', 'c3-2', 'atom',
+
+ # ia64
+ 'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley',
+
+ # Sparc
+ 'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934',
+ 'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3',
+
+ # RS/6000 & PowerPC
+ '401', '403', '405', '405fp', '440', '440fp', '505', '601', '602',
+ '603', '603e', '604', '604e', '620', '630', '740', '7400',
+ '7450', '750', '801', '821', '823', '860', '970', '8540',
+ 'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2',
+ 'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios',
+ 'rios1', 'rsc', 'rios2', 'rs64a',
+
+ # MIPS
+ '4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000',
+ 'r4100', 'r4300', 'r4400', 'r4600', 'r4650',
+ 'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100',
+ 'vr4111', 'vr4120', 'vr4130', 'vr4300',
+ 'vr5000', 'vr5400', 'vr5500',
+
+ # HP/PA-RISC
+ '700', '7100', '7100lc', '7200', '7300', '8000',
+
+ # Advanced RISC Machines
+ 'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
+ 'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312',
+
+ # z Systems (aka s390x)
+ 'z196', 'zEC12', 'z13', 'z13', 'z14', 'z15'],
+
+ ['propagated', 'optional'])
+
+ feature.feature('conditional', [], ['incidental', 'free'])
+
+ # The value of 'no' prevents building of a target.
+ feature.feature('build', ['yes', 'no'], ['optional'])
+
+ # Windows-specific features
+ feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
+ feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
+
+
+ variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on'])
+ variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
+ '<runtime-debugging>off', '<define>NDEBUG'])
+ variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
+
+
+reset ()
+register_globals ()
+
+class SearchedLibTarget (virtual_target.AbstractFileTarget):
+ def __init__ (self, name, project, shared, search, action):
+ virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action)
+
+ self.shared_ = shared
+ self.search_ = search
+
+ def shared (self):
+ return self.shared_
+
+ def search (self):
+ return self.search_
+
+ def actualize_location (self, target):
+ bjam.call("NOTFILE", target)
+
+ def path (self):
+ #FIXME: several functions rely on this not being None
+ return ""
+
+
+class CScanner (scanner.Scanner):
+ def __init__ (self, includes):
+ scanner.Scanner.__init__ (self)
+
+ self.includes_ = []
+
+ for i in includes:
+ self.includes_.extend(i.split("&&"))
+
+ def pattern (self):
+ return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
+
+ def process (self, target, matches, binding):
+ # since it's possible for this function to be called
+ # thousands to millions of times (depending on how many
+ # header files there are), as such, there are some
+ # optimizations that have been used here. Anything that
+ # is slightly out of the ordinary for Python code
+ # has been commented.
+ angle = []
+ quoted = []
+ for match in matches:
+ if '<' in match:
+ angle.append(match.strip('<>'))
+ elif '"' in match:
+ quoted.append(match.strip('"'))
+
+ g = id(self)
+ b = os.path.normpath(os.path.dirname(binding[0]))
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ # We don't need this extra information for angle includes,
+ # since they should not depend on including file (we can't
+ # get literal "." in include path).
+ # Note: string interpolation is slightly faster
+ # than .format()
+ g2 = '<%s#%s>' % (g, b)
+ g = "<%s>" % g
+
+ angle = [g + x for x in angle]
+ quoted = [g2 + x for x in quoted]
+
+ all = angle + quoted
+ bjam.call("mark-included", target, all)
+
+ # each include in self.includes_ looks something like this:
+ # <include>path/to/somewhere
+ # calling get_value(include) is super slow,
+ # calling .replace('<include>', '') is much faster
+ # however, i[9:] is the fastest way of stripping off the "<include>"
+ # substring.
+ include_paths = [i[9:] for i in self.includes_]
+
+ engine = get_manager().engine()
+ engine.set_target_variable(angle, "SEARCH", include_paths)
+ engine.set_target_variable(quoted, "SEARCH", [b] + include_paths)
+
+ # Just propagate current scanner to includes, in a hope
+ # that includes do not change scanners.
+ get_manager().scanners().propagate(self, all)
+
+scanner.register (CScanner, 'include')
+type.set_scanner ('CPP', CScanner)
+type.set_scanner ('C', CScanner)
+type.set_scanner('H', CScanner)
+type.set_scanner('HPP', CScanner)
+
+# Ported to trunk@47077
+class LibGenerator (generators.Generator):
+ """ The generator class for libraries (target type LIB). Depending on properties it will
+ request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
+ SHARED_LIB.
+ """
+
+ def __init__(self, id, composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
+ generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run(self, project, name, prop_set, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ # The lib generator is composing, and can be only invoked with
+ # explicit name. This check is present in generator.run (and so in
+ # builtin.LinkingGenerator), but duplicate it here to avoid doing
+ # extra work.
+ if name:
+ properties = prop_set.raw()
+ # Determine the needed target type
+ actual_type = None
+ properties_grist = get_grist(properties)
+ if '<source>' not in properties_grist and \
+ ('<search>' in properties_grist or '<name>' in properties_grist):
+ actual_type = 'SEARCHED_LIB'
+ elif '<file>' in properties_grist:
+ # The generator for
+ actual_type = 'LIB'
+ elif '<link>shared' in properties:
+ actual_type = 'SHARED_LIB'
+ else:
+ actual_type = 'STATIC_LIB'
+
+ prop_set = prop_set.add_raw(['<main-target-type>LIB'])
+
+ # Construct the target.
+ return generators.construct(project, name, actual_type, prop_set, sources)
+
+ def viable_source_types(self):
+ return ['*']
+
+generators.register(LibGenerator("builtin.lib-generator"))
+
+generators.override("builtin.prebuilt", "builtin.lib-generator")
+
+def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+ """The implementation of the 'lib' rule. Beyond standard syntax that rule allows
+ simplified: 'lib a b c ;'."""
+ assert is_iterable_typed(names, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
+ if len(names) > 1:
+ if any(r.startswith('<name>') for r in requirements):
+ get_manager().errors()("When several names are given to the 'lib' rule\n" +
+ "it is not allowed to specify the <name> feature.")
+
+ if sources:
+ get_manager().errors()("When several names are given to the 'lib' rule\n" +
+ "it is not allowed to specify sources.")
+
+ project = get_manager().projects().current()
+ result = []
+
+ for name in names:
+ r = requirements[:]
+
+ # Support " lib a ; " and " lib a b c ; " syntax.
+ if not sources and not any(r.startswith("<name>") for r in requirements) \
+ and not any(r.startswith("<file") for r in requirements):
+ r.append("<name>" + name)
+
+ result.append(targets.create_typed_metatarget(name, "LIB", sources,
+ r,
+ default_build,
+ usage_requirements))
+ return result
+
+get_manager().projects().add_rule("lib", lib)
+
+
+# Updated to trunk@47077
+class SearchedLibGenerator (generators.Generator):
+ def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []):
+ # TODO: the comment below looks strange. There are no requirements!
+ # The requirements cause the generators to be tried *only* when we're building
+ # lib target and there's 'search' feature. This seems ugly --- all we want
+ # is make sure SearchedLibGenerator is not invoked deep in transformation
+ # search.
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run(self, project, name, prop_set, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
+ if not name:
+ return None
+
+ # If name is empty, it means we're called not from top-level.
+ # In this case, we just fail immediately, because SearchedLibGenerator
+ # cannot be used to produce intermediate targets.
+
+ properties = prop_set.raw ()
+ shared = '<link>shared' in properties
+
+ a = virtual_target.NullAction (project.manager(), prop_set)
+
+ real_name = feature.get_values ('<name>', properties)
+ if real_name:
+ real_name = real_name[0]
+ else:
+ real_name = name
+ search = feature.get_values('<search>', properties)
+ usage_requirements = property_set.create(['<xdll-path>' + p for p in search])
+ t = SearchedLibTarget(real_name, project, shared, search, a)
+
+ # We return sources for a simple reason. If there's
+ # lib png : z : <name>png ;
+ # the 'z' target should be returned, so that apps linking to
+ # 'png' will link to 'z', too.
+ return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources)
+
+generators.register (SearchedLibGenerator ())
+
+class PrebuiltLibGenerator(generators.Generator):
+
+ def __init__(self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run(self, project, name, properties, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
+ f = properties.get("file")
+ return f + sources
+
+generators.register(PrebuiltLibGenerator("builtin.prebuilt", False, [],
+ ["LIB"], ["<file>"]))
+
+generators.override("builtin.prebuilt", "builtin.lib-generator")
+
+
+class CompileAction (virtual_target.Action):
+ def __init__ (self, manager, sources, action_name, prop_set):
+ virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set)
+
+ def adjust_properties (self, prop_set):
+ """ For all virtual targets for the same dependency graph as self,
+ i.e. which belong to the same main target, add their directories
+ to include path.
+ """
+ assert isinstance(prop_set, property_set.PropertySet)
+ s = self.targets () [0].creating_subvariant ()
+
+ return prop_set.add_raw (s.implicit_includes ('include', 'H'))
+
+class CCompilingGenerator (generators.Generator):
+ """ Declare a special compiler generator.
+ The only thing it does is changing the type used to represent
+ 'action' in the constructed dependency graph to 'CompileAction'.
+ That class in turn adds additional include paths to handle a case
+ when a source file includes headers which are generated themselves.
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ # TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong.
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def action_class (self):
+ return CompileAction
+
+def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []):
+ g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties)
+ return generators.register (g)
+
+
+class LinkingGenerator (generators.Generator):
+ """ The generator class for handling EXE and SHARED_LIB creation.
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run (self, project, name, prop_set, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
+ # create a copy since sources is being modified
+ sources = list(sources)
+ sources.extend(prop_set.get('<library>'))
+
+ # Add <library-path> properties for all searched libraries
+ extra = []
+ for s in sources:
+ if s.type () == 'SEARCHED_LIB':
+ search = s.search()
+ extra.extend(property.Property('<library-path>', sp) for sp in search)
+
+ # It's possible that we have libraries in sources which did not came
+ # from 'lib' target. For example, libraries which are specified
+ # just as filenames as sources. We don't have xdll-path properties
+ # for such target, but still need to add proper dll-path properties.
+ extra_xdll_path = []
+ for s in sources:
+ if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
+ # Unfortunately, we don't have a good way to find the path
+ # to a file, so use this nasty approach.
+ p = s.project()
+ location = path.root(s.name(), p.get('source-location')[0])
+ extra_xdll_path.append(os.path.dirname(location))
+
+ # Hardcode DLL paths only when linking executables.
+ # Pros: do not need to relink libraries when installing.
+ # Cons: "standalone" libraries (plugins, python extensions) can not
+ # hardcode paths to dependent libraries.
+ if prop_set.get('<hardcode-dll-paths>') == ['true'] \
+ and type.is_derived(self.target_types_ [0], 'EXE'):
+ xdll_path = prop_set.get('<xdll-path>')
+ extra.extend(property.Property('<dll-path>', sp) \
+ for sp in extra_xdll_path)
+ extra.extend(property.Property('<dll-path>', sp) \
+ for sp in xdll_path)
+
+ if extra:
+ prop_set = prop_set.add_raw (extra)
+ result = generators.Generator.run(self, project, name, prop_set, sources)
+
+ if result:
+ ur = self.extra_usage_requirements(result, prop_set)
+ ur = ur.add(property_set.create(['<xdll-path>' + p for p in extra_xdll_path]))
+ else:
+ return None
+ return (ur, result)
+
+ def extra_usage_requirements (self, created_targets, prop_set):
+ assert is_iterable_typed(created_targets, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+
+ result = property_set.empty ()
+ extra = []
+
+ # Add appropriate <xdll-path> usage requirements.
+ raw = prop_set.raw ()
+ if '<link>shared' in raw:
+ paths = []
+
+ # TODO: is it safe to use the current directory? I think we should use
+ # another mechanism to allow this to be run from anywhere.
+ pwd = os.getcwd()
+
+ for t in created_targets:
+ if type.is_derived(t.type(), 'SHARED_LIB'):
+ paths.append(path.root(path.make(t.path()), pwd))
+
+ extra += replace_grist(paths, '<xdll-path>')
+
+ # We need to pass <xdll-path> features that we've got from sources,
+ # because if shared library is built, exe which uses it must know paths
+ # to other shared libraries this one depends on, to be able to find them
+ # all at runtime.
+
+ # Just pass all features in property_set, it's theorically possible
+ # that we'll propagate <xdll-path> features explicitly specified by
+ # the user, but then the user's to blame for using internal feature.
+ values = prop_set.get('<xdll-path>')
+ extra += replace_grist(values, '<xdll-path>')
+
+ if extra:
+ result = property_set.create(extra)
+
+ return result
+
+ def generated_targets (self, sources, prop_set, project, name):
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring)
+ # sources to pass to inherited rule
+ sources2 = []
+ # sources which are libraries
+ libraries = []
+
+ # Searched libraries are not passed as argument to linker
+ # but via some option. So, we pass them to the action
+ # via property.
+ fsa = []
+ fst = []
+ for s in sources:
+ if type.is_derived(s.type(), 'SEARCHED_LIB'):
+ n = s.name()
+ if s.shared():
+ fsa.append(n)
+
+ else:
+ fst.append(n)
+
+ else:
+ sources2.append(s)
+
+ add = []
+ if fsa:
+ add.append("<find-shared-library>" + '&&'.join(fsa))
+ if fst:
+ add.append("<find-static-library>" + '&&'.join(fst))
+
+ spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
+ return spawn
+
+
+def register_linker(id, source_types, target_types, requirements):
+ g = LinkingGenerator(id, True, source_types, target_types, requirements)
+ generators.register(g)
+
+class ArchiveGenerator (generators.Generator):
+ """ The generator class for handling STATIC_LIB creation.
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run (self, project, name, prop_set, sources):
+ assert isinstance(project, targets.ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
+ # create a copy since this modifies the sources list
+ sources = list(sources)
+ sources.extend(prop_set.get('<library>'))
+
+ result = generators.Generator.run (self, project, name, prop_set, sources)
+
+ usage_requirements = []
+ link = prop_set.get('<link>')
+ if 'static' in link:
+ for t in sources:
+ if type.is_derived(t.type(), 'LIB'):
+ usage_requirements.append(property.Property('<library>', t))
+
+ usage_requirements = property_set.create(usage_requirements)
+
+ return usage_requirements, result
+
+
+def register_archiver(id, source_types, target_types, requirements):
+ g = ArchiveGenerator(id, True, source_types, target_types, requirements)
+ generators.register(g)
+
+class DummyGenerator(generators.Generator):
+ """Generator that accepts everything and produces nothing. Useful as a general
+ fallback for toolset-specific actions like PCH generation.
+ """
+ def run (self, project, name, prop_set, sources):
+ return (property_set.empty(), [])
+
+
+get_manager().projects().add_rule("variant", variant)
+
+import stage
+import symlink
+import message
diff --git a/src/boost/tools/build/src/tools/bzip2.jam b/src/boost/tools/build/src/tools/bzip2.jam
new file mode 100644
index 000000000..a6f8ee460
--- /dev/null
+++ b/src/boost/tools/build/src/tools/bzip2.jam
@@ -0,0 +1,279 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the bzip library
+#
+# After 'using bzip2', the following targets are available:
+#
+# /bzip2//bzip2 -- The bzip library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import make ;
+import os ;
+import print ;
+import property ;
+import property-set ;
+
+header = bzlib.h ;
+names = bz2 ;
+
+sources = blocksort.c bzlib.c compress.c crctable.c
+ decompress.c huffman.c randtable.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the bzip library.
+#
+# bzip can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt bzip::
+#
+# <search>
+# The directory containing the bzip binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the bzip headers.
+#
+# If none of these options is specified, then the environmental
+# variables BZIP2_LIBRARY_PATH, BZIP2_NAME, and BZIP2_INCLUDE will
+# be used instead.
+#
+# Options for building bzip from source::
+#
+# <source>
+# The bzip source directory. Defaults to the environmental variable
+# BZIP2_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find bzip in the default system location
+# using bzip2 ;
+# # Build bzip from source
+# using bzip2 : 1.0.6 : <source>/home/sergey/src/bzip2-1.0.6 ;
+# # Find bzip in /usr/local
+# using bzip2 : 1.0.6
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build bzip from source for msvc and find
+# # prebuilt binaries for gcc.
+# using bzip2 : 1.0.6 : <source>C:/Devel/src/bzip2-1.0.6 : <toolset>msvc ;
+# using bzip2 : 1.0.6 : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The bzip version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the bzip target
+
+ : is-default ?
+ # Default configurations are only used when bzip
+ # has not yet been configured. This option is
+ # deprecated. A configuration will be treated
+ # as a default when none of <include>, <search>,
+ # <name>, and <source> are present.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project bzip2 ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local source-path = [ feature.get-values <source> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+ local tag = [ feature.get-values <tag> : $(options) ] ;
+ local build-name = [ feature.get-values <build-name> : $(options) ] ;
+
+ if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
+ {
+ is-default = true ;
+ }
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ local no-build-from-source ;
+ # Ignore environmental BZIP2_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for bzip2:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ }
+ else
+ {
+ source-path ?= [ os.environ BZIP2_SOURCE ] ;
+ if $(source-path)
+ {
+ source-path = [ path.root [ path.make $(source-path) ]
+ [ path.pwd ] ] ;
+ }
+ }
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [bzip2] bzip is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "bzip is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path)
+ {
+ build-name ?= bz2 ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag)
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ def-file = [ path.glob $(source-path) : libbz2.def ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [bzip2] Building bzip from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [bzip2] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [bzip2] found bzip source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [bzip2] could not find bzip source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources)
+ {
+ if ! $(.def-file-target)
+ {
+ .def-file-target = [ targets.create-metatarget make-target-class
+ : $(.project) : libbz2.def : $(def-file)
+ : <action>@bzip2.make-bz2-def-file ]
+ ;
+ }
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ <link>shared:<def-file>libbz2.def
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library bzip2 : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ }
+ else
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [bzip2] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [bzip2] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library bzip2 : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
+
+if [ os.name ] = NT
+{
+ local rule read-file ( file )
+ {
+ return [ SPLIT_BY_CHARACTERS [ SHELL "type \"$(file:G=)\" 2>nul" ] : "\n" ] ;
+ }
+}
+else if [ os.name ] = VMS
+{
+ local rule read-file ( file )
+ {
+ return [ SPLIT_BY_CHARACTERS [ SHELL "PIPE TYPE $(file:W) 2>NL:" ] : "\n" ] ;
+ }
+}
+else
+{
+ local rule read-file ( file )
+ {
+ return [ SPLIT_BY_CHARACTERS [ SHELL "cat \"$(file:G=)\" 2>/dev/null" ] : "\n" ] ;
+ }
+}
+
+rule make-bz2-def-file ( target : source : properties * )
+{
+ print.output $(target) ;
+ for local line in [ read-file $(source) ]
+ {
+ if ! [ MATCH "(LIBRARY[ \t]+LIBBZ2)" : $(line) ]
+ {
+ print.text $(line) : yes ;
+ }
+ }
+}
diff --git a/src/boost/tools/build/src/tools/cast.jam b/src/boost/tools/build/src/tools/cast.jam
new file mode 100644
index 000000000..77a7710dd
--- /dev/null
+++ b/src/boost/tools/build/src/tools/cast.jam
@@ -0,0 +1,91 @@
+# Copyright 2005 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target 'cast', used to change type for target. For example, in Qt
+# library one wants two kinds of CPP files -- those that just compiled and those
+# that are passed via the MOC tool.
+#
+# This is done with:
+#
+# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
+#
+# B2 will assign target type CPP to both main.cpp and widget.cpp. Then,
+# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
+# support will run the MOC tool as part of the build process.
+#
+# At the moment, the 'cast' rule only works for non-derived (source) targets.
+#
+# TODO: The following comment is unclear or incorrect. Clean it up.
+# > Another solution would be to add a separate main target 'moc-them' that
+# > would moc all the passed sources, no matter what their type is, but I prefer
+# > cast, as defining a new target type + generator for that type is somewhat
+# > simpler than defining a main target rule.
+
+import "class" : new ;
+import project ;
+import property-set ;
+import targets ;
+import type ;
+
+
+class cast-target-class : typed-target
+{
+ import type ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ typed-target.__init__ $(name) : $(project) : $(type) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local result ;
+ for local s in $(source-targets)
+ {
+ if ! [ class.is-a $(s) : file-target ]
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error Source to the 'cast' rule is not a file! ;
+ }
+ if [ $(s).action ]
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error Only non-derived target are allowed for
+ 'cast'. : when building [ full-name ] ;
+ }
+ local r = [ $(s).clone-with-different-type $(self.type) ] ;
+ result += [ virtual-target.register $(r) ] ;
+ }
+ return [ property-set.empty ] $(result) ;
+ }
+}
+
+
+rule cast ( name type : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ local real-type = [ type.type-from-rule-name $(type) ] ;
+ if ! $(real-type)
+ {
+ import errors ;
+ errors.user-error No type corresponds to the main target rule name
+ '$(type)' : "Hint: try a lowercase name" ;
+ }
+
+ targets.main-target-alternative [ new cast-target-class $(name) : $(project)
+ : $(real-type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ;
+}
+
+
+IMPORT $(__name__) : cast : : cast ;
diff --git a/src/boost/tools/build/src/tools/cast.py b/src/boost/tools/build/src/tools/cast.py
new file mode 100644
index 000000000..562ba7911
--- /dev/null
+++ b/src/boost/tools/build/src/tools/cast.py
@@ -0,0 +1,76 @@
+# Status: ported
+# Base revision: 64432.
+# Copyright 2005-2010 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target 'cast', used to change type for target. For example, in Qt
+# library one wants two kinds of CPP files -- those that just compiled and those
+# that are passed via the MOC tool.
+#
+# This is done with:
+#
+# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
+#
+# Boost.Build will assign target type CPP to both main.cpp and widget.cpp. Then,
+# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
+# support will run the MOC tool as part of the build process.
+#
+# At the moment, the 'cast' rule only works for non-derived (source) targets.
+#
+# TODO: The following comment is unclear or incorrect. Clean it up.
+# > Another solution would be to add a separate main target 'moc-them' that
+# > would moc all the passed sources, no matter what their type is, but I prefer
+# > cast, as defining a new target type + generator for that type is somewhat
+# > simpler than defining a main target rule.
+
+from b2.build import targets, virtual_target, property_set, type as type_
+
+from b2.manager import get_manager
+from b2.util import bjam_signature, is_iterable_typed
+
+
+class CastTargetClass(targets.TypedTarget):
+
+ def construct(self, name, source_targets, ps):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, virtual_target.VirtualTarget)
+ assert isinstance(ps, property_set.PropertySet)
+
+ result = []
+ for s in source_targets:
+ if not isinstance(s, virtual_target.FileTarget):
+ get_manager().errors()("Source to the 'cast' metatager is not a file")
+
+ if s.action():
+ get_manager().errors()("Only non-derived targets allowed as sources for 'cast'.")
+
+
+ r = s.clone_with_different_type(self.type())
+ result.append(get_manager().virtual_targets().register(r))
+
+ return property_set.empty(), result
+
+
+@bjam_signature((["name", "type"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"]))
+def cast(name, type, sources, requirements, default_build, usage_requirements):
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ real_type = type_.type_from_rule_name(type)
+ if not real_type:
+ real_type = type
+ return t.main_target_alternative(
+ CastTargetClass(name, project, real_type,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+
+get_manager().projects().add_rule("cast", cast)
diff --git a/src/boost/tools/build/src/tools/clang-darwin.jam b/src/boost/tools/build/src/tools/clang-darwin.jam
new file mode 100644
index 000000000..11f62d298
--- /dev/null
+++ b/src/boost/tools/build/src/tools/clang-darwin.jam
@@ -0,0 +1,189 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import clang ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset clang : platform : darwin ;
+
+toolset.inherit-generators clang-darwin
+ <toolset>clang <toolset-clang:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override clang-darwin.prebuilt : builtin.lib-generator ;
+generators.override clang-darwin.prebuilt : builtin.prebuilt ;
+generators.override clang-darwin.searched-lib-generator : searched-lib-generator ;
+
+generators.register-c-compiler clang-darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>clang <toolset-clang:platform>darwin ;
+generators.register-c-compiler clang-darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>clang <toolset-clang:platform>darwin ;
+
+toolset.inherit-rules clang-darwin : gcc ;
+toolset.inherit-flags clang-darwin : gcc
+ : <inlining>full
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ <lto>on/<lto-mode>full
+ <lto>on/<lto-mode>fat
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Initializes the clang-darwin toolset
+# version in optional
+# name (default clang++) is used to invoke the specified clang compiler
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command clang-darwin : clang++
+ : $(command) : /usr/bin /usr/local/bin ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "version ([0-9]+[.][0-9]+)"
+ : [ SHELL "$(command-string) --version" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang
+ : version $(version) ] ;
+
+ common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
+ clang.init-cxxstd-flags clang-darwin : $(condition) : $(version) ;
+
+ # - Ranlib.
+ local ranlib = [ feature.get-values <ranlib> : $(options) ] ;
+ toolset.flags clang-darwin.archive .RANLIB $(condition) : $(ranlib[1]) ;
+
+ # - Archive builder.
+ local archiver = [ feature.get-values <archiver> : $(options) ] ;
+ toolset.flags clang-darwin.archive .AR $(condition) : $(archiver[1]) ;
+}
+
+SPACE = " " ;
+
+toolset.flags clang-darwin.compile.m OPTIONS <mflags> ;
+toolset.flags clang-darwin.compile.mm OPTIONS <mflags> ;
+toolset.flags clang-darwin.compile.mm OPTIONS <mmflags> ;
+
+# Declare flags and action for compilation.
+
+# For clang, 'on' and 'full' are identical
+toolset.flags clang-darwin.compile OPTIONS <inlining>full : -Wno-inline ;
+
+# SJW 12/2017: Support for <flags> is widely inconsistent.
+# shouldn't this be handled by the common gcc?
+toolset.flags clang-darwin.compile OPTIONS <flags> ;
+
+# LTO
+toolset.flags clang-darwin.compile OPTIONS <lto>on/<lto-mode>thin : -flto=thin ;
+toolset.flags clang-darwin.link OPTIONS <lto>on/<lto-mode>thin : -flto=thin ;
+
+toolset.flags clang-darwin.compile OPTIONS <lto>on/<lto-mode>full : -flto=full ;
+toolset.flags clang-darwin.link OPTIONS <lto>on/<lto-mode>full : -flto=full ;
+
+# stdlib selection
+toolset.flags clang-darwin.compile OPTIONS <stdlib>gnu <stdlib>gnu11 : -stdlib=libstdc++ ;
+toolset.flags clang-darwin.link OPTIONS <stdlib>gnu <stdlib>gnu11 : -stdlib=libstdc++ ;
+
+toolset.flags clang-darwin.compile OPTIONS <stdlib>libc++ : -stdlib=libc++ ;
+toolset.flags clang-darwin.link OPTIONS <stdlib>libc++ : -stdlib=libc++ ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.m
+{
+ "$(CONFIG_COMMAND)" -x objective-c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.mm
+{
+ "$(CONFIG_COMMAND)" -x objective-c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Default value. Mostly for the sake of clang-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in clang-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+.RANLIB = ranlib -cs ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+}
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "@rpath/$(<[1]:D=)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS)
+}
diff --git a/src/boost/tools/build/src/tools/clang-linux.jam b/src/boost/tools/build/src/tools/clang-linux.jam
new file mode 100644
index 000000000..13b34552c
--- /dev/null
+++ b/src/boost/tools/build/src/tools/clang-linux.jam
@@ -0,0 +1,223 @@
+# Copyright (c) 2003 Michael Stevens
+# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer)
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import common ;
+import toolset ;
+import feature ;
+import toolset : flags ;
+
+import clang ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+import type ;
+import numbers ;
+
+feature.extend-subfeature toolset clang : platform : linux ;
+
+toolset.inherit-generators clang-linux
+ <toolset>clang <toolset-clang:platform>linux : gcc
+ : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ;
+generators.override clang-linux.prebuilt : builtin.lib-generator ;
+generators.override clang-linux.prebuilt : builtin.prebuilt ;
+generators.override clang-linux.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ;
+generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH
+ : <toolset>clang <toolset-clang:platform>linux : pth ;
+
+toolset.inherit-rules clang-linux : gcc ;
+toolset.inherit-flags clang-linux : gcc
+ : <inlining>full
+ <threading>multi/<target-os>windows
+ <lto>on/<lto-mode>full
+ <lto>on/<lto-mode>fat
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
+ .debug-configuration = true ;
+}
+
+rule init ( version ? : command * : options * ) {
+ command = [ common.get-invocation-command clang-linux : clang++
+ : $(command) ] ;
+
+ # Determine the version
+ if $(command) {
+ local command-string = \"$(command)\" ;
+ command-string = $(command-string:J=" ") ;
+ version ?= [ MATCH "version ([0-9.]+)"
+ : [ SHELL "$(command-string) --version" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-linux
+ : version $(version) ] ;
+
+ common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
+ clang.init-cxxstd-flags clang-linux : $(condition) : $(version) ;
+
+ # Support for gcc root as the backend, this is mainly useful for clang/gcc on Windows
+ # since on Linux gcc will be the default compiler already located on the PATH.
+ # On Windows it is possible to have multiple versions of mingw(-64)/gcc installed
+ # in different directories. The <root>option can be given so that the gcc backend
+ # can be found at runtime, while the $(command) can be a script that sets the
+ # PATH for both the clang directory and the backende gcc directory
+ # before calling clang++ when compiling/linking.
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+
+ if $(root)
+ {
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right onces. Note that we do not provide a clean way to build a 32-bit
+ # binary using a 64-bit compiler, but user can always pass -m32
+ # manually.
+ local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
+ if $(.debug-configuration)
+ {
+ ECHO "notice:" using gcc libraries with clang"::" $(condition) "::" $(lib_path) ;
+ }
+ toolset.flags clang-linux.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+
+ # - Ranlib.
+ local ranlib = [ feature.get-values <ranlib> : $(options) ] ;
+ if ( ! $(ranlib) ) && $(root)
+ {
+ ranlib = $(root)/bin/ranlib ;
+ }
+ toolset.flags clang-linux.archive .RANLIB $(condition) : $(ranlib[1]) ;
+
+ # - Archive builder.
+ local archiver = [ feature.get-values <archiver> : $(options) ] ;
+ if ( ! $(archiver) ) && $(root)
+ {
+ archiver = $(root)/bin/ar ;
+ }
+ toolset.flags clang-linux.archive .AR $(condition) : $(archiver[1]) ;
+}
+
+###############################################################################
+# Flags
+
+# note: clang silently ignores some of these inlining options
+# For clang, 'on' and 'full' are identical.
+toolset.flags clang-linux.compile OPTIONS <inlining>full : -Wno-inline ;
+
+toolset.flags clang-linux.compile OPTIONS <threading>multi/<target-os>windows : -pthread ;
+toolset.flags clang-linux.link OPTIONS <threading>multi/<target-os>windows : -pthread ;
+
+# LTO
+toolset.flags clang-linux.compile OPTIONS <lto>on/<lto-mode>thin : -flto=thin ;
+toolset.flags clang-linux.link OPTIONS <lto>on/<lto-mode>thin : -flto=thin ;
+
+toolset.flags clang-linux.compile OPTIONS <lto>on/<lto-mode>full : -flto=full ;
+toolset.flags clang-linux.link OPTIONS <lto>on/<lto-mode>full : -flto=full ;
+
+# stdlib selection
+toolset.flags clang-linux.compile OPTIONS <stdlib>gnu <stdlib>gnu11 : -stdlib=libstdc++ ;
+toolset.flags clang-linux.link OPTIONS <stdlib>gnu <stdlib>gnu11 : -stdlib=libstdc++ ;
+
+toolset.flags clang-linux.compile OPTIONS <stdlib>libc++ : -stdlib=libc++ ;
+toolset.flags clang-linux.link OPTIONS <stdlib>libc++ : -stdlib=libc++ ;
+
+###############################################################################
+# C and C++ compilation
+
+rule compile.c++ ( targets * : sources * : properties * ) {
+ local pch-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pch-file) {
+ DEPENDS $(<) : $(pch-file) ;
+ clang-linux.compile.c++.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ clang-linux.compile.c++.without-pch $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c++.without-pch {
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+actions compile.c++.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pch -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)"
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ local pch-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pch-file) {
+ DEPENDS $(<) : $(pch-file) ;
+ clang-linux.compile.c.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ clang-linux.compile.c.without-pch $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c.without-pch
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pch -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# PCH emission
+
+RM = [ common.rm-command ] ;
+
+rule compile.c++.pch ( targets * : sources * : properties * ) {
+}
+
+actions compile.c++.pch {
+ $(RM) -f "$(<)" && "$(CONFIG_COMMAND)" -c -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pch -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * ) {
+}
+
+actions compile.c.pch
+{
+ $(RM) -f "$(<)" && "$(CONFIG_COMMAND)" -c -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pch -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# Linking
+
+SPACE = " " ;
+
+rule link ( targets * : sources * : properties * ) {
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+actions link bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * ) {
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+# Differ from 'link' above only by -shared.
+actions link.dll bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
diff --git a/src/boost/tools/build/src/tools/clang-vxworks.jam b/src/boost/tools/build/src/tools/clang-vxworks.jam
new file mode 100644
index 000000000..053f4fb55
--- /dev/null
+++ b/src/boost/tools/build/src/tools/clang-vxworks.jam
@@ -0,0 +1,128 @@
+# Copyright Brian Kuhl 2017.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import clang ;
+import feature : feature get-values ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+
+feature.extend-subfeature toolset clang : platform : vxworks ;
+
+toolset.inherit-generators clang-vxworks
+ <toolset>clang <toolset-clang:platform>vxworks
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override clang-vxworks.prebuilt : builtin.lib-generator ;
+generators.override clang-vxworks.prebuilt : builtin.prebuilt ;
+generators.override clang-vxworks.searched-lib-generator : searched-lib-generator ;
+
+
+toolset.inherit-rules clang-vxworks : gcc ;
+toolset.inherit-flags clang-vxworks : gcc
+ : <inlining>full
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+# Initializes the clang-vxworks toolset
+# version in optional
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command clang-vxworks : ccllvm
+ : $(command) ] ;
+
+ linker = [ get-values <linker> : $(options) ] ;
+ linker ?= ld ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-vxworks
+ : version $(version) ] ;
+
+ common.handle-options clang-vxworks : $(condition) : $(command) : $(options) ;
+
+ toolset.flags clang-vxworks.link .LD : $(linker) ;
+
+ # - Archive builder.
+ local archiver = [ feature.get-values <archiver> : $(options) ] ;
+ toolset.flags clang-vxworks.archive .AR $(condition) : $(archiver[1]) ;
+}
+
+SPACE = " " ;
+
+toolset.flags clang-vxworks.compile OPTIONS <cflags> ;
+toolset.flags clang-vxworks.compile.c++ OPTIONS <cxxflags> ;
+toolset.flags clang-vxworks.compile INCLUDES <include> ;
+
+# For clang, 'on' and 'full' are identical
+toolset.flags clang-vxworks.compile OPTIONS <inlining>full : -Wno-inline ;
+
+toolset.flags clang-vxworks.compile OPTIONS <flags> ;
+
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+
+
+# Default value. Mostly for the sake of clang-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in clang-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rcu "$(<)" "$(>)"
+}
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>clang-vxworks-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(.LD)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS)
+ }
+
+actions link.dll bind LIBRARIES
+{
+ "$(.LD)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) -fpic -shared -non-static
+}
diff --git a/src/boost/tools/build/src/tools/clang-win.jam b/src/boost/tools/build/src/tools/clang-win.jam
new file mode 100644
index 000000000..0e7a90332
--- /dev/null
+++ b/src/boost/tools/build/src/tools/clang-win.jam
@@ -0,0 +1,183 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Peter Dimov 2018
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import common ;
+import errors ;
+import feature ;
+import clang ;
+import msvc ;
+import os ;
+import toolset ;
+import generators ;
+import path ;
+import regex ;
+
+feature.extend-subfeature toolset clang : platform : win ;
+
+toolset.inherit-generators clang-win <toolset>clang <toolset-clang:platform>win : msvc : msvc.compile.c.pch msvc.compile.c++.pch ;
+toolset.inherit-flags clang-win : msvc ;
+toolset.inherit-rules clang-win : msvc ;
+
+# Override default do-nothing generators.
+generators.override clang-win.compile.rc : rc.compile.resource ;
+generators.override clang-win.compile.mc : mc.compile ;
+
+if [ MATCH (--debug-(clang-(win-)?)?configuration) : [ modules.peek : ARGV ] ]
+{
+ local rule .notice ( messages * )
+ {
+ ECHO "notice: [clang-win]" $(messages) ;
+ }
+}
+else
+{
+ local rule .notice ( messages * )
+ {
+ }
+}
+
+# [ get-option archiver : 32 : $(options) ]
+#
+# returns <archiver-32>, or <archiver>
+
+local rule get-option ( option : addr : options * )
+{
+ local r = [ feature.get-values "<$(option)-$(addr)>" : $(options) ] ;
+ r ?= [ feature.get-values "<$(option)>" : $(options) ] ;
+ return $(r) ;
+}
+
+# init
+#
+# options:
+#
+# <assembler>ml.exe (or <assembler-32>, or <assembler-64>)
+# <archiver>lib.exe
+# <manifest-tool>mt.exe
+# <resource-compiler>rc.exe
+# <mc-compiler>mc.exe
+# <idl-compiler>midl.exe
+
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command-nodefault clang-win : clang-cl.exe : $(command) ] ;
+
+ if ! $(command)
+ {
+ errors.error "Cannot configure toolset clang-win: no 'clang-cl.exe' command found or given" ;
+ }
+
+ local compiler = "\"$(command)\"" ;
+ compiler = "$(compiler:J= )" ;
+
+ version ?= [ MATCH "version ([0-9.]+)" : [ SHELL "$(compiler) -v 2>&1" ] ] ;
+
+ .notice "using compiler '$(compiler)', version '$(version)'" ;
+
+ local condition = [ common.check-init-parameters clang-win : version $(version) ] ;
+
+ common.handle-options clang-win : $(condition) : $(command) : $(options) ;
+
+ for local addr in 32 64
+ {
+ local config = [ SPLIT_BY_CHARACTERS [ SHELL "$(compiler) -m$(addr) -### foo.obj /link 2>&1" ] : "\n" ] ;
+
+ local match = 1 ;
+ local items ;
+
+ while $(match)
+ {
+ match = [ MATCH "^ *(\"[^\"]*\")(.*)" : $(config) ] ;
+
+ if $(match)
+ {
+ items += $(match[1]) ;
+ config = $(match[2]) ;
+ }
+ }
+
+ local ml ;
+
+ if $(items)
+ {
+ ml = [ regex.replace $(items[1]) "x64\\\\+link\\.exe" "x64\\ml64.exe" ] ;
+ ml = [ regex.replace $(ml) "x86\\\\+link\\.exe" "x86\\ml.exe" ] ;
+
+ if ! [ MATCH "(ml\\.exe)" "(ml64\\.exe)" : $(ml) ]
+ {
+ ml = ;
+ }
+ }
+
+ local assembler = [ get-option "assembler" : $(addr) : $(options) ] ;
+ assembler ?= $(ml) ;
+ if $(addr) = 32 { assembler ?= ml.exe ; } else { assembler ?= ml64.exe ; }
+
+ local link ;
+
+ if $(items)
+ {
+ link = [ regex.replace $(items[1]) "\\\\+HostX64\\\\+x86\\\\+" "\\HostX86\\x86\\" ] ;
+ }
+
+ local archiver = [ get-option "archiver" : $(addr) : $(options) ] ;
+
+ if $(link)
+ {
+ archiver ?= "$(link) /lib" ;
+ }
+ archiver ?= lib.exe ;
+
+ .notice "$(addr):" "using assembler '$(assembler)'" ;
+ .notice "$(addr):" "using archiver '$(archiver)'" ;
+
+ local manifest-tool = [ get-option "manifest-tool" : $(addr) : $(options) ] ;
+ local resource-compiler = [ get-option "resource-compiler" : $(addr) : $(options) ] ;
+ local mc-compiler = [ get-option "mc-compiler" : $(addr) : $(options) ] ;
+ local idl-compiler = [ get-option "idl-compiler" : $(addr) : $(options) ] ;
+
+ for local item in $(items)
+ {
+ match = [ MATCH "\"-libpath:(.*)\\\\+Lib\\\\.*\\\\um\\\\+x(.*)\"" : $(item) ] ;
+
+ if $(match)
+ {
+ local sdk-path = "$(match[1])\\bin\\x$(match[2])" ;
+ .notice "$(addr):" "using SDK path '$(sdk-path)'" ;
+
+ manifest-tool ?= "\"$(sdk-path)\\mt.exe\"" ;
+ resource-compiler ?= "\"$(sdk-path)\\rc.exe\"" ;
+ mc-compiler ?= "\"$(sdk-path)\\mc.exe\"" ;
+ idl-compiler ?= "\"$(sdk-path)\\midl.exe\"" ;
+ }
+ }
+
+ manifest-tool ?= mt.exe ;
+ resource-compiler ?= rc.exe ;
+ mc-compiler ?= mc.exe ;
+ idl-compiler ?= midl.exe ;
+
+ .notice "$(addr):" "using manifest-tool '$(manifest-tool)'" ;
+ .notice "$(addr):" "using resource-compiler '$(resource-compiler)'" ;
+ .notice "$(addr):" "using mc-compiler '$(mc-compiler)'" ;
+ .notice "$(addr):" "using idl-compiler '$(idl-compiler)'" ;
+
+ local cond = "$(condition)/<architecture>/<address-model>$(addr)" "$(condition)/<architecture>x86/<address-model>$(addr)" ;
+ if $(addr) = 32 { cond += "$(condition)/<architecture>/<address-model>" ; }
+
+ toolset.flags clang-win.compile .CC $(cond) : $(compiler) -m$(addr) ;
+ toolset.flags clang-win.link .LD $(cond) : $(compiler) -m$(addr) /link "/incremental:no" "/manifest" ;
+ toolset.flags clang-win.compile .ASM $(cond) : $(assembler) -nologo ;
+ toolset.flags clang-win.archive .LD $(cond) : $(archiver) /nologo ;
+ toolset.flags clang-win.link .MT $(cond) : $(manifest-tool) -nologo ;
+ toolset.flags clang-win.compile .MC $(cond) : $(mc-compiler) ;
+ toolset.flags clang-win.compile .RC $(cond) : $(resource-compiler) /nologo ;
+ toolset.flags clang-win.compile .IDL $(cond) : $(idl-compiler) ;
+ }
+
+ toolset.flags clang-win.link LIBRARY_OPTION <toolset>clang-win : "" : unchecked ;
+}
diff --git a/src/boost/tools/build/src/tools/clang.jam b/src/boost/tools/build/src/tools/clang.jam
new file mode 100644
index 000000000..bcb383703
--- /dev/null
+++ b/src/boost/tools/build/src/tools/clang.jam
@@ -0,0 +1,65 @@
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This is a generic 'clang' toolset. Depending on the current system, it
+# forwards either to 'clang-linux' or 'clang-darwin' modules.
+
+import feature ;
+import os ;
+import toolset ;
+import sequence ;
+import regex ;
+import set ;
+
+feature.extend toolset : clang ;
+feature.subfeature toolset clang : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if $(1) = win
+ {
+ toolset.using clang-win :
+ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ toolset.using clang-darwin :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using clang-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
+
+
+local rule cxxstd-flags ( toolset : condition * : options * )
+{
+ toolset.flags $(toolset).compile.c++ OPTIONS $(condition) : $(options) : unchecked ;
+ toolset.flags $(toolset).link OPTIONS $(condition) : $(options) : unchecked ;
+}
+
+local rule version-ge ( lhs : rhs )
+{
+ lhs = [ regex.split $(lhs) "[.]" ] ;
+ rhs = [ regex.split $(rhs) "[.]" ] ;
+ return [ sequence.compare $(rhs) : $(lhs) : numbers.less ] ;
+}
+
+# Version specific flags
+rule init-cxxstd-flags ( toolset : condition * : version )
+{
+ local cxxstd = [ feature.values <cxxstd> ] ;
+ local dialects = [ feature.values <cxxstd-dialect> ] ;
+ dialects = [ set.difference $(dialects) : gnu iso ] ;
+ local std ;
+ if [ version-ge $(version) : 3.5 ] { std = 1z ; }
+ else if [ version-ge $(version) : 3.4 ] { std = 14 ; }
+ else if [ version-ge $(version) : 3.3 ] { std = 11 ; }
+ else { std = 03 ; }
+ cxxstd-flags $(toolset) : $(condition)/<cxxstd>latest/<cxxstd-dialect>iso : -std=c++$(std) ;
+ cxxstd-flags $(toolset) : $(condition)/<cxxstd>latest/<cxxstd-dialect>gnu : -std=gnu++$(std) ;
+ cxxstd-flags $(toolset) : $(condition)/<cxxstd>latest/<cxxstd-dialect>$(dialects) : -std=c++$(std) ;
+}
diff --git a/src/boost/tools/build/src/tools/common.jam b/src/boost/tools/build/src/tools/common.jam
new file mode 100644
index 000000000..005ac9c5d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/common.jam
@@ -0,0 +1,1095 @@
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides actions common to all toolsets, such as creating directories and
+# removing files.
+
+import os ;
+import modules ;
+import utility ;
+import print ;
+import type ;
+import feature ;
+import errors ;
+import path ;
+import sequence ;
+import toolset ;
+import virtual-target ;
+import numbers ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
+{
+ .show-configuration = true ;
+}
+
+# Configurations
+#
+# The following class helps to manage toolset configurations. Each configuration
+# has a unique ID and one or more parameters. A typical example of a unique ID
+# is a condition generated by 'common.check-init-parameters' rule. Other kinds
+# of IDs can be used. Parameters may include any details about the configuration
+# like 'command', 'path', etc.
+#
+# A toolset configuration may be in one of the following states:
+#
+# - registered
+# Configuration has been registered (e.g. explicitly or by auto-detection
+# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
+# not yet been called for it.
+# - used
+# Once called 'toolset.using' rule marks the configuration as 'used'.
+#
+# The main difference between the states above is that while a configuration is
+# 'registered' its options can be freely changed. This is useful in particular
+# for autodetection code - all detected configurations may be safely overwritten
+# by user code.
+
+class configurations
+{
+ import errors ;
+
+ rule __init__ ( )
+ {
+ }
+
+ # Registers a configuration.
+ #
+ # Returns 'true' if the configuration has been added and an empty value if
+ # it already exists. Reports an error if the configuration is 'used'.
+ #
+ rule register ( id )
+ {
+ if $(id) in $(self.used)
+ {
+ errors.error "common: the configuration '$(id)' is in use" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.all)
+ {
+ self.all += $(id) ;
+
+ # Indicate that a new configuration has been added.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Mark a configuration as 'used'.
+ #
+ # Returns 'true' if the state of the configuration has been changed to
+ # 'used' and an empty value if it the state has not been changed. Reports an
+ # error if the configuration is not known.
+ #
+ rule use ( id )
+ {
+ if ! $(id) in $(self.all)
+ {
+ errors.error "common: the configuration '$(id)' is not known" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.used)
+ {
+ self.used += $(id) ;
+
+ # Indicate that the configuration has been marked as 'used'.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Return all registered configurations.
+ #
+ rule all ( )
+ {
+ return $(self.all) ;
+ }
+
+ # Return all used configurations.
+ #
+ rule used ( )
+ {
+ return $(self.used) ;
+ }
+
+ # Returns the value of a configuration parameter.
+ #
+ rule get ( id : param )
+ {
+ return $(self.$(param).$(id)) ;
+ }
+
+ # Sets the value of a configuration parameter.
+ #
+ rule set ( id : param : value * )
+ {
+ self.$(param).$(id) = $(value) ;
+ }
+}
+
+
+# The rule for checking toolset parameters. Trailing parameters should all be
+# parameter name/value pairs. The rule will check that each parameter either has
+# a value in each invocation or has no value in each invocation. Also, the rule
+# will check that the combination of all parameter values is unique in all
+# invocations.
+#
+# Each parameter name corresponds to a subfeature. This rule will declare a
+# subfeature the first time a non-empty parameter value is passed and will
+# extend it with all the values.
+#
+# The return value from this rule is a condition to be used for flags settings.
+#
+rule check-init-parameters ( toolset requirement * : * )
+{
+ local sig = $(toolset) ;
+ local condition = <toolset>$(toolset) ;
+ local subcondition ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local name = $($(index)[1]) ;
+ local value = $($(index)[2]) ;
+
+ if $(value)-is-not-empty
+ {
+ condition = $(condition)-$(value) ;
+ if $(.had-unspecified-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "no value was specified in earlier"
+ "initialization" : "an explicit value is specified now" ;
+ }
+ # The below logic is for intel compiler. It calls this rule with
+ # 'intel-linux' and 'intel-win' as toolset, so we need to get the
+ # base part of toolset name. We can not pass 'intel' as toolset
+ # because in that case it will be impossible to register versionless
+ # intel-linux and intel-win toolsets of a specific version.
+ local t = $(toolset) ;
+ local m = [ MATCH "([^-]*)-" : $(toolset) ] ;
+ if $(m)
+ {
+ t = $(m[1]) ;
+ }
+ if ! $(.had-value.$(toolset).$(name))
+ {
+ if ! $(.declared-subfeature.$(t).$(name))
+ {
+ feature.subfeature toolset $(t) : $(name) : : propagated ;
+ .declared-subfeature.$(t).$(name) = true ;
+ }
+ .had-value.$(toolset).$(name) = true ;
+ }
+ feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
+ subcondition += <toolset-$(t):$(name)>$(value) ;
+ }
+ else
+ {
+ if $(.had-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "an explicit value was specified in an"
+ "earlier initialization" : "no value is specified now" ;
+ }
+ .had-unspecified-value.$(toolset).$(name) = true ;
+ }
+ sig = $(sig)$(value:E="")- ;
+ }
+ # We also need to consider requirements on the toolset as we can
+ # configure the same toolset multiple times with different options that
+ # are selected with the requirements.
+ if $(requirement)
+ {
+ sig = $(sig)$(requirement:J=,) ;
+ }
+ if $(sig) in $(.all-signatures)
+ {
+ local message =
+ "duplicate initialization of $(toolset) with the following parameters: " ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local p = $($(index)) ;
+ if $(p)
+ {
+ message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
+ }
+ }
+ message += "previous initialization at $(.init-loc.$(sig))" ;
+ errors.user-error
+ $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
+ $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
+ }
+ .all-signatures += $(sig) ;
+ .init-loc.$(sig) = [ errors.nearest-user-location ] ;
+
+ # If we have a requirement, this version should only be applied under that
+ # condition. To accomplish this we add a toolset requirement that imposes
+ # the toolset subcondition, which encodes the version.
+ if $(requirement)
+ {
+ local r = <toolset>$(toolset) $(requirement) ;
+ r = $(r:J=,) ;
+ toolset.add-requirements "$(r):$(subcondition)" ;
+ }
+
+ # We add the requirements, if any, to the condition to scope the toolset
+ # variables and options to this specific version.
+ condition += $(requirement) ;
+
+ if $(.show-configuration)
+ {
+ ECHO "notice:" $(condition) ;
+ }
+ return $(condition:J=/) ;
+}
+
+
+# A helper rule to get the command to invoke some tool. If
+# 'user-provided-command' is not given, tries to find binary named 'tool' in
+# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+# element of 'user-provided-command' is an existing program.
+#
+# This rule returns the command to be used when invoking the tool. If we can not
+# find the tool, a warning is issued. If 'path-last' is specified, PATH is
+# checked after 'additional-paths' when searching for 'tool'.
+#
+rule get-invocation-command-nodefault ( toolset : tool :
+ user-provided-command * : additional-paths * : path-last ? )
+{
+ local command ;
+ if ! $(user-provided-command)
+ {
+ command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO "warning:" toolset $(toolset) "initialization:" can not find tool
+ $(tool) ;
+ ECHO "warning:" initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+ else
+ {
+ command = [ check-tool $(user-provided-command) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO "warning:" toolset $(toolset) "initialization:" ;
+ ECHO "warning:" can not find user-provided command
+ '$(user-provided-command)' ;
+ ECHO "warning:" initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+
+ return $(command) ;
+}
+
+
+# Same as get-invocation-command-nodefault, except that if no tool is found,
+# returns either the user-provided-command, if present, or the 'tool' parameter.
+#
+rule get-invocation-command ( toolset : tool : user-provided-command * :
+ additional-paths * : path-last ? )
+{
+ local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
+ $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
+
+ if ! $(result)
+ {
+ if $(user-provided-command)
+ {
+ result = $(user-provided-command) ;
+ }
+ else
+ {
+ result = $(tool) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Given an invocation command return the absolute path to the command. This
+# works even if command has no path element and was found on the PATH.
+#
+rule get-absolute-tool-path ( command )
+{
+ if $(command:D)
+ {
+ return $(command:D) ;
+ }
+ else
+ {
+ local m = [ GLOB [ modules.peek : PATH Path path ] : $(command)
+ $(command).exe ] ;
+ return $(m[1]:D) ;
+ }
+}
+
+
+# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
+# If found in PATH, returns 'name' and if found in additional paths, returns
+# absolute name. If the tool is found in several directories, returns the first
+# path found. Otherwise, returns an empty string. If 'path-last' is specified,
+# PATH is searched after 'additional-paths'.
+#
+rule find-tool ( name : additional-paths * : path-last ? )
+{
+ if $(name:D)
+ {
+ return [ check-tool-aux $(name) ] ;
+ }
+ local path = [ path.programs-path ] ;
+ local match = [ path.glob $(path) : $(name) $(name).exe ] ;
+ local additional-match = [ path.glob $(additional-paths) : $(name)
+ $(name).exe ] ;
+
+ local result ;
+ if $(path-last)
+ {
+ result = $(additional-match) ;
+ if ! $(result) && $(match)
+ {
+ result = $(name) ;
+ }
+ }
+ else
+ {
+ if $(match)
+ {
+ result = $(name) ;
+ }
+ else
+ {
+ result = $(additional-match) ;
+ }
+ }
+ if $(result)
+ {
+ return [ path.native $(result[1]) ] ;
+ }
+}
+
+# Checks if 'command' can be found either in path or is a full name to an
+# existing file.
+#
+local rule check-tool-aux ( command )
+{
+ if $(command:D)
+ {
+ if [ path.exists $(command) ]
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ || ( [ os.on-windows ] && [ path.exists $(command).exe ] )
+ # Only NT will run .bat & .cmd files by their unqualified names.
+ || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
+ [ path.exists $(command).cmd ] ) )
+ {
+ return $(command) ;
+ }
+ }
+ else
+ {
+ if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
+ {
+ return $(command) ;
+ }
+ }
+}
+
+
+# Checks that a tool can be invoked by 'command'. If command is not an absolute
+# path, checks if it can be found in 'path'. If command is an absolute path,
+# check that it exists. Returns 'command' if ok or empty string otherwise.
+#
+local rule check-tool ( xcommand + )
+{
+ if [ check-tool-aux $(xcommand[1]) ] ||
+ [ check-tool-aux $(xcommand[-1]) ]
+ {
+ return $(xcommand) ;
+ }
+}
+
+
+# Handle common options for toolset, specifically sets the following flag
+# variables:
+# - CONFIG_COMMAND to $(command)
+# - OPTIONS for compile to the value of <compileflags> in $(options)
+# - OPTIONS for compile.c to the value of <cflags> in $(options)
+# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
+# - OPTIONS for compile.asm to the value of <asmflags> in $(options)
+# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
+# - OPTIONS for link to the value of <linkflags> in $(options)
+#
+rule handle-options ( toolset : condition * : command * : options * )
+{
+ if $(.debug-configuration)
+ {
+ ECHO "notice:" will use '$(command)' for $(toolset), condition
+ $(condition:E=(empty)) ;
+ }
+
+ # The last parameter ('unchecked') says it is OK to set flags for another
+ # module.
+ toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
+ : unchecked ;
+
+ toolset.flags $(toolset).compile OPTIONS $(condition) :
+ [ feature.get-values <compileflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c OPTIONS $(condition) :
+ [ feature.get-values <cflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
+ [ feature.get-values <cxxflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.asm OPTIONS $(condition) :
+ [ feature.get-values <asmflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).link OPTIONS $(condition) :
+ [ feature.get-values <linkflags> : $(options) ] : unchecked ;
+}
+
+
+# Returns the location of the "program files" directory on a Windows platform.
+#
+rule get-program-files-dir ( )
+{
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ ProgramFiles = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ ProgramFiles = "c:\\Program Files" ;
+ }
+ return $(ProgramFiles) ;
+}
+
+
+if [ os.name ] = NT
+{
+ NULL_DEVICE = "NUL" ;
+ IGNORE = "2>$(NULL_DEVICE) >$(NULL_DEVICE) & setlocal" ;
+ RM = del /f /q ;
+ CP = copy /b ;
+ LN ?= $(CP) ;
+ # Ugly hack to convince copy to set the timestamp of the destination to the
+ # current time by concatenating the source with a nonexistent file. Note
+ # that this requires /b (binary) as the default when concatenating files is
+ # /a (ascii).
+ WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
+}
+else if [ os.name ] = VMS
+{
+ NULL_DEVICE = "NL:" ;
+ PIPE = PIPE ;
+ IGNORE = "2>$(NULL_DEVICE) >$(NULL_DEVICE)" ;
+ RM = DELETE /NOCONF ;
+ CP = COPY /OVERWRITE ;
+ LN = $(CP) ;
+}
+else
+{
+ NULL_DEVICE = "/dev/null" ;
+ IGNORE = "2>$(NULL_DEVICE) >$(NULL_DEVICE)" ;
+ RM = rm -f ;
+ CP = cp ;
+ LN = ln ;
+}
+
+NULL_OUT = ">$(NULL_DEVICE)" ;
+
+rule null-device ( )
+{
+ return $(NULL_DEVICE) ;
+}
+
+
+rule rm-command ( )
+{
+ return $(RM) ;
+}
+
+
+rule copy-command ( )
+{
+ return $(CP) ;
+}
+
+
+if "\n" = "n"
+{
+ # Escape characters not supported so use ugly hacks. Will not work on Cygwin
+ # - see below.
+ nl = "
+" ;
+ q = "" ;
+}
+else
+{
+ nl = "\n" ;
+ q = "\"" ;
+}
+
+
+rule newline-char ( )
+{
+ return $(nl) ;
+}
+
+
+# Returns the command needed to set an environment variable on the current
+# platform. The variable setting persists through all following commands and is
+# visible in the environment seen by subsequently executed commands. In other
+# words, on Unix systems, the variable is exported, which is consistent with the
+# only possible behavior on Windows systems.
+#
+rule variable-setting-command ( variable : value )
+{
+ if [ os.name ] = NT
+ {
+ return "set $(variable)=$(value)$(nl)" ;
+ }
+ else if [ os.name ] = VMS
+ {
+ return "$(variable) == $(q)$(value)$(q)$(nl)" ;
+ }
+ else
+ {
+ # If we do not have escape character support in bjam, the cod below
+ # blows up on CYGWIN, since the $(nl) variable holds a Windows new-line
+ # \r\n sequence that messes up the executed export command which then
+ # reports that the passed variable name is incorrect.
+ # But we have a check for cygwin in kernel/bootstrap.jam already.
+ return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
+ }
+}
+
+
+# Returns a command to sets a named shell path variable to the given NATIVE
+# paths on the current platform.
+#
+rule path-variable-setting-command ( variable : paths * )
+{
+ local sep = [ os.path-separator ] ;
+ return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
+}
+
+
+# Returns a command that prepends the given paths to the named path variable on
+# the current platform.
+#
+rule prepend-path-variable-command ( variable : paths * )
+{
+ return [ path-variable-setting-command $(variable)
+ : $(paths) [ os.expand-variable $(variable) ] ] ;
+}
+
+
+# Return a command which can create a file. If 'r' is result of invocation, then
+# 'r foobar' will create foobar with unspecified content. What happens if file
+# already exists is unspecified.
+#
+rule file-creation-command ( )
+{
+ if [ os.name ] = NT
+ {
+ # A few alternative implementations on Windows:
+ #
+ # 'type NUL >> '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also not change
+ # the target's timestamp in case the file already exists.
+ #
+ # 'type NUL > '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also destroy an
+ # already existing file by overwriting it with an empty one.
+ #
+ # I guess the best solution would be to allow Boost Jam to define
+ # built-in functions such as 'create a file', 'touch a file' or 'copy a
+ # file' which could be used from inside action code. That would allow
+ # completely portable operations without this kind of kludge.
+ # (22.02.2009.) (Jurko)
+ return "echo. > " ;
+ }
+ else if [ os.name ] = VMS
+ {
+ return "APPEND /NEW NL: " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+# Returns a command that may be used for 'touching' files. It is not a real
+# 'touch' command on NT because it adds an empty line at the end of file but it
+# works with source files.
+#
+rule file-touch-command ( )
+{
+ if [ os.name ] = NT
+ {
+ return "echo. >> " ;
+ }
+ else if [ os.name ] = VMS
+ {
+ return "APPEND /NEW NL: " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+rule MkDir
+{
+ # If dir exists, do not update it. Do this even for $(DOT).
+ NOUPDATE $(<) ;
+
+ if $(<) != $(DOT) && ! $($(<)-mkdir)
+ {
+ # Cheesy gate to prevent multiple invocations on same dir.
+ $(<)-mkdir = true ;
+
+ # Schedule the mkdir build action.
+ common.mkdir $(<) ;
+
+ # Prepare a Jam 'dirs' target that can be used to make the build only
+ # construct all the target directories.
+ DEPENDS dirs : $(<) ;
+
+ # Recursively create parent directories. $(<:P) = $(<)'s parent & we
+ # recurse until root.
+
+ local s = $(<:P) ;
+ if [ os.name ] = NT
+ {
+ switch $(s)
+ {
+ case "*:" : s = ;
+ case "*:\\" : s = ;
+ }
+ }
+
+ if $(s)
+ {
+ if $(s) != $(<)
+ {
+ DEPENDS $(<) : $(s) ;
+ MkDir $(s) ;
+ }
+ else
+ {
+ NOTFILE $(s) ;
+ }
+ }
+ }
+}
+
+
+#actions MkDir1
+#{
+# mkdir "$(<)"
+#}
+
+# The following quick-fix actions should be replaced using the original MkDir1
+# action once Boost Jam gets updated to correctly detect different paths leading
+# up to the same filesystem target and triggers their build action only once.
+# (todo) (04.07.2008.) (Jurko)
+
+if [ os.name ] = NT
+{
+ actions quietly mkdir
+ {
+ if not exist "$(<)\\" mkdir "$(<)"
+ }
+}
+else
+{
+ actions quietly mkdir
+ {
+ mkdir -p "$(<)"
+ }
+}
+
+
+actions piecemeal together existing Clean
+{
+ $(RM) "$(>)"
+}
+
+
+rule copy
+{
+}
+
+
+actions copy
+{
+ $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
+}
+
+
+rule RmTemps
+{
+}
+
+
+actions quietly updated piecemeal together RmTemps
+{
+ $(RM) "$(>)" $(IGNORE)
+}
+
+
+actions hard-link
+{
+ $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
+ $(LN) "$(>)" "$(<)" $(NULL_OUT)
+}
+
+
+if [ os.name ] = VMS
+{
+ actions mkdir
+ {
+ IF F$PARSE("$(<:W)") .EQS. "" THEN CREATE /DIR $(<:W)
+ }
+
+ actions piecemeal together existing Clean
+ {
+ $(RM) $(>:WJ=;*,);*
+ }
+
+ actions copy
+ {
+ $(CP) $(>:WJ=,) $(<:W)
+ }
+
+ actions quietly updated piecemeal together RmTemps
+ {
+ $(PIPE) $(RM) $(>:WJ=;*,);* $(IGNORE)
+ }
+
+ actions hard-link
+ {
+ $(PIPE) $(RM) $(>[1]:W);* $(IGNORE)
+ $(PIPE) $(LN) $(>[1]:W) $(<:W) $(NULL_OUT)
+ }
+}
+
+# Given a target, as given to a custom tag rule, returns a string formatted
+# according to the passed format. Format is a list of properties that is
+# represented in the result. For each element of format the corresponding target
+# information is obtained and added to the result string. For all, but the
+# literal, the format value is taken as the as string to prepend to the output
+# to join the item to the rest of the result. If not given "-" is used as a
+# joiner.
+#
+# The format options can be:
+#
+# <base>[joiner]
+# :: The basename of the target name.
+# <toolset>[joiner]
+# :: The abbreviated toolset tag being used to build the target.
+# <threading>[joiner]
+# :: Indication of a multi-threaded build.
+# <runtime>[joiner]
+# :: Collective tag of the build runtime.
+# <version:/version-feature | X.Y[.Z]/>[joiner]
+# :: Short version tag taken from the given "version-feature" in the
+# build properties. Or if not present, the literal value as the
+# version number.
+# <property:/property-name/>[joiner]
+# :: Direct lookup of the given property-name value in the build
+# properties. /property-name/ is a regular expression. E.g.
+# <property:toolset-.*:flavor> will match every toolset.
+# /otherwise/
+# :: The literal value of the format argument.
+#
+# For example this format:
+#
+# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
+#
+# Might return:
+#
+# boost_thread-vc80-mt-gd-1_33.dll, or
+# boost_regex-vc80-gd-1_33.dll
+#
+# The returned name also has the target type specific prefix and suffix which
+# puts it in a ready form to use as the value from a custom tag rule.
+#
+rule format-name ( format * : name : type ? : property-set )
+{
+ local result = "" ;
+ for local f in $(format)
+ {
+ switch $(f:G)
+ {
+ case <base> :
+ result += $(name:B) ;
+
+ case <toolset> :
+ result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <threading> :
+ result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
+ : $(property-set) ] ] ;
+
+ case <runtime> :
+ result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <qt> :
+ result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <address-model> :
+ result += [ join-tag $(f:G=) : [ address-model-tag $(name) :
+ $(type) : $(property-set) ] ] ;
+
+ case <arch-and-model> :
+ result += [ join-tag $(f:G=) : [ arch-and-model-tag $(name) :
+ $(type) : $(property-set) ] ] ;
+
+ case <version:*> :
+ local key = [ MATCH <version:(.*)> : $(f:G) ] ;
+ local version = [ $(property-set).get <$(key)> ] ;
+ version ?= $(key) ;
+ version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
+ result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
+
+ case <property:*> :
+ local key = [ MATCH <property:(.*)> : $(f:G) ] ;
+ local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
+ if $(p0)
+ {
+ local p = [ $(property-set).get <$(p0)> ] ;
+ if $(p)
+ {
+ result += [ join-tag $(f:G=) : $(p) ] ;
+ }
+ }
+
+ case * :
+ result += $(f:G=) ;
+ }
+ }
+ return [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
+ $(property-set) ] ;
+}
+
+
+local rule join-tag ( joiner ? : tag ? )
+{
+ if ! $(joiner) { joiner = - ; }
+ return $(joiner)$(tag) ;
+}
+
+
+local rule toolset-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ switch [ $(property-set).get <toolset> ]
+ {
+ case borland* : tag += bcb ;
+ case clang* :
+ {
+ switch [ $(property-set).get <toolset-clang:platform> ]
+ {
+ case darwin : tag += clang-darwin ;
+ case linux : tag += clang ;
+ case win : tag += clangw ;
+ }
+ }
+ case como* : tag += como ;
+ case cw : tag += cw ;
+ case darwin* : tag += xgcc ;
+ case edg* : tag += edg ;
+ case gcc* :
+ {
+ switch [ $(property-set).get <target-os> ]
+ {
+ case *windows* : tag += mgw ;
+ case * : tag += gcc ;
+ }
+ }
+ case intel :
+ if [ $(property-set).get <toolset-intel:platform> ] = win
+ {
+ tag += iw ;
+ }
+ else
+ {
+ tag += il ;
+ }
+ case kcc* : tag += kcc ;
+ case kylix* : tag += bck ;
+ #case metrowerks* : tag += cw ;
+ #case mingw* : tag += mgw ;
+ case mipspro* : tag += mp ;
+ case msvc* : tag += vc ;
+ case qcc* : tag += qcc ;
+ case sun* : tag += sw ;
+ case tru64cxx* : tag += tru ;
+ case vacpp* : tag += xlc ;
+ }
+ local version = [ MATCH "<toolset.*version>([0123456789]+)[.]?([0123456789]*)"
+ : $(properties) ] ;
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
+ if $(tag) = vc
+ {
+ if $(version[1]) = 6
+ {
+ # Cancel minor version.
+ version = 6 ;
+ }
+ else if $(version[1]) = 7 && $(version[2]) = 0
+ {
+ version = 7 ;
+ }
+ }
+
+ # From GCC 5, versioning changes and minor becomes patch
+ if ( $(tag) = gcc || $(tag) = mgw ) && $(version[1]) && [ numbers.less 4 $(version[1]) ]
+ {
+ version = $(version[1]) ;
+ }
+
+ # Ditto, from Clang 4
+ if ( $(tag) = clang || $(tag) = clangw ) && $(version[1]) && [ numbers.less 3 $(version[1]) ]
+ {
+ version = $(version[1]) ;
+ }
+
+ # On intel, version is not added, because it does not matter and it is the
+ # version of vc used as backend that matters. Ideally, we should encode the
+ # backend version but that would break compatibility with V1.
+ if $(tag) = iw
+ {
+ version = ;
+ }
+
+ # On borland, version is not added for compatibility with V1.
+ if $(tag) = bcb
+ {
+ version = ;
+ }
+
+ tag += $(version) ;
+
+ return $(tag:J=) ;
+}
+
+
+local rule threading-tag ( name : type ? : property-set )
+{
+ if <threading>multi in [ $(property-set).raw ]
+ {
+ return mt ;
+ }
+}
+
+
+local rule runtime-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ if <runtime-link>static in $(properties) { tag += s ; }
+
+ # This is an ugly thing. In V1, there is code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules will not even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # known to care about runtime debugging.
+ if ( <toolset>msvc in $(properties) ) ||
+ ( <stdlib>stlport in $(properties) ) ||
+ ( <toolset-intel:platform>win in $(properties) )
+ {
+ if <runtime-debugging>on in $(properties) { tag += g ; }
+ }
+
+ if <python-debugging>on in $(properties) { tag += y ; }
+ if <variant>debug in $(properties) { tag += d ; }
+ if <stdlib>stlport in $(properties) { tag += p ; }
+ if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
+
+ return $(tag:J=) ;
+}
+
+
+# Create a tag for the Qt library version
+# "<qt>4.6.0" will result in tag "qt460"
+local rule qt-tag ( name : type ? : property-set )
+{
+ local v = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)" :
+ [ $(property-set).get <qt> ] ] ;
+ return qt$(v:J=) ;
+}
+
+
+# Create a tag for the address-model
+# <address-model>64 will simply generate "64"
+local rule address-model-tag ( name : type ? : property-set )
+{
+ return [ $(property-set).get <address-model> ] ;
+}
+
+# Create a tag for the architecture and model
+# <architecture>x86 <address-model>32 would generate "x32"
+# This relies on the fact that all architectures start with
+# unique letters.
+local rule arch-and-model-tag ( name : type ? : property-set )
+{
+ local architecture = [ $(property-set).get <architecture> ] ;
+ local address-model = [ $(property-set).get <address-model> ] ;
+
+ local arch = [ MATCH ^(.) : $(architecture) ] ;
+
+ return $(arch)$(address-model) ;
+}
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local save-os = [ modules.peek os : .name ] ;
+
+ modules.poke os : .name : LINUX ;
+ assert.result "PATH=\"foo:bar:baz\"\nexport PATH\n"
+ : path-variable-setting-command PATH : foo bar baz ;
+ assert.result "PATH=\"foo:bar:$PATH\"\nexport PATH\n"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : NT ;
+ assert.result "set PATH=foo;bar;baz\n"
+ : path-variable-setting-command PATH : foo bar baz ;
+ assert.result "set PATH=foo;bar;%PATH%\n"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : $(save-os) ;
+}
diff --git a/src/boost/tools/build/src/tools/common.py b/src/boost/tools/build/src/tools/common.py
new file mode 100644
index 000000000..8f6cbfff1
--- /dev/null
+++ b/src/boost/tools/build/src/tools/common.py
@@ -0,0 +1,860 @@
+# Status: being ported by Steven Watanabe
+# Base revision: 47174
+#
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+""" Provides actions common to all toolsets, such as creating directories and
+ removing files.
+"""
+
+import re
+import bjam
+import os
+import os.path
+import sys
+
+# for some reason this fails on Python 2.7(r27:82525)
+# from b2.build import virtual_target
+import b2.build.virtual_target
+from b2.build import feature, type
+from b2.util.utility import *
+from b2.util import path, is_iterable_typed
+
+__re__before_first_dash = re.compile ('([^-]*)-')
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ Note that this must be called _after_ resetting the module 'feature'.
+ """
+ global __had_unspecified_value, __had_value, __declared_subfeature
+ global __init_loc
+ global __all_signatures, __debug_configuration, __show_configuration
+
+ # Stores toolsets without specified initialization values.
+ __had_unspecified_value = {}
+
+ # Stores toolsets with specified initialization values.
+ __had_value = {}
+
+ # Stores toolsets with declared subfeatures.
+ __declared_subfeature = {}
+
+ # Stores all signatures of the toolsets.
+ __all_signatures = {}
+
+ # Stores the initialization locations of each toolset
+ __init_loc = {}
+
+ __debug_configuration = '--debug-configuration' in bjam.variable('ARGV')
+ __show_configuration = '--show-configuration' in bjam.variable('ARGV')
+
+ global __executable_path_variable
+ OS = bjam.call("peek", [], "OS")[0]
+ if OS == "NT":
+ # On Windows the case and capitalization of PATH is not always predictable, so
+ # let's find out what variable name was really set.
+ for n in os.environ:
+ if n.lower() == "path":
+ __executable_path_variable = n
+ break
+ else:
+ __executable_path_variable = "PATH"
+
+ m = {"NT": __executable_path_variable,
+ "CYGWIN": "PATH",
+ "MACOSX": "DYLD_LIBRARY_PATH",
+ "AIX": "LIBPATH",
+ "HAIKU": "LIBRARY_PATH"}
+ global __shared_library_path_variable
+ __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH")
+
+reset()
+
+def shared_library_path_variable():
+ return __shared_library_path_variable
+
+# ported from trunk@47174
+class Configurations(object):
+ """
+ This class helps to manage toolset configurations. Each configuration
+ has a unique ID and one or more parameters. A typical example of a unique ID
+ is a condition generated by 'common.check-init-parameters' rule. Other kinds
+ of IDs can be used. Parameters may include any details about the configuration
+ like 'command', 'path', etc.
+
+ A toolset configuration may be in one of the following states:
+
+ - registered
+ Configuration has been registered (e.g. by autodetection code) but has
+ not yet been marked as used, i.e. 'toolset.using' rule has not yet been
+ called for it.
+ - used
+ Once called 'toolset.using' rule marks the configuration as 'used'.
+
+ The main difference between the states above is that while a configuration is
+ 'registered' its options can be freely changed. This is useful in particular
+ for autodetection code - all detected configurations may be safely overwritten
+ by user code.
+ """
+
+ def __init__(self):
+ self.used_ = set()
+ self.all_ = set()
+ self.params_ = {}
+
+ def register(self, id):
+ """
+ Registers a configuration.
+
+ Returns True if the configuration has been added and False if
+ it already exists. Reports an error if the configuration is 'used'.
+ """
+ assert isinstance(id, basestring)
+ if id in self.used_:
+ #FIXME
+ errors.error("common: the configuration '$(id)' is in use")
+
+ if id not in self.all_:
+ self.all_.add(id)
+
+ # Indicate that a new configuration has been added.
+ return True
+ else:
+ return False
+
+ def use(self, id):
+ """
+ Mark a configuration as 'used'.
+
+ Returns True if the state of the configuration has been changed to
+ 'used' and False if it the state wasn't changed. Reports an error
+ if the configuration isn't known.
+ """
+ assert isinstance(id, basestring)
+ if id not in self.all_:
+ #FIXME:
+ errors.error("common: the configuration '$(id)' is not known")
+
+ if id not in self.used_:
+ self.used_.add(id)
+
+ # indicate that the configuration has been marked as 'used'
+ return True
+ else:
+ return False
+
+ def all(self):
+ """ Return all registered configurations. """
+ return self.all_
+
+ def used(self):
+ """ Return all used configurations. """
+ return self.used_
+
+ def get(self, id, param):
+ """ Returns the value of a configuration parameter. """
+ assert isinstance(id, basestring)
+ assert isinstance(param, basestring)
+ return self.params_.get(param, {}).get(id)
+
+ def set (self, id, param, value):
+ """ Sets the value of a configuration parameter. """
+ assert isinstance(id, basestring)
+ assert isinstance(param, basestring)
+ assert is_iterable_typed(value, basestring)
+ self.params_.setdefault(param, {})[id] = value
+
+# Ported from trunk@47174
+def check_init_parameters(toolset, requirement, *args):
+ """ The rule for checking toolset parameters. Trailing parameters should all be
+ parameter name/value pairs. The rule will check that each parameter either has
+ a value in each invocation or has no value in each invocation. Also, the rule
+ will check that the combination of all parameter values is unique in all
+ invocations.
+
+ Each parameter name corresponds to a subfeature. This rule will declare a
+ subfeature the first time a non-empty parameter value is passed and will
+ extend it with all the values.
+
+ The return value from this rule is a condition to be used for flags settings.
+ """
+ assert isinstance(toolset, basestring)
+ assert is_iterable_typed(requirement, basestring) or requirement is None
+ from b2.build import toolset as b2_toolset
+ if requirement is None:
+ requirement = []
+ sig = toolset
+ condition = replace_grist(toolset, '<toolset>')
+ subcondition = []
+
+ for arg in args:
+ assert(isinstance(arg, tuple))
+ assert(len(arg) == 2)
+ name = arg[0]
+ value = arg[1]
+ assert(isinstance(name, str))
+ assert(isinstance(value, str) or value is None)
+
+ str_toolset_name = str((toolset, name))
+
+ # FIXME: is this the correct translation?
+ ### if $(value)-is-not-empty
+ if value is not None:
+ condition = condition + '-' + value
+ if str_toolset_name in __had_unspecified_value:
+ raise BaseException("'%s' initialization: parameter '%s' inconsistent\n" \
+ "no value was specified in earlier initialization\n" \
+ "an explicit value is specified now" % (toolset, name))
+
+ # The logic below is for intel compiler. It calls this rule
+ # with 'intel-linux' and 'intel-win' as toolset, so we need to
+ # get the base part of toolset name.
+ # We can't pass 'intel' as toolset, because it that case it will
+ # be impossible to register versionles intel-linux and
+ # intel-win of specific version.
+ t = toolset
+ m = __re__before_first_dash.match(toolset)
+ if m:
+ t = m.group(1)
+
+ if str_toolset_name not in __had_value:
+ if str((t, name)) not in __declared_subfeature:
+ feature.subfeature('toolset', t, name, [], ['propagated'])
+ __declared_subfeature[str((t, name))] = True
+
+ __had_value[str_toolset_name] = True
+
+ feature.extend_subfeature('toolset', t, name, [value])
+ subcondition += ['<toolset-' + t + ':' + name + '>' + value ]
+
+ else:
+ if str_toolset_name in __had_value:
+ raise BaseException ("'%s' initialization: parameter '%s' inconsistent\n" \
+ "an explicit value was specified in an earlier initialization\n" \
+ "no value is specified now" % (toolset, name))
+
+ __had_unspecified_value[str_toolset_name] = True
+
+ if value == None: value = ''
+
+ sig = sig + value + '-'
+
+ # if a requirement is specified, the signature should be unique
+ # with that requirement
+ if requirement:
+ sig += '-' + '-'.join(requirement)
+
+ if sig in __all_signatures:
+ message = "duplicate initialization of '%s' with the following parameters: " % toolset
+
+ for arg in args:
+ name = arg[0]
+ value = arg[1]
+ if value == None: value = '<unspecified>'
+
+ message += "'%s' = '%s'\n" % (name, value)
+
+ raise BaseException(message)
+
+ __all_signatures[sig] = True
+ # FIXME
+ __init_loc[sig] = "User location unknown" #[ errors.nearest-user-location ] ;
+
+ # If we have a requirement, this version should only be applied under that
+ # condition. To accomplish this we add a toolset requirement that imposes
+ # the toolset subcondition, which encodes the version.
+ if requirement:
+ r = ['<toolset>' + toolset] + requirement
+ r = ','.join(r)
+ b2_toolset.add_requirements([r + ':' + c for c in subcondition])
+
+ # We add the requirements, if any, to the condition to scope the toolset
+ # variables and options to this specific version.
+ condition = [condition]
+ if requirement:
+ condition += requirement
+
+ if __show_configuration:
+ print "notice:", condition
+ return ['/'.join(condition)]
+
+# Ported from trunk@47077
+def get_invocation_command_nodefault(
+ toolset, tool, user_provided_command=[], additional_paths=[], path_last=False):
+ """
+ A helper rule to get the command to invoke some tool. If
+ 'user-provided-command' is not given, tries to find binary named 'tool' in
+ PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+ element of 'user-provided-command' is an existing program.
+
+ This rule returns the command to be used when invoking the tool. If we can't
+ find the tool, a warning is issued. If 'path-last' is specified, PATH is
+ checked after 'additional-paths' when searching for 'tool'.
+ """
+ assert isinstance(toolset, basestring)
+ assert isinstance(tool, basestring)
+ assert is_iterable_typed(user_provided_command, basestring)
+ assert is_iterable_typed(additional_paths, basestring) or additional_paths is None
+ assert isinstance(path_last, (int, bool))
+
+ if not user_provided_command:
+ command = find_tool(tool, additional_paths, path_last)
+ if not command and __debug_configuration:
+ print "warning: toolset", toolset, "initialization: can't find tool, tool"
+ #FIXME
+ #print "warning: initialized from" [ errors.nearest-user-location ] ;
+ else:
+ command = check_tool(user_provided_command)
+ if not command and __debug_configuration:
+ print "warning: toolset", toolset, "initialization:"
+ print "warning: can't find user-provided command", user_provided_command
+ #FIXME
+ #ECHO "warning: initialized from" [ errors.nearest-user-location ]
+ command = []
+ if command:
+ command = ' '.join(command)
+
+ return command
+
+# ported from trunk@47174
+def get_invocation_command(toolset, tool, user_provided_command = [],
+ additional_paths = [], path_last = False):
+ """ Same as get_invocation_command_nodefault, except that if no tool is found,
+ returns either the user-provided-command, if present, or the 'tool' parameter.
+ """
+ assert isinstance(toolset, basestring)
+ assert isinstance(tool, basestring)
+ assert is_iterable_typed(user_provided_command, basestring)
+ assert is_iterable_typed(additional_paths, basestring) or additional_paths is None
+ assert isinstance(path_last, (int, bool))
+
+ result = get_invocation_command_nodefault(toolset, tool,
+ user_provided_command,
+ additional_paths,
+ path_last)
+
+ if not result:
+ if user_provided_command:
+ result = user_provided_command[0]
+ else:
+ result = tool
+
+ assert(isinstance(result, str))
+
+ return result
+
+# ported from trunk@47281
+def get_absolute_tool_path(command):
+ """
+ Given an invocation command,
+ return the absolute path to the command. This works even if command
+ has not path element and is present in PATH.
+ """
+ assert isinstance(command, basestring)
+ if os.path.dirname(command):
+ return os.path.dirname(command)
+ else:
+ programs = path.programs_path()
+ m = path.glob(programs, [command, command + '.exe' ])
+ if not len(m):
+ if __debug_configuration:
+ print "Could not find:", command, "in", programs
+ return None
+ return os.path.dirname(m[0])
+
+# ported from trunk@47174
+def find_tool(name, additional_paths = [], path_last = False):
+ """ Attempts to find tool (binary) named 'name' in PATH and in
+ 'additional-paths'. If found in path, returns 'name'. If
+ found in additional paths, returns full name. If the tool
+ is found in several directories, returns the first path found.
+ Otherwise, returns the empty string. If 'path_last' is specified,
+ path is checked after 'additional_paths'.
+ """
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(additional_paths, basestring)
+ assert isinstance(path_last, (int, bool))
+
+ programs = path.programs_path()
+ match = path.glob(programs, [name, name + '.exe'])
+ additional_match = path.glob(additional_paths, [name, name + '.exe'])
+
+ result = []
+ if path_last:
+ result = additional_match
+ if not result and match:
+ result = match
+
+ else:
+ if match:
+ result = match
+
+ elif additional_match:
+ result = additional_match
+
+ if result:
+ return path.native(result[0])
+ else:
+ return ''
+
+#ported from trunk@47281
+def check_tool_aux(command):
+ """ Checks if 'command' can be found either in path
+ or is a full name to an existing file.
+ """
+ assert isinstance(command, basestring)
+ dirname = os.path.dirname(command)
+ if dirname:
+ if os.path.exists(command):
+ return command
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ elif on_windows() and os.path.exists(command + '.exe'):
+ return command
+ # Only NT will run .bat files by their unqualified names.
+ elif os_name() == 'NT' and os.path.exists(command + '.bat'):
+ return command
+ else:
+ paths = path.programs_path()
+ if path.glob(paths, [command]):
+ return command
+
+# ported from trunk@47281
+def check_tool(command):
+ """ Checks that a tool can be invoked by 'command'.
+ If command is not an absolute path, checks if it can be found in 'path'.
+ If command is absolute path, check that it exists. Returns 'command'
+ if ok and empty string otherwise.
+ """
+ assert is_iterable_typed(command, basestring)
+ #FIXME: why do we check the first and last elements????
+ if check_tool_aux(command[0]) or check_tool_aux(command[-1]):
+ return command
+
+# ported from trunk@47281
+def handle_options(tool, condition, command, options):
+ """ Handle common options for toolset, specifically sets the following
+ flag variables:
+ - CONFIG_COMMAND to 'command'
+ - OPTIOns for compile to the value of <compileflags> in options
+ - OPTIONS for compile.c to the value of <cflags> in options
+ - OPTIONS for compile.c++ to the value of <cxxflags> in options
+ - OPTIONS for compile.asm to the value of <asmflags> in options
+ - OPTIONS for compile.fortran to the value of <fflags> in options
+ - OPTIONs for link to the value of <linkflags> in options
+ """
+ from b2.build import toolset
+
+ assert isinstance(tool, basestring)
+ assert is_iterable_typed(condition, basestring)
+ assert command and isinstance(command, basestring)
+ assert is_iterable_typed(options, basestring)
+ toolset.flags(tool, 'CONFIG_COMMAND', condition, [command])
+ toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options))
+ toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options))
+ toolset.flags(tool + '.compile.c++', 'OPTIONS', condition, feature.get_values('<cxxflags>', options))
+ toolset.flags(tool + '.compile.asm', 'OPTIONS', condition, feature.get_values('<asmflags>', options))
+ toolset.flags(tool + '.compile.fortran', 'OPTIONS', condition, feature.get_values('<fflags>', options))
+ toolset.flags(tool + '.link', 'OPTIONS', condition, feature.get_values('<linkflags>', options))
+
+# ported from trunk@47281
+def get_program_files_dir():
+ """ returns the location of the "program files" directory on a windows
+ platform
+ """
+ ProgramFiles = bjam.variable("ProgramFiles")
+ if ProgramFiles:
+ ProgramFiles = ' '.join(ProgramFiles)
+ else:
+ ProgramFiles = "c:\\Program Files"
+ return ProgramFiles
+
+# ported from trunk@47281
+def rm_command():
+ return __RM
+
+# ported from trunk@47281
+def copy_command():
+ return __CP
+
+# ported from trunk@47281
+def variable_setting_command(variable, value):
+ """
+ Returns the command needed to set an environment variable on the current
+ platform. The variable setting persists through all following commands and is
+ visible in the environment seen by subsequently executed commands. In other
+ words, on Unix systems, the variable is exported, which is consistent with the
+ only possible behavior on Windows systems.
+ """
+ assert isinstance(variable, basestring)
+ assert isinstance(value, basestring)
+
+ if os_name() == 'NT':
+ return "set " + variable + "=" + value + os.linesep
+ else:
+ # (todo)
+ # The following does not work on CYGWIN and needs to be fixed. On
+ # CYGWIN the $(nl) variable holds a Windows new-line \r\n sequence that
+ # messes up the executed export command which then reports that the
+ # passed variable name is incorrect. This is most likely due to the
+ # extra \r character getting interpreted as a part of the variable name.
+ #
+ # Several ideas pop to mind on how to fix this:
+ # * One way would be to separate the commands using the ; shell
+ # command separator. This seems like the quickest possible
+ # solution but I do not know whether this would break code on any
+ # platforms I I have no access to.
+ # * Another would be to not use the terminating $(nl) but that would
+ # require updating all the using code so it does not simply
+ # prepend this variable to its own commands.
+ # * I guess the cleanest solution would be to update Boost Jam to
+ # allow explicitly specifying \n & \r characters in its scripts
+ # instead of always relying only on the 'current OS native newline
+ # sequence'.
+ #
+ # Some code found to depend on this behaviour:
+ # * This Boost Build module.
+ # * __test__ rule.
+ # * path-variable-setting-command rule.
+ # * python.jam toolset.
+ # * xsltproc.jam toolset.
+ # * fop.jam toolset.
+ # (todo) (07.07.2008.) (Jurko)
+ #
+ # I think that this works correctly in python -- Steven Watanabe
+ return variable + "=" + value + os.linesep + "export " + variable + os.linesep
+
+def path_variable_setting_command(variable, paths):
+ """
+ Returns a command to sets a named shell path variable to the given NATIVE
+ paths on the current platform.
+ """
+ assert isinstance(variable, basestring)
+ assert is_iterable_typed(paths, basestring)
+ sep = os.path.pathsep
+ return variable_setting_command(variable, sep.join(paths))
+
+def prepend_path_variable_command(variable, paths):
+ """
+ Returns a command that prepends the given paths to the named path variable on
+ the current platform.
+ """
+ assert isinstance(variable, basestring)
+ assert is_iterable_typed(paths, basestring)
+ return path_variable_setting_command(
+ variable, paths + [expand_variable(variable)])
+
+
+def expand_variable(variable):
+ """Produce a string that expands the shell variable."""
+ if os.name == 'nt':
+ return '%{}%'.format(variable)
+ return '${%s}' % variable
+
+
+def file_creation_command():
+ """
+ Return a command which can create a file. If 'r' is result of invocation, then
+ 'r foobar' will create foobar with unspecified content. What happens if file
+ already exists is unspecified.
+ """
+ if os_name() == 'NT':
+ return "echo. > "
+ else:
+ return "touch "
+
+#FIXME: global variable
+__mkdir_set = set()
+__re_windows_drive = re.compile(r'^.*:\$')
+
+def mkdir(engine, target):
+ assert isinstance(target, basestring)
+ # If dir exists, do not update it. Do this even for $(DOT).
+ bjam.call('NOUPDATE', target)
+
+ global __mkdir_set
+
+ # FIXME: Where is DOT defined?
+ #if $(<) != $(DOT) && ! $($(<)-mkdir):
+ if target != '.' and target not in __mkdir_set:
+ # Cheesy gate to prevent multiple invocations on same dir.
+ __mkdir_set.add(target)
+
+ # Schedule the mkdir build action.
+ engine.set_update_action("common.MkDir", target, [])
+
+ # Prepare a Jam 'dirs' target that can be used to make the build only
+ # construct all the target directories.
+ engine.add_dependency('dirs', target)
+
+ # Recursively create parent directories. $(<:P) = $(<)'s parent & we
+ # recurse until root.
+
+ s = os.path.dirname(target)
+ if os_name() == 'NT':
+ if(__re_windows_drive.match(s)):
+ s = ''
+
+ if s:
+ if s != target:
+ engine.add_dependency(target, s)
+ mkdir(engine, s)
+ else:
+ bjam.call('NOTFILE', s)
+
+__re_version = re.compile(r'^([^.]+)[.]([^.]+)[.]?([^.]*)')
+
+def format_name(format, name, target_type, prop_set):
+ """ Given a target, as given to a custom tag rule, returns a string formatted
+ according to the passed format. Format is a list of properties that is
+ represented in the result. For each element of format the corresponding target
+ information is obtained and added to the result string. For all, but the
+ literal, the format value is taken as the as string to prepend to the output
+ to join the item to the rest of the result. If not given "-" is used as a
+ joiner.
+
+ The format options can be:
+
+ <base>[joiner]
+ :: The basename of the target name.
+ <toolset>[joiner]
+ :: The abbreviated toolset tag being used to build the target.
+ <threading>[joiner]
+ :: Indication of a multi-threaded build.
+ <runtime>[joiner]
+ :: Collective tag of the build runtime.
+ <version:/version-feature | X.Y[.Z]/>[joiner]
+ :: Short version tag taken from the given "version-feature"
+ in the build properties. Or if not present, the literal
+ value as the version number.
+ <property:/property-name/>[joiner]
+ :: Direct lookup of the given property-name value in the
+ build properties. /property-name/ is a regular expression.
+ e.g. <property:toolset-.*:flavor> will match every toolset.
+ /otherwise/
+ :: The literal value of the format argument.
+
+ For example this format:
+
+ boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
+
+ Might return:
+
+ boost_thread-vc80-mt-gd-1_33.dll, or
+ boost_regex-vc80-gd-1_33.dll
+
+ The returned name also has the target type specific prefix and suffix which
+ puts it in a ready form to use as the value from a custom tag rule.
+ """
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert is_iterable_typed(format, basestring)
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
+ # assert(isinstance(prop_set, property_set.PropertySet))
+ if type.is_derived(target_type, 'LIB'):
+ result = "" ;
+ for f in format:
+ grist = get_grist(f)
+ if grist == '<base>':
+ result += os.path.basename(name)
+ elif grist == '<toolset>':
+ result += join_tag(get_value(f),
+ toolset_tag(name, target_type, prop_set))
+ elif grist == '<threading>':
+ result += join_tag(get_value(f),
+ threading_tag(name, target_type, prop_set))
+ elif grist == '<runtime>':
+ result += join_tag(get_value(f),
+ runtime_tag(name, target_type, prop_set))
+ elif grist.startswith('<version:'):
+ key = grist[len('<version:'):-1]
+ version = prop_set.get('<' + key + '>')
+ if not version:
+ version = key
+ version = __re_version.match(version)
+ result += join_tag(get_value(f), version[1] + '_' + version[2])
+ elif grist.startswith('<property:'):
+ key = grist[len('<property:'):-1]
+ property_re = re.compile('<(' + key + ')>')
+ p0 = None
+ for prop in prop_set.raw():
+ match = property_re.match(prop)
+ if match:
+ p0 = match[1]
+ break
+ if p0:
+ p = prop_set.get('<' + p0 + '>')
+ if p:
+ assert(len(p) == 1)
+ result += join_tag(ungrist(f), p)
+ else:
+ result += f
+
+ result = b2.build.virtual_target.add_prefix_and_suffix(
+ ''.join(result), target_type, prop_set)
+ return result
+
+def join_tag(joiner, tag):
+ assert isinstance(joiner, basestring)
+ assert isinstance(tag, basestring)
+ if tag:
+ if not joiner: joiner = '-'
+ return joiner + tag
+ return ''
+
+__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)")
+
+def toolset_tag(name, target_type, prop_set):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
+ tag = ''
+
+ properties = prop_set.raw()
+ tools = prop_set.get('<toolset>')
+ assert(len(tools) == 1)
+ tools = tools[0]
+ if tools.startswith('borland'): tag += 'bcb'
+ elif tools.startswith('como'): tag += 'como'
+ elif tools.startswith('cw'): tag += 'cw'
+ elif tools.startswith('darwin'): tag += 'xgcc'
+ elif tools.startswith('edg'): tag += 'edg'
+ elif tools.startswith('gcc'):
+ flavor = prop_set.get('<toolset-gcc:flavor>')
+ ''.find
+ if flavor.find('mingw') != -1:
+ tag += 'mgw'
+ else:
+ tag += 'gcc'
+ elif tools == 'intel':
+ if prop_set.get('<toolset-intel:platform>') == ['win']:
+ tag += 'iw'
+ else:
+ tag += 'il'
+ elif tools.startswith('kcc'): tag += 'kcc'
+ elif tools.startswith('kylix'): tag += 'bck'
+ #case metrowerks* : tag += cw ;
+ #case mingw* : tag += mgw ;
+ elif tools.startswith('mipspro'): tag += 'mp'
+ elif tools.startswith('msvc'): tag += 'vc'
+ elif tools.startswith('sun'): tag += 'sw'
+ elif tools.startswith('tru64cxx'): tag += 'tru'
+ elif tools.startswith('vacpp'): tag += 'xlc'
+
+ for prop in properties:
+ match = __re_toolset_version.match(prop)
+ if(match):
+ version = match
+ break
+ version_string = None
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
+ if tag == 'vc':
+ if version.group(1) == '6':
+ # Cancel minor version.
+ version_string = '6'
+ elif version.group(1) == '7' and version.group(2) == '0':
+ version_string = '7'
+
+ # On intel, version is not added, because it does not matter and it's the
+ # version of vc used as backend that matters. Ideally, we'd encode the
+ # backend version but that would break compatibility with V1.
+ elif tag == 'iw':
+ version_string = ''
+
+ # On borland, version is not added for compatibility with V1.
+ elif tag == 'bcb':
+ version_string = ''
+
+ if version_string is None:
+ version = version.group(1) + version.group(2)
+
+ tag += version
+
+ return tag
+
+
+def threading_tag(name, target_type, prop_set):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
+ tag = ''
+ properties = prop_set.raw()
+ if '<threading>multi' in properties: tag = 'mt'
+
+ return tag
+
+
+def runtime_tag(name, target_type, prop_set ):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert isinstance(name, basestring)
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, PropertySet)
+ tag = ''
+
+ properties = prop_set.raw()
+ if '<runtime-link>static' in properties: tag += 's'
+
+ # This is an ugly thing. In V1, there's a code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules won't even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # which are known to care about runtime debug.
+ if '<toolset>msvc' in properties \
+ or '<stdlib>stlport' in properties \
+ or '<toolset-intel:platform>win' in properties:
+ if '<runtime-debugging>on' in properties: tag += 'g'
+
+ if '<python-debugging>on' in properties: tag += 'y'
+ if '<variant>debug' in properties: tag += 'd'
+ if '<stdlib>stlport' in properties: tag += 'p'
+ if '<stdlib-stlport:iostream>hostios' in properties: tag += 'n'
+
+ return tag
+
+
+def init(manager):
+ global __RM, __CP, __IGNORE, __LN
+ engine = manager.engine()
+
+ # register the make() and alias() rules globally
+ import b2.tools.make
+ import b2.build.alias
+
+ windows_hack = ''
+ # ported from trunk@47281
+ if os_name() == 'NT':
+ __RM = 'del /f /q'
+ __CP = 'copy /b'
+ windows_hack = '+ this-file-does-not-exist-A698EE7806899E69'
+ __IGNORE = '2>nul >nul & setlocal'
+ __LN = __CP
+ #if not __LN:
+ # __LN = CP
+ MKDIR = 'if not exist "$(<)\\" mkdir "$(<)"'
+ else:
+ __RM = 'rm -f'
+ __CP = 'cp'
+ __IGNORE = ''
+ __LN = 'ln'
+ MKDIR = 'mkdir -p "$(<)"'
+
+ engine.register_action("common.MkDir", MKDIR + __IGNORE)
+
+ engine.register_action(
+ "common.Clean", __RM + ' "$(>)"', flags=['piecemeal', 'together', 'existing'])
+ engine.register_action("common.copy", '{} "$(>)" {} "$(<)"'.format(__CP, windows_hack))
+ engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE,
+ flags=['quietly', 'updated', 'piecemeal', 'together'])
+
+ engine.register_action("common.hard-link",
+ __RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep +
+ __LN + ' "$(>)" "$(<)" $(NULL_OUT)')
diff --git a/src/boost/tools/build/src/tools/como-linux.jam b/src/boost/tools/build/src/tools/como-linux.jam
new file mode 100644
index 000000000..5c554c8f8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/como-linux.jam
@@ -0,0 +1,103 @@
+# Copyright 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include a version
+# number.
+#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+import common ;
+import generators ;
+
+import unix ;
+import como ;
+
+feature.extend-subfeature toolset como : platform : linux ;
+
+toolset.inherit-generators como-linux
+ <toolset>como <toolset-como:platform>linux : unix ;
+generators.override como-linux.prebuilt : builtin.lib-generator ;
+generators.override como-linux.searched-lib-generator : searched-lib-generator ;
+toolset.inherit-flags como-linux : unix ;
+toolset.inherit-rules como-linux : gcc ;
+
+generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ
+ : <toolset>como <toolset-como:platform>linux ;
+generators.register-c-compiler como-linux.compile.c : C : OBJ
+ : <toolset>como <toolset-como:platform>linux ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters como-linux
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command como-linux : como
+ : $(command) ] ;
+
+ common.handle-options como-linux : $(condition) : $(command) : $(options) ;
+}
+
+
+flags como-linux C++FLAGS <exception-handling>off : --no_exceptions ;
+flags como-linux C++FLAGS <exception-handling>on : --exceptions ;
+
+flags como-linux CFLAGS <inlining>off : --no_inlining ;
+flags como-linux CFLAGS <inlining>on <inlining>full : --inlining ;
+
+flags como-linux CFLAGS <optimization>off : -O0 ;
+flags como-linux CFLAGS <optimization>speed : -O3 ;
+flags como-linux CFLAGS <optimization>space : -Os ;
+
+flags como-linux CFLAGS <debug-symbols>on : -g ;
+flags como-linux LINKFLAGS <debug-symbols>on : -g ;
+
+flags como-linux FINDLIBS : m ;
+flags como-linux FINDLIBS : rt ;
+
+flags como-linux CFLAGS <cflags> ;
+flags como-linux C++FLAGS <cxxflags> ;
+flags como-linux DEFINES <define> ;
+flags como-linux UNDEFS <undef> ;
+flags como-linux HDRS <include> ;
+flags como-linux STDHDRS <sysinclude> ;
+flags como-linux LINKFLAGS <linkflags> ;
+flags como-linux ARFLAGS <arflags> ;
+
+flags como-linux.link LIBRARIES <library-file> ;
+flags como-linux.link LINKPATH <library-path> ;
+flags como-linux.link FINDLIBS-ST <find-static-library> ;
+flags como-linux.link FINDLIBS-SA <find-shared-library> ;
+
+flags como-linux.link RPATH <dll-path> ;
+flags como-linux.link RPATH_LINK <xdll-path> ;
+
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
+}
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1
+}
+
+actions compile.c
+{
+ $(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1
+}
+
+actions archive
+{
+ ar rcu $(<) $(>)
+}
diff --git a/src/boost/tools/build/src/tools/como-win.jam b/src/boost/tools/build/src/tools/como-win.jam
new file mode 100644
index 000000000..d21a70d6f
--- /dev/null
+++ b/src/boost/tools/build/src/tools/como-win.jam
@@ -0,0 +1,117 @@
+# (C) Copyright David Abrahams 2001.
+# (C) Copyright MetaCommunications, Inc. 2004.
+
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include a version
+# number.
+#//<a href="http://www.comeaucomputing.com/">Comeau<br>C++</a>
+
+import common ;
+import como ;
+import feature ;
+import generators ;
+import toolset : flags ;
+
+feature.extend-subfeature toolset como : platform : win ;
+
+
+# Initializes the Comeau toolset for windows. The command is the command which
+# invokes the compiler. You should either set environment variable
+# COMO_XXX_INCLUDE where XXX is the used backend (as described in the
+# documentation), or pass that as part of command, e.g:
+#
+# using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ;
+#
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters como-win
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command como-win : como.exe :
+ $(command) ] ;
+
+ common.handle-options como-win : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler como-win.compile.c++ : CPP : OBJ
+ : <toolset>como <toolset-como:platform>win ;
+generators.register-c-compiler como-win.compile.c : C : OBJ
+ : <toolset>como <toolset-como:platform>win ;
+
+
+generators.register-linker como-win.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>como <toolset-como:platform>win ;
+
+# Note that status of shared libraries support is not clear, so we do not define
+# the link.dll generator.
+generators.register-archiver como-win.archive
+ : OBJ : STATIC_LIB
+ : <toolset>como <toolset-como:platform>win ;
+
+
+flags como-win C++FLAGS <exception-handling>off : --no_exceptions ;
+flags como-win C++FLAGS <exception-handling>on : --exceptions ;
+
+flags como-win CFLAGS <inlining>off : --no_inlining ;
+flags como-win CFLAGS <inlining>on <inlining>full : --inlining ;
+
+
+# The following seems to be VC-specific options. At least, when I uncomment
+# then, Comeau with bcc as backend reports that bcc32 invocation failed.
+#
+#flags como-win CFLAGS <debug-symbols>on : /Zi ;
+#flags como-win CFLAGS <optimization>off : /Od ;
+
+
+flags como-win CFLAGS <cflags> ;
+flags como-win CFLAGS : -D_WIN32 ; # Make sure that we get the Boost Win32 platform config header.
+flags como-win CFLAGS <threading>multi : -D_MT ; # Make sure that our config knows that threading is on.
+flags como-win C++FLAGS <cxxflags> ;
+flags como-win DEFINES <define> ;
+flags como-win UNDEFS <undef> ;
+flags como-win HDRS <include> ;
+flags como-win SYSHDRS <sysinclude> ;
+flags como-win LINKFLAGS <linkflags> ;
+flags como-win ARFLAGS <arflags> ;
+flags como-win NO_WARN <no-warn> ;
+
+#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ;
+#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ;
+
+flags como-win LIBPATH <library-path> ;
+flags como-win LIBRARIES <library-file> ;
+flags como-win FINDLIBS <find-shared-library> ;
+flags como-win FINDLIBS <find-static-library> ;
+
+nl = "
+" ;
+
+
+# For como, we repeat all libraries so that dependencies are always resolved.
+#
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)"
+}
+
+actions compile.c
+{
+ $(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)"
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)"
+}
+
+actions archive
+{
+ $(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
+ lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")"
+}
diff --git a/src/boost/tools/build/src/tools/como.jam b/src/boost/tools/build/src/tools/como.jam
new file mode 100644
index 000000000..3e6051c1f
--- /dev/null
+++ b/src/boost/tools/build/src/tools/como.jam
@@ -0,0 +1,75 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.como]]
+= Comeau C/C++ Compiler
+
+The `como-linux` and the `como-win` modules supports the
+http://www.comeaucomputing.com/[Comeau C/C++ Compiler] on Linux and
+Windows respectively.
+
+The module is initialized using the following syntax:
+
+----
+using como : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the command is not specified, B2 will search for a binary
+named `como` in PATH.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+Before using the Windows version of the compiler, you need to setup
+necessary environment variables per compiler's documentation. In
+particular, the COMO_XXX_INCLUDE variable should be set, where XXX
+corresponds to the used backend C compiler.
+
+|# # end::doc[]
+
+# This is a generic 'como' toolset. Depending on the current system, it
+# forwards either to 'como-linux' or 'como-win' modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : como ;
+feature.subfeature toolset como : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = LINUX
+ {
+ toolset.using como-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using como-win :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+
+ }
+}
diff --git a/src/boost/tools/build/src/tools/convert.jam b/src/boost/tools/build/src/tools/convert.jam
new file mode 100644
index 000000000..ac1d70101
--- /dev/null
+++ b/src/boost/tools/build/src/tools/convert.jam
@@ -0,0 +1,62 @@
+# Copyright (c) 2009 Vladimir Prus
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements 'convert' target that takes a bunch of source and
+# tries to convert each one to the specified type.
+#
+# For example:
+#
+# convert objects obj : a.cpp b.cpp ;
+#
+
+import targets ;
+import generators ;
+import project ;
+import type ;
+import "class" : new ;
+
+class convert-target-class : typed-target
+{
+ rule __init__ ( name : project : type
+ : sources * : requirements * : default-build * : usage-requirements * )
+ {
+ typed-target.__init__ $(name) : $(project) : $(type)
+ : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local r = [ generators.construct $(self.project) : $(self.type)
+ : [ property-set.create [ $(property-set).raw ] # [ feature.expand
+ <main-target-type>$(self.type) ]
+ # ]
+ : $(source-targets) ] ;
+ if ! $(r)
+ {
+ errors.error "unable to construct" [ full-name ] ;
+ }
+
+ return $(r) ;
+ }
+
+}
+
+rule convert ( name type : sources * : requirements * : default-build *
+ : usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ # This is a circular module dependency, so it must be imported here
+ modules.import targets ;
+ targets.main-target-alternative
+ [ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ]
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
+}
+IMPORT $(__name__) : convert : : convert ;
diff --git a/src/boost/tools/build/src/tools/cray.jam b/src/boost/tools/build/src/tools/cray.jam
new file mode 100644
index 000000000..107b3dbbc
--- /dev/null
+++ b/src/boost/tools/build/src/tools/cray.jam
@@ -0,0 +1,1169 @@
+# Copyright 2001 David Abrahams
+# Copyright 2004, 2005 Markus Schoepflin
+# Copyright 2011 John Maddock
+# Copyright 2013, 2017-2018 Cray, Inc.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# README.md
+#
+# This toolset is for the Cray Compiling Environment (CCE).
+#
+# The assembler, linker, and archiver are the same as those used in the
+# `gcc` toolset. Therefore, there is some duplication of code between the
+# `gcc` toolset and this toolset.
+#
+# # CCE Introduction
+#
+# Users want to compile and run massively parallel applications on Cray
+# supercomputers. Typically, the user compiles code on a login node of the
+# supercomputer and then runs the compiled program on multiple compute
+# nodes using a batch control system. This means the user is almost always
+# cross compiling.
+#
+# But, they're not just cross compiling. In order for a program to run on
+# a Cray supercomputer it has to link to particular libraries. There are
+# three general categories of libraries that user programs must link to:
+#
+# - Network libraries: Enable communication between processes on different
+# compute nodes. Depends on the network hardware in the supercomputer.
+# - Compute node libraries: Depends on the hardware on the targeted
+# compute nodes.
+# - Language extension libraries: Depends on the language extensions used
+# by the program (e.g. OpenMP, Unified Parallel C, et cetera).
+#
+# Instead of forcing users to invoke the compiler with a bunch of
+# libraries listed on the command line, CCE decides what libraries to link
+# based on the environment. This is primarily controlled by loading and
+# unloading modules (with the `module` command) to create a cross
+# compiling and linking environment suitable for the particular hardware
+# on the targeted Cray supercomputer.
+#
+# CCE compilers come in two parts: the compiler itself, and the compiler
+# driver. Invoking a compiler directly is not supported. We must always
+# invoke the compiler through a compiler driver: either `cc` for C code,
+# `CC` for C++ code, or `ftn` for Fortran code. The compiler driver is
+# responsible for gathering information from the environment and invoking
+# the selected compiler with the appropriate command line options.
+#
+# For more information on CCE, search for Cray publication S-2529 on the
+# Cray publications website (https://pubs.cray.com).
+
+import "class" : new ;
+import common ;
+import feature ;
+import gcc ;
+import generators ;
+import os ;
+import regex ;
+import set ;
+import toolset ;
+import type ;
+import unix ;
+
+###
+### 'init'
+###
+
+rule init ( : : options * : requirements * )
+{
+
+ # User cannot specify a 'version' in their 'using' statement. Compiler
+ # version is always controlled by loading and unloading modules in the
+ # user's environment.
+
+ # User cannot specify a 'command' in their 'using' statement. Using a
+ # single 'command' argument only makes sense when a single executable can
+ # compile different types of code (e.g. gcc will compile C or C++ based on
+ # the file name extensions). In CCE, you have to invoke one of the three
+ # compiler drivers: cc for C code, CC for C++ code, or ftn for Fortran
+ # code. Each compiler driver compiles a single type of source code. It is
+ # possible to let the user pass in three 'command' arguments, one for each
+ # driver, but that seems like more effort that it's worth.
+
+ local toolset = cray ;
+
+ check-prgenv-module $(toolset) ;
+
+ local command-c = [ validate-command $(toolset) cc ] ;
+ local command-cxx = [ validate-command $(toolset) CC ] ;
+ local command-fortran = [ validate-command $(toolset) ftn ] ;
+
+ # Archive builder.
+ local command-ar = [ validate-command $(toolset) ar ] ;
+
+ # Archive indexer.
+ local command-ranlib = [ validate-command $(toolset) ranlib ] ;
+
+ # The 'command' variables always have one element, but they may contain
+ # spaces (e.g. if 'command' is an absolute path and some path components
+ # have spaces).
+
+ local version = ;
+ local developer-build = ;
+ {
+ local version-string = [ SHELL "\"$(command-cxx)\" -VV 2>&1" ] ;
+ local version-components = [ MATCH "Version ([0-9]+).([0-9]+).([a-zA-Z0-9]+)" : $(version-string) ] ;
+ if ! [ MATCH "([0-9]+)" : $(version-components[3]) ]
+ {
+
+ # The last component of the version is not a series of digits. This means
+ # we're probably using a developer build of CCE (i.e. a compiler built by
+ # a Cray employee). Developer builds report versions like '8.7.x'.
+
+ developer-build = true ;
+
+ # We want to treat developer builds as though they are the highest
+ # possible patch version of the release. Effectively, we want to turn
+ # '8.7.x' into '8.7.99'.
+
+ version-components = $(version-components[1]) $(version-components[2]) 99 ;
+
+ }
+
+ version = $(version-components:J=.) ;
+ }
+
+ local build = ;
+ if $(developer-build)
+ {
+
+ # If this is a developer build, we want to add the build subfeature to the
+ # compiler.
+
+ local version-string = [ SHELL "\"$(command-cxx)\" -VV 2>&1" ] ;
+ build = [ MATCH "[(][0-9]+_([0-9a-fA-F]+)[)]" : $(version-string) ] ;
+
+ # Truncate build hash to 7 characters
+ build = [ MATCH "(.......)................................." : $(build) ] ;
+ }
+
+ # IMPORTANT: 'set-cray-feature-defaults' causes the B2 tests to
+ # fail. I tried using an 'init' argument called 'ignore-cray-defaults' and
+ # setting up 'test-config.jam' to pass 'ignore-cray-defaults' during
+ # testing, but I couldn't get the test to read my 'test-config.jam' file
+ # when running tests individually. So, I just comment out
+ # 'set-cray-feature-defaults' during testing.
+
+ set-cray-feature-defaults ;
+
+ {
+
+ # 'check-init-parameters' ensures that each time a toolset is initialized,
+ # it is initialized with a unique configuration. The return value is a
+ # B2 property condition which uniquely identifies this
+ # configured instance of this toolset. Typically, toolsets use the
+ # returned condition as the conditional in a 'toolset.flags' call to set
+ # flags specific to this configuration of this toolset.
+
+ local identifying-condition = [ common.check-init-parameters $(toolset) $(requirements) : version $(version) : build $(build) ] ;
+
+ # 'handle-options' uses 'toolset.flags' to set 'CONFIG_COMMAND' variables
+ # on targets when this toolset is used. The 'CONFIG_COMMAND' variables
+ # specify the commands to call for compiling. This would be more relevant
+ # if our 'init' rule had arguments that might affect the command that is
+ # invoked (e.g. in many toolsets 'version' affects the name of the
+ # compiler command). For now, we'll do this because it is a common pattern
+ # in toolsets, and we may need it in the future.
+
+ handle-options
+ $(toolset)
+ : $(identifying-condition)
+ : $(command-c) $(command-cxx) $(command-fortran) $(command-ar) $(command-ranlib)
+ : $(options) ;
+
+ # Add compiler version to 'VERSION' variable on all targets. 'VERSION' is
+ # not used in any actions, but it is used in some updating rule
+ # procedures.
+
+ toolset.flags $(toolset) VERSION $(identifying-condition) : [ numeric-version $(version) ] ;
+ }
+}
+
+rule check-prgenv-module ( toolset )
+{
+
+ local compiler = [ os.environ PE_ENV ] ;
+ compiler = $(compiler:L) ;
+
+ # We could check that environment variable CRAY_PRGENV$PE_ENV is set to
+ # "loaded", but this seems unnecessary and redundant.
+
+ local default-compiler = cray ;
+
+ if ! $(compiler)
+ {
+ log-warning $(toolset) : no PrgEnv module loaded
+ : falling back to PrgEnv-$(default-compiler)
+ : please load the PrgEnv-$(default-compiler) module next time ;
+ compiler = $(default-compiler) ;
+ }
+
+ if $(compiler) != $(default-compiler)
+ {
+ log-error $(toolset) : compiler '$(compiler)' not supported
+ : toolset initialization failed
+ : please load the PrgEnv-$(default-compiler) module next time ;
+ # Do not abort, as suggested by:
+ # http://www.boost.org/build/doc/html/bbv2/extending/toolset_modules.html.
+ }
+}
+
+rule set-cray-feature-defaults ( )
+{
+
+ # CCE users expect that using the 'cray' toolset without any explicit
+ # options will give them the same result as invoking CCE without any
+ # explicit options. So, we set feature defaults to match the default CCE
+ # options.
+ #
+ # The decision to turn off <debug-symbols> by default was a tough one.
+ # When CCE produces debugging symbols, it disables all inlining. This
+ # causes a decrease in performance, which the user probably was not
+ # expecting since they thought they were compiling with default CCE
+ # options.
+
+ feature.set-default cxxstd-dialect : gnu ;
+ feature.set-default debug-symbols : off ;
+ feature.set-default optimization : default ;
+ feature.set-default inlining : default ;
+ feature.set-default vectorize : default ;
+}
+
+###
+### Command line options
+###
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+
+ # Check if '--debug-configuration' was passed on the command line. This is
+ # inspired by 'common.jam' and other modules.
+
+ # Variable names with a '.' prefix are intended to be globals.
+ #
+ # Refer to:
+ # https://github.com/boostorg/build/blob/develop/CONTRIBUTING.rst
+
+ # The Jam language uses dynamic scoping. Setting '.debug-configuration' in
+ # this module influences the behavior of methods called from this module.
+
+ .debug-configuration = true ;
+}
+
+if [ MATCH (--debug-driver) : [ modules.peek : ARGV ] ]
+{
+
+ .debug-driver = true ;
+}
+
+###
+### Features
+###
+
+feature.extend toolset : cray ;
+
+# Typically, extending '<toolset>' with the value 'cray' would cause
+# 'cray' to be the default '<toolset>' as long as it is the first value
+# added to '<toolset>'. However, we already imported the 'gcc' toolset, so
+# 'cray' is not the first value added to '<toolset>'. Therefore, we need
+# to call 'feature.set-default'.
+#
+# If the build request specifies a '<toolset>' (e.g. on the command line),
+# then the '<toolset>' feature default is ignored. However, if the 'cray'
+# toolset is selected in 'user-config.jam' (e.g. with 'using cray ;'),
+# then the build request will use the '<toolset>' feature default.
+# Therefore, we must use 'feature.set-default' so that selecting the
+# 'cray' toolset in 'user-config.jam' works correctly.
+
+feature.set-default toolset : cray ;
+
+# CCE is different from other compilers in that it optimizes, inlines, and
+# vectorizes by default. B2 assumes that 'off' is the default for
+# all compilers. However, for CCE, 'off' and 'default' have different
+# meanings. For CCE, 'off' requires an additional command line argument to
+# turn the feature off. 'default' will not include an additional command
+# line argument, but will do optimization, inlining, and vectorizing at
+# whatever default level CCE uses.
+
+feature.extend optimization : default ;
+feature.extend inlining : default ;
+feature.extend vectorize : default ;
+
+###
+### Flags
+###
+
+# Updating rules are named in a dotted hierarchy. For example:
+#
+# compile
+# \_ compile.c++
+# \_ compile.c++.preprocess
+# \_ compile.c
+# \_ compile.c.preprocess
+#
+# This naming convention allows us to apply flags to multiple children in
+# the hierarchy. For example, if we apply a flag to 'compile.c++', that
+# flag is also applied to its child 'compile.c++.preprocess'. If we apply
+# a flag to 'compile', then that flag is applied to all children under
+# 'compile'.
+
+toolset.flags cray.compile OPTIONS <link>shared : -h pic ;
+
+toolset.flags cray.compile OPTIONS <optimization>default ; # Blank.
+toolset.flags cray.compile OPTIONS <optimization>off : -O 0 ;
+toolset.flags cray.compile OPTIONS <optimization>speed : -O 3 ;
+toolset.flags cray.compile OPTIONS <optimization>space ; # Blank. CCE does not optimize for space.
+
+toolset.flags cray.compile OPTIONS <inlining>default ; # Blank.
+toolset.flags cray.compile OPTIONS <inlining>off : -h ipa0 ;
+toolset.flags cray.compile OPTIONS <inlining>on ; # Blank. CCE does inlining by default.
+toolset.flags cray.compile OPTIONS <inlining>full : -h ipa5 ;
+
+toolset.flags cray.compile OPTIONS <vectorize>default ; # Blank.
+toolset.flags cray.compile OPTIONS <vectorize>off : -h vector0 ;
+toolset.flags cray.compile OPTIONS <vectorize>on ; # Blank. CCE vectorizes by default.
+toolset.flags cray.compile OPTIONS <vectorize>full : -h vector3 ;
+
+toolset.flags cray.link FINDLIBS-SA <threading>multi : rt ; # Not sure if this is correct.
+
+toolset.flags cray.link OPTIONS <link>shared : -h pic ;
+
+{
+ #
+ # Link flags copied from 'gcc.jam'.
+ #
+
+ local toolset = cray ;
+ local generic-os = [ set.difference [ feature.values <target-os> ] : aix darwin vxworks solaris osf hpux ] ;
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+ toolset.flags $(toolset).link OPTIONS <target-os>$(generic-os)/<strip>on : -Wl,--strip-all ;
+ toolset.flags $(toolset).link RPATH <target-os>$(generic-os) : <dll-path> ;
+ toolset.flags $(toolset).link RPATH_OPTION <target-os>$(generic-os) : -rpath ;
+ toolset.flags $(toolset).link RPATH_LINK <target-os>$(generic-os) : <xdll-path> ;
+ toolset.flags $(toolset).link START-GROUP <target-os>$(generic-os) : -Wl,--start-group ;
+ toolset.flags $(toolset).link END-GROUP <target-os>$(generic-os) : -Wl,--end-group ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by the -l switch. These modifiers are -Bstatic and
+ # -Bdynamic and change search for -l switches that follow them. The
+ # following list shows the tried variants. Search stops at the first
+ # variant that has a match.
+ #
+ # *nix: -Bstatic -lxxx
+ # libxxx.a
+ #
+ # *nix: -Bdynamic -lxxx
+ # libxxx.so
+ # libxxx.a
+ #
+ # windows (mingw, cygwin) -Bstatic -lxxx
+ # libxxx.a
+ # xxx.lib
+ #
+ # windows (mingw, cygwin) -Bdynamic -lxxx
+ # libxxx.dll.a
+ # xxx.dll.a
+ # libxxx.a
+ # xxx.lib
+ # cygxxx.dll (*)
+ # libxxx.dll
+ # xxx.dll
+ # libxxx.a
+ #
+ # (*) This is for cygwin
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
+ # On *nix mixing shared libs with static runtime is not a good idea.
+ toolset.flags $(toolset).link FINDLIBS-ST-PFX <target-os>$(generic-os)/<runtime-link>shared : -Wl,-Bstatic ;
+ toolset.flags $(toolset).link FINDLIBS-SA-PFX <target-os>$(generic-os)/<runtime-link>shared : -Wl,-Bdynamic ;
+
+ toolset.flags $(toolset).link HAVE_SONAME <target-os>$(generic-os) : "" ;
+ toolset.flags $(toolset).link SONAME_OPTION <target-os>$(generic-os) : -h ;
+
+ # See note [1]
+ toolset.flags $(toolset).link OPTIONS <target-os>$(generic-os)/<runtime-link>static : -static ;
+
+ # [1]
+ # For <runtime-link>static we made sure there are no dynamic libraries in the
+ # link. On HP-UX not all system libraries exist as archived libraries (for
+ # example, there is no libunwind.a), so, on this platform, the -static option
+ # cannot be specified.
+}
+
+# Flags for 'free' features ('free' features are features that do not have
+# a pre-defined set of values).
+
+toolset.flags cray.compile USER_OPTIONS <cflags> ;
+toolset.flags cray.compile.c++ USER_OPTIONS <cxxflags> ;
+toolset.flags cray.compile.asm USER_OPTIONS <asmflags> ;
+toolset.flags cray.compile DEFINES <define> ;
+toolset.flags cray.compile INCLUDES <include> ;
+
+toolset.flags cray.link USER_OPTIONS <linkflags> ;
+toolset.flags cray.link LINKPATH <library-path> ;
+toolset.flags cray.link FINDLIBS-ST <find-static-library> ;
+toolset.flags cray.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags cray.link LIBRARIES <library-file> ;
+
+toolset.flags cray.archive AROPTIONS <archiveflags> ;
+
+###
+### Actions
+###
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND_CXX)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -c -o "$(<)" "$(>)" $(DRIVER_OPTIONS)
+}
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND_C)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -c -o "$(<)" "$(>)" $(DRIVER_OPTIONS)
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND_CXX)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -c -o "$(<)" "$(>)" $(DRIVER_OPTIONS)
+}
+
+actions compile.c++.preprocess
+{
+ "$(CONFIG_COMMAND_CXX)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -E "$(>)" >"$(<)" $(DRIVER_OPTIONS)
+}
+
+actions compile.c.preprocess
+{
+ "$(CONFIG_COMMAND_C)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -E "$(>)" >"$(<)" $(DRIVER_OPTIONS)
+}
+
+# We don't want to invoke 'ld' (the linker) directly for 'link', since we
+# want to give the CCE compiler driver a chance to modify the command line
+# it passes to 'ld'.
+#
+# The question is: which CCE compiler driver do we use? The driver for C,
+# the driver for C++, or the driver for Fortran?
+#
+# Here are things that definitely do not work:
+#
+# - Using the driver for C doesn't work when linking C++ programs, because
+# things like 'std::cout' are not available in C, they are only
+# available in C++.
+#
+# We use the driver for C++ below since we are primarily interested in
+# compiling Boost, which is written in C++. Also, the C++ driver will
+# properly link C code as well.
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND_CXX)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) $(DRIVER_OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND_CXX)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) $(DRIVER_OPTIONS)
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+}
+
+###
+### Updating rules
+###
+
+# These are the actual updating rules that apply the associated actions
+# when called.
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ compile-c++-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ compile-c-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile.asm ( targets * : sources * : properties * )
+{
+ compile-asm-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile.c++.preprocess ( targets * : sources * : properties * )
+{
+ compile-c++-preprocess-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile.c.preprocess ( targets * : sources * : properties * )
+{
+ compile-c-preprocess-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ link-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ link-dll-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule archive ( targets * : sources * : properties * )
+{
+ archive-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+# These are the procedure portions of the updating rules. Calling the
+# procedure portion may modify the targets, but it will not apply actions
+# to the targets. This allows us to reuse the procedure portions of the
+# updating rules without applying the same actions to targets.
+
+rule compile-c++-procedure ( targets * : sources * : properties * )
+{
+ set-cxxstd-procedure $(targets) : $(sources) : $(properties) ;
+ set-cxxstd-dialect-procedure $(targets) : $(sources) : $(properties) ;
+ set-debug-symbols-procedure $(targets) : $(sources) : $(properties) ;
+ add-space-procedure $(targets) : $(sources) : $(properties) ;
+ debug-driver-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile-c-procedure ( targets * : sources * : properties * )
+{
+ set-debug-symbols-procedure $(targets) : $(sources) : $(properties) ;
+ add-space-procedure $(targets) : $(sources) : $(properties) ;
+ debug-driver-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile-asm-procedure ( targets * : sources * : properties * )
+{
+ compile-c++-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile-c++-preprocess-procedure ( targets * : sources * : properties * )
+{
+ compile-c++-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile-c-preprocess-procedure ( targets * : sources * : properties * )
+{
+ compile-c-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+rule link-procedure ( targets * : sources * : properties * )
+{
+ set-cxxstd-procedure $(targets) : $(sources) : $(properties) ;
+ set-cxxstd-dialect-procedure $(targets) : $(sources) : $(properties) ;
+ gcc-link-procedure $(targets) : $(sources) : $(properties) ;
+ debug-driver-procedure $(targets) : $(sources) : $(properties) ;
+
+ # CCE driver command line flags for linking executables.
+
+ local link = [ feature.get-values <link> : $(properties) ] ;
+ switch $(link)
+ {
+ case shared :
+ DRIVER_OPTIONS on $(<) += -dynamic ;
+ case static :
+ DRIVER_OPTIONS on $(<) += -static ;
+ }
+
+ # The link command line from the 'gcc' toolset includes:
+ #
+ # '$(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA)'
+ #
+ # The 'FINDLIBS-ST' and 'FINDLIBS-SA' variables are the libraries
+ # specified by the '<find-static-library>' and '<find-shared-library>'
+ # features, respectively. The 'FINDLIBS-ST-PFX' is typically
+ # '-Wl,-Bstatic'. The 'FINDLIBS-SA-PFX' is typically '-Wl,-Bdynamic'.
+ #
+ # The '-Bstatic' and '-Bdynamic' flags passed to the linker tell the
+ # linker how to link all of the following libraries. The flag is in effect
+ # until it is overridden by another '-B' flag on the command line.
+ #
+ # So, it makes sense that the 'gcc' toolset includes these flags, so the
+ # '<find-static-library>' and '<find-shared-library>' libraries are linked
+ # properly.
+ #
+ # The last flag that is set ('-Bdynamic') affects the link type for any
+ # other libraries on the command line. In the 'gcc' toolset, this is okay,
+ # since there are no other libraries specified on the command line after
+ # these flags. However, when the CCE compiler driver invokes the linker,
+ # it adds additional libraries to the command line based on what modules
+ # are loaded in the environment. So, the last '-B' flag on the CCE driver
+ # command line affects the link type for all libraries that CCE
+ # automatically appends.
+ #
+ # Therefore, we have to set the final '-B' flag to the link type we want
+ # the CCE libraries to be linked with. Appending to the 'OPTIONS' variable
+ # seems reasonable.
+
+ local link = [ feature.get-values <link> : $(properties) ] ;
+ switch $(link)
+ {
+ case shared :
+ OPTIONS on $(<) += -Wl,-Bdynamic ;
+ case static :
+ OPTIONS on $(<) += -Wl,-Bstatic ;
+ }
+}
+
+rule link-dll-procedure ( targets * : sources * : properties * )
+{
+ set-cxxstd-procedure $(targets) : $(sources) : $(properties) ;
+ set-cxxstd-dialect-procedure $(targets) : $(sources) : $(properties) ;
+ gcc-link-dll-procedure $(targets) : $(sources) : $(properties) ;
+ debug-driver-procedure $(targets) : $(sources) : $(properties) ;
+
+ # CCE driver command line flags for linking shared libraries.
+
+ DRIVER_OPTIONS on $(<) += -shared ;
+}
+
+rule archive-procedure ( targets * : sources * : properties * )
+{
+ gcc-archive-procedure $(targets) : $(sources) : $(properties) ;
+ debug-driver-procedure $(targets) : $(sources) : $(properties) ;
+}
+
+# Utility procedure rules intended to be called from updating rules.
+
+rule gcc-link-procedure ( targets * : sources * : properties * )
+{
+
+ # Copied from 'gcc.jam'.
+
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ gcc.quote-rpath $(targets) ;
+}
+
+rule gcc-link-dll-procedure ( targets * : sources * : properties * )
+{
+
+ # Copied from 'gcc.jam'.
+
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ gcc.quote-rpath $(targets) ;
+}
+
+rule gcc-archive-procedure ( targets * : sources * : properties * )
+{
+
+ # Copied from 'gcc.jam'.
+
+ # Always remove archive and start again. Here is the rationale from
+ #
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+rule add-space-procedure ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+rule set-cxxstd-procedure ( targets * : sources * : properties * )
+{
+
+ # Translate '<cxxstd>' into a standard recognized by CCE.
+
+ local version = [ on $(targets[1]) return $(VERSION) ] ;
+
+ local cxxstd = [ feature.get-values cxxstd : $(properties) ] ;
+ local cray-cxxstd = ;
+
+ local unsupported-values = 2a ; # I don't know what '2a' means.
+ if $(cxxstd) && $(cxxstd) in $(unsupported-values)
+ {
+
+ log-warning cray : ignoring unsupported property '<cxxstd>$(cxxstd)' ;
+
+ # Set to default value, or blank if default is unsupported.
+
+ local default-value = [ get-default-feature-value cxxstd ] ;
+ if $(default-value) in $(unsupported-values)
+ {
+ cxxstd = ;
+ }
+ else
+ {
+ cxxstd = $(default-value) ;
+ }
+ }
+
+ switch $(cxxstd)
+ {
+ case 98 : cray-cxxstd = 03 ;
+ case 03 : cray-cxxstd = 03 ;
+ case 0x : cray-cxxstd = 11 ;
+ case 11 : cray-cxxstd = 11 ;
+ case 1y : cray-cxxstd = 14 ;
+ case 14 : cray-cxxstd = 14 ;
+ case 1z : cray-cxxstd = 17 ;
+ case 17 : cray-cxxstd = 17 ;
+ case latest :
+ cray-cxxstd = [ latest-cray-cxxstd $(version) ] ;
+ }
+
+ # If the 'cray-cxxstd' is not supported by this compiler version, we just
+ # let the command line fail.
+
+ # If 'cxxstd' was blank, then 'cray-cxxstd' is also blank, and nothing is
+ # added to the command line. The compiler just uses it's default C++
+ # standard.
+
+ # Apply final options.
+ local space = " " ;
+ OPTIONS on $(targets) += -h$(space)std=c++$(cray-cxxstd) ;
+}
+
+rule set-cxxstd-dialect-procedure ( targets * : sources * : properties * )
+{
+
+ # Translate '<cxxstd-dialect>' into '-h [no]conform' and '-h [no]gnu'
+ # options.
+
+ local version = [ on $(targets[1]) return $(VERSION) ] ;
+
+ local cxxstd-dialect = [ feature.get-values cxxstd-dialect : $(properties) ] ;
+ local cray-conform = ;
+ local cray-gnu = ;
+
+ local unsupported-values = ms ;
+ if $(cxxstd-dialect) && $(cxxstd-dialect) in $(unsupported-values)
+ {
+
+ log-warning cray : ignoring unsupported property '<cxxstd-dialect>$(cxxstd-dialect)' ;
+
+ # Set to default value, or blank if default is unsupported.
+
+ local default-value = [ get-default-feature-value cxxstd-dialect ] ;
+ if $(default-value) in $(unsupported-values)
+ {
+ cxxstd-dialect = ;
+ }
+ else
+ {
+ cxxstd-dialect = $(default-value) ;
+ }
+ }
+
+ switch $(cxxstd-dialect)
+ {
+ case gnu : cray-conform = noconform ;
+ cray-gnu = gnu ;
+ case iso : cray-conform = conform ;
+ cray-gnu = nognu ;
+ }
+
+ if [ has-conform-option $(version) ] = false
+ {
+ # The '-h [no]conform' option is ignored in recent versions of CCE.
+ cray-conform = ;
+ }
+
+ # If 'cxxstd-dialect' was blank, then 'cray-conform' and 'cray-gnu' are
+ # also blank, and nothing is added to the command line. The compiler just
+ # uses it's default C++ dialect.
+
+ # Apply final options.
+ local space = " " ;
+ OPTIONS on $(targets) += -h$(space)$(cray-conform)
+ -h$(space)$(cray-gnu) ;
+}
+
+rule set-debug-symbols-procedure ( targets * : sources * : properties * )
+{
+
+ local debug-symbols = [ feature.get-values <debug-symbols> : $(properties) ] ;
+ if $(debug-symbols) = "on"
+ {
+ local optimization = [ feature.get-values <optimization> : $(properties) ] ;
+ local debug-option = ;
+ if $(optimization) = off
+ {
+ debug-option = 0 ;
+ }
+ else
+ {
+ debug-option = 3 ;
+ }
+
+ local space = " " ;
+ OPTIONS on $(targets) += -G$(space)$(debug-option) ;
+ }
+}
+
+rule debug-driver-procedure ( targets * : sources * : properties * )
+{
+ if $(.debug-driver)
+ {
+
+ # Passing '-vv' to the CCE driver causes it to output the command lines
+ # for the underlying tools that it invokes.
+
+ DRIVER_OPTIONS on $(<) += -vv ;
+ }
+}
+
+###
+### Generators
+###
+
+class cray-linking-generator : gcc-linking-generator
+{
+ rule action-class ( )
+ {
+ return action ;
+ }
+}
+
+# We reuse some generator classes from the 'unix' toolset. Specifically,
+# we are reusing generators for the following updating actions:
+#
+# - 'archive'
+# - 'searched-lib-generator'
+# - 'prebuilt'
+#
+# Inheriting these generators is like using the same generator classes as
+# the 'unix' toolset, but pointing them to the 'cray' updating rules.
+
+toolset.inherit-generators cray : unix : unix.link unix.link.dll ;
+
+# The 'C-compiling-generator' class adds source paths to the '<include>'
+# property.
+
+generators.register [ new C-compiling-generator
+ cray.compile.c++
+ : CPP
+ : OBJ
+ : <toolset>cray ] ;
+generators.register [ new C-compiling-generator
+ cray.compile.c
+ : C
+ : OBJ
+ : <toolset>cray ] ;
+generators.register [ new C-compiling-generator
+ cray.compile.asm
+ : ASM
+ : OBJ
+ : <toolset>cray ] ;
+generators.register [ new C-compiling-generator
+ cray.compile.c++.preprocess
+ : CPP
+ : PREPROCESSED_CPP
+ : <toolset>cray ] ;
+generators.register [ new C-compiling-generator
+ cray.compile.c.preprocess
+ : C
+ : PREPROCESSED_C
+ : <toolset>cray ] ;
+generators.register [ new cray-linking-generator
+ cray.link
+ : LIB OBJ
+ : EXE
+ : <toolset>cray ] ;
+generators.register [ new cray-linking-generator
+ cray.link.dll
+ : LIB OBJ
+ : SHARED_LIB
+ : <toolset>cray ] ;
+
+# Tell B2 to prefer 'cray' generators over other valid
+# generators. This is used to resolve a tie when B2 finds that
+# there is more than one viable generator for a particular build request.
+
+generators.override cray.prebuilt : builtin.prebuilt ;
+generators.override cray.searched-lib-generator : searched-lib-generator ;
+
+type.set-generated-target-suffix PREPROCESSED_CPP : <toolset>cray : i ;
+type.set-generated-target-suffix PREPROCESSED_C : <toolset>cray : i ;
+
+###
+### Utility rules
+###
+
+rule validate-command ( toolset command )
+{
+ local found-command = [ common.find-tool $(command) ] ;
+ if $(found-command) && $(.debug-configuration)
+ {
+ log-notice $(toolset) : command '$(command)' found at [ common.get-absolute-tool-path $(found-command) ] ;
+ }
+ if ! $(found-command)
+ {
+ log-warning $(toolset) : command '$(command)' not found ;
+ found-command = $(command) ;
+ }
+ return $(found-command) ;
+}
+
+local rule options-helper ( rule-or-module variable-name condition * : feature options * )
+{
+ toolset.flags $(rule-or-module) $(variable-name) $(condition) : [ feature.get-values $(feature) : $(options) ] : unchecked ;
+}
+
+rule handle-options (
+ toolset
+ : toolset-condition *
+ : command-c command-cxx command-fortran command-ar command-ranlib
+ : options *
+)
+{
+
+ # Configures some common 'toolset.flags' options. In particular, this rule
+ # sets the compiler command name to invoke. Inspired by
+ # 'common.handle-options'.
+
+ # We cannot use a single 'CONFIG_COMMAND' variable because each CCE driver
+ # can only handle a single source code language. Therefore, we have to
+ # give actions a way to specify which driver they intend to use, and we
+ # accomplish this by providing multiple 'CONFIG_COMMAND' variables to the
+ # action. We cannot set the language through a flag in the 'OPTIONS'
+ # variable the way the 'gcc' toolset does.
+
+ toolset.flags $(toolset) CONFIG_COMMAND_C $(toolset-condition) : $(command-c) : unchecked ;
+ toolset.flags $(toolset) CONFIG_COMMAND_CXX $(toolset-condition) : $(command-cxx) : unchecked ;
+ toolset.flags $(toolset) CONFIG_COMMAND_FORTRAN $(toolset-condition) : $(command-fortran) : unchecked ;
+ toolset.flags $(toolset).archive .AR $(toolset-condition) : $(command-ar) : unchecked ;
+ toolset.flags $(toolset).archive .RANLIB $(toolset-condition) : $(command-ranlib) : unchecked ;
+
+ # The following flags are applied to all targets built by this
+ # configuration of this toolset. This particular configuration of this
+ # toolset is identified by '$(toolset-condition)'. This allows the user to
+ # specify 'options' in their 'using' statement, and those options will be
+ # applied to all targets built by this configuration of this toolset.
+
+ options-helper $(toolset).compile USER_OPTIONS $(toolset-condition) : <compileflags> $(options) ;
+ options-helper $(toolset).compile USER_OPTIONS $(toolset-condition) : <cflags> $(options) ;
+ options-helper $(toolset).compile.c++ USER_OPTIONS $(toolset-condition) : <cxxflags> $(options) ;
+ options-helper $(toolset).compile.fortran USER_OPTIONS $(toolset-condition) : <fflags> $(options) ;
+ options-helper $(toolset).compile.asm USER_OPTIONS $(toolset-condition) : <asmflags> $(options) ;
+ options-helper $(toolset).compile DEFINES $(toolset-condition) : <define> $(options) ;
+ options-helper $(toolset).compile INCLUDES $(toolset-condition) : <include> $(options) ;
+
+ options-helper $(toolset).link USER_OPTIONS $(toolset-condition) : <linkflags> $(options) ;
+ options-helper $(toolset).link LINKPATH $(toolset-condition) : <library-path> $(options) ;
+ options-helper $(toolset).link FINDLIBS-ST $(toolset-condition) : <find-static-library> $(options) ;
+ options-helper $(toolset).link FINDLIBS-SA $(toolset-condition) : <find-shared-library> $(options) ;
+ options-helper $(toolset).link LIBRARIES $(toolset-condition) : <library-file> $(options) ;
+
+ options-helper $(toolset).archive AROPTIONS $(toolset-condition) : <archiveflags> $(options) ;
+}
+
+rule latest-cray-cxxstd ( compiler-version )
+{
+ # Select latest 'cray-cxxstd' based on compiler version.
+
+ local cray-cxxstd = 03 ;
+
+ if $(compiler-version) >= [ numeric-version 8.6 ]
+ {
+ cray-cxxstd = 14 ;
+ }
+
+ return $(cray-cxxstd) ;
+}
+
+rule has-conform-option ( compiler-version )
+{
+
+ # Returns 'true' or 'false'. Returns empty list if the 'compiler-version'
+ # is not supported.
+
+ local result = true ;
+
+ if $(compiler-version) >= [ numeric-version 8.6 ]
+ {
+ result = false ;
+ }
+
+ return $(result) ;
+}
+
+local rule justify-right ( pad-char elements * )
+{
+
+ # Returns a list of 'elements' where each 'element' is at least 2
+ # characters long. If an 'element' is less than two characters long, pads
+ # 'element' with 'pad-char' to make it 2 characters long.
+
+ local result = ;
+ local p = $(pad-char) ;
+ for local e in $(elements)
+ {
+ switch $(e)
+ {
+ case ?? : result += $(e) ;
+ case ? : result += $(p)$(e) ;
+ case * : result += $(p)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+local rule list-justify-left ( pad-elem elements * )
+{
+
+ # Add 'pad-elem' to 'elements' list until it has 4 elements. If 'elements'
+ # list already had 4 or more elements, returns the first 4 elements in
+ # 'elements' list.
+
+ local tally = x ;
+ local result = ;
+ for local e in $(elements)
+ {
+ if $(tally) != xxxxx
+ {
+ result += $(e) ;
+ tally = $(tally)x ;
+ }
+ }
+
+ while $(tally) != xxxxx
+ {
+ result += $(pad-elem) ;
+ tally = $(tally)x ;
+ }
+
+ return $(result) ;
+}
+
+local rule numeric-version ( dotted-version )
+{
+
+ # Returns a numeric representation of version that can be compared
+ # directly with comparison operators.
+
+ local result = [ regex.split $(dotted-version) "[.]" ] ;
+ result = [ list-justify-left 0 $(result) ] ;
+ result = [ justify-right 0 $(result) ] ;
+ result = $(result:J="") ;
+
+ return $(result) ;
+}
+
+local rule get-default-feature-value ( feature-name )
+{
+ local default-property = [ feature.defaults $(feature-name) ] ;
+ local default-value = [ feature.get-values $(feature-name) : $(default-property) ] ;
+ return $(default-value) ;
+}
+
+rule log ( log-level prefix ? : * )
+{
+ for local message-arg in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
+ {
+ local message = $($(message-arg)) ;
+ if $(message)
+ {
+ ECHO "$(log-level):" "$(prefix):" $(message) ;
+ }
+ }
+}
+
+rule log-error ( prefix ? : * )
+{
+ log error $(prefix) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) : $(20) : $(21) : $(22) : $(23) : $(24) ;
+}
+
+rule log-warning ( prefix ? : * )
+{
+ log warning $(prefix) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) : $(20) : $(21) : $(22) : $(23) : $(24) ;
+}
+
+rule log-notice ( prefix ? : * )
+{
+ log notice $(prefix) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) : $(20) : $(21) : $(22) : $(23) : $(24) ;
+}
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result 08060000 : numeric-version 8.6 ;
+ assert.result 08061500 : numeric-version 8.6.15 ;
+ assert.result 08061501 : numeric-version 8.6.15.1 ;
+ assert.result 08061501 : numeric-version 8.6.15.1.2 ;
+
+ local a = [ numeric-version 8.6 ] ;
+ local b = [ numeric-version 8.5.9 ] ;
+
+ # 'assert.equal x : y' forces the test to fail. It's like saying 'assert
+ # false'.
+
+ if ! ( $(a) > $(b) )
+ {
+ assert.equal x : y ;
+ }
+
+ if ! ( $(b) < $(a) )
+ {
+ assert.equal x : y ;
+ }
+
+ if ! ( $(a) >= $(b) )
+ {
+ assert.equal x : y ;
+ }
+
+ if ! ( $(a) >= $(a) )
+ {
+ assert.equal x : y ;
+ }
+
+ if ! ( $(b) <= $(a) )
+ {
+ assert.equal x : y ;
+ }
+
+ if ! ( $(b) <= $(b) )
+ {
+ assert.equal x : y ;
+ }
+
+ if ! ( $(a) = $(a) )
+ {
+ assert.equal x : y ;
+ }
+
+ if ! ( $(a) != $(b) )
+ {
+ assert.equal x : y ;
+ }
+}
diff --git a/src/boost/tools/build/src/tools/cw-config.jam b/src/boost/tools/build/src/tools/cw-config.jam
new file mode 100644
index 000000000..1211b7c04
--- /dev/null
+++ b/src/boost/tools/build/src/tools/cw-config.jam
@@ -0,0 +1,34 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for CodeWarrior toolset. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ for local R in 9 8 7
+ {
+ local cw-path = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
+ : "PATH" ] ;
+ local cw-version = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)"
+ : "VERSION" ] ;
+ cw-path ?= [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0"
+ : "PATH" ] ;
+ cw-version ?= $(R).0 ;
+
+ if $(cw-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
+ }
+ using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ;
+ }
+ }
+}
diff --git a/src/boost/tools/build/src/tools/cw.jam b/src/boost/tools/build/src/tools/cw.jam
new file mode 100644
index 000000000..531c9c9fa
--- /dev/null
+++ b/src/boost/tools/build/src/tools/cw.jam
@@ -0,0 +1,302 @@
+# Copyright (C) Reece H Dunn 2004
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.cw]]
+= Code Warrior
+
+The `cw` module support CodeWarrior compiler, originally produced by
+Metrowerks and presently developed by Freescale. B2 supports
+only the versions of the compiler that target x86 processors. All such
+versions were released by Metrowerks before acquisition and are not sold
+any longer. The last version known to work is 9.4.
+
+The module is initialized using the following syntax:
+
+----
+using cw : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the command is not specified, B2 will search for a binary
+named `mwcc` in default installation paths and in PATH.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+`setup`::
+ The command that sets up environment variables prior to invoking the
+ compiler. If not specified, `cwenv.bat` alongside the compiler binary
+ will be used.
+`compiler`::
+ The command that compiles C and C++ sources. If not specified, `mwcc`
+ will be used. The command will be invoked after the setup script was
+ executed and adjusted the PATH variable.
+`linker`::
+ The command that links executables and dynamic libraries. If not
+ specified, `mwld` will be used. The command will be invoked after the
+ setup script was executed and adjusted the PATH variable.
+
+|# # end::doc[]
+
+# based on the msvc.jam toolset
+
+import property ;
+import generators ;
+import os ;
+import type ;
+import toolset : flags ;
+import errors : error ;
+import feature : feature get-values ;
+import path ;
+import sequence : unique ;
+import common ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : cw ;
+
+toolset.add-requirements <toolset>cw,<runtime-link>shared:<threading>multi ;
+
+nl = "
+" ;
+
+rule init ( version ? : command * : options * )
+{
+ # TODO: fix the $(command[1]) = $(compiler) issue
+
+ setup = [ get-values <setup> : $(options) ] ;
+ setup ?= cwenv.bat ;
+ compiler = [ get-values <compiler> : $(options) ] ;
+ compiler ?= mwcc ;
+ linker = [ get-values <linker> : $(options) ] ;
+ linker ?= mwld ;
+
+ local condition = [ common.check-init-parameters cw :
+ version $(version) ] ;
+
+ command = [ common.get-invocation-command cw : mwcc.exe : $(command) :
+ [ default-paths $(version) ] ] ;
+
+ common.handle-options cw : $(condition) : $(command) : $(options) ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ local tool-root = $(command) ;
+
+ setup = $(tool-root)\\$(setup) ;
+
+ # map the batch file in setup so it can be executed
+
+ other-tools = $(tool-root:D) ;
+ root ?= $(other-tools:D) ;
+
+ flags cw.link RUN_PATH $(condition) :
+ "$(root)\\Win32-x86 Support\\Libraries\\Runtime"
+ "$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ;
+
+ setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ;
+
+ if [ os.name ] = NT
+ {
+ setup = $(setup)"
+" ;
+ }
+ else
+ {
+ setup = "cmd /S /C "$(setup)" \"&&\" " ;
+ }
+
+ # bind the setup command to the tool so it can be executed before the
+ # command
+
+ local prefix = $(setup) ;
+
+ flags cw.compile .CC $(condition) : $(prefix)$(compiler) ;
+ flags cw.link .LD $(condition) : $(prefix)$(linker) ;
+ flags cw.archive .LD $(condition) : $(prefix)$(linker) ;
+
+ if [ MATCH "^([89]\\.)" : $(version) ]
+ {
+ if [ os.name ] = NT
+ {
+ # The runtime libraries
+ flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>off : -runtime ss ;
+ flags cw.compile CFLAGS <runtime-link>static/<threading>single/<runtime-debugging>on : -runtime ssd ;
+
+ flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>off : -runtime sm ;
+ flags cw.compile CFLAGS <runtime-link>static/<threading>multi/<runtime-debugging>on : -runtime smd ;
+
+ flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>off : -runtime dm ;
+ flags cw.compile CFLAGS <runtime-link>shared/<runtime-debugging>on : -runtime dmd ;
+ }
+ }
+}
+
+
+local rule default-paths ( version ? ) # FIXME
+{
+ local possible-paths ;
+ local ProgramFiles = [ common.get-program-files-dir ] ;
+
+ # TODO: add support for cw8 and cw9 detection
+
+ local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ;
+ possible-paths += $(version-6-path) ;
+
+ # perform post-processing
+
+ possible-paths
+ = $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ;
+
+ possible-paths += [ modules.peek : PATH Path path ] ;
+
+ return $(possible-paths) ;
+}
+
+
+
+
+## declare generators
+
+generators.register-c-compiler cw.compile.c++ : CPP : OBJ : <toolset>cw ;
+generators.register-c-compiler cw.compile.c : C : OBJ : <toolset>cw ;
+
+generators.register-linker cw.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>cw
+ ;
+generators.register-linker cw.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : SHARED_LIB IMPORT_LIB
+ : <toolset>cw
+ ;
+
+generators.register-archiver cw.archive
+ : OBJ
+ : STATIC_LIB
+ : <toolset>cw
+ ;
+
+## compilation phase
+
+flags cw WHATEVER <toolset-cw:version> ;
+
+flags cw.compile CFLAGS <debug-symbols>on : -g ;
+flags cw.compile CFLAGS <optimization>off : -O0 ;
+flags cw.compile CFLAGS <optimization>speed : -O4,p ;
+flags cw.compile CFLAGS <optimization>space : -O4,s ;
+flags cw.compile CFLAGS <inlining>off : -inline off ;
+flags cw.compile CFLAGS <inlining>on : -inline on ;
+flags cw.compile CFLAGS <inlining>full : -inline all ;
+flags cw.compile CFLAGS <exception-handling>off : -Cpp_exceptions off ;
+
+
+flags cw.compile CFLAGS <rtti>on : -RTTI on ;
+flags cw.compile CFLAGS <rtti>off : -RTTI off ;
+
+flags cw.compile CFLAGS <warnings>on : -w on ;
+flags cw.compile CFLAGS <warnings>off : -w off ;
+flags cw.compile CFLAGS <warnings>all : -w all ;
+flags cw.compile CFLAGS <warnings-as-errors>on : -w error ;
+
+flags cw.compile USER_CFLAGS <cflags> : ;
+flags cw.compile.c++ USER_CFLAGS <cxxflags> : ;
+
+flags cw.compile DEFINES <define> ;
+flags cw.compile UNDEFS <undef> ;
+flags cw.compile INCLUDES <include> ;
+
+actions compile.c
+{
+ $(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
+}
+actions compile.c++
+{
+ $(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")"
+}
+
+## linking phase
+
+flags cw.link DEF_FILE <def-file> ;
+
+flags cw LINKFLAGS : -search ;
+flags cw LINKFLAGS <debug-symbols>on : -g ;
+flags cw LINKFLAGS <user-interface>console : -subsystem console ;
+flags cw LINKFLAGS <user-interface>gui : -subsystem windows ;
+flags cw LINKFLAGS <user-interface>wince : -subsystem wince ;
+flags cw LINKFLAGS <user-interface>native : -subsystem native ;
+flags cw LINKFLAGS <user-interface>auto : -subsystem auto ;
+
+flags cw LINKFLAGS <main-target-type>LIB/<link>static : -library ;
+
+flags cw.link USER_LINKFLAGS <linkflags> ;
+flags cw.link LINKPATH <library-path> ;
+
+flags cw.link FINDLIBS_ST <find-static-library> ;
+flags cw.link FINDLIBS_SA <find-shared-library> ;
+flags cw.link LIBRARY_OPTION <toolset>cw : "" : unchecked ;
+flags cw.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
+
+rule link.dll ( targets + : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
+}
+
+if [ os.name ] in NT
+{
+ actions archive
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+ }
+}
+else # cygwin
+{
+ actions archive
+ {
+ _bbv2_out_="$(<)"
+ if test -f "$_bbv2_out_" ; then
+ _bbv2_existing_="$(<:W)"
+ fi
+ $(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+ }
+}
+
+actions link bind DEF_FILE
+{
+ $(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
+actions link.dll bind DEF_FILE
+{
+ $(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
diff --git a/src/boost/tools/build/src/tools/cygwin.jam b/src/boost/tools/build/src/tools/cygwin.jam
new file mode 100644
index 000000000..92adf6d45
--- /dev/null
+++ b/src/boost/tools/build/src/tools/cygwin.jam
@@ -0,0 +1,12 @@
+# Copyright 2004 Vladimir Prus.
+# Copyright 2016 Steven Watanabe
+# Copyright 2017 Peter Dimov
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides utility functions for handling cygwin paths
+
+rule cygwin-to-windows-path ( path )
+{
+ return [ MATCH "(.*)[\n]+" : [ SHELL "cygpath -w $(path)" ] ] ;
+}
diff --git a/src/boost/tools/build/src/tools/darwin.jam b/src/boost/tools/build/src/tools/darwin.jam
new file mode 100644
index 000000000..8d477410b
--- /dev/null
+++ b/src/boost/tools/build/src/tools/darwin.jam
@@ -0,0 +1,620 @@
+# Copyright 2003 Christopher Currie
+# Copyright 2006 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2005-2007 Mat Marcus
+# Copyright 2005-2007 Adobe Systems Incorporated
+# Copyright 2007-2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
+# for explanation why it's a separate toolset.
+
+import feature : feature ;
+import toolset : flags ;
+import type ;
+import common ;
+import generators ;
+import path : basename ;
+import version ;
+import property-set ;
+import regex ;
+import errors ;
+
+## Use a framework.
+feature framework : : free ;
+
+## The MacOSX version to compile for, which maps to the SDK to use (sysroot).
+feature macosx-version : : propagated link-incompatible symmetric optional ;
+
+## The minimal MacOSX version to target.
+feature macosx-version-min : : propagated optional ;
+
+## A dependency, that is forced to be included in the link.
+feature force-load : : free dependency incidental ;
+
+#############################################################################
+
+_ = " " ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : darwin ;
+import gcc ;
+toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ;
+
+generators.override darwin.prebuilt : builtin.prebuilt ;
+generators.override darwin.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
+generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH : <toolset>darwin : gch ;
+
+toolset.inherit-rules darwin : gcc : localize ;
+toolset.inherit-flags darwin : gcc
+ : <runtime-link>static
+ <architecture>arm/<address-model>32
+ <architecture>arm/<address-model>64
+ <architecture>arm/<instruction-set>
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ <architecture>x86/<instruction-set>
+ <architecture>power/<address-model>32
+ <architecture>power/<address-model>64
+ <architecture>power/<instruction-set>
+ <inlining>full ;
+
+# Options:
+#
+# <root>PATH
+# Platform root path. The common autodetection will set this to
+# "/Developer". And when a command is given it will be set to
+# the corresponding "*.platform/Developer" directory.
+#
+rule init ( version ? : command * : options * : requirement * )
+{
+ # First time around, figure what is host OSX version
+ if ! $(.host-osx-version)
+ {
+ .host-osx-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ;
+ if $(.debug-configuration)
+ {
+ ECHO notice\: OSX version on this machine is $(.host-osx-version) ;
+ }
+ }
+
+ # - The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+
+ # - The bin directory where to find the commands to execute.
+ local bin ;
+
+ # - The configured compile driver command.
+ local command = [ common.get-invocation-command darwin : g++ : $(command) ] ;
+
+ # The version as reported by the compiler
+ local real-version ;
+
+ # - Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[1]) ] ;
+ if $(bin) = "/usr/bin"
+ {
+ root ?= /Developer ;
+ }
+ else
+ {
+ local r = $(bin:D) ;
+ r = $(r:D) ;
+ root ?= $(r) ;
+ }
+ }
+
+ # - Autodetect the version if not given.
+ if $(command)
+ {
+ # - The 'command' variable can have multiple elements. When calling
+ # the SHELL builtin we need a single string.
+ local command-string = $(command:J=" ") ;
+ real-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ version ?= $(real-version) ;
+ }
+
+ .real-version.$(version) = $(real-version) ;
+
+ # - Define the condition for this toolset instance.
+ local condition =
+ [ common.check-init-parameters darwin $(requirement) : version $(version) ] ;
+
+ # - Set the toolset generic common options.
+ common.handle-options darwin : $(condition) : $(command) : $(options) ;
+
+ # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates.
+ if $(real-version) < "4.0.0"
+ {
+ flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ;
+ }
+ # - GCC 4.2 and higher in Darwin does not have -Wno-long-double.
+ if $(real-version) < "4.2.0"
+ {
+ flags darwin.compile OPTIONS $(condition) : -Wno-long-double ;
+ }
+ # - GCC on Darwin with -pedantic, suppress unsupported long long warning
+ flags darwin.compile OPTIONS $(condition)/<warnings>all : -Wno-long-long ;
+
+ # - GCC on El Capitan (10.11) does not support -finline-functions
+ if "10.11.0" <= $(.host-osx-version)
+ {
+ flags darwin.compile OPTIONS $(condition)/<inlining>full : -Wno-inline ;
+ }
+
+ # - The symbol strip program.
+ local strip ;
+ if <striper> in $(options)
+ {
+ # We can turn off strip by specifying it as empty. In which
+ # case we switch to using the linker to do the strip.
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ;
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ;
+ }
+ else
+ {
+ # Otherwise we need to find a strip program to use. And hence
+ # also tell the link action that we need to use a strip
+ # post-process.
+ flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ;
+ strip =
+ [ common.get-invocation-command darwin
+ : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.link .STRIP $(condition) : $(strip[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice\: using strip for $(condition) at $(strip[1]) ;
+ }
+ }
+
+ # - The archive builder (libtool is the default as creating
+ # archives in darwin is complicated.
+ local archiver =
+ [ common.get-invocation-command darwin
+ : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice\: using archiver for $(condition) at $(archiver[1]) ;
+ }
+
+ # - Initialize the SDKs available in the root for this tool.
+ local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ;
+
+ #~ ECHO --- ;
+ #~ ECHO --- bin :: $(bin) ;
+ #~ ECHO --- root :: $(root) ;
+ #~ ECHO --- version :: $(version) ;
+ #~ ECHO --- condition :: $(condition) ;
+ #~ ECHO --- strip :: $(strip) ;
+ #~ ECHO --- archiver :: $(archiver) ;
+ #~ ECHO --- sdks :: $(sdks) ;
+ #~ ECHO --- ;
+ #~ EXIT ;
+}
+
+# Add and set options for a discovered SDK version.
+local rule init-sdk ( condition * : root ? : version + : version-feature ? )
+{
+ local rule version-to-feature ( version + )
+ {
+ switch $(version[1])
+ {
+ case appletv* :
+ {
+ return $(version[1])-$(version[2-]:J=.) ;
+ }
+ case iphone* :
+ {
+ return $(version[1])-$(version[2-]:J=.) ;
+ }
+ case mac* :
+ {
+ return $(version[2-]:J=.) ;
+ }
+ case * :
+ {
+ return $(version:J=.) ;
+ }
+ }
+ }
+
+ if $(version-feature)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO notice\: available sdk for $(condition)/<macosx-version>$(version-feature) at $(root) ;
+ }
+
+ # Add the version to the features for specifying them.
+ if ! $(version-feature) in [ feature.values macosx-version ]
+ {
+ feature.extend macosx-version : $(version-feature) ;
+ }
+ if ! $(version-feature) in [ feature.values macosx-version-min ]
+ {
+ feature.extend macosx-version-min : $(version-feature) ;
+ }
+
+ # Set the flags the version needs to compile with, first
+ # generic options.
+ flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(root) ;
+ flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(root) ;
+
+ # Then device variation options.
+ switch $(version[1])
+ {
+ case appletvsim* :
+ {
+ local N = $(version[2]) ;
+ if ! $(version[3]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
+ else { N += 0$(version[3]) ; }
+ if ! $(version[4]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
+ else { N += 0$(version[4]) ; }
+ N = $(N:J=) ;
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ }
+
+ case appletv* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -mtvos-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -mtvos-version-min=$(version[2-]:J=.) ;
+ }
+
+ case iphonesim* :
+ {
+ local N = $(version[2]) ;
+ if ! $(version[3]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
+ else { N += 0$(version[3]) ; }
+ if ! $(version[4]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
+ else { N += 0$(version[4]) ; }
+ N = $(N:J=) ;
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ }
+
+ case iphone* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ }
+
+ case mac* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ }
+ }
+
+ if $(version[3]) > 0
+ {
+ # We have a minor version of an SDK. We want to set up
+ # previous minor versions, plus the current minor version.
+ # So we recurse to set up the previous minor versions, up to
+ # the current version.
+ local minor-minus-1 = [ CALC $(version[3]) - 1 ] ;
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version[1-2]) $(minor-minus-1) : [ version-to-feature $(version[1-2]) $(minor-minus-1) ] ]
+ $(version-feature) ;
+ }
+ else
+ {
+ return $(version-feature) ;
+ }
+ }
+ else if $(version[4])
+ {
+ # We have a patch version of an SDK. We want to set up
+ # both the specific patch version, and the minor version.
+ # So we recurse to set up the patch version. Plus the minor version.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+ else
+ {
+ # Yes, this is intentionally recursive.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+}
+
+# Determine the MacOSX SDK versions installed and their locations.
+local rule init-available-sdk-versions ( condition * : root ? )
+{
+ root ?= /Developer ;
+ local sdks-root = $(root)/SDKs ;
+ local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk AppleTVOS*.sdk AppleTVSimulator*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ;
+ local result ;
+ for local sdk in $(sdks)
+ {
+ local sdk-match = [ MATCH "([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)?" : $(sdk:D=) ] ;
+ local sdk-platform = $(sdk-match[1]:L) ;
+ local sdk-version = $(sdk-match[2-]) ;
+ if $(sdk-version)
+ {
+ switch $(sdk-platform)
+ {
+ case macosx :
+ {
+ sdk-version = mac $(sdk-version) ;
+ }
+ case appletvos :
+ {
+ sdk-version = appletv $(sdk-version) ;
+ }
+ case appletvsimulator :
+ {
+ sdk-version = appletvsim $(sdk-version) ;
+ }
+ case iphoneos :
+ {
+ sdk-version = iphone $(sdk-version) ;
+ }
+ case iphonesimulator :
+ {
+ sdk-version = iphonesim $(sdk-version) ;
+ }
+ case * :
+ {
+ sdk-version = $(sdk-version:J=-) ;
+ }
+ }
+ result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+# Generic options.
+flags darwin.compile OPTIONS <flags> ;
+
+# The following adds objective-c support to darwin.
+# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
+
+generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
+generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
+
+rule setup-address-model ( targets * : sources * : properties * )
+{
+ local ps = [ property-set.create $(properties) ] ;
+ local arch = [ $(ps).get <architecture> ] ;
+ local instruction-set = [ $(ps).get <instruction-set> ] ;
+ local address-model = [ $(ps).get <address-model> ] ;
+ local osx-version = [ $(ps).get <macosx-version> ] ;
+ local gcc-version = [ $(ps).get <toolset-darwin:version> ] ;
+ gcc-version = $(.real-version.$(gcc-version)) ;
+ local options ;
+
+ local support-ppc64 = 1 ;
+
+ osx-version ?= $(.host-osx-version) ;
+
+ switch $(osx-version)
+ {
+ case iphone* :
+ {
+ support-ppc64 = ;
+ }
+
+ case * :
+ if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ]
+ {
+ # When targeting 10.6:
+ # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested
+ # - gcc 4.0 will compile fine, somehow, but then fail at link time
+ support-ppc64 = ;
+ }
+ }
+ switch $(arch)
+ {
+ case combined :
+ {
+ if $(address-model) = 32_64 {
+ if $(support-ppc64) {
+ options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;
+ } else {
+ # Build 3-way binary
+ options = -arch i386 -arch ppc -arch x86_64 ;
+ }
+ } else if $(address-model) = 64 {
+ if $(support-ppc64) {
+ options = -arch x86_64 -arch ppc64 ;
+ } else {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+ } else {
+ options = -arch i386 -arch ppc ;
+ }
+ }
+
+ case x86 :
+ {
+ if $(address-model) = 32_64 {
+ options = -arch i386 -arch x86_64 ;
+ } else if $(address-model) = 64 {
+ options = -arch x86_64 ;
+ } else {
+ options = -arch i386 ;
+ }
+ }
+
+ case power :
+ {
+ if ! $(support-ppc64)
+ && ( $(address-model) = 32_64 || $(address-model) = 64 )
+ {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+
+ if $(address-model) = 32_64 {
+ options = -arch ppc -arch ppc64 ;
+ } else if $(address-model) = 64 {
+ options = -arch ppc64 ;
+ } else {
+ options = -arch ppc ;
+ }
+ }
+
+ case arm :
+ {
+ if $(instruction-set) {
+ options = -arch$(_)$(instruction-set) ;
+ } else {
+ options = -arch arm ;
+ }
+ }
+ }
+
+ if $(options)
+ {
+ OPTIONS on $(targets) += $(options) ;
+ }
+}
+
+rule compile.m ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c" ;
+ gcc.set-fpic-options $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.m
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.mm ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c++" ;
+ gcc.set-fpic-options $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.mm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Set the max header padding to allow renaming of libs for installation.
+flags darwin.link.dll OPTIONS : -headerpad_max_install_names ;
+
+# To link the static runtime we need to link to all the core runtime libraries.
+flags darwin.link OPTIONS <runtime-link>static
+ : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
+
+# Strip as much as possible when optimizing.
+flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+
+# Dynamic/shared linking.
+flags darwin.compile OPTIONS <link>shared : -dynamic ;
+
+# Misc options.
+flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ;
+#~ flags darwin.link OPTIONS : -fexceptions ;
+
+# Add the framework names to use.
+flags darwin.link FRAMEWORK <framework> ;
+
+#
+flags darwin.link FORCE_LOAD <force-load> ;
+
+# This is flag is useful for debugging the link step
+# uncomment to see what libtool is doing under the hood
+#~ flags darwin.link.dll OPTIONS : -Wl,-v ;
+
+# set up the -F option to include the paths to any frameworks used.
+local rule prepare-framework-path ( target + )
+{
+ # The -framework option only takes basename of the framework.
+ # The -F option specifies the directories where a framework
+ # is searched for. So, if we find <framework> feature
+ # with some path, we need to generate property -F option.
+ local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ;
+
+ # Be sure to generate no -F if there's no path.
+ for local framework-path in $(framework-paths)
+ {
+ if $(framework-path) != ""
+ {
+ FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
+ }
+ }
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+# Note that using strip without any options was reported to result in broken
+# binaries, at least on OS X 10.5.5, see:
+# http://svn.boost.org/trac/boost/ticket/2347
+# So we pass -S -x.
+actions link bind LIBRARIES FORCE_LOAD
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+ $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)"
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+}
+
+# We use libtool instead of ar to support universal binary linking
+# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
+actions piecemeal archive
+{
+ "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
+}
diff --git a/src/boost/tools/build/src/tools/darwin.py b/src/boost/tools/build/src/tools/darwin.py
new file mode 100644
index 000000000..f03d63f35
--- /dev/null
+++ b/src/boost/tools/build/src/tools/darwin.py
@@ -0,0 +1,57 @@
+# Copyright (C) Christopher Currie 2003. Permission to copy, use,
+# modify, sell and distribute this software is granted provided this
+# copyright notice appears in all copies. This software is provided
+# "as is" without express or implied warranty, and with no claim as to
+# its suitability for any purpose.
+
+# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
+# for explanation why it's a separate toolset.
+
+import common, gcc, builtin
+from b2.build import feature, toolset, type, action, generators
+from b2.util.utility import *
+
+toolset.register ('darwin')
+
+toolset.inherit_generators ('darwin', [], 'gcc')
+toolset.inherit_flags ('darwin', 'gcc')
+toolset.inherit_rules ('darwin', 'gcc')
+
+def init (version = None, command = None, options = None):
+ options = to_seq (options)
+
+ condition = common.check_init_parameters ('darwin', None, ('version', version))
+
+ command = common.get_invocation_command ('darwin', 'g++', command)
+
+ common.handle_options ('darwin', condition, command, options)
+
+ gcc.init_link_flags ('darwin', 'darwin', condition)
+
+# Darwin has a different shared library suffix
+type.set_generated_target_suffix ('SHARED_LIB', ['<toolset>darwin'], 'dylib')
+
+# we need to be able to tell the type of .dylib files
+type.register_suffixes ('dylib', 'SHARED_LIB')
+
+feature.feature ('framework', [], ['free'])
+
+toolset.flags ('darwin.compile', 'OPTIONS', '<link>shared', ['-dynamic'])
+toolset.flags ('darwin.compile', 'OPTIONS', None, ['-Wno-long-double', '-no-cpp-precomp'])
+toolset.flags ('darwin.compile.c++', 'OPTIONS', None, ['-fcoalesce-templates'])
+
+toolset.flags ('darwin.link', 'FRAMEWORK', '<framework>')
+
+# This is flag is useful for debugging the link step
+# uncomment to see what libtool is doing under the hood
+# toolset.flags ('darwin.link.dll', 'OPTIONS', None, '[-Wl,-v'])
+
+action.register ('darwin.compile.cpp', None, ['$(CONFIG_COMMAND) $(ST_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)'])
+
+# TODO: how to set 'bind LIBRARIES'?
+action.register ('darwin.link.dll', None, ['$(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)'])
+
+def darwin_archive (manager, targets, sources, properties):
+ pass
+
+action.register ('darwin.archive', darwin_archive, ['ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"'])
diff --git a/src/boost/tools/build/src/tools/diab.jam b/src/boost/tools/build/src/tools/diab.jam
new file mode 100644
index 000000000..60953b677
--- /dev/null
+++ b/src/boost/tools/build/src/tools/diab.jam
@@ -0,0 +1,131 @@
+# Copyright 2015, Wind River Inc.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Diab C++ Compiler
+#
+
+import feature generators common ;
+import toolset : flags ;
+import os ;
+
+feature.extend toolset : diab ;
+
+# Inherit from Unix toolset to get library ordering magic.
+toolset.inherit diab : unix ;
+
+generators.override diab.prebuilt : builtin.lib-generator ;
+generators.override diab.prebuilt : builtin.prebuilt ;
+generators.override diab.searched-lib-generator : searched-lib-generator ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters diab : version $(version) ] ;
+
+ local command = [ common.get-invocation-command diab : dcc : $(command) ] ;
+
+ if $(command)
+ {
+ local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+
+ if $(root)
+ {
+ flags diab .root <host-os>linux : "\"$(root)\"/" ;
+ flags diab .root <host-os>windows : $(root:T)/ ;
+ }
+ }
+ # If we can't find 'CC' anyway, at least show 'CC' in the commands
+ command ?= CC ;
+
+ common.handle-options diab : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler diab.compile.c++ : CPP : OBJ : <toolset>diab ;
+generators.register-c-compiler diab.compile.c : C : OBJ : <toolset>diab ;
+
+
+# unlike most compilers, Diab defaults to static linking.
+# flags cxx LINKFLAGS <runtime-link>static : ;
+flags diab.compile OPTIONS <debug-symbols>on : -g ;
+flags diab.link OPTIONS <debug-symbols>on : -g ;
+
+flags diab.compile OPTIONS <optimization>off : ;
+flags diab.compile OPTIONS <optimization>speed : -speed ;
+flags diab.compile OPTIONS <optimization>space : -size ;
+
+# flags diab.compile OPTIONS <inlining>off : -Xinline=0 ;
+# flags diab.compile OPTIONS <inlining>on : -Xinline=10 ;
+# flags diab.compile OPTIONS <inlining>full : -Xinline=50 ;
+
+flags diab.compile OPTIONS <cflags> ;
+flags diab.compile.c++ OPTIONS <cxxflags> ;
+flags diab.compile DEFINES <define> ;
+
+flags diab.compile.c++ OPTIONS <exception-handling>off : -Xno-exceptions ;
+# So Dinkum STL knows when exceptions are disabled
+flags diab.compile.c++ DEFINES <exception-handling>off : _NO_EX=1 ;
+flags diab.compile.c++ DEFINES <rtti>off : _NO_RTTI ;
+flags diab.compile INCLUDES <include> ;
+flags diab.link OPTIONS <linkflags> ;
+
+flags diab.compile OPTIONS <link>shared : -Xpic ;
+#flags diab.compile OPTIONS <link>static : ;
+# get VxWorks link options from shell environment
+flags diab.link OPTIONS <link>static : [ os.environ LDFLAGS_STATIC ] ;
+flags diab.link.dll OPTIONS : [ os.environ LDFLAGS_SO ] ;
+flags diab.link OPTIONS <link>shared : [ os.environ LDFLAGS_DYNAMIC ] ;
+
+flags diab.link LOPTIONS <link>shared : -Xdynamic -Xshared -Xpic ;
+
+flags diab.link LIBPATH <library-path> ;
+flags diab.link LIBRARIES <library-file> ;
+flags diab.link FINDLIBS-ST <find-static-library> ;
+flags diab.link FINDLIBS-SA <find-shared-library> ;
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(OPTIONS) $(LOPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
+}
+
+# When creating dynamic libraries, we don't want to be warned about unresolved
+# symbols, therefore all unresolved symbols are marked as expected by
+# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
+# chain.
+
+actions link.dll bind LIBRARIES
+{
+ $(.root:E=)dplus $(OPTIONS) $(LOPTIONS) "$(LIBRARIES)" -o "$(<[1])" -L$(LIBPATH) "$(>)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
+}
+
+#rule compile.asm ( targets * : sources * : properties * )
+#{
+# setup-fpic $(targets) : $(sources) : $(properties) ;
+# setup-address-model $(targets) : $(sources) : $(properties) ;
+#}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+
+
+actions compile.c
+{
+ $(.root:E=)dcc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ $(.root:E=)dplus -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Always create archive from scratch. See the gcc toolet for rationale.
+RM = [ common.rm-command ] ;
+actions together piecemeal archive
+{
+ $(RM) "$(<)"
+ dar rc $(<) $(>)
+}
diff --git a/src/boost/tools/build/src/tools/dmc.jam b/src/boost/tools/build/src/tools/dmc.jam
new file mode 100644
index 000000000..cebeefa54
--- /dev/null
+++ b/src/boost/tools/build/src/tools/dmc.jam
@@ -0,0 +1,174 @@
+# Digital Mars C++
+
+# (C) Copyright Christof Meerwald 2003.
+# (C) Copyright Aleksey Gurtovoy 2004.
+# (C) Copyright Arjan Knepper 2006.
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.dmc]]
+= Digital Mars C/C++ Compiler
+
+The `dmc` module supports the http://www.digitalmars.com/[Digital Mars
+C++ compiler.]
+
+The module is initialized using the following syntax:
+
+----
+using dmc : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the command is not specified, B2 will search for a binary
+named `dmc` in PATH.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+|# # end::doc[]
+
+# The following #// line will be used by the regression test table generation
+# program as the column heading for HTML tables. Must not include version number.
+#//<a href="http://www.digitalmars.com/">Digital<br>Mars C++</a>
+
+import feature generators common ;
+import toolset : flags ;
+import sequence regex ;
+
+feature.extend toolset : dmc ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters dmc : version $(version) ] ;
+
+ local command = [ common.get-invocation-command dmc : dmc : $(command) ] ;
+ command ?= dmc ;
+
+ common.handle-options dmc : $(condition) : $(command) : $(options) ;
+
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ root = $(command:D) ;
+
+ if $(root)
+ {
+ # DMC linker is sensitive the the direction of slashes, and
+ # won't link if forward slashes are used in command.
+ root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ;
+ flags dmc .root $(condition) : $(root)\\bin\\ ;
+ }
+ else
+ {
+ flags dmc .root $(condition) : "" ;
+ }
+}
+
+
+# Declare generators
+generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>dmc ;
+generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>dmc ;
+
+generators.register-archiver dmc.archive : OBJ : STATIC_LIB : <toolset>dmc ;
+generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : <toolset>dmc ;
+generators.register-c-compiler dmc.compile.c : C : OBJ : <toolset>dmc ;
+
+
+# Declare flags
+# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds.
+# flags dmc.compile OPTIONS <debug-symbols>on : -g ;
+flags dmc.compile OPTIONS <debug-symbols>on : -gl ;
+flags dmc.link OPTIONS <debug-symbols>on : /CO /NOPACKF /DEBUGLI ;
+flags dmc.link OPTIONS <debug-symbols>off : /PACKF ;
+
+flags dmc.compile OPTIONS <optimization>off : -S -o+none ;
+flags dmc.compile OPTIONS <optimization>speed : -o+time ;
+flags dmc.compile OPTIONS <optimization>space : -o+space ;
+flags dmc.compile OPTIONS <exception-handling>on : -Ae ;
+flags dmc.compile OPTIONS <rtti>on : -Ar ;
+# FIXME:
+# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used
+# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used
+# But for some reason the -WD cflag is always in use.
+# flags dmc.compile OPTIONS <link>shared : -WD ;
+# flags dmc.compile OPTIONS <link>static : -WA ;
+
+# Note that these two options actually imply multithreading support on DMC
+# because there is no single-threaded dynamic runtime library. Specifying
+# <threading>multi would be a bad idea, though, because no option would be
+# matched when the build uses the default settings of <runtime-link>dynamic
+# and <threading>single.
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>shared : -ND ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>shared : -ND ;
+
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>single : ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>single : ;
+flags dmc.compile OPTIONS <runtime-debugging>off/<runtime-link>static/<threading>multi : -D_MT ;
+flags dmc.compile OPTIONS <runtime-debugging>on/<runtime-link>static/<threading>multi : -D_MT ;
+
+flags dmc.compile OPTIONS : <cflags> ;
+flags dmc.compile.c++ OPTIONS : <cxxflags> ;
+
+flags dmc.compile DEFINES : <define> ;
+flags dmc.compile INCLUDES : <include> ;
+
+flags dmc.link <linkflags> ;
+flags dmc.archive OPTIONS <arflags> ;
+
+flags dmc LIBPATH <library-path> ;
+flags dmc LIBRARIES <library-file> ;
+flags dmc FINDLIBS <find-library-sa> ;
+flags dmc FINDLIBS <find-library-st> ;
+
+actions together link bind LIBRARIES
+{
+ "$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
+}
+
+actions together link.dll bind LIBRARIES
+{
+ echo LIBRARY "$(<[1])" > $(<[2]:B).def
+ echo DESCRIPTION 'A Library' >> $(<[2]:B).def
+ echo EXETYPE NT >> $(<[2]:B).def
+ echo SUBSYSTEM WINDOWS >> $(<[2]:B).def
+ echo CODE EXECUTE READ >> $(<[2]:B).def
+ echo DATA READ WRITE >> $(<[2]:B).def
+ "$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def"
+}
+
+actions compile.c
+{
+ "$(.root)dmc" -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)"
+}
+
+actions together piecemeal archive
+{
+ "$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)"
+}
diff --git a/src/boost/tools/build/src/tools/docutils.jam b/src/boost/tools/build/src/tools/docutils.jam
new file mode 100644
index 000000000..d5c99b184
--- /dev/null
+++ b/src/boost/tools/build/src/tools/docutils.jam
@@ -0,0 +1,125 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for docutils ReStructuredText processing.
+
+import type ;
+import scanner ;
+import generators ;
+import os ;
+import common ;
+import toolset ;
+import path ;
+import feature : feature ;
+import property ;
+import errors ;
+
+.initialized = ;
+
+type.register ReST : rst ;
+
+class rst-scanner : common-scanner
+{
+ rule __init__ ( paths * )
+ {
+ common-scanner.__init__ . $(paths) ;
+ }
+
+ rule pattern ( )
+ {
+ return "^[ ]*\\.\\.[ ]+include::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+image::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+figure::[ ]+([^
+]+)"
+ ;
+ }
+}
+
+scanner.register rst-scanner : include ;
+type.set-scanner ReST : rst-scanner ;
+
+generators.register-standard docutils.html : ReST : HTML ;
+
+rule init ( docutils-dir ? : tools-dir ? )
+{
+ docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ;
+ tools-dir ?= $(docutils-dir)/tools ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+ if $(docutils-dir)
+ {
+ .docutils-dir = $(docutils-dir) ;
+ .tools-dir = $(tools-dir:R="") ;
+
+ .setup = [
+ common.prepend-path-variable-command PYTHONPATH
+ : $(.docutils-dir) $(.docutils-dir)/extras ] ;
+ RST2XXX = [ common.find-tool rst2html ] ;
+ }
+ else
+ {
+ RST2XXX_PY = [ common.find-tool rst2html.py ] ;
+ }
+ }
+}
+
+rule html ( target : source : properties * )
+{
+ if ! [ on $(target) return $(RST2XXX) ]
+ {
+ local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
+ if ! $(.tools-dir) && ! $(RST2XXX_PY) {
+ errors.user-error
+ "The docutils module is used, but not configured. "
+ : ""
+ : "Please modify your user-config.jam or project-config.jam to contain:"
+ : ""
+ : " using docutils : <docutils-dir> ;"
+ : ""
+ : "On Ubuntu, 'docutils-common' package will create /usr/share/docutils."
+ : "Other flavours of Linux likely have docutils as package as well."
+ : "On Windows, you can install from http://docutils.sourceforge.net/."
+ ;
+ }
+
+ if $(RST2XXX_PY)
+ {
+ if $(RST2XXX_PY:D)
+ {
+ # If we have a path to the rst2html.py script, we need to use
+ # the python interpreter to load it up.
+ RST2XXX on $(target) = $(python-cmd:G=:E="python") $(RST2XXX_PY) ;
+ }
+ else
+ {
+ # Otherwise, bare rst2html.py, we can just exec that directly.
+ # This work for both Nix, and the standard Windows Python installs.
+ RST2XXX on $(target) = $(RST2XXX_PY) ;
+ }
+ }
+ else
+ {
+ RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ;
+ }
+ }
+}
+
+
+feature docutils : : free ;
+feature docutils-html : : free ;
+feature docutils-cmd : : free ;
+toolset.flags docutils COMMON-FLAGS : <docutils> ;
+toolset.flags docutils HTML-FLAGS : <docutils-html> ;
+toolset.flags docutils RST2XXX : <docutils-cmd> ;
+
+actions html
+{
+ $(.setup)
+ "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
+}
+
diff --git a/src/boost/tools/build/src/tools/doxproc.py b/src/boost/tools/build/src/tools/doxproc.py
new file mode 100644
index 000000000..d415133e1
--- /dev/null
+++ b/src/boost/tools/build/src/tools/doxproc.py
@@ -0,0 +1,859 @@
+#!/usr/bin/python
+# Copyright 2006 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+'''
+Processing of Doxygen generated XML.
+'''
+
+import os
+import os.path
+import sys
+import time
+import string
+import getopt
+import glob
+import re
+import xml.dom.minidom
+
+
+def usage():
+ print '''
+Usage:
+ %s options
+
+Options:
+ --xmldir Directory with the Doxygen xml result files.
+ --output Write the output BoostBook to the given location.
+ --id The ID of the top level BoostBook section.
+ --title The title of the top level BoostBook section.
+ --enable-index Generate additional index sections for classes and
+ types.
+''' % ( sys.argv[0] )
+
+
+def get_args( argv = sys.argv[1:] ):
+ spec = [
+ 'xmldir=',
+ 'output=',
+ 'id=',
+ 'title=',
+ 'enable-index',
+ 'help' ]
+ options = {
+ '--xmldir' : 'xml',
+ '--output' : None,
+ '--id' : 'dox',
+ '--title' : 'Doxygen'
+ }
+ ( option_pairs, other ) = getopt.getopt( argv, '', spec )
+ map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs )
+
+ if '--help' in options:
+ usage()
+ sys.exit(1)
+
+ return {
+ 'xmldir' : options['--xmldir'],
+ 'output' : options['--output'],
+ 'id' : options['--id'],
+ 'title' : options['--title'],
+ 'index' : '--enable-index' in options
+ }
+
+def if_attribute(node, attribute, true_value, false_value=None):
+ if node.getAttribute(attribute) == 'yes':
+ return true_value
+ else:
+ return false_value
+
+class Doxygen2BoostBook:
+
+ def __init__( self, **kwargs ):
+ ##
+ self.args = kwargs
+ self.args.setdefault('id','')
+ self.args.setdefault('title','')
+ self.args.setdefault('last_revision', time.asctime())
+ self.args.setdefault('index', False)
+ self.id = '%(id)s.reference' % self.args
+ self.args['id'] = self.id
+ #~ This is our template BoostBook document we insert the generated content into.
+ self.boostbook = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
+<section id="%(id)s" name="%(title)s" last-revision="%(last_revision)s">
+ <title>%(title)s</title>
+ <library-reference id="%(id)s.headers">
+ <title>Headers</title>
+ </library-reference>
+ <index id="%(id)s.classes">
+ <title>Classes</title>
+ </index>
+ <index id="%(id)s.index">
+ <title>Index</title>
+ </index>
+</section>
+''' % self.args )
+ self.section = {
+ 'headers' : self._getChild('library-reference',id='%(id)s.headers' % self.args),
+ 'classes' : self._getChild('index',id='%(id)s.classes' % self.args),
+ 'index' : self._getChild('index',id='%(id)s.index' % self.args)
+ }
+ #~ Remove the index sections if we aren't generating it.
+ if not self.args['index']:
+ self.section['classes'].parentNode.removeChild(self.section['classes'])
+ self.section['classes'].unlink()
+ del self.section['classes']
+ self.section['index'].parentNode.removeChild(self.section['index'])
+ self.section['index'].unlink()
+ del self.section['index']
+ #~ The symbols, per Doxygen notion, that we translated.
+ self.symbols = {}
+ #~ Map of Doxygen IDs and BoostBook IDs, so we can translate as needed.
+ self.idmap = {}
+ #~ Marks generation, to prevent redoing it.
+ self.generated = False
+
+ #~ Add an Doxygen generated XML document to the content we are translating.
+ def addDox( self, document ):
+ self._translateNode(document.documentElement)
+
+ #~ Turns the internal XML tree into an output UTF-8 string.
+ def tostring( self ):
+ self._generate()
+ #~ return self.boostbook.toprettyxml(' ')
+ return self.boostbook.toxml('utf-8')
+
+ #~ Does post-processing on the partial generated content to generate additional info
+ #~ now that we have the complete source documents.
+ def _generate( self ):
+ if not self.generated:
+ self.generated = True
+ symbols = self.symbols.keys()
+ symbols.sort()
+ #~ Populate the header section.
+ for symbol in symbols:
+ if self.symbols[symbol]['kind'] in ('header'):
+ self.section['headers'].appendChild(self.symbols[symbol]['dom'])
+ for symbol in symbols:
+ if self.symbols[symbol]['kind'] not in ('namespace', 'header'):
+ container = self._resolveContainer(self.symbols[symbol],
+ self.symbols[self.symbols[symbol]['header']]['dom'])
+ if container.nodeName != 'namespace':
+ ## The current BoostBook to Docbook translation doesn't
+ ## respect, nor assign, IDs to inner types of any kind.
+ ## So nuke the ID entry so as not create bogus links.
+ del self.idmap[self.symbols[symbol]['id']]
+ container.appendChild(self.symbols[symbol]['dom'])
+ self._rewriteIDs(self.boostbook.documentElement)
+
+ #~ Rewrite the various IDs from Doxygen references to the newly created
+ #~ BoostBook references.
+ def _rewriteIDs( self, node ):
+ if node.nodeName in ('link'):
+ if node.getAttribute('linkend') in self.idmap:
+ #~ A link, and we have someplace to repoint it at.
+ node.setAttribute('linkend',self.idmap[node.getAttribute('linkend')])
+ else:
+ #~ A link, but we don't have a generated target for it.
+ node.removeAttribute('linkend')
+ elif hasattr(node,'hasAttribute') and node.hasAttribute('id') and node.getAttribute('id') in self.idmap:
+ #~ Simple ID, and we have a translation.
+ node.setAttribute('id',self.idmap[node.getAttribute('id')])
+ #~ Recurse, and iterate, depth-first traversal which turns out to be
+ #~ left-to-right and top-to-bottom for the document.
+ if node.firstChild:
+ self._rewriteIDs(node.firstChild)
+ if node.nextSibling:
+ self._rewriteIDs(node.nextSibling)
+
+ def _resolveContainer( self, cpp, root ):
+ container = root
+ for ns in cpp['namespace']:
+ node = self._getChild('namespace',name=ns,root=container)
+ if not node:
+ node = container.appendChild(
+ self._createNode('namespace',name=ns))
+ container = node
+ for inner in cpp['name'].split('::'):
+ node = self._getChild(name=inner,root=container)
+ if not node:
+ break
+ container = node
+ return container
+
+ def _setID( self, id, name ):
+ self.idmap[id] = name.replace('::','.').replace('/','.')
+ #~ print '--| setID:',id,'::',self.idmap[id]
+
+ #~ Translate a given node within a given context.
+ #~ The translation dispatches to a local method of the form
+ #~ "_translate[_context0,...,_contextN]", and the keyword args are
+ #~ passed along. If there is no translation handling method we
+ #~ return None.
+ def _translateNode( self, *context, **kwargs ):
+ node = None
+ names = [ ]
+ for c in context:
+ if c:
+ if not isinstance(c,xml.dom.Node):
+ suffix = '_'+c.replace('-','_')
+ else:
+ suffix = '_'+c.nodeName.replace('-','_')
+ node = c
+ names.append('_translate')
+ names = map(lambda x: x+suffix,names)
+ if node:
+ for name in names:
+ if hasattr(self,name):
+ return getattr(self,name)(node,**kwargs)
+ return None
+
+ #~ Translates the children of the given parent node, appending the results
+ #~ to the indicated target. For nodes not translated by the translation method
+ #~ it copies the child over and recurses on that child to translate any
+ #~ possible interior nodes. Hence this will translate the entire subtree.
+ def _translateChildren( self, parent, **kwargs ):
+ target = kwargs['target']
+ for n in parent.childNodes:
+ child = self._translateNode(n,target=target)
+ if child:
+ target.appendChild(child)
+ else:
+ child = n.cloneNode(False)
+ if hasattr(child,'data'):
+ child.data = re.sub(r'\s+',' ',child.data)
+ target.appendChild(child)
+ self._translateChildren(n,target=child)
+
+ #~ Translate the given node as a description, into the description subnode
+ #~ of the target. If no description subnode is present in the target it
+ #~ is created.
+ def _translateDescription( self, node, target=None, tag='description', **kwargs ):
+ description = self._getChild(tag,root=target)
+ if not description:
+ description = target.appendChild(self._createNode(tag))
+ self._translateChildren(node,target=description)
+ return description
+
+ #~ Top level translation of: <doxygen ...>...</doxygen>,
+ #~ translates the children.
+ def _translate_doxygen( self, node ):
+ #~ print '_translate_doxygen:', node.nodeName
+ result = []
+ for n in node.childNodes:
+ newNode = self._translateNode(n)
+ if newNode:
+ result.append(newNode)
+ return result
+
+ #~ Top level translation of:
+ #~ <doxygenindex ...>
+ #~ <compound ...>
+ #~ <member ...>
+ #~ <name>...</name>
+ #~ </member>
+ #~ ...
+ #~ </compound>
+ #~ ...
+ #~ </doxygenindex>
+ #~ builds the class and symbol sections, if requested.
+ def _translate_doxygenindex( self, node ):
+ #~ print '_translate_doxygenindex:', node.nodeName
+ if self.args['index']:
+ entries = []
+ classes = []
+ #~ Accumulate all the index entries we care about.
+ for n in node.childNodes:
+ if n.nodeName == 'compound':
+ if n.getAttribute('kind') not in ('file','dir','define'):
+ cpp = self._cppName(self._getChildData('name',root=n))
+ entry = {
+ 'name' : cpp['name'],
+ 'compoundname' : cpp['compoundname'],
+ 'id' : n.getAttribute('refid')
+ }
+ if n.getAttribute('kind') in ('class','struct'):
+ classes.append(entry)
+ entries.append(entry)
+ for m in n.childNodes:
+ if m.nodeName == 'member':
+ cpp = self._cppName(self._getChildData('name',root=m))
+ entry = {
+ 'name' : cpp['name'],
+ 'compoundname' : cpp['compoundname'],
+ 'id' : n.getAttribute('refid')
+ }
+ if hasattr(m,'getAttribute') and m.getAttribute('kind') in ('class','struct'):
+ classes.append(entry)
+ entries.append(entry)
+ #~ Put them in a sensible order.
+ entries.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower()))
+ classes.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower()))
+ #~ And generate the BoostBook for them.
+ self._translate_index_(entries,target=self.section['index'])
+ self._translate_index_(classes,target=self.section['classes'])
+ return None
+
+ #~ Translate a set of index entries in the BoostBook output. The output
+ #~ is grouped into groups of the first letter of the entry names.
+ def _translate_index_(self, entries, target=None, **kwargs ):
+ i = 0
+ targetID = target.getAttribute('id')
+ while i < len(entries):
+ dividerKey = entries[i]['name'][0].upper()
+ divider = target.appendChild(self._createNode('indexdiv',id=targetID+'.'+dividerKey))
+ divider.appendChild(self._createText('title',dividerKey))
+ while i < len(entries) and dividerKey == entries[i]['name'][0].upper():
+ iename = entries[i]['name']
+ ie = divider.appendChild(self._createNode('indexentry'))
+ ie = ie.appendChild(self._createText('primaryie',iename))
+ while i < len(entries) and entries[i]['name'] == iename:
+ ie.appendChild(self.boostbook.createTextNode(' ('))
+ ie.appendChild(self._createText(
+ 'link',entries[i]['compoundname'],linkend=entries[i]['id']))
+ ie.appendChild(self.boostbook.createTextNode(')'))
+ i += 1
+
+ #~ Translate a <compounddef ...>...</compounddef>,
+ #~ by retranslating with the "kind" of compounddef.
+ def _translate_compounddef( self, node, target=None, **kwargs ):
+ return self._translateNode(node,node.getAttribute('kind'))
+
+ #~ Translate a <compounddef kind="namespace"...>...</compounddef>. For
+ #~ namespaces we just collect the information for later use as there is no
+ #~ currently namespaces are not included in the BoostBook format. In the future
+ #~ it might be good to generate a namespace index.
+ def _translate_compounddef_namespace( self, node, target=None, **kwargs ):
+ namespace = {
+ 'id' : node.getAttribute('id'),
+ 'kind' : 'namespace',
+ 'name' : self._getChildData('compoundname',root=node),
+ 'brief' : self._getChildData('briefdescription',root=node),
+ 'detailed' : self._getChildData('detaileddescription',root=node),
+ 'parsed' : False
+ }
+ if namespace['name'] in self.symbols:
+ if not self.symbols[namespace['name']]['parsed']:
+ self.symbols[namespace['name']]['parsed'] = True
+ #~ for n in node.childNodes:
+ #~ if hasattr(n,'getAttribute'):
+ #~ self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs)
+ else:
+ self.symbols[namespace['name']] = namespace
+ #~ self._setID(namespace['id'],namespace['name'])
+ return None
+
+ #~ Translate a <compounddef kind="class"...>...</compounddef>, which
+ #~ forwards to the kind=struct as they are the same.
+ def _translate_compounddef_class( self, node, target=None, **kwargs ):
+ return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs)
+
+ #~ Translate a <compounddef kind="struct"...>...</compounddef> into:
+ #~ <header id="?" name="?">
+ #~ <struct name="?">
+ #~ ...
+ #~ </struct>
+ #~ </header>
+ def _translate_compounddef_struct( self, node, tag='struct', target=None, **kwargs ):
+ result = None
+ includes = self._getChild('includes',root=node)
+ if includes:
+ ## Add the header into the output table.
+ self._translate_compounddef_includes_(includes,includes,**kwargs)
+ ## Compounds are the declared symbols, classes, types, etc.
+ ## We add them to the symbol table, along with the partial DOM for them
+ ## so that they can be organized into the output later.
+ compoundname = self._getChildData('compoundname',root=node)
+ compoundname = self._cppName(compoundname)
+ self._setID(node.getAttribute('id'),compoundname['compoundname'])
+ struct = self._createNode(tag,name=compoundname['name'].split('::')[-1])
+ self.symbols[compoundname['compoundname']] = {
+ 'header' : includes.firstChild.data,
+ 'namespace' : compoundname['namespace'],
+ 'id' : node.getAttribute('id'),
+ 'kind' : tag,
+ 'name' : compoundname['name'],
+ 'dom' : struct
+ }
+ ## Add the children which will be the members of the struct.
+ for n in node.childNodes:
+ self._translateNode(n,target=struct,scope=compoundname['compoundname'])
+ result = struct
+ return result
+
+ #~ Translate a <compounddef ...><includes ...>...</includes></compounddef>,
+ def _translate_compounddef_includes_( self, node, target=None, **kwargs ):
+ name = node.firstChild.data
+ if name not in self.symbols:
+ self._setID(node.getAttribute('refid'),name)
+ self.symbols[name] = {
+ 'kind' : 'header',
+ 'id' : node.getAttribute('refid'),
+ 'dom' : self._createNode('header',
+ id=node.getAttribute('refid'),
+ name=name)
+ }
+ return None
+
+ #~ Translate a <basecompoundref...>...</basecompoundref> into:
+ #~ <inherit access="?">
+ #~ ...
+ #~ </inherit>
+ def _translate_basecompoundref( self, ref, target=None, **kwargs ):
+ inherit = target.appendChild(self._createNode('inherit',
+ access=ref.getAttribute('prot')))
+ self._translateChildren(ref,target=inherit)
+ return
+
+ #~ Translate:
+ #~ <templateparamlist>
+ #~ <param>
+ #~ <type>...</type>
+ #~ <declname>...</declname>
+ #~ <defname>...</defname>
+ #~ <defval>...</defval>
+ #~ </param>
+ #~ ...
+ #~ </templateparamlist>
+ #~ Into:
+ #~ <template>
+ #~ <template-type-parameter name="?" />
+ #~ <template-nontype-parameter name="?">
+ #~ <type>?</type>
+ #~ <default>?</default>
+ #~ </template-nontype-parameter>
+ #~ </template>
+ def _translate_templateparamlist( self, templateparamlist, target=None, **kwargs ):
+ template = target.appendChild(self._createNode('template'))
+ for param in templateparamlist.childNodes:
+ if param.nodeName == 'param':
+ type = self._getChildData('type',root=param)
+ defval = self._getChild('defval',root=param)
+ paramKind = None
+ if type in ('class','typename'):
+ paramKind = 'template-type-parameter'
+ else:
+ paramKind = 'template-nontype-parameter'
+ templateParam = template.appendChild(
+ self._createNode(paramKind,
+ name=self._getChildData('declname',root=param)))
+ if paramKind == 'template-nontype-parameter':
+ template_type = templateParam.appendChild(self._createNode('type'))
+ self._translate_type(
+ self._getChild('type',root=param),target=template_type)
+ if defval:
+ value = self._getChildData('ref',root=defval.firstChild)
+ if not value:
+ value = self._getData(defval)
+ templateParam.appendChild(self._createText('default',value))
+ return template
+
+ #~ Translate:
+ #~ <briefdescription>...</briefdescription>
+ #~ Into:
+ #~ <purpose>...</purpose>
+ def _translate_briefdescription( self, brief, target=None, **kwargs ):
+ self._translateDescription(brief,target=target,**kwargs)
+ return self._translateDescription(brief,target=target,tag='purpose',**kwargs)
+
+ #~ Translate:
+ #~ <detaileddescription>...</detaileddescription>
+ #~ Into:
+ #~ <description>...</description>
+ def _translate_detaileddescription( self, detailed, target=None, **kwargs ):
+ return self._translateDescription(detailed,target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="?">...</sectiondef>
+ #~ With kind specific translation.
+ def _translate_sectiondef( self, sectiondef, target=None, **kwargs ):
+ self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs)
+
+ #~ Translate non-function sections.
+ def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ):
+ for n in sectiondef.childNodes:
+ if hasattr(n,'getAttribute'):
+ self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs)
+ return None
+
+ #~ Translate:
+ #~ <sectiondef kind="public-type">...</sectiondef>
+ def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ):
+ return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="public-sttrib">...</sectiondef>
+ def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs):
+ return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="?-func">...</sectiondef>
+ #~ All the various function group translations end up here for which
+ #~ they are translated into:
+ #~ <method-group name="?">
+ #~ ...
+ #~ </method-group>
+ def _translate_sectiondef_func_( self, sectiondef, name='functions', target=None, **kwargs ):
+ members = target.appendChild(self._createNode('method-group',name=name))
+ for n in sectiondef.childNodes:
+ if hasattr(n,'getAttribute'):
+ self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs)
+ return members
+
+ #~ Translate:
+ #~ <sectiondef kind="public-func">...</sectiondef>
+ def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ):
+ return self._translate_sectiondef_func_(sectiondef,
+ name='public member functions',target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="public-static-func">...</sectiondef>
+ def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs):
+ return self._translate_sectiondef_func_(sectiondef,
+ name='public static functions',target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="protected-func">...</sectiondef>
+ def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ):
+ return self._translate_sectiondef_func_(sectiondef,
+ name='protected member functions',target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="private-static-func">...</sectiondef>
+ def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs):
+ return self._translate_sectiondef_func_(sectiondef,
+ name='private static functions',target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="public-func">...</sectiondef>
+ def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ):
+ return self._translate_sectiondef_func_(sectiondef,
+ name='private member functions',target=target,**kwargs)
+
+ #~ Translate:
+ #~ <sectiondef kind="user-defined"><header>...</header>...</sectiondef>
+ def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ):
+ return self._translate_sectiondef_func_(sectiondef,
+ name=self._getChildData('header', root=sectiondef),target=target,**kwargs)
+
+ #~ Translate:
+ #~ <memberdef kind="typedef" id="?">
+ #~ <name>...</name>
+ #~ </memberdef>
+ #~ To:
+ #~ <typedef id="?" name="?">
+ #~ <type>...</type>
+ #~ </typedef>
+ def _translate_memberdef_typedef( self, memberdef, target=None, scope=None, **kwargs ):
+ self._setID(memberdef.getAttribute('id'),
+ scope+'::'+self._getChildData('name',root=memberdef))
+ typedef = target.appendChild(self._createNode('typedef',
+ id=memberdef.getAttribute('id'),
+ name=self._getChildData('name',root=memberdef)))
+ typedef_type = typedef.appendChild(self._createNode('type'))
+ self._translate_type(self._getChild('type',root=memberdef),target=typedef_type)
+ return typedef
+
+ #~ Translate:
+ #~ <memberdef kind="function" id="?" const="?" static="?" explicit="?" inline="?">
+ #~ <name>...</name>
+ #~ </memberdef>
+ #~ To:
+ #~ <method name="?" cv="?" specifiers="?">
+ #~ ...
+ #~ </method>
+ def _translate_memberdef_function( self, memberdef, target=None, scope=None, **kwargs ):
+ name = self._getChildData('name',root=memberdef)
+ self._setID(memberdef.getAttribute('id'),scope+'::'+name)
+ ## Check if we have some specific kind of method.
+ if name == scope.split('::')[-1]:
+ kind = 'constructor'
+ target = target.parentNode
+ elif name == '~'+scope.split('::')[-1]:
+ kind = 'destructor'
+ target = target.parentNode
+ elif name == 'operator=':
+ kind = 'copy-assignment'
+ target = target.parentNode
+ else:
+ kind = 'method'
+ method = target.appendChild(self._createNode(kind,
+ # id=memberdef.getAttribute('id'),
+ name=name,
+ cv=' '.join([
+ if_attribute(memberdef,'const','const','').strip()
+ ]),
+ specifiers=' '.join([
+ if_attribute(memberdef,'static','static',''),
+ if_attribute(memberdef,'explicit','explicit',''),
+ if_attribute(memberdef,'inline','inline','')
+ ]).strip()
+ ))
+ ## We iterate the children to translate each part of the function.
+ for n in memberdef.childNodes:
+ self._translateNode(memberdef,'function',n,target=method)
+ return method
+
+ #~ Translate:
+ #~ <memberdef kind="function"...><templateparamlist>...</templateparamlist></memberdef>
+ def _translate_memberdef_function_templateparamlist(
+ self, templateparamlist, target=None, **kwargs ):
+ return self._translate_templateparamlist(templateparamlist,target=target,**kwargs)
+
+ #~ Translate:
+ #~ <memberdef kind="function"...><type>...</type></memberdef>
+ #~ To:
+ #~ ...<type>?</type>
+ def _translate_memberdef_function_type( self, resultType, target=None, **kwargs ):
+ methodType = self._createNode('type')
+ self._translate_type(resultType,target=methodType)
+ if methodType.hasChildNodes():
+ target.appendChild(methodType)
+ return methodType
+
+ #~ Translate:
+ #~ <memberdef kind="function"...><briefdescription>...</briefdescription></memberdef>
+ def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ):
+ result = self._translateDescription(description,target=target,**kwargs)
+ ## For functions if we translate the brief docs to the purpose they end up
+ ## right above the regular description. And since we just added the brief to that
+ ## on the previous line, don't bother with the repetition.
+ # result = self._translateDescription(description,target=target,tag='purpose',**kwargs)
+ return result
+
+ #~ Translate:
+ #~ <memberdef kind="function"...><detaileddescription>...</detaileddescription></memberdef>
+ def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ):
+ return self._translateDescription(description,target=target,**kwargs)
+
+ #~ Translate:
+ #~ <memberdef kind="function"...><inbodydescription>...</inbodydescription></memberdef>
+ def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ):
+ return self._translateDescription(description,target=target,**kwargs)
+
+ #~ Translate:
+ #~ <memberdef kind="function"...><param>...</param></memberdef>
+ def _translate_memberdef_function_param( self, param, target=None, **kwargs ):
+ return self._translate_param(param,target=target,**kwargs)
+
+ #~ Translate:
+ #~ <memberdef kind="variable" id="?">
+ #~ <name>...</name>
+ #~ <type>...</type>
+ #~ </memberdef>
+ #~ To:
+ #~ <data-member id="?" name="?">
+ #~ <type>...</type>
+ #~ </data-member>
+ def _translate_memberdef_variable( self, memberdef, target=None, scope=None, **kwargs ):
+ self._setID(memberdef.getAttribute('id'),
+ scope+'::'+self._getChildData('name',root=memberdef))
+ data_member = target.appendChild(self._createNode('data-member',
+ id=memberdef.getAttribute('id'),
+ name=self._getChildData('name',root=memberdef)))
+ data_member_type = data_member.appendChild(self._createNode('type'))
+ self._translate_type(self._getChild('type',root=memberdef),target=data_member_type)
+
+ #~ Translate:
+ #~ <memberdef kind="enum" id="?">
+ #~ <name>...</name>
+ #~ ...
+ #~ </memberdef>
+ #~ To:
+ #~ <enum id="?" name="?">
+ #~ ...
+ #~ </enum>
+ def _translate_memberdef_enum( self, memberdef, target=None, scope=None, **kwargs ):
+ self._setID(memberdef.getAttribute('id'),
+ scope+'::'+self._getChildData('name',root=memberdef))
+ enum = target.appendChild(self._createNode('enum',
+ id=memberdef.getAttribute('id'),
+ name=self._getChildData('name',root=memberdef)))
+ for n in memberdef.childNodes:
+ self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs)
+ return enum
+
+ #~ Translate:
+ #~ <memberdef kind="enum"...>
+ #~ <enumvalue id="?">
+ #~ <name>...</name>
+ #~ <initializer>...</initializer>
+ #~ </enumvalue>
+ #~ </memberdef>
+ #~ To:
+ #~ <enumvalue id="?" name="?">
+ #~ <default>...</default>
+ #~ </enumvalue>
+ def _translate_memberdef_enum_enumvalue( self, enumvalue, target=None, scope=None, **kwargs ):
+ self._setID(enumvalue.getAttribute('id'),
+ scope+'::'+self._getChildData('name',root=enumvalue))
+ value = target.appendChild(self._createNode('enumvalue',
+ id=enumvalue.getAttribute('id'),
+ name=self._getChildData('name',root=enumvalue)))
+ initializer = self._getChild('initializer',root=enumvalue)
+ if initializer:
+ self._translateChildren(initializer,
+ target=target.appendChild(self._createNode('default')))
+ return value
+
+ #~ Translate:
+ #~ <param>
+ #~ <type>...</type>
+ #~ <declname>...</declname>
+ #~ <defval>...</defval>
+ #~ </param>
+ #~ To:
+ #~ <parameter name="?">
+ #~ <paramtype>...</paramtype>
+ #~ ...
+ #~ </parameter>
+ def _translate_param( self, param, target=None, **kwargs):
+ parameter = target.appendChild(self._createNode('parameter',
+ name=self._getChildData('declname',root=param)))
+ paramtype = parameter.appendChild(self._createNode('paramtype'))
+ self._translate_type(self._getChild('type',root=param),target=paramtype)
+ defval = self._getChild('defval',root=param)
+ if defval:
+ self._translateChildren(self._getChild('defval',root=param),target=parameter)
+ return parameter
+
+ #~ Translate:
+ #~ <ref kindref="?" ...>...</ref>
+ def _translate_ref( self, ref, **kwargs ):
+ return self._translateNode(ref,ref.getAttribute('kindref'))
+
+ #~ Translate:
+ #~ <ref refid="?" kindref="compound">...</ref>
+ #~ To:
+ #~ <link linkend="?"><classname>...</classname></link>
+ def _translate_ref_compound( self, ref, **kwargs ):
+ result = self._createNode('link',linkend=ref.getAttribute('refid'))
+ classname = result.appendChild(self._createNode('classname'))
+ self._translateChildren(ref,target=classname)
+ return result
+
+ #~ Translate:
+ #~ <ref refid="?" kindref="member">...</ref>
+ #~ To:
+ #~ <link linkend="?">...</link>
+ def _translate_ref_member( self, ref, **kwargs ):
+ result = self._createNode('link',linkend=ref.getAttribute('refid'))
+ self._translateChildren(ref,target=result)
+ return result
+
+ #~ Translate:
+ #~ <type>...</type>
+ def _translate_type( self, type, target=None, **kwargs ):
+ result = self._translateChildren(type,target=target,**kwargs)
+ #~ Filter types to clean up various readability problems, most notably
+ #~ with really long types.
+ xml = target.toxml('utf-8');
+ if (
+ xml.startswith('<type>boost::mpl::') or
+ xml.startswith('<type>BOOST_PP_') or
+ re.match('<type>boost::(lazy_)?(enable|disable)_if',xml)
+ ):
+ while target.firstChild:
+ target.removeChild(target.firstChild)
+ target.appendChild(self._createText('emphasis','unspecified'))
+ return result
+
+ def _getChild( self, tag = None, id = None, name = None, root = None ):
+ if not root:
+ root = self.boostbook.documentElement
+ for n in root.childNodes:
+ found = True
+ if tag and found:
+ found = found and tag == n.nodeName
+ if id and found:
+ if n.hasAttribute('id'):
+ found = found and n.getAttribute('id') == id
+ else:
+ found = found and n.hasAttribute('id') and n.getAttribute('id') == id
+ if name and found:
+ found = found and n.hasAttribute('name') and n.getAttribute('name') == name
+ if found:
+ #~ print '--|', n
+ return n
+ return None
+
+ def _getChildData( self, tag, **kwargs ):
+ return self._getData(self._getChild(tag,**kwargs),**kwargs)
+
+ def _getData( self, node, **kwargs ):
+ if node:
+ text = self._getChild('#text',root=node)
+ if text:
+ return text.data.strip()
+ return ''
+
+ def _cppName( self, type ):
+ parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':'))
+ result = {
+ 'compoundname' : parts.group(1),
+ 'namespace' : parts.group(1).split('::')[0:-1],
+ 'name' : parts.group(1).split('::')[-1],
+ 'specialization' : parts.group(2)
+ }
+ if result['namespace'] and len(result['namespace']) > 0:
+ namespace = '::'.join(result['namespace'])
+ while (
+ len(result['namespace']) > 0 and (
+ namespace not in self.symbols or
+ self.symbols[namespace]['kind'] != 'namespace')
+ ):
+ result['name'] = result['namespace'].pop()+'::'+result['name']
+ namespace = '::'.join(result['namespace'])
+ return result
+
+ def _createNode( self, tag, **kwargs ):
+ result = self.boostbook.createElement(tag)
+ for k in kwargs.keys():
+ if kwargs[k] != '':
+ if k == 'id':
+ result.setAttribute('id',kwargs[k])
+ else:
+ result.setAttribute(k,kwargs[k])
+ return result
+
+ def _createText( self, tag, data, **kwargs ):
+ result = self._createNode(tag,**kwargs)
+ data = data.strip()
+ if len(data) > 0:
+ result.appendChild(self.boostbook.createTextNode(data))
+ return result
+
+
+def main( xmldir=None, output=None, id=None, title=None, index=False ):
+ #~ print '--- main: xmldir = %s, output = %s' % (xmldir,output)
+
+ input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) )
+ input.sort
+ translator = Doxygen2BoostBook(id=id, title=title, index=index)
+ #~ Feed in the namespaces first to build up the set of namespaces
+ #~ and definitions so that lookup is unambiguous when reading in the definitions.
+ namespace_files = filter(
+ lambda x:
+ os.path.basename(x).startswith('namespace'),
+ input)
+ decl_files = filter(
+ lambda x:
+ not os.path.basename(x).startswith('namespace') and not os.path.basename(x).startswith('_'),
+ input)
+ for dox in namespace_files:
+ #~ print '--|',os.path.basename(dox)
+ translator.addDox(xml.dom.minidom.parse(dox))
+ for dox in decl_files:
+ #~ print '--|',os.path.basename(dox)
+ translator.addDox(xml.dom.minidom.parse(dox))
+
+ if output:
+ output = open(output,'w')
+ else:
+ output = sys.stdout
+ if output:
+ output.write(translator.tostring())
+
+
+main( **get_args() )
diff --git a/src/boost/tools/build/src/tools/doxygen-config.jam b/src/boost/tools/build/src/tools/doxygen-config.jam
new file mode 100644
index 000000000..2cd2ccaeb
--- /dev/null
+++ b/src/boost/tools/build/src/tools/doxygen-config.jam
@@ -0,0 +1,11 @@
+#~ Copyright 2005, 2006 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Doxygen tools. To use, just import this module.
+
+import toolset : using ;
+
+ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ;
+
+using doxygen ;
diff --git a/src/boost/tools/build/src/tools/doxygen.jam b/src/boost/tools/build/src/tools/doxygen.jam
new file mode 100644
index 000000000..a676b6c60
--- /dev/null
+++ b/src/boost/tools/build/src/tools/doxygen.jam
@@ -0,0 +1,782 @@
+# Copyright 2003, 2004 Douglas Gregor
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Copyright 2006 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of various outputs from source
+# files documented with doxygen comments. The supported transformations are:
+#
+# * Source -> Doxygen XML -> BoostBook XML
+# * Source -> Doxygen HTML
+#
+# The type of transformation is selected based on the target requested. For
+# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an
+# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen
+# HTML specifying a target with an ".html" suffix will produce a directory
+# <target> with the Doxygen html files, and a <target>.html file redirecting to
+# that directory.
+
+import alias ;
+import boostbook ;
+import "class" : new ;
+import common ;
+import feature ;
+import make ;
+import modules ;
+import generators ;
+import os ;
+import param ;
+import path ;
+import print ;
+import project ;
+import property ;
+import stage ;
+import targets ;
+import toolset ;
+import type ;
+import utility ;
+import xsltproc ;
+import virtual-target ;
+
+
+# Use to specify extra configuration parameters. These get translated into a
+# doxyfile which configures the building of the docs.
+feature.feature "doxygen:param" : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option.
+feature.feature prefix : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option.
+feature.feature reftitle : : free ;
+
+# Which processor to use for various translations from Doxygen.
+feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ;
+
+# To generate, or not, index sections.
+feature.feature doxygen.doxproc.index : no yes : propagated incidental ;
+
+# The ID for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.id : : free ;
+
+# The title for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.title : : free ;
+
+# Location for images when generating XML
+feature.feature "doxygen:xml-imagedir" : : free ;
+
+# Indicates whether the entire directory should be deleted
+feature.feature doxygen.rmdir : off on : optional incidental ;
+
+# Doxygen configuration input file.
+type.register DOXYFILE : doxyfile ;
+
+# Doxygen XML multi-file output.
+type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ;
+
+# Doxygen XML coallesed output.
+type.register DOXYGEN_XML : doxygen : XML ;
+
+# Doxygen HTML multifile directory.
+type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ;
+
+# Redirection HTML file to HTML multifile directory.
+type.register DOXYGEN_HTML : : HTML ;
+
+type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ;
+
+
+# Initialize the Doxygen module. Parameters are:
+# name: the name of the 'doxygen' executable. If not specified, the name
+# 'doxygen' will be used
+#
+rule init ( name ? )
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ .doxproc = [ modules.binding $(__name__) ] ;
+ .doxproc = $(.doxproc:D)/doxproc.py ;
+
+ generators.register-composing doxygen.headers-to-doxyfile
+ : H HPP CPP MARKDOWN : DOXYFILE ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_XML_MULTIFILE ;
+ generators.register-standard doxygen.xml-dir-to-boostbook
+ : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
+ generators.register-xslt doxygen.xml-to-boostbook
+ : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
+ generators.register-xslt doxygen.collect
+ : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
+ generators.register-standard doxygen.html-redirect
+ : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ;
+ generators.register-standard doxygen.copy-latex-pngs
+ : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ;
+
+ IMPORT $(__name__) : doxygen : : doxygen ;
+ }
+
+ if $(name)
+ {
+ modify-config ;
+ .doxygen = $(name) ;
+ check-doxygen ;
+ }
+
+ if ! $(.doxygen)
+ {
+ check-doxygen ;
+ }
+}
+
+
+local rule freeze-config ( )
+{
+ if ! $(.initialized)
+ {
+ import errors ;
+ errors.user-error doxygen must be initialized before it can be used. ;
+ }
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+ if [ .is-cygwin ]
+ {
+ .is-cygwin = true ;
+ }
+ }
+}
+
+
+local rule modify-config ( )
+{
+ if $(.config-frozen)
+ {
+ import errors ;
+ errors.user-error "Cannot change doxygen after it has been used." ;
+ }
+}
+
+
+local rule check-doxygen ( )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using doxygen ":" $(.doxygen) ;
+ }
+ local extra-paths ;
+ if [ os.name ] = NT
+ {
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ extra-paths = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ extra-paths = "C:\\Program Files" ;
+ }
+ }
+ .doxygen = [ common.get-invocation-command doxygen : doxygen : $(.doxygen) :
+ $(extra-paths) ] ;
+}
+
+
+rule name ( )
+{
+ freeze-config ;
+ return $(.doxygen) ;
+}
+
+
+local rule .is-cygwin ( )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native [ path.join [ path.parent $(file) ] doxygen ]
+ ] ;
+ local command = cd \"$(dir)\" "&&" \"$(.doxygen)\"
+ windows-paths-check.doxyfile 2>&1 ;
+ command = $(command:J=" ") ;
+ result = [ SHELL $(command) ] ;
+ if [ MATCH "(Parsing file /)" : $(result) ]
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Runs Doxygen on the given Doxygen configuration file (the source) to generate
+# the Doxygen files. The output is dumped according to the settings in the
+# Doxygen configuration file, not according to the target! Because of this, we
+# essentially "touch" the target file, in effect making it look like we have
+# really written something useful to it. Anyone that uses this action must deal
+# with this behavior.
+#
+actions doxygen-action
+{
+ $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)"
+}
+
+
+# Runs the Python doxproc XML processor.
+#
+actions doxproc
+{
+ python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)"
+}
+
+
+rule translate-path ( path )
+{
+ freeze-config ;
+ if [ os.on-windows ]
+ {
+ if [ os.name ] = CYGWIN
+ {
+ if $(.is-cygwin)
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return $(path:W) ;
+ }
+ }
+ else
+ {
+ if $(.is-cygwin)
+ {
+ match = [ MATCH "^(.):(.*)" : $(path) ] ;
+ if $(match)
+ {
+ return /cygdrive/$(match[1])$(match[2]:T) ;
+ }
+ else
+ {
+ return $(path:T) ;
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+}
+
+toolset.uses-features doxygen.headers-to-doxyfile : "<doxygen:param>" ;
+
+# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
+# and a property list that may contain <doxygen:param> features.
+#
+rule headers-to-doxyfile ( target : sources * : properties * )
+{
+ local text = "# Generated by B2 version 2" ;
+
+ local output-dir ;
+
+ # Translate <doxygen:param> into command line flags.
+ for local param in [ feature.get-values <doxygen:param> : $(properties) ]
+ {
+ local namevalue = [ MATCH "([^=]*)=(.*)" : $(param) ] ;
+ if $(namevalue[1]) = OUTPUT_DIRECTORY
+ {
+ output-dir = [ translate-path [ utility.unquote $(namevalue[2]) ] ]
+ ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+ else
+ {
+ text += "$(namevalue[1]) = $(namevalue[2])" ;
+ }
+ }
+
+ if ! $(output-dir)
+ {
+ output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+
+ local headers ;
+ for local header in $(sources:G=)
+ {
+ header = [ translate-path $(header) ] ;
+ headers += \"$(header)\" ;
+ }
+
+ # Doxygen generates LaTex by default. So disable it unconditionally, or at
+ # least until someone needs, and hence writes support for, LaTex output.
+ text += "GENERATE_LATEX = NO" ;
+ text += "INPUT = $(headers:J= )" ;
+ print.output $(target) plain ;
+ print.text $(text) : true ;
+}
+
+toolset.uses-features doxygen.run : <doxygen.rmdir> "<doxygen:param>" ;
+
+# Run Doxygen. See doxygen-action for a description of the strange properties of
+# this rule.
+#
+rule run ( target : source : properties * )
+{
+ freeze-config ;
+ if <doxygen.rmdir>on in $(properties)
+ {
+ local output-dir = [ path.make [ MATCH
+ "<doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*)" : $(properties) ] ] ;
+ local html-dir = [ path.make [ MATCH <doxygen:param>HTML_OUTPUT=(.*) :
+ $(properties) ] ] ;
+ if $(output-dir) && $(html-dir) &&
+ [ path.glob $(output-dir) : $(html-dir) ]
+ {
+ HTMLDIR on $(target) = [ path.native [ path.join $(output-dir)
+ $(html-dir) ] ] ;
+ rm-htmldir $(target) ;
+ }
+ }
+ doxygen-action $(target) : $(source) ;
+ NAME on $(target) = $(.doxygen) ;
+ RM on $(target) = [ modules.peek common : RM ] ;
+ *.XML on $(target) = [ path.native [ path.join [ path.make [ on $(target)
+ return $(LOCATE) ] ] $(target:B:S=) *.xml ] ] ;
+}
+
+
+if [ os.name ] = NT
+{
+ RMDIR = rmdir /s /q ;
+}
+else
+{
+ RMDIR = rm -rf ;
+}
+
+actions quietly rm-htmldir
+{
+ $(RMDIR) $(HTMLDIR)
+}
+
+
+# The rules below require BoostBook stylesheets, so we need some code to check
+# that the boostbook module has actually been initialized.
+#
+rule check-boostbook ( )
+{
+ if ! [ modules.peek boostbook : .initialized ]
+ {
+ import errors ;
+ errors.user-error
+ : The boostbook module is not initialized you have attempted to use
+ : the 'doxygen' toolset, which requires BoostBook, but never
+ : initialized BoostBook.
+ : "Hint:" add 'using boostbook \;' to your user-config.jam. ;
+ }
+}
+
+
+# Collect the set of Doxygen XML files into a single XML source file that can be
+# handled by an XSLT processor. The source is completely ignored (see
+# doxygen-action), because this action picks up the Doxygen XML index file xml/
+# index.xml. This is because we can not teach Doxygen to act like a NORMAL
+# program and take a "-o output.xml" argument (grrrr). The target of the
+# collection will be a single Doxygen XML file.
+#
+rule collect ( target : source : properties * )
+{
+ check-boostbook ;
+ local collect-xsl-dir
+ = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ;
+ local source-path
+ = [ path.make [ on $(source) return $(LOCATE) ] ] ;
+ local collect-path
+ = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ;
+ local native-path
+ = [ path.native $(collect-path) ] ;
+ local real-source
+ = [ path.native [ path.join $(collect-path) index.xml ] ] ;
+ xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl)
+ : <xsl:param>doxygen.xml.path=$(native-path) ;
+}
+
+toolset.uses-features doxygen.xml-to-boostbook : <prefix> <reftitle> ;
+
+# Translate Doxygen XML into BoostBook.
+#
+rule xml-to-boostbook ( target : source : properties * )
+{
+ check-boostbook ;
+ local xsl-dir = [ boostbook.xsl-dir ] ;
+ local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen
+ doxygen2boostbook.xsl ] ] ;
+
+ local xslt-properties = $(properties) ;
+ for local prefix in [ feature.get-values <prefix> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ;
+ }
+ for local title in [ feature.get-values <reftitle> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ;
+ }
+
+ xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ;
+}
+
+
+toolset.flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes :
+ --enable-index ;
+toolset.flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ;
+toolset.flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ;
+
+
+rule xml-dir-to-boostbook ( target : source : properties * )
+{
+ DOXPROC on $(target) = $(.doxproc) ;
+ LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ;
+ doxygen.doxproc $(target) : $(source:S=) ;
+}
+
+
+# Generate the HTML redirect to HTML dir index.html file.
+#
+rule html-redirect ( target : source : properties * )
+{
+ local uri = "$(target:B)/index.html" ;
+ print.output $(target) plain ;
+ print.text
+"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\"
+ \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
+<html xmlns=\"http://www.w3.org/1999/xhtml\">
+<head>
+ <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" />
+
+ <title></title>
+</head>
+
+<body>
+ Automatic redirection failed, please go to <a href=
+ \"$(uri)\">$(uri)</a>.
+</body>
+</html>
+"
+ : true ;
+}
+
+rule copy-latex-pngs ( target : source : requirements * )
+{
+ local directory = [ path.native [ feature.get-values <doxygen:xml-imagedir>
+ : $(requirements) ] ] ;
+ local location = [ on $(target) return $(LOCATE) ] ;
+
+ local pdf-location = [ path.native [ path.join [ path.make $(location) ]
+ [ path.make $(directory) ] ] ] ;
+ local html-location = [ path.native [ path.join . html [ path.make
+ $(directory) ] ] ] ;
+
+ common.MkDir $(pdf-location) ;
+ common.MkDir $(html-location) ;
+
+ DEPENDS $(target) : $(pdf-location) $(html-location) ;
+
+ if [ os.name ] = NT
+ {
+ CP on $(target) = copy /y ;
+ FROM on $(target) = \\*.png ;
+ TOHTML on $(target) = .\\html\\$(directory) ;
+ TOPDF on $(target) = \\$(directory) ;
+ }
+ else
+ {
+ CP on $(target) = cp ;
+ FROM on $(target) = /*.png ;
+ TOHTML on $(target) = ./html/$(directory) ;
+ TOPDF on $(target) = $(target:D)/$(directory) ;
+ }
+}
+
+actions copy-latex-pngs
+{
+ $(CP) $(>:S=)$(FROM) $(TOHTML)
+ $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF)
+ echo "Stamped" > "$(<)"
+}
+
+
+# Building latex images for doxygen XML depends on latex, dvips, and gs being in
+# your PATH. This is true for most Unix installs, but not on Win32, where you
+# will need to install MkTex and Ghostscript and add these tools to your path.
+
+actions check-latex
+{
+ latex -version >$(<)
+}
+
+actions check-dvips
+{
+ dvips -version >$(<)
+}
+
+if [ os.name ] = "NT"
+{
+ actions check-gs
+ {
+ gswin32c -version >$(<)
+ }
+}
+else
+{
+ actions check-gs
+ {
+ gs -version >$(<)
+ }
+}
+
+
+local rule check-tools-targets ( project )
+{
+ if ! $(.check-tools-targets)
+ {
+ # Find the root project.
+ #
+ # This is a best effort attempt to avoid using different locations for
+ # storing *.check files depending on which project imported the doxygen
+ # toolset first. The files are stored in a location related to the
+ # project's root project. Note that this location may change depending
+ # on the folder the build was run from in case the build uses multiple
+ # related projects with their own Jamroot separate modules.
+ local project-module = [ $(project).project-module ] ;
+ local root-module = [ project.get-jamroot-module $(project-module) ] ;
+ if ! $(root-module)
+ {
+ import errors ;
+ if [ project.is-config-module $(project-module) ]
+ {
+ errors.user-error doxygen targets can not be declared in Boost
+ Build's configuration modules. ;
+ }
+ else
+ {
+ errors.user-error doxygen targets can not be declared in
+ standalone projects. : use a Jamfile/Jamroot project
+ instead. ;
+ }
+ }
+ local root-project = [ project.target $(root-module) ] ;
+
+ local targets =
+ [ new file-target latex.check : : $(root-project) : [ new action :
+ doxygen.check-latex ] ]
+ [ new file-target dvips.check : : $(root-project) : [ new action :
+ doxygen.check-dvips ] ]
+ [ new file-target gs.check : : $(root-project) : [ new action :
+ doxygen.check-gs ] ] ;
+
+ for local target in $(targets)
+ {
+ .check-tools-targets += [ virtual-target.register $(target) ] ;
+ }
+ }
+ return $(.check-tools-targets) ;
+}
+
+
+project.initialize $(__name__) ;
+project doxygen ;
+
+class doxygen-check-tools-target-class : basic-target
+{
+ rule construct ( name : sources * : property-set )
+ {
+ IMPORT doxygen : check-tools-targets : $(__name__) :
+ doxygen.check-tools-targets ;
+ return [ property-set.empty ] [ doxygen.check-tools-targets [ project ]
+ ] ;
+ }
+}
+
+
+# Declares a metatarget for collecting version information on different external
+# tools used in this module.
+#
+rule check-tools ( target )
+{
+ freeze-config ;
+ targets.create-metatarget doxygen-check-tools-target-class :
+ [ project.current ] : $(target) ;
+}
+
+
+# User-level rule to generate HTML files or BoostBook XML from a set of headers
+# via Doxygen.
+#
+rule doxygen ( target : sources + : requirements * : default-build * :
+ usage-requirements * )
+{
+ param.handle-named-params
+ sources requirements default-build usage-requirements ;
+ requirements += <format>none ;
+ freeze-config ;
+ local project = [ project.current ] ;
+
+ if $(target:S) = .html
+ {
+ # Build an HTML directory from the sources.
+ local html-location = [ feature.get-values <location> : $(requirements)
+ ] ;
+ local output-dir ;
+ if [ $(project).get build-dir ]
+ {
+ # Explicitly specified build dir. Add html at the end.
+ output-dir = [ path.join [ $(project).build-dir ]
+ $(html-location:E=html) ] ;
+ }
+ else
+ {
+ # Trim 'bin' from implicit build dir, for no other reason than
+ # backward compatibility.
+ output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]
+ $(html-location:E=html) ] ;
+ }
+ output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;
+ local output-dir-native = [ path.native $(output-dir) ] ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+
+ # The doxygen configuration file.
+ targets.create-typed-target DOXYFILE : $(project) : $(target:S=.tag)
+ : $(sources)
+ : $(requirements)
+ <doxygen:param>GENERATE_HTML=YES
+ <doxygen:param>GENERATE_XML=NO
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target:S=.tag) ;
+
+ # The html directory to generate by running doxygen.
+ targets.create-typed-target DOXYGEN_HTML_MULTIFILE : $(project)
+ : $(target:S=.dir) # Name.
+ : $(target:S=.tag) # Sources.
+ : $(requirements)
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target:S=.dir) ;
+
+ # The redirect html file into the generated html.
+ targets.create-typed-target DOXYGEN_HTML : $(project) : $(target)
+ : $(target:S=.dir) # Sources.
+ : $(requirements) <location>$(output-dir)
+ : $(default-build) ;
+ }
+ else
+ {
+ # Build a BoostBook XML file from the sources.
+ local location-xml = [ feature.get-values <location> : $(requirements) ]
+ ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+ local target-xml = $(target:B=$(target:B)-xml) ;
+
+ # Check whether we need to build images.
+ local images-location = [ feature.get-values <doxygen:xml-imagedir> :
+ $(requirements) ] ;
+ if $(images-location)
+ {
+ # Prepare a metatarget for collecting used external tool version
+ # information. We use only one such metatarget as they always
+ # produce the same files and we do not want to deal with multiple
+ # metatargets having matching names, causing 'ambiguous variants'
+ # errors.
+ if ! $(.check-tools)
+ {
+ # FIXME: Since we have the check-tools target object reference,
+ # see how we can use that instead of having to construct a valid
+ # target reference string for use in <dependency> property
+ # values.
+ local project-id = --doxygen.check-tools-project-- ;
+ local target-id = --doxygen.check-tools-- ;
+ local pm = [ $(project).project-module ] ;
+ project.register-id $(project-id) : $(pm) ;
+ check-tools $(target-id) ;
+ .check-tools = /$(project-id)//$(target-id) ;
+ }
+
+ doxygen $(target).doxygen-xml-images.html : $(sources) :
+ $(requirements)
+ <doxygen.rmdir>on
+ <doxygen:param>QUIET=YES
+ <doxygen:param>WARNINGS=NO
+ <doxygen:param>WARN_IF_UNDOCUMENTED=NO
+ <dependency>$(.check-tools) ;
+ $(project).mark-target-as-explicit $(target).doxygen-xml-images.html
+ ;
+
+ targets.create-typed-target DOXYGEN_XML_IMAGES : $(project)
+ : $(target).doxygen-xml-images # Name.
+ : $(target).doxygen-xml-images.html # Sources.
+ : $(requirements)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target).doxygen-xml-images ;
+
+ if ! [ MATCH (/)$ : $(images-location) ]
+ {
+ images-location = $(images-location)/ ;
+ }
+
+ requirements +=
+ <dependency>$(target).doxygen-xml-images
+ <xsl:param>boost.doxygen.formuladir=$(images-location) ;
+ }
+
+ # The doxygen configuration file.
+ targets.create-typed-target DOXYFILE : $(project) : $(target-xml:S=.tag)
+ : $(sources)
+ : $(requirements)
+ <doxygen:param>GENERATE_HTML=NO
+ <doxygen:param>GENERATE_XML=YES
+ <doxygen:param>XML_OUTPUT=$(target-xml)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target-xml:S=.tag) ;
+
+ # The Doxygen XML directory for the processed source files.
+ targets.create-typed-target DOXYGEN_XML_MULTIFILE : $(project)
+ : $(target-xml:S=.dir) # Name.
+ : $(target-xml:S=.tag) # Sources.
+ : $(requirements)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target-xml:S=.dir) ;
+
+ # The resulting BoostBook file is generated by the processor tool. The
+ # tool can be either the xsltproc plus accompanying XSL scripts. Or it
+ # can be the python doxproc.py script.
+ targets.create-typed-target BOOSTBOOK : $(project) : $(target-xml)
+ : $(target-xml:S=.dir) # Sources.
+ : $(requirements)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target-xml) ;
+
+ stage $(target:S=.xml) # Name.
+ : $(target-xml) # Sources.
+ : $(requirements)
+ <location>$(location-xml:E=.)
+ <name>$(target:S=.xml)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target:S=.xml) ;
+
+ # TODO: See why this alias target is used here instead of simply naming
+ # the previous stage target $(target) and having it specify the alias
+ # target's usage requirements directly.
+ alias $(target) : : $(requirements) : $(default-build) :
+ $(usage-requirements) <dependency>$(target:S=.xml) ;
+ }
+}
diff --git a/src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile b/src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile
new file mode 100644
index 000000000..9b969df9c
--- /dev/null
+++ b/src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile
@@ -0,0 +1,3 @@
+INPUT = windows-paths-check.hpp
+GENERATE_HTML = NO
+GENERATE_LATEX = NO
diff --git a/src/boost/tools/build/src/tools/doxygen/windows-paths-check.hpp b/src/boost/tools/build/src/tools/doxygen/windows-paths-check.hpp
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/src/boost/tools/build/src/tools/doxygen/windows-paths-check.hpp
diff --git a/src/boost/tools/build/src/tools/emscripten.jam b/src/boost/tools/build/src/tools/emscripten.jam
new file mode 100644
index 000000000..c9a0009db
--- /dev/null
+++ b/src/boost/tools/build/src/tools/emscripten.jam
@@ -0,0 +1,113 @@
+# Copyright Rene Rivera 2016
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+import os ;
+import toolset ;
+import common ;
+import gcc ;
+import type ;
+
+feature.feature embind : off on : propagated ;
+feature.feature closure : off on full : propagated ;
+feature.feature link-optimization : off on full : propagated ;
+
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command emscripten
+ : emcc
+ : $(command) ] ;
+
+ # Determine the version
+ if $(command)
+ {
+ local command-string = \"$(command)\" ;
+ command-string = $(command-string:J=" ") ;
+ version ?= [ MATCH "([0-9.]+)"
+ : [ SHELL "$(command-string) --version" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters emscripten
+ : version $(version) ] ;
+
+ common.handle-options emscripten : $(condition) : $(command) : $(options) ;
+}
+
+feature.extend toolset : emscripten ;
+
+toolset.inherit-generators emscripten <toolset>emscripten
+ : gcc
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+toolset.inherit-rules emscripten : gcc ;
+toolset.inherit-flags emscripten : gcc
+ :
+ <optimization>off <optimization>speed <optimization>space
+ <profiling>off <profiling>on
+ <inlining>off <inlining>on <inlining>full
+ <warnings>off <warnings>all <warnings>on
+ <warnings-as-errors>off <warnings-as-errors>on
+ <debug-symbols>off <debug-symbols>on
+ <rtti>off <rtti>on
+ ;
+
+type.set-generated-target-suffix EXE : <toolset>emscripten : "js" ;
+type.set-generated-target-suffix OBJ : <toolset>emscripten : "bc" ;
+type.set-generated-target-suffix STATIC_LIB : <toolset>emscripten : "bc" ;
+
+toolset.flags emscripten.compile OPTIONS <flags> ;
+toolset.flags emscripten.compile OPTIONS <cflags> ;
+toolset.flags emscripten.compile.c++ OPTIONS <cxxflags> ;
+
+toolset.flags emscripten.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags emscripten.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags emscripten.compile OPTIONS <optimization>space : -Oz ;
+toolset.flags emscripten.link OPTIONS <optimization>off : -O0 ;
+toolset.flags emscripten.link OPTIONS <optimization>speed : -O3 ;
+toolset.flags emscripten.link OPTIONS <optimization>space : -O3 ;
+
+toolset.flags emscripten.compile OPTIONS <profiling>on : --profiling-funcs ;
+
+toolset.flags emscripten.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags emscripten.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags emscripten.compile OPTIONS <inlining>full : -Wno-inline ;
+
+toolset.flags emscripten.compile OPTIONS <warnings>off : -w ;
+toolset.flags emscripten.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags emscripten.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags emscripten.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags emscripten OPTIONS <debug-symbols>off : -g0 ;
+toolset.flags emscripten OPTIONS <debug-symbols>on : -g4 -s DEMANGLE_SUPPORT=1 ;
+toolset.flags emscripten OPTIONS <rtti>off : -fno-rtti ;
+
+toolset.flags emscripten.link OPTIONS <embind>on : --bind ;
+toolset.flags emscripten.link OPTIONS <closure>on : --closure 1 ;
+toolset.flags emscripten.link OPTIONS <closure>full : --closure 2 ;
+toolset.flags emscripten.link OPTIONS <link-optimization>off : --llvm-lto 0 ;
+toolset.flags emscripten.link OPTIONS <link-optimization>on : --llvm-lto 1 ;
+toolset.flags emscripten.link OPTIONS <link-optimization>full : --llvm-lto 3 ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions archive
+{
+ "$(CONFIG_COMMAND)" $(AROPTIONS) -o "$(<)" "$(>)"
+}
+
+toolset.flags emscripten.link USER_OPTIONS <linkflags> ;
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS)
+}
diff --git a/src/boost/tools/build/src/tools/features/__init_features__.jam b/src/boost/tools/build/src/tools/features/__init_features__.jam
new file mode 100644
index 000000000..aedf3e1dc
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/__init_features__.jam
@@ -0,0 +1,23 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Here we automatically define any "feature" modules in this directory.
+
+local key = feature ;
+
+import os path modules ;
+
+.this-module's-file = [ modules.binding $(__name__) ] ;
+.this-module's-dir = [ path.parent [ path.make $(.this-module's-file) ] ] ;
+.to-load-jamfiles = [ path.glob $(.this-module's-dir) : *-$(key).jam ] ;
+.to-load-modules = [ MATCH ^(.*)\.jam$ : $(.to-load-jamfiles) ] ;
+
+# A loop over all matched modules in this directory
+for local m in $(.to-load-modules)
+{
+ m = [ path.basename $(m) ] ;
+ m = $(key)s/$(m) ;
+ import $(m) ;
+}
diff --git a/src/boost/tools/build/src/tools/features/address-model-feature.jam b/src/boost/tools/build/src/tools/features/address-model-feature.jam
new file mode 100644
index 000000000..479c89298
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/address-model-feature.jam
@@ -0,0 +1,22 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.address-model]]`address-model`::
+*Allowed values:* `32`, `64`.
++
+Specifies if 32-bit or 64-bit code should be generated by the compiler. Whether
+this feature works depends on the used compiler, its version, how the compiler
+is configured, and the values of the `architecture` `instruction-set` features.
+Please see the section <<C++ Compilers>> for details.
+
+|# # end::doc[]
+
+feature.feature address-model
+ : 16 32 64 32_64
+ : propagated optional ;
diff --git a/src/boost/tools/build/src/tools/features/allow-feature.jam b/src/boost/tools/build/src/tools/features/allow-feature.jam
new file mode 100644
index 000000000..bcee55796
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/allow-feature.jam
@@ -0,0 +1,19 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.allow]]`allow`::
+This feature is used to allow specific generators to run. For example, Qt tools
+can only be invoked when Qt library is used. In that case, `<allow>qt` will be
+in usage requirement of the library.
+
+|# # end::doc[]
+
+feature.feature allow
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/architecture-feature.jam b/src/boost/tools/build/src/tools/features/architecture-feature.jam
new file mode 100644
index 000000000..3e3ca382f
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/architecture-feature.jam
@@ -0,0 +1,55 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.architecture]]`architecture`::
+*Allowed values:* `x86`, `ia64`, `sparc`, `power`, `mips1`, `mips2`,
+`mips3`, `mips4`, `mips32`, `mips32r2`, `mips64`, `parisc`, `arm`,
+`s390x`, `combined`, `combined-x86-power`.
++
+Specifies the general processor family to generate code for.
+
+|# # end::doc[]
+
+feature.feature architecture
+ :
+ # x86 and x86-64
+ x86
+
+ # ia64
+ ia64
+
+ # Sparc
+ sparc
+
+ # RS/6000 & PowerPC
+ power
+
+ # MIPS/SGI
+ mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
+
+ # HP/PA-RISC
+ parisc
+
+ # Advanced RISC Machines
+ arm
+
+ # RISC-V
+ riscv
+
+ # z Systems (aka s390x)
+ s390x
+
+ # Combined architectures for platforms/toolsets that support building for
+ # multiple architectures at once. "combined" would be the default multi-arch
+ # for the toolset.
+ combined
+ combined-x86-power
+ :
+ propagated optional
+ ;
diff --git a/src/boost/tools/build/src/tools/features/archiveflags-feature.jam b/src/boost/tools/build/src/tools/features/archiveflags-feature.jam
new file mode 100644
index 000000000..c645d8c7a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/archiveflags-feature.jam
@@ -0,0 +1,18 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.archiveflags]]`archiveflags`::
+The value of this feature is passed without modification to the archiver tool
+when creating static libraries.
+
+|# # end::doc[]
+
+feature.feature archiveflags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/asmflags-feature.jam b/src/boost/tools/build/src/tools/features/asmflags-feature.jam
new file mode 100644
index 000000000..00626958e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/asmflags-feature.jam
@@ -0,0 +1,17 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.asmflags]]`asmflags`::
+The value of this feature is passed without modification to the assembler.
+
+|# # end::doc[]
+
+feature.feature asmflags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/build-feature.jam b/src/boost/tools/build/src/tools/features/build-feature.jam
new file mode 100644
index 000000000..64c7713c8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/build-feature.jam
@@ -0,0 +1,22 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.build]]`build`::
+*Allowed values:* `no`
++
+Used to conditionally disable build of a target. If `<build>no` is in
+properties when building a target, build of that target is skipped. Combined
+with conditional requirements this allows you to skip building some target in
+configurations where the build is known to fail.
+
+|# # end::doc[]
+
+feature.feature build
+ : yes no
+ : optional ;
diff --git a/src/boost/tools/build/src/tools/features/cflags-feature.jam b/src/boost/tools/build/src/tools/features/cflags-feature.jam
new file mode 100644
index 000000000..4586317f4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/cflags-feature.jam
@@ -0,0 +1,21 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.cflags]]`cflags`; `cxxflags`; `linkflags`::
+The value of these features is passed without modification to the corresponding
+tools. For `cflags` that is both the C and {CPP} compilers, for `cxxflags` that
+is the {CPP} compiler, and for `linkflags` that is the linker. The features are
+handy when you are trying to do something special that cannot be achieved by a
+higher-level feature in B2.
+
+|# # end::doc[]
+
+feature.feature cflags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/conditional-feature.jam b/src/boost/tools/build/src/tools/features/conditional-feature.jam
new file mode 100644
index 000000000..368ad232f
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/conditional-feature.jam
@@ -0,0 +1,31 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.conditional]]`conditional`::
+Used to introduce indirect conditional requirements. The value should have the
+form:
++
+----
+@rulename
+----
++
+where _rulename_ should be a name of a rule with the following signature:
++
+----
+rule rulename ( properties * )
+----
++
+The rule will be called for each target with its properties and should return
+any additional properties. See also section <<Requirements>> for an example.
+
+|# # end::doc[]
+
+feature.feature conditional
+ :
+ : incidental free ;
diff --git a/src/boost/tools/build/src/tools/features/coverage-feature.jam b/src/boost/tools/build/src/tools/features/coverage-feature.jam
new file mode 100644
index 000000000..517c6d5b9
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/coverage-feature.jam
@@ -0,0 +1,22 @@
+# Copyright 2019 Rene Rivera
+# Copyright 2019 Hans Dembinski
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.coverage]]`coverage`::
+*Allowed values:* `off`, `on`.
++
+Enables code instrumentation to generate coverage data during execution.
+
+|# # end::doc[]
+
+feature.feature coverage
+ :
+ off # Disable coverage generation for the tool (default).
+ on # Enable coverage generation for the tool.
+ : incidental propagated ;
diff --git a/src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam b/src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam
new file mode 100644
index 000000000..825b03a1c
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam
@@ -0,0 +1,39 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+import numbers ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.cpp-template-depth]]`c++-template-depth`::
+*Allowed values:* Any positive integer.
++
+Allows configuring a {CPP} compiler with the maximal template instantiation
+depth parameter. Specific toolsets may or may not provide support for this
+feature depending on whether their compilers provide a corresponding
+command-line option.
++
+NOTE: Due to some internal details in the current B2 implementation it
+is not possible to have features whose valid values are all positive integer.
+As a workaround a large set of allowed values has been defined for this feature
+and, if a different one is needed, user can easily add it by calling the
+feature.extend rule.
+
+|# # end::doc[]
+
+# TODO: This should be upgraded as soon as Boost Build adds support for custom
+# validated feature values or at least features allowing any positive integral
+# value. See related Boost Build related trac ticket #194.
+
+feature.feature c++-template-depth
+ :
+ [ numbers.range 64 1024 : 64 ]
+ [ numbers.range 20 1000 : 10 ]
+ # Maximum template instantiation depth guaranteed for ANSI/ISO C++
+ # conforming programs.
+ 17
+ :
+ incidental optional propagated ;
diff --git a/src/boost/tools/build/src/tools/features/cxxabi-feature.jam b/src/boost/tools/build/src/tools/features/cxxabi-feature.jam
new file mode 100644
index 000000000..9b6cd4fd4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/cxxabi-feature.jam
@@ -0,0 +1,18 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.cxxabi]]`c++abi`::
+Selects a specific variant of C++ ABI if the compiler supports several.
+
+
+|# # end::doc[]
+
+feature.feature c++abi
+ :
+ : propagated optional ;
diff --git a/src/boost/tools/build/src/tools/features/cxxflags-feature.jam b/src/boost/tools/build/src/tools/features/cxxflags-feature.jam
new file mode 100644
index 000000000..6db7a7ce6
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/cxxflags-feature.jam
@@ -0,0 +1,17 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.cxxflags]]`cxxflags`::
+See <<bbv2.builtin.features.cflags,`<cflags>`>>.
+
+|# # end::doc[]
+
+feature.feature cxxflags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/cxxstd-feature.jam b/src/boost/tools/build/src/tools/features/cxxstd-feature.jam
new file mode 100644
index 000000000..dfddf5bb7
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/cxxstd-feature.jam
@@ -0,0 +1,50 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.cxxstd]]`cxxstd`::
+*Allowed values*: `98`, `03`, `0x`, `11`, `1y`, `14`, `1z`, `17`, `2a`,
+`latest`.
++
+Specifies the version of the C++ Standard Language to build with. All the
+official versions of the standard since "98" are included. It is also possible
+to specify using the experimental, work in progress, `latest` version. Some
+compilers specified intermediate versions for the experimental versions leading
+up to the released standard version. Those are included following the GNU
+nomenclature as `0x`, `1y`, `1z`, and `2a`. Depending on the compiler `latest`
+would map to one of those.
+
+NOTE: This is an `optional` feature. Hence when not specified the compiler
+default behaviour is used.
+
+NOTE: Please consult the toolset specific documentation for which `cxxstd`
+is supported.
+
+|# # end::doc[]
+
+feature.feature cxxstd
+ : 98 03 0x 11 1y 14 1z 17 2a latest
+ : optional composite propagated ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.cxxstd-dialect]]`cxxstd-dialect`::
+*Subfeature of* `cxxstd`
++
+*Allowed values*: `iso`, `gnu`, `ms`.
++
+Indicates if a non-standard dialect should be used. These usually have
+either/or extensions or platform specific functionality. Not specifying the
+dialect will default to 'iso' which will attempt to use ISO C++ Standard
+conformance to the best of the compiler's ability.
+
+|# # end::doc[]
+
+feature.subfeature cxxstd : dialect
+ : iso gnu ms
+ : composite propagated ;
diff --git a/src/boost/tools/build/src/tools/features/debug-feature.jam b/src/boost/tools/build/src/tools/features/debug-feature.jam
new file mode 100644
index 000000000..f98177e39
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/debug-feature.jam
@@ -0,0 +1,34 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.debug-symbols]]`debug-symbols`::
+*Allowed values:* `on`, `off`.
++
+Specifies if produced object files, executables, and libraries should include
+debug information. Typically, the value of this feature is implicitly set by
+the `variant` feature, but it can be explicitly specified by the user. The most
+common usage is to build release variant with debugging information.
+
+|# # end::doc[]
+
+feature.feature debug-symbols
+ : on off
+ : propagated ;
+
+#| tag::prof-doc[]
+
+[[bbv2.builtin.features.profiling]]`profiling`::
+*Allowed values:* `off`, `on`.
++
+Enables generation of extra code to write profile information.
+|# # end::prof-doc[]
+
+feature.feature profiling
+ : off on
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/define-feature.jam b/src/boost/tools/build/src/tools/features/define-feature.jam
new file mode 100644
index 000000000..680afdd94
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/define-feature.jam
@@ -0,0 +1,30 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.define]]`define`::
+Specifies a preprocessor symbol that should be defined on the command line.
+You may either specify just the symbol, which will be defined without any
+value, or both the symbol and the value, separated by equal sign.
+
+|# # end::doc[]
+
+feature.feature define
+ :
+ : free ;
+
+#| tag::undef-doc[]
+
+[[bbv2.builtin.features.undef]]`undef`::
+Specifies a preprocessor symbol to undefine.
+
+|# # end::undef-doc[]
+
+feature.feature undef
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/dependency-feature.jam b/src/boost/tools/build/src/tools/features/dependency-feature.jam
new file mode 100644
index 000000000..d866fb21c
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/dependency-feature.jam
@@ -0,0 +1,62 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# The following features are incidental since they have no effect on built
+# products. Not making them incidental will result in problems in corner cases,
+# e.g.:
+#
+# unit-test a : a.cpp : <use>b ;
+# lib b : a.cpp b ;
+#
+# Here, if <use> is not incidental, we would decide we have two targets for
+# a.obj with different properties and complain about it.
+#
+# Note that making a feature incidental does not mean it is ignored. It may be
+# ignored when creating a virtual target, but the rest of build process will use
+# them.
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.dependency]]`dependency`::
+Introduces a dependency on the target named by the value of this feature (so it
+will be brought up-to-date whenever the target being declared is). The
+dependency is not used in any other way.
+
+|# # end::doc[]
+
+feature.feature dependency
+ :
+ : free dependency incidental ;
+
+#| tag::impl-doc[]
+
+[[bbv2.builtin.features.implicit-dependency]]`implicit-dependency`::
+Indicates that the target named by the value of this feature may produce files
+that are included by the sources of the target being declared. See the section
+<<Generated headers>> for more information.
+
+|# # end::impl-doc[]
+
+feature.feature implicit-dependency
+ :
+ : free dependency incidental ;
+
+#| tag::use-doc[]
+
+[[bbv2.builtin.features.use]]`use`::
+Introduces a dependency on the target named by the value of this feature (so it
+will be brought up-to-date whenever the target being declared is), and adds its
+usage requirements to the build properties of the target being declared. The
+dependency is not used in any other way. The primary use case is when you want
+the usage requirements (such as `#include` paths) of some library to be
+applied, but do not want to link to it.
+
+|# # end::use-doc[]
+
+feature.feature use
+ :
+ : free dependency incidental ;
diff --git a/src/boost/tools/build/src/tools/features/dll-feature.jam b/src/boost/tools/build/src/tools/features/dll-feature.jam
new file mode 100644
index 000000000..3c7ed0d65
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/dll-feature.jam
@@ -0,0 +1,73 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.dll-path]]`dll-path`::
+Specifies an additional directory where the system should look for shared
+libraries when the executable or shared library is run. This feature only
+affects Unix compilers. Please see
+<<Why are the `dll-path` and `hardcode-dll-paths` properties useful?>>
+in <<Frequently Asked Questions>> for details.
+
+|# # end::doc[]
+
+feature.feature dll-path
+ :
+ : free path ;
+
+#| tag::hardcode-doc[]
+
+[[bbv2.builtin.features.hardcode-dll-paths]]`hardcode-dll-paths`::
+*Allowed values:* `true`, `false`.
++
+Controls automatic generation of dll-path properties.
++
+This property is specific to Unix systems. If an executable is built with
+`<hardcode-dll-paths>true`, the generated binary will contain the list of all
+the paths to the used shared libraries. As the result, the executable can be
+run without changing system paths to shared libraries or installing the
+libraries to system paths. This is very convenient during development. Please
+see the <<bbv2.faq.dll-path,FAQ entry>> for details. Note that on Mac OSX,
+the paths are unconditionally hardcoded by the linker, and it is not possible
+to disable that behavior
+
+|# # end::hardcode-doc[]
+
+feature.feature hardcode-dll-paths
+ : true false
+ : incidental ;
+
+# An internal feature that holds the paths of all dependency shared libraries.
+# On Windows, it is needed so that we can add all those paths to PATH when
+# running applications. On Linux, it is needed to add proper -rpath-link command
+# line options.
+feature.feature xdll-path
+ :
+ : free path ;
+
+#| tag::def-doc[]
+
+[[bbv2.builtin.features.def-file]]`def-file`::
+Provides a means to specify def-file for windows DLLs.
+
+|# # end::def-doc[]
+
+feature.feature def-file
+ :
+ : free dependency ;
+
+#| tag::suppress-doc[]
+
+[[bbv2.builtin.features.suppress-import-lib]]`suppress-import-lib`::
+Suppresses creation of import library by the linker.
+
+|# # end::suppress-doc[]
+
+feature.feature suppress-import-lib
+ : false true
+ : incidental ;
diff --git a/src/boost/tools/build/src/tools/features/exception-feature.jam b/src/boost/tools/build/src/tools/features/exception-feature.jam
new file mode 100644
index 000000000..9db3834b2
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/exception-feature.jam
@@ -0,0 +1,47 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# TODO: Documentation.
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.exception-handling]]`exception-handling`::
+*Allowed values:* `on`, `off`.
++
+Disables exceptions.
+
+|# # end::doc[]
+
+feature.feature exception-handling
+ : on off
+ : propagated ;
+
+#| tag::asynch-doc[]
+
+[[bbv2.builtin.features.asynch-exceptions]]`asynch-exceptions`::
+*Allowed values:* `off`, `on`.
++
+Selects whether there is support for asynchronous EH (e.g. catching SEGVs).
+
+|# # end::asynch-doc[]
+
+feature.feature asynch-exceptions
+ : off on
+ : propagated ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.extern-c-nothrow]]`extern-c-nothrow`::
+*Allowed values:* `off`, `on`.
++
+Selects whether all `extern "C"` functions are considered `nothrow` by default.
+
+|# # end::doc[]
+
+feature.feature extern-c-nothrow
+ : off on
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/fflags-feature.jam b/src/boost/tools/build/src/tools/features/fflags-feature.jam
new file mode 100644
index 000000000..fe89d6e63
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/fflags-feature.jam
@@ -0,0 +1,18 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.fflags]]`fflags`::
+The value of this feature is passed without modification to the tool when
+compiling Fortran sources.
+
+|# # end::doc[]
+
+feature.feature fflags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/file-feature.jam b/src/boost/tools/build/src/tools/features/file-feature.jam
new file mode 100644
index 000000000..a16d8d754
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/file-feature.jam
@@ -0,0 +1,18 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.file]]`file`::
+When used in requirements of a prebuilt library target this feature specifies
+the path to the library file. See <<Prebuilt targets>> for examples.
+
+|# # end::doc[]
+
+feature.feature file
+ :
+ : free dependency incidental ;
diff --git a/src/boost/tools/build/src/tools/features/find-lib-feature.jam b/src/boost/tools/build/src/tools/features/find-lib-feature.jam
new file mode 100644
index 000000000..e00f40397
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/find-lib-feature.jam
@@ -0,0 +1,42 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.find-shared-library]]`find-shared-library`::
+Adds a shared library to link to. Usually link:#bbv2.tasks.libraries[`lib`]
+targets should be preferred over using this feature.
+
+|# # end::doc[]
+
+feature.feature find-shared-library
+ :
+ : free ; #order-sensitive ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.find-static-library]]`find-static-library`::
+Adds a static library to link to. Usually link:#bbv2.tasks.libraries[`lib`]
+targets should be preferred over using this feature.
+
+|# # end::doc[]
+
+feature.feature find-static-library
+ :
+ : free ; #order-sensitive ;
+
+#| tag::path-doc[]
+
+[[bbv2.builtin.features.library-path]]`library-path`::
+Adds to the list of directories which will be used by the linker to search for
+libraries.
+
+|# # end::path-doc[]
+
+feature.feature library-path
+ :
+ : free path ; #order-sensitive ;
diff --git a/src/boost/tools/build/src/tools/features/flags-feature.jam b/src/boost/tools/build/src/tools/features/flags-feature.jam
new file mode 100644
index 000000000..a0c416609
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/flags-feature.jam
@@ -0,0 +1,19 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.flags]]`flags`::
+This feature is used for generic, i.e. non-language specific, flags for tools.
+The value of this feature is passed without modification to the tool that will
+build the target.
+
+|# # end::doc[]
+
+feature.feature flags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/include-feature.jam b/src/boost/tools/build/src/tools/features/include-feature.jam
new file mode 100644
index 000000000..25d8ad8c4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/include-feature.jam
@@ -0,0 +1,19 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.include]]`include`::
+Specifies an additional include path that is to be passed to C and {CPP}
+compilers.
+
+|# # end::doc[]
+
+feature.feature "include"
+ :
+ : free path #order-sensitive
+ ;
diff --git a/src/boost/tools/build/src/tools/features/instruction-set-feature.jam b/src/boost/tools/build/src/tools/features/instruction-set-feature.jam
new file mode 100644
index 000000000..d68c29afc
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/instruction-set-feature.jam
@@ -0,0 +1,64 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.instruction-set]]`instruction-set`::
+*Allowed values:* depends on the used toolset.
++
+Specifies for which specific instruction set the code should be generated. The
+code in general might not run on processors with older/different instruction
+sets.
++
+While B2 allows a large set of possible values for this features,
+whether a given value works depends on which compiler you use. Please see
+the section <<C++ Compilers>> for details.
+
+|# # end::doc[]
+
+feature.feature instruction-set
+ :
+ # x86 and x86-64
+ native i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
+ pentium3m pentium-m pentium4 pentium4m prescott nocona core2 corei7 corei7-avx core-avx-i
+ conroe conroe-xe conroe-l allendale merom merom-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem sandy-bridge ivy-bridge haswell broadwell skylake skylake-avx512 cannonlake
+ icelake
+ k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp athlon-mp k8 opteron athlon64 athlon-fx
+ k8-sse3 opteron-sse3 athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3 bdver4 btver1
+ btver2 znver1 winchip-c6 winchip2 c3 c3-2 atom
+
+ # ia64
+ itanium itanium1 merced itanium2 mckinley
+
+ # Sparc
+ v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
+ sparclet tsc701 v9 ultrasparc ultrasparc3
+
+ # RS/6000 & PowerPC
+ 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
+ 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
+ power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
+
+ # MIPS
+ 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
+ r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
+ vr5000 vr5400 vr5500
+
+ # HP/PA-RISC
+ 700 7100 7100lc 7200 7300 8000
+
+ # Advanced RISC Machines
+ armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
+ armv7 armv7s
+
+ # z Systems (aka s390x)
+ z196 zEC12 z13 z14 z15
+
+ :
+ propagated optional
+ ;
diff --git a/src/boost/tools/build/src/tools/features/internal-feature.jam b/src/boost/tools/build/src/tools/features/internal-feature.jam
new file mode 100644
index 000000000..5c1d88e78
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/internal-feature.jam
@@ -0,0 +1,19 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# TODO: Documentation.
+
+import feature ;
+
+# Internal feature.
+feature.feature library-file
+ :
+ : free dependency ;
+
+# Internal feature used to store the name of a bjam action to call when building
+# a target.
+feature.feature action
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/library-feature.jam b/src/boost/tools/build/src/tools/features/library-feature.jam
new file mode 100644
index 000000000..86772c0e9
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/library-feature.jam
@@ -0,0 +1,22 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.library]]`library`::
+This feature is almost equivalent to the
+<<bbv2.builtin.features.library,`<source>`>> feature, except that it takes
+effect only for linking. When you want to link all targets in a Jamfile to
+certain library, the `<library>` feature is preferred over `<source>X` -- the
+latter will add the library to all targets, even those that have nothing to do
+with libraries.
+
+|# # end::doc[]
+
+feature.feature library
+ :
+ : free dependency incidental ;
diff --git a/src/boost/tools/build/src/tools/features/link-feature.jam b/src/boost/tools/build/src/tools/features/link-feature.jam
new file mode 100644
index 000000000..f697341a8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/link-feature.jam
@@ -0,0 +1,19 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.link]]`link`::
+*Allowed values:* `shared`, `static`
++
+Controls how libraries are built.
+
+|# # end::doc[]
+
+feature.feature link
+ : shared static
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/linkflags-feature.jam b/src/boost/tools/build/src/tools/features/linkflags-feature.jam
new file mode 100644
index 000000000..4e157f399
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/linkflags-feature.jam
@@ -0,0 +1,17 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.linkflags]]`linkflags`::
+See <<bbv2.builtin.features.cflags,`<cflags>`>>.
+
+|# # end::doc[]
+
+feature.feature linkflags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/local-visibility-feature.jam b/src/boost/tools/build/src/tools/features/local-visibility-feature.jam
new file mode 100644
index 000000000..d1792fd06
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/local-visibility-feature.jam
@@ -0,0 +1,27 @@
+# Copyright 2018 Andrey Semashev
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.local-visibility]]`local-visibility`::
+*Allowed values:* `global`, `protected`, `hidden`.
++
+This feature has the same effect as the
+<<bbv2.builtin.features.visibility,`visibility`>> feature but is intended
+to be used by targets that require a particular symbol visibility. Unlike the
+`visibility` feature, `local-visibility` is not inherited by the target
+dependencies and only affects the target to which it is applied.
++
+The `local-visibility` feature supports the same values with the same meaning
+as the `visibility` feature. By default, if `local-visibility` is not specified
+for a target, the value of the `visibility` feature is used.
+
+|# # end::doc[]
+
+feature.feature local-visibility
+ : global protected hidden
+ : optional ;
diff --git a/src/boost/tools/build/src/tools/features/location-feature.jam b/src/boost/tools/build/src/tools/features/location-feature.jam
new file mode 100644
index 000000000..1581d3392
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/location-feature.jam
@@ -0,0 +1,18 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.location]]`location`::
+Specifies the build directory for a target. The feature is used primarily with
+<<bbv2.tasks.installing,`<install>`>> rule.
+
+|# # end::doc[]
+
+feature.feature location
+ :
+ : free path ;
diff --git a/src/boost/tools/build/src/tools/features/location-prefix-feature.jam b/src/boost/tools/build/src/tools/features/location-prefix-feature.jam
new file mode 100644
index 000000000..11582fd20
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/location-prefix-feature.jam
@@ -0,0 +1,18 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.location-prefix]]`location-prefix`::
+Sets the build directory for a target as the project’s build directory prefixed
+with the value of this feature. See section <<Target Paths>> for an example.
+
+|# # end::doc[]
+
+feature.feature location-prefix
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/lto-feature.jam b/src/boost/tools/build/src/tools/features/lto-feature.jam
new file mode 100644
index 000000000..99ca7bdcc
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/lto-feature.jam
@@ -0,0 +1,46 @@
+# Copyright 2019 Dmitry Arkhipov
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.lto]]`lto`::
+*Allowed values:* `on`.
++
+Enables link time optimizations (also known as interprocedural optimizations or
+whole-program optimizations). Currently supported toolsets are <<GNU {CPP}>>,
+clang and <<Microsoft Visual {CPP}>>. The feature is optional.
+
+|# # end::doc[]
+
+feature.feature lto
+ : on
+ : optional propagated ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.lto-mode]]`lto-mode`::
+*Subfeature of* `lto`
++
+*Allowed values:* `full`, `thin`, `fat`.
++
+Specifies the type of LTO to use.
++
+`full`::: Use the monolithic LTO: on linking all input is merged into a single
+ module.
+`thin`::: Use clang's ThinLTO: each compiled file contains a summary of the
+ module, these summaries are merged into a single index. This allows to avoid
+ merging all modules together, which greatly reduces linking time.
+`fat`::: Produce gcc's fat LTO objects: compiled files contain both the
+ intermidiate language suitable for LTO and object code suitable for regular
+ linking.
+
+|# # end::doc[]
+
+feature.subfeature lto
+ : mode
+ : full thin fat
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/name-feature.jam b/src/boost/tools/build/src/tools/features/name-feature.jam
new file mode 100644
index 000000000..e134fc422
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/name-feature.jam
@@ -0,0 +1,22 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.name]]`name`::
+When used in requirements of a prebuilt library target this feature specifies
+the name of the library (the name of the library file without any
+platform-specific suffixes or prefixes). See <<Prebuilt targets>> for examples.
++
+When used in requirements of an `<install>` target it specifies the name of the
+target file.
+
+|# # end::doc[]
+
+feature.feature name
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/objcflags-feature.jam b/src/boost/tools/build/src/tools/features/objcflags-feature.jam
new file mode 100644
index 000000000..b69c6b661
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/objcflags-feature.jam
@@ -0,0 +1,32 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.mflags]]`mflags`::
+The value of this feature is passed without modification to the tool when
+compiling Objective C sources.
+
+
+|# # end::doc[]
+
+feature.feature mflags
+ :
+ : free ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.mmflags]]`mmflags`::
+The value of this feature is passed without modification to the tool when
+compiling Objective {CPP} sources.
+
+
+|# # end::doc[]
+
+feature.feature mmflags
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/optimization-feature.jam b/src/boost/tools/build/src/tools/features/optimization-feature.jam
new file mode 100644
index 000000000..f02183622
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/optimization-feature.jam
@@ -0,0 +1,46 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.optimization]]`optimization`::
+*Allowed values:* `off`, `speed`, `space`.
++
+Enables optimization. `speed` optimizes for faster code, `space` optimizes for
+smaller binary.
+
+|# # end::doc[]
+
+feature.feature optimization
+ : off speed space
+ : propagated ;
+
+#| tag::inline-doc[]
+
+[[bbv2.builtin.features.inlining]]`inlining`::
+*Allowed values:* `off`, `on`, `full`.
++
+Enables inlining.
+
+|# # end::inline-doc[]
+
+feature.feature inlining
+ : off on full
+ : propagated ;
+
+#| tag::vector-doc[]
+
+[[bbv2.builtin.features.vectorize]]`vectorize`::
+*Allowed values:* `off`, `on`, `full`.
++
+Enables vectorization.
+
+|# # end::vector-doc[]
+
+feature.feature vectorize
+ : off on full
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/os-feature.jam b/src/boost/tools/build/src/tools/features/os-feature.jam
new file mode 100644
index 000000000..98407e16a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/os-feature.jam
@@ -0,0 +1,95 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+import modules ;
+import os ;
+
+.os-names =
+ aix android appletv bsd cygwin darwin freebsd haiku hpux iphone linux
+ netbsd openbsd osf qnx qnxnto sgi solaris unix unixware windows vms vxworks
+ freertos
+
+ # Not actually an OS -- used for targeting bare metal where object
+ # format is ELF. This catches both -elf and -eabi gcc targets as well
+ # as other compilers targeting ELF. It is not clear how often we need
+ # the 'elf' key as opposed to other bare metal targets, but let us
+ # stick with gcc naming.
+ elf
+ ;
+
+# Feature used to determine which OS we're on. New <target-os> and <host-os>
+# features should be used instead.
+local os = [ modules.peek : OS ] ;
+feature.feature os : $(os) : propagated link-incompatible ;
+
+# Translates from bjam current OS to the os tags used in host-os and
+# target-os, i.e. returns the running host-os.
+#
+local rule default-host-os ( )
+{
+ local host-os ;
+ if [ os.name ] in $(.os-names:U)
+ {
+ host-os = [ os.name ] ;
+ }
+ else
+ {
+ switch [ os.name ]
+ {
+ case NT : host-os = windows ;
+ case AS400 : host-os = unix ;
+ case MINGW : host-os = windows ;
+ case BSDI : host-os = bsd ;
+ case COHERENT : host-os = unix ;
+ case DRAGONFLYBSD : host-os = bsd ;
+ case IRIX : host-os = sgi ;
+ case HAIKU : host-os = haiku ;
+ case MACOSX : host-os = darwin ;
+ case KFREEBSD : host-os = freebsd ;
+ case LINUX : host-os = linux ;
+ case VMS : host-os = vms ;
+ case SUNOS :
+ ECHO
+ "SunOS is not a supported operating system."
+ "We believe last version of SunOS was released in 1992, "
+ "so if you get this message, something is very wrong with "
+ "configuration logic. Please report this as a bug. " ;
+ EXIT ;
+ case * : host-os = unix ;
+ }
+ }
+ return $(host-os:L) ;
+}
+
+
+# The two OS features define a known set of abstract OS names. The host-os is
+# the OS under which bjam is running. Even though this should really be a fixed
+# property we need to list all the values to prevent unknown value errors. Both
+# set the default value to the current OS to account for the default use case of
+# building on the target OS.
+feature.feature host-os : $(.os-names) ;
+feature.set-default host-os : [ default-host-os ] ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.target-os]]`target-os`::
+*Allowed values:* `aix`, `android`, `appletv`, `bsd`, `cygwin`, `darwin`,
+`freebsd`, `haiku`, `hpux`, `iphone`, `linux`, `netbsd`, `openbsd`, `osf`,
+`qnx`, `qnxnto`, `sgi`, `solaris`, `unix`, `unixware`, `windows`, `vms`,
+`vxworks`, `freertos`.
++
+Specifies the operating system for which the code is to be generated. The
+compiler you used should be the compiler for that operating system. This option
+causes B2 to use naming conventions suitable for that operating
+system, and adjust build process accordingly. For example, with gcc, it
+controls if import libraries are produced for shared libraries or not.
++
+See the section <<Cross-compilation>> for details of cross-compilation.
+
+|# # end::doc[]
+
+feature.feature target-os : $(.os-names) : propagated link-incompatible ;
+feature.set-default target-os : [ default-host-os ] ;
diff --git a/src/boost/tools/build/src/tools/features/relevant-feature.jam b/src/boost/tools/build/src/tools/features/relevant-feature.jam
new file mode 100644
index 000000000..94ba69965
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/relevant-feature.jam
@@ -0,0 +1,48 @@
+# Copyright 2017 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.relevant]]`relevant`::
+*Allowed values:* the name of any feature.
++
+Indicates which other features are relevant for a given target. It is usually
+not necessary to manage it explicitly, as B2 can deduce it in most
+cases. Features which are not relevant will not affect target paths, and will
+not cause conflicts.
++
+* A feature will be considered relevant if any of the following are true
++
+** It is referenced by `toolset.flags` or `toolset.uses-features`
+** It is used by the requirements of a generator
+** It is a sub-feature of a relevant feature
+** It has a sub-feature which is relevant
+** It is a composite feature, and any composed feature is relevant
+** It affects target alternative selection for a main target
+** It is a propagated feature and is relevant for any dependency
+** It is relevant for any dependency created by the same main target
+** It is used in the condition of a conditional property and the corresponding
+ value is relevant
+** It is explicitly named as relevant
++
+* Relevant features cannot be automatically deduced in the following cases:
++
+** Indirect conditionals. Solution: return properties of the form
+`<relevant>result-feature:<relevant>condition-feature`
++
+NOTE: This isn't really a conditional, although for most purposes it functions
+like one. In particular, it does not support multiple comma-separated elements
+in the condition, and it does work correctly even in contexts where conditional
+properties are not allowed
+** Action rules that read properties. Solution: add toolset.uses-features to
+ tell B2 that the feature is actually used.
+** Generators and targets that manipulate property-sets directly. Solution:
+ set <relevant> manually.
+
+|# # end::doc[]
+
+feature.feature relevant : : incidental free ;
diff --git a/src/boost/tools/build/src/tools/features/rtti-feature.jam b/src/boost/tools/build/src/tools/features/rtti-feature.jam
new file mode 100644
index 000000000..5943412e4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/rtti-feature.jam
@@ -0,0 +1,19 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.rtti]]`rtti`::
+*Allowed values:* `on`, `off`.
++
+Disables run-time type information.
+
+|# # end::doc[]
+
+feature.feature rtti
+ : on off
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/runtime-feature.jam b/src/boost/tools/build/src/tools/features/runtime-feature.jam
new file mode 100644
index 000000000..f25c178ea
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/runtime-feature.jam
@@ -0,0 +1,40 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.runtime-debugging]]`runtime-debugging`::
+*Allowed values:* `on`, `off`.
++
+Specifies whether produced object files, executables, and libraries should
+include behavior useful only for debugging, such as asserts. Typically, the
+value of this feature is implicitly set by the `variant` feature, but it can be
+explicitly specified by the user. The most common usage is to build release
+variant with debugging output.
+
+|# # end::doc[]
+
+feature.feature runtime-debugging
+ : on off
+ : propagated ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.runtime-link]]`runtime-link`::
+*Allowed values:* `shared`, `static`
++
+Controls if a static or shared C/{CPP} runtime should be used. There are some
+restrictions how this feature can be used, for example on some compilers an
+application using static runtime should not use shared libraries at all, and on
+some compilers, mixing static and shared runtime requires extreme care. Check
+your compiler documentation for more details.
+
+|# # end::doc[]
+
+feature.feature runtime-link
+ : shared static
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/sanitizers-feature.jam b/src/boost/tools/build/src/tools/features/sanitizers-feature.jam
new file mode 100644
index 000000000..8affd57a9
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/sanitizers-feature.jam
@@ -0,0 +1,63 @@
+# Copyright 2019 Damian Jarek
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::addr-doc[]
+
+[[bbv2.builtin.features.address-sanitizer]]`address-sanitizer`::
+*Allowed values:* `on`, `norecover`.
++
+Enables address sanitizer. Value `norecover` disables recovery for the
+sanitizer. The feature is optional, thus no sanitizer is enabled by default.
+
+|# # end::addr-doc[]
+
+feature.feature address-sanitizer
+ : on norecover
+ : propagated optional ;
+
+#| tag::leak-doc[]
+
+[[bbv2.builtin.features.leak-sanitizer]]`leak-sanitizer`::
+*Allowed values:* `on`, `norecover`.
++
+Enables leak sanitizer. Value `norecover` disables recovery for the
+sanitizer. The feature is optional, thus no sanitizer is enabled by default.
+
+|# # end::leak-doc[]
+
+feature.feature leak-sanitizer
+ : on norecover
+ : propagated optional ;
+
+#| tag::thread-doc[]
+
+[[bbv2.builtin.features.thread-sanitizer]]`thread-sanitizer`::
+*Allowed values:* `on`, `norecover`.
++
+Enables thread sanitizer. Value `norecover` disables recovery for the
+sanitizer. The feature is optional, thus no sanitizer is enabled by default.
+
+|# # end::thread-doc[]
+
+feature.feature thread-sanitizer
+ : on norecover
+ : propagated optional ;
+
+#| tag::undef-doc[]
+
+[[bbv2.builtin.features.undefined-sanitizer]]`undefined-sanitizer`::
+*Allowed values:* `on`, `norecover`.
++
+Enables undefined behavior sanitizer. Value `norecover` disables recovery for
+the sanitizer. The feature is optional, thus no sanitizer is enabled by
+default.
+
+|# # end::undef-doc[]
+
+feature.feature undefined-sanitizer
+ : on norecover
+ : propagated optional ;
diff --git a/src/boost/tools/build/src/tools/features/search-feature.jam b/src/boost/tools/build/src/tools/features/search-feature.jam
new file mode 100644
index 000000000..cbcf2a176
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/search-feature.jam
@@ -0,0 +1,20 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.search]]`search`::
+When used in requirements of a prebuilt library target this feature adds to the
+list of directories to search for the library file. See <<Prebuilt targets>>
+for examples.
+
+|# # end::doc[]
+
+feature.feature search
+ :
+ : free path #order-sensitive
+ ;
diff --git a/src/boost/tools/build/src/tools/features/source-feature.jam b/src/boost/tools/build/src/tools/features/source-feature.jam
new file mode 100644
index 000000000..7cc1ab8d3
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/source-feature.jam
@@ -0,0 +1,22 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.source]]`source`::
+The `<source>X` property has the same effect on building a target as putting X
+in the list of sources. It is useful when you want to add the same source to
+all targets in the project (you can put `<source>` in requirements) or to
+conditionally include a source (using conditional requirements, see
+the section <<Conditions and alternatives>>. See also the
+<<bbv2.builtin.features.library,`<library>`>> feature.
+
+|# # end::doc[]
+
+feature.feature source
+ :
+ : free dependency incidental ;
diff --git a/src/boost/tools/build/src/tools/features/stdlib-feature.jam b/src/boost/tools/build/src/tools/features/stdlib-feature.jam
new file mode 100644
index 000000000..b280ac8d0
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/stdlib-feature.jam
@@ -0,0 +1,29 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.stdlib]]`stdlib`::
+*Allowed values*: `native`, `gnu`, `gnu11`, `libc++`, `sun-stlport`, `apache`.
++
+Specifies C++ standard library to link to and in some cases the library ABI to
+use:
++
+`native`::: Use compiler's default.
+`gnu`::: Use GNU Standard Library (a.k.a. pass:[libstdc++]) with the old ABI.
+`gnu11`::: Use GNU Standard Library with the new ABI.
+`libc++`::: Use LLVM pass:[libc++].
+`sun-stlport`::: Use the STLport implementation of the standard library
+ provided with the Solaris Studio compiler.
+`apache`::: Use the Apache stdcxx version 4 C++ standard library provided with
+ the Solaris Studio compiler.
+
+|# # end::doc[]
+
+feature.feature stdlib
+ : native gnu gnu11 libc++ sun-stlport apache
+ : propagated composite ;
diff --git a/src/boost/tools/build/src/tools/features/strip-feature.jam b/src/boost/tools/build/src/tools/features/strip-feature.jam
new file mode 100644
index 000000000..0bb9221af
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/strip-feature.jam
@@ -0,0 +1,25 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.strip]]`strip`::
+*Allowed values:* `off`, `on`.
++
+Controls whether the binary should be stripped -- that is have everything not
+necessary to running removed.
++
+NOTE: This feature will show up in target paths of everything, not just
+binaries.
+
+|# # end::doc[]
+
+# TODO: Should fix that when implementing feature relevance.
+
+feature.feature strip
+ : off on
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/tag-feature.jam b/src/boost/tools/build/src/tools/features/tag-feature.jam
new file mode 100644
index 000000000..c03924edf
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/tag-feature.jam
@@ -0,0 +1,39 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.tag]]`tag`::
+Used to customize the name of the generated files. The value should have the
+form:
++
+----
+@rulename
+----
++
+where _rulename_ should be a name of a rule with the following signature:
++
+----
+rule tag ( name : type ? : property-set )
+----
++
+The rule will be called for each target with the default name computed by
+B2, the type of the target, and property set. The rule can either
+return a string that must be used as the name of the target, or an empty
+string, in which case the default name will be used.
++
+Most typical use of the `tag` feature is to encode build properties, or library
+version in library target names. You should take care to return non-empty
+string from the tag rule only for types you care about -- otherwise, you might
+end up modifying names of object files, generated header file and other targets
+for which changing names does not make sense.
+
+|# # end::doc[]
+
+feature.feature tag
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/threadapi-feature.jam b/src/boost/tools/build/src/tools/features/threadapi-feature.jam
new file mode 100644
index 000000000..ae320678d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/threadapi-feature.jam
@@ -0,0 +1,39 @@
+# Copyright 2017 Alexander Karzhenkov
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property-set ;
+import feature : feature ;
+import toolset ;
+import features/os-feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.threadapi]]`threadapi`::
+*Allowed values:* `pthread`, `win32`.
++
+Selects threading implementation. The default is `win32` if `<target-os>` is
+`windows` and `pthread` otherwise.
+
+|# # end::doc[]
+
+feature threadapi : pthread win32 : symmetric propagated ;
+toolset.add-defaults <target-os>windows:<threadapi>win32 ;
+
+rule get-default ( property-set )
+{
+ local api = pthread ;
+ if [ $(property-set).get <target-os> ] = windows { api = win32 ; }
+ return $(api) ;
+}
+
+# Obsolete rule that didn't quite work. Remove this
+# after all references to it have been cleaned up.
+rule detect ( properties * )
+{
+ # local ps = [ property-set.create $(properties) ] ;
+ # local api = [ $(ps).get <threadapi> ] ;
+ # if ! $(api) { api = [ get-default $(ps) ] ; }
+ # return <threadapi>$(api) <relevant>threadapi:<relevant>target-os ;
+}
diff --git a/src/boost/tools/build/src/tools/features/threading-feature.jam b/src/boost/tools/build/src/tools/features/threading-feature.jam
new file mode 100644
index 000000000..8d7a8f7c3
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/threading-feature.jam
@@ -0,0 +1,24 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.threading]]`threading`::
+*Allowed values:* `single`, `multi`
++
+Controls if the project should be built in multi-threaded mode. This feature
+does not necessary change code generation in the compiler, but it causes the
+compiler to link to additional or different runtime libraries, and define
+additional preprocessor symbols (for example, `_MT` on Windows and `_REENTRANT`
+on Linux). How those symbols affect the compiled code depends on the code
+itself.
+
+|# # end::doc[]
+
+feature.feature threading
+ : single multi
+ : propagated ;
diff --git a/src/boost/tools/build/src/tools/features/toolset-feature.jam b/src/boost/tools/build/src/tools/features/toolset-feature.jam
new file mode 100644
index 000000000..fbd0baba8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/toolset-feature.jam
@@ -0,0 +1,20 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.toolset]]`toolset`::
+*Allowed values:* any of the toolset modules.
++
+Selects the toolset that will be used to build binary targets. The full list of
+toolset modules is in the <<Builtin tools>> section.
+
+|# # end::doc[]
+
+feature.feature toolset
+ :
+ : implicit propagated symmetric ;
diff --git a/src/boost/tools/build/src/tools/features/user-interface-feature.jam b/src/boost/tools/build/src/tools/features/user-interface-feature.jam
new file mode 100644
index 000000000..d4631ead4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/user-interface-feature.jam
@@ -0,0 +1,28 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.user-interface]]`user-interface`::
+*Allowed values:* `console`, `gui`, `wince`, `native`, `auto`.
++
+Specifies the environment for the executable which affects the entry point
+symbol (or entry point function) that the linker will select. This feature is
+Windows-specific.
++
+`console`::: console application.
+`gui`::: application does not require a console (it is supposed to create its
+ own windows.
+`wince`::: application is intended to run on a device that has a version of the
+ Windows CE kernel.
+`native`::: application runs without a subsystem environment.
+`auto`::: application runs in the POSIX subsystem in Windows.
+
+|# # end::doc[]
+
+feature.feature user-interface
+ : console gui wince native auto ;
diff --git a/src/boost/tools/build/src/tools/features/variant-feature.jam b/src/boost/tools/build/src/tools/features/variant-feature.jam
new file mode 100644
index 000000000..ced88cebe
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/variant-feature.jam
@@ -0,0 +1,114 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+import errors ;
+import property ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.variant]]`variant`::
+*Allowed values:* `debug`, `release`, `profile`.
++
+A feature combining several low-level features, making it easy to
+request common build configurations.
++
+The value `debug` expands to
++
+----
+<optimization>off <debug-symbols>on <inlining>off <runtime-debugging>on
+----
++
+The value `release` expands to
++
+----
+<optimization>speed <debug-symbols>off <inlining>full <runtime-debugging>off
+----
++
+The value `profile` expands to the same as `release`, plus:
++
+----
+<profiling>on <debug-symbols>on
+----
++
+Users can define their own build variants using the `variant` rule
+from the `common` module.
++
+NOTE: Runtime debugging is on in debug builds to suit the expectations of
+people used to various IDEs.
+
+|# # end::doc[]
+
+feature.feature variant
+ :
+ : implicit composite propagated symmetric ;
+
+# Declares a new variant.
+#
+# First determines explicit properties for this variant, by refining parents'
+# explicit properties with the passed explicit properties. The result is
+# remembered and will be used if this variant is used as parent.
+#
+# Second, determines the full property set for this variant by adding to the
+# explicit properties default values for all missing non-symmetric properties.
+#
+# Lastly, makes appropriate value of 'variant' property expand to the full
+# property set.
+#
+rule variant ( name # Name of the variant
+ : parents-or-properties * # Specifies parent variants, if
+ # 'explicit-properties' are given, and
+ # explicit-properties or parents otherwise.
+ : explicit-properties * # Explicit properties.
+ )
+{
+ local parents ;
+ if ! $(explicit-properties)
+ {
+ if $(parents-or-properties[1]:G)
+ {
+ explicit-properties = $(parents-or-properties) ;
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+
+ # The problem is that we have to check for conflicts between base variants.
+ if $(parents[2])
+ {
+ errors.error "multiple base variants are not yet supported" ;
+ }
+
+ local inherited ;
+ # Add explicitly specified properties for parents.
+ for local p in $(parents)
+ {
+ # TODO: This check may be made stricter.
+ if ! [ feature.is-implicit-value $(p) ]
+ {
+ errors.error "Invalid base variant" $(p) ;
+ }
+
+ inherited += $(.explicit-properties.$(p)) ;
+ }
+ property.validate $(explicit-properties) ;
+ explicit-properties = [ property.refine $(inherited)
+ : $(explicit-properties) ] ;
+
+ # Record explicitly specified properties for this variant. We do this after
+ # inheriting parents' properties so they affect other variants derived from
+ # this one.
+ .explicit-properties.$(name) = $(explicit-properties) ;
+
+ feature.extend variant : $(name) ;
+ feature.compose <variant>$(name) : $(explicit-properties) ;
+}
+IMPORT $(__name__) : variant : : variant ;
diff --git a/src/boost/tools/build/src/tools/features/version-feature.jam b/src/boost/tools/build/src/tools/features/version-feature.jam
new file mode 100644
index 000000000..10f7fa7d2
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/version-feature.jam
@@ -0,0 +1,19 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.version]]`version`::
+This feature isn't used by any of the builtin tools, but can be used, for
+example, to adjust target's name via <<bbv2.builtin.features.tag,`<tag>`>>
+feature.
+
+|# # end::doc[]
+
+feature.feature version
+ :
+ : free ;
diff --git a/src/boost/tools/build/src/tools/features/visibility-feature.jam b/src/boost/tools/build/src/tools/features/visibility-feature.jam
new file mode 100644
index 000000000..443dc89ef
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/visibility-feature.jam
@@ -0,0 +1,46 @@
+# Copyright 2018 Andrey Semashev
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.visibility]]`visibility`::
+*Allowed values:* `global`, `protected`, `hidden`.
++
+Specifies the default symbol visibility in compiled binaries. Not all values
+are supported on all platforms and on some platforms (for example, Windows)
+symbol visibility is not supported at all.
++
+The supported values have the following meaning:
++
+`global`::: a.k.a. "default" in gcc documentation. Global symbols are
+ considered public, they are exported from shared libraries and can be
+ redefined by another shared library or executable.
+`protected`::: a.k.a. "symbolic". Protected symbols are exported from shared
+ ibraries but cannot be redefined by another shared library or executable.
+ This mode is not supported on some platforms, for example OS X.
+`hidden`::: Hidden symbols are not exported from shared libraries and cannot
+ be redefined by a different shared library or executable loaded in a process.
+ In this mode, public symbols have to be explicitly marked in the source code
+ to be exported from shared libraries. This is the recommended mode.
++
+By default compiler default visibility mode is used (no compiler flags are
+added).
++
+NOTE: In Boost super-project Jamroot file this property is set to the default
+value of `hidden`. This means that Boost libraries are built with hidden
+visibility by default, unless the user overrides it with a different
+`visibility` or a library sets a different `local-visibility` (see below).
+
+|# # end::doc[]
+
+feature.feature visibility
+ : global protected hidden
+ : optional composite propagated ;
+
+feature.compose <visibility>global : <local-visibility>global ;
+feature.compose <visibility>protected : <local-visibility>protected ;
+feature.compose <visibility>hidden : <local-visibility>hidden ;
diff --git a/src/boost/tools/build/src/tools/features/warnings-feature.jam b/src/boost/tools/build/src/tools/features/warnings-feature.jam
new file mode 100644
index 000000000..b64bda648
--- /dev/null
+++ b/src/boost/tools/build/src/tools/features/warnings-feature.jam
@@ -0,0 +1,41 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.warnings]]`warnings`::
+*Allowed values:* `on`, `all`, `extra`, `pedantic`, `off`.
++
+Controls the warning level of compilers.
++
+`on`::: enable default/"reasonable" warning level.
+`all`::: enable most warnings.
+`extra`::: enable extra, possibly conflicting, warnings.
+`pedantic`::: enable likely inconsequential, and conflicting, warnings.
+`off`::: disable all warnings.
++
+Default value is `all`.
+
+|# # end::doc[]
+
+feature.feature warnings
+ : on all extra pedantic off
+ : incidental propagated ;
+
+#| tag::doc[]
+
+[[bbv2.builtin.features.warnings-as-errors]]`warnings-as-errors`::
+*Allowed values:* `off`, `on`.
++
+Makes it possible to treat warnings as errors and abort compilation on a
+warning.
+
+|# # end::doc[]
+
+feature.feature warnings-as-errors
+ : off on
+ : incidental propagated ;
diff --git a/src/boost/tools/build/src/tools/flags.jam b/src/boost/tools/build/src/tools/flags.jam
new file mode 100644
index 000000000..045f9af69
--- /dev/null
+++ b/src/boost/tools/build/src/tools/flags.jam
@@ -0,0 +1,152 @@
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# defines the check-has-flag rule.
+
+import "class" ;
+import common ;
+import feature : feature ;
+import generators ;
+import make ;
+import print ;
+import project ;
+import toolset : flags ;
+
+rule init ( )
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+ project.push-current ;
+ project.initialize $(__name__) ;
+ project /check/flags ;
+ .project = [ project.current ] ;
+ make empty.c : : @write-main ;
+ make empty.cpp : : @write-main ;
+ obj empty.obj : empty.cpp ;
+ project : requirements <flags.check>on ;
+ project.pop-current ;
+ }
+}
+
+rule write-main ( target : : properties * )
+{
+ print.output $(target) ;
+ print.text "int main() { return 0; }\n" : yes ;
+}
+
+# Applies true-properties if the toolset recognizes a specific flag.
+# Otherwise applies false-properties.
+#
+# Option must be one of <cflags>, <cxxflags>, or <linkflags>.
+#
+# Example::
+#
+# exe foo : foo.cpp :
+# [ check-has-flag <cxxflags>-std=c++11 : <cxxflags>-std=c++11 ] ;
+#
+rule check-has-flag ( option message ? : true-properties * : false-properties * )
+{
+ init ;
+ local id = [ MD5 $(option) ] ;
+
+ if ! $(.targets.$(id))
+ {
+ project.push-current $(.project) ;
+ switch $(option:G)
+ {
+ case <cflags> : obj flags_$(id) : empty.c : $(option) ;
+ case <cxxflags> : obj flags_$(id) : empty.cpp : $(option) ;
+ case <linkflags> : exe flags_$(id) : empty.obj : $(option) ;
+ case * :
+ import errors ;
+ errors.user-error "Don't know how to check $(option:G)" ;
+ }
+ project.pop-current ;
+ .targets.$(id) = true ;
+ }
+ message ?= "has $(option:G=)" ;
+ return [ check-target-builds /check/flags//flags_$(id) $(message)
+ : $(true-properties) : $(false-properties) ] ;
+}
+
+IMPORT $(__name__) : check-has-flag : : check-has-flag ;
+
+feature flags.check : on : optional composite ;
+feature.compose <flags.check>on : <warnings-as-errors>on ;
+
+# Some compilers don't have an easy way to cause an error
+# for unknown options. In this case, we need to check
+# their stdout/stderr. This generator will copy it's
+# source, but will cause an error if the given pattern
+# matches the output from the source.
+#
+
+feature flags.pattern : : free ;
+
+class flag-check-generator : generator
+{
+ rule __init__ ( type : requirements * : pattern )
+ {
+ generator.__init__ flags.check-output : $(type) : $(type)(%_valid) :
+ $(requirements) <flags.check>on ;
+ self.pattern = $(pattern) ;
+ }
+ rule run ( project name ? : property-set : sources * )
+ {
+ property-set = [ property-set.create
+ [ property.change [ $(property-set).raw ] : <flags.check> ]
+ <flags.pattern>$(self.pattern) ] ;
+ return [ generator.run $(project) $(name)
+ : $(property-set) : $(sources) ] ;
+ }
+ rule action-class ( )
+ {
+ return non-scanning-action ;
+ }
+}
+
+# These generator definitions should probably be moved to the individual toolsets.
+
+# msvc-7.1 uses 4002. Later versions use 9002.
+generators.register
+ [ class.new flag-check-generator OBJ : <toolset>msvc : "(D[94]002)" ] ;
+generators.register
+ [ class.new flag-check-generator EXE : <toolset>msvc : "(LNK4044)" ] ;
+generators.register
+ [ class.new flag-check-generator OBJ : <toolset>intel : "(#10006)" ] ;
+generators.register
+ [ class.new flag-check-generator EXE : <toolset>intel : "(#10006)" ] ;
+generators.override flags.check-output : all ;
+
+rule check-output-callback ( targets * : source-targets * : ignored * : output ? )
+{
+ if [ MATCH [ on $(targets) return $(PATTERN) ] : $(output) ]
+ {
+ FLAG_CHECK_COMMAND on $(targets) = illegal-ad22d215a8bbd73 ;
+ }
+}
+
+IMPORT $(__name__) : check-output-callback : : flags.check-output-callback ;
+
+flags flags.check-output PATTERN : <flags.pattern> ;
+
+rule check-output ( targets * : sources * : properties * )
+{
+ local action = [ on $(sources) return $(.action) ] ;
+ local all-sources ;
+ for local t in [ $(action).targets ]
+ {
+ all-sources += [ $(t).actualize ] ;
+ }
+ REBUILDS $(targets) : $(sources) ;
+ __ACTION_RULE__ on $(all-sources) = flags.check-output-callback $(targets) ;
+ common.copy $(targets[1]) : $(sources[1]) ;
+}
+
+actions check-output
+{
+ $(FLAG_CHECK_COMMAND)
+}
diff --git a/src/boost/tools/build/src/tools/fop.jam b/src/boost/tools/build/src/tools/fop.jam
new file mode 100644
index 000000000..c24b8725f
--- /dev/null
+++ b/src/boost/tools/build/src/tools/fop.jam
@@ -0,0 +1,69 @@
+# Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed
+# under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+# This module defines rules to handle generation of PDF and
+# PostScript files from XSL Formatting Objects via Apache FOP
+
+import generators ;
+import common ;
+import boostbook ;
+
+generators.register-standard fop.render.pdf : FO : PDF ;
+generators.register-standard fop.render.ps : FO : PS ;
+
+# Initializes the fop toolset.
+#
+rule init ( fop-command ? : java-home ? : java ? )
+{
+ local has-command = $(.has-command) ;
+
+ if $(fop-command)
+ {
+ .has-command = true ;
+ }
+
+ if $(fop-command) || ! $(has-command)
+ {
+ fop-command = [ common.get-invocation-command fop : fop : $(fop-command)
+ : [ modules.peek : FOP_DIR ] ] ;
+ }
+
+ if $(fop-command)
+ {
+ .FOP_COMMAND = $(fop-command) ;
+ }
+
+ if $(java-home) || $(java)
+ {
+ .FOP_SETUP = ;
+
+
+ # JAVA_HOME is the location that java was installed to.
+
+ if $(java-home)
+ {
+ .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ;
+ }
+
+ # JAVACMD is the location that of the java executable, useful for a
+ # non-standard java installation, where the executable isn't at
+ # $JAVA_HOME/bin/java.
+
+ if $(java)
+ {
+ .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ;
+ }
+ }
+}
+
+actions render.pdf
+{
+ $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<)
+}
+
+actions render.ps
+{
+ $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<)
+}
diff --git a/src/boost/tools/build/src/tools/fortran.jam b/src/boost/tools/build/src/tools/fortran.jam
new file mode 100644
index 000000000..37665825e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/fortran.jam
@@ -0,0 +1,55 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# This file contains common settings for all fortran tools
+#
+
+import "class" : new ;
+import feature : feature ;
+
+import type ;
+import generators ;
+import common ;
+
+type.register FORTRAN : f F for f77 ;
+type.register FORTRAN90 : f90 F90 ;
+
+feature fortran : : free ;
+feature fortran90 : : free ;
+
+class fortran-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
+ }
+}
+
+rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
+{
+ local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
+ generators.register $(g) ;
+}
+
+class fortran90-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ;
+ }
+}
+
+rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * )
+{
+ local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ;
+ generators.register $(g) ;
+}
+
+# FIXME: this is ugly, should find a better way (we'd want client code to
+# register all generators as "generator.some-rule", not with "some-module.some-rule".)
+IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ;
+IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ;
diff --git a/src/boost/tools/build/src/tools/gcc.jam b/src/boost/tools/build/src/tools/gcc.jam
new file mode 100644
index 000000000..8910a55f2
--- /dev/null
+++ b/src/boost/tools/build/src/tools/gcc.jam
@@ -0,0 +1,1330 @@
+# Copyright 2001 David Abrahams
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2003 Vladimir Prus
+# Copyright 2005 Reece H. Dunn
+# Copyright 2006 Ilya Sokolov
+# Copyright 2007 Roland Schwarz
+# Copyright 2007 Boris Gubenko
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.gcc]]
+= GNU C++
+
+The `gcc` module supports the http://gcc.gnu.org[GNU C++ compiler] on
+Linux, a number of Unix-like system including SunOS and on Windows
+(either http://www.cygwin.com[Cygwin] or http://www.mingw.org[MinGW]).
+
+The `gcc` module is initialized using the following syntax:
+
+----
+using gcc : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the version is not explicitly specified, it will be automatically
+detected by running the compiler with the `-v` option. If the command is
+not specified, the `g++` binary will be searched in PATH.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+`root`::
+Specifies root directory of the compiler installation. This option is
+necessary only if it is not possible to detect this information from the
+compiler command--for example if the specified compiler command is a user
+script.
+
+`archiver`::
+Specifies the archiver command that is used to produce static
+libraries. Normally, it is autodetected using gcc `-print-prog-name`
+option or defaulted to `ar`, but in some cases you might want to
+override it, for example to explicitly use a system version instead of
+one included with gcc.
+
+`ranlib`::
+Specifies the ranlib command that is used to generated symbol table
+for static libraries. Normally, it is autodetected using gcc
+`-print-prog-name` option or defaulted to `ranlib`, but in some cases
+you might want to override it, for example to explicitly use a system
+version instead of one included with gcc.
+
+`rc`::
+Specifies the resource compiler command that will be used with the
+version of gcc that is being configured. This setting makes sense only
+for Windows and only if you plan to use resource files. By default
+`windres` will be used.
+
+`rc-type`::
+Specifies the type of resource compiler. The value can be either
+`windres` for msvc resource compiler, or `rc` for borland's resource
+compiler.
+
+In order to compile 64-bit applications, you have to specify
+`address-model=64`, and the `instruction-set` feature should refer to a 64
+bit processor. Currently, those include `nocona`, `opteron`, `athlon64` and
+`athlon-fx`.
+
+|# # end::doc[]
+
+import "class" : new ;
+import common ;
+import cygwin ;
+import feature ;
+import fortran ;
+import generators ;
+import os ;
+import pch ;
+import property ;
+import property-set ;
+import rc ;
+import regex ;
+import sequence ;
+import set ;
+import toolset ;
+import type ;
+import unix ;
+import virtual-target ;
+import errors ;
+
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+
+feature.extend toolset : gcc ;
+
+toolset.inherit-generators gcc : unix : unix.link unix.link.dll ;
+toolset.inherit-flags gcc : unix ;
+toolset.inherit-rules gcc : unix ;
+
+generators.override gcc.prebuilt : builtin.prebuilt ;
+generators.override gcc.searched-lib-generator : searched-lib-generator ;
+
+# Make gcc toolset object files use the "o" suffix on all platforms.
+type.set-generated-target-suffix OBJ : <toolset>gcc : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ;
+
+
+# Initializes the gcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or
+# sun and the default value will be selected based on the current OS.
+# Example:
+# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+# The compiler command to use is detected in three steps:
+# 1) If an explicit command is specified by the user, it will be used and must
+# be available.
+# 2) If only a certain version is specified, it is enforced:
+# - either the 'g++-VERSION' command must be available
+# - or the default command 'g++' must be available and match the exact
+# version.
+# 3) Without user-provided restrictions use default 'g++'.
+#
+rule init ( version ? : command * : options * : requirement * )
+{
+ #1): use user-provided command
+ local tool-command = ;
+ if $(command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ :
+ $(command) ] ;
+ if ! $(tool-command)
+ {
+ import errors ;
+ errors.error toolset gcc "initialization:"
+ : provided command '$(command)' not found
+ : initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+ #2): enforce user-provided version
+ else if $(version)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc :
+ "g++-$(version[1])" ] ;
+
+ #2.1) fallback: check whether "g++" reports the requested version
+ if ! $(tool-command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ]
+ ;
+ if $(tool-command)
+ {
+ local tool-command-string = \"$(tool-command)\" ;
+ tool-command-string = $(tool-command-string:J=" ") ;
+ local tool-version = [ dump-full-version
+ $(tool-command-string) ] ;
+ # Permit a match between a two-digit version specified by the
+ # user (e.g. 4.4) and a 3-digit version reported by gcc.
+ # Since only two digits are present in the binary name
+ # anyway, insisting that user specify the 3-digit version
+ # when configuring B2, while it is not required on
+ # the command line, would be strange.
+ local versionl = [ regex.split $(version) "[.]" ] ;
+ local tool-versionl = [ regex.split $(tool-version) "[.]" ] ;
+ if ! ( $(versionl[1]) = $(tool-versionl[1]) &&
+ $(versionl[2]:E=$(tool-versionl[2])) = $(tool-versionl[2]) &&
+ $(versionl[3]:E=$(tool-versionl[3])) = $(tool-versionl[3]) )
+ {
+ import errors ;
+ errors.error toolset gcc "initialization:"
+ : version '$(version)' requested but
+ 'g++-$(version)' not found and version
+ '$(tool-version)' of default '$(tool-command)'
+ does not match
+ : initialized from [ errors.nearest-user-location ]
+ ;
+ tool-command = ;
+ }
+ }
+ else
+ {
+ import errors ;
+ errors.error toolset gcc "initialization:"
+ : version '$(version)' requested but neither
+ 'g++-$(version)' nor default 'g++' found
+ : initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+ }
+ #3) default: no command and no version specified, try using "g++"
+ else
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if ! $(tool-command)
+ {
+ import errors ;
+ errors.error toolset gcc "initialization:"
+ : no command provided, default command 'g++' not found
+ : initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+
+
+ # Information about the gcc command...
+ # The command.
+ local command = $(tool-command) ;
+ # The 'command' variable can have multiple elements but when calling the
+ # SHELL builtin we need a single string, and we need to quote elements
+ # with spaces.
+ local command-string = \"$(command)\" ;
+ command-string = $(command-string:J=" ") ;
+ # The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+ # The bin directory where to find the command to execute.
+ local bin ;
+ # The compiler flavor.
+ local flavor = [ feature.get-values <flavor> : $(options) ] ;
+ # vxworks build on windows uses csh that is neither mingw or cygwin
+ if [ feature.get-values <target-os> : $(options) ] = vxworks
+ {
+ flavor ?= vxworks ;
+ }
+ # Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+ }
+ local target-os ;
+ # Autodetect the version and flavor if not given.
+ if $(command)
+ {
+ local machine = [ MATCH "^([^ ]+)" :
+ [ SHELL "$(command-string) -dumpmachine" ] ] ;
+ version ?= [ dump-version $(command-string) ] ;
+ switch $(machine:L)
+ {
+ case *mingw* : flavor ?= mingw ;
+ case *cygwin* : flavor ?= cygwin ;
+ }
+ switch $(machine:L)
+ {
+ case *mingw* : target-os ?= windows ;
+ case *cygwin* : target-os ?= cygwin ;
+ case *linux* : target-os ?= linux ;
+ # TODO: finish this list.
+ }
+ }
+
+ local condition ;
+ condition = [ common.check-init-parameters gcc $(requirement) : version $(version)
+ : $(condition) ] ;
+
+ common.handle-options gcc : $(condition) : $(command) : $(options) ;
+
+ # Set the default target-os for this toolset.
+ if $(target-os) && ! [ feature.get-values <target-os> : $(requirement) ]
+ {
+ local conditionx = [ regex.replace $(condition) "/" "," ] ;
+ toolset.add-defaults $(conditionx)\:<target-os>$(target-os) ;
+ }
+
+ # If gcc is installed in a non-standard location, we would need to add
+ # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+ # rules).
+ if $(command)
+ {
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right onces. Note that we do not provide a clean way to build a 32-bit
+ # binary using a 64-bit compiler, but user can always pass -m32
+ # manually.
+ local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
+ if $(.debug-configuration)
+ {
+ ECHO "notice:" using gcc libraries "::" $(condition) "::" $(lib_path) ;
+ }
+ toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+
+ # If we are not using a system gcc installation we should adjust the various
+ # programs as needed to prefer using their installation specific versions.
+ # This is essential for correct use of MinGW and for cross-compiling.
+
+ # - Archive builder.
+ local archiver = [ common.get-invocation-command gcc
+ : [ .get-prog-name $(command-string) : ar : $(flavor) ]
+ : [ feature.get-values <archiver> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO "notice:" using gcc archiver "::" $(condition) "::" $(archiver[1]) ;
+ }
+ local arflags = [ feature.get-values <arflags> : $(options) ] ;
+ toolset.flags gcc.archive .ARFLAGS $(condition) : $(arflags) ;
+
+ # - Ranlib.
+ local ranlib = [ common.get-invocation-command gcc
+ : [ .get-prog-name $(command-string) : ranlib : $(flavor) ]
+ : [ feature.get-values <ranlib> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO "notice:" using gcc ranlib "::" $(condition) "::" $(ranlib[1]) ;
+ }
+
+ # - Resource compiler.
+ local rc = [ common.get-invocation-command-nodefault gcc : windres :
+ [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ;
+ local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
+ rc-type ?= windres ;
+ if ! $(rc)
+ {
+ # If we can not find an RC compiler we fallback to a null one that
+ # creates empty object files. This allows the same Jamfiles to work
+ # across the board. The null RC uses assembler to create the empty
+ # objects, so configure that.
+ rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ]
+ ;
+ rc-type = null ;
+ }
+ rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
+
+ toolset.flags gcc VERSION $(condition) : [ regex.split $(version) "[.]" ] ;
+
+ init-cxxstd-flags $(condition) : $(version) ;
+}
+
+if [ os.name ] = NT
+{
+ # This causes single-line command invocation to not go through .bat files,
+ # thus avoiding command-line length limitations.
+ # TODO: Set JAMSHELL on specific targets instead of globally.
+ JAMSHELL = % ;
+}
+
+local rule dump-full-version ( command-string )
+{
+ # -dumpfullversion is only supported for gcc 7+.
+ # Passing both options works, as the first one that's
+ # recognized will be used.
+ return [ MATCH "^([0-9.]+)" :
+ [ SHELL "$(command-string) -dumpfullversion -dumpversion" ] ] ;
+}
+
+local rule dump-version ( command-string )
+{
+ return [ MATCH "^([0-9.]+)" :
+ [ SHELL "$(command-string) -dumpversion" ] ] ;
+}
+
+# Uses -print-prog-name to get the name of the tool.
+# Converts the path to native form if using cygwin.
+rule .get-prog-name ( command-string : tool : flavor ? )
+{
+ local prog-name = [ NORMALIZE_PATH [ MATCH "(.*)[\n]+" :
+ [ SHELL "$(command-string) -print-prog-name=$(tool)" ] ] ] ;
+
+ if $(flavor) = cygwin && [ os.name ] = NT
+ {
+ prog-name = [ cygwin.cygwin-to-windows-path $(prog-name) ] ;
+ }
+ return $(prog-name) ;
+}
+
+###
+### Functions that set options on the targets.
+###
+
+local all-os = [ feature.values <target-os> ] ;
+
+local rule compile-link-flags ( * )
+{
+ toolset.flags gcc.compile OPTIONS $(1) : $(2) ;
+ toolset.flags gcc.link OPTIONS $(1) : $(2) ;
+}
+
+{
+ # This logic will add -fPIC for all compilations:
+ #
+ # lib a : a.cpp b ;
+ # obj b : b.cpp ;
+ # exe c : c.cpp a d ;
+ # obj d : d.cpp ;
+ #
+ # This all is fine, except that 'd' will be compiled with -fPIC even
+ # though it is not needed, as 'd' is used only in exe. However, it is
+ # hard to detect where a target is going to be used. Alternatively, we
+ # can set -fPIC only when main target type is LIB but than 'b' would be
+ # compiled without -fPIC which would lead to link errors on x86-64. So,
+ # compile everything with -fPIC.
+ #
+ # Yet another alternative would be to create a propagated <sharedable>
+ # feature and set it when building shared libraries, but that would be
+ # hard to implement and would increase the target path length even more.
+
+ # On Windows, fPIC is the default, and specifying -fPIC explicitly leads
+ # to a warning.
+ local non-windows = [ set.difference $(all-os) : cygwin windows ] ;
+ compile-link-flags <link>shared/<target-os>$(non-windows) : -fPIC ;
+}
+
+{
+ # Handle address-model
+ compile-link-flags <target-os>aix/<address-model>32 : -maix32 ;
+ compile-link-flags <target-os>aix/<address-model>64 : -maix64 ;
+
+ compile-link-flags <target-os>hpux/<address-model>32 : -milp32 ;
+ compile-link-flags <target-os>hpux/<address-model>64 : -mlp64 ;
+
+ local generic-os = [ set.difference $(all-os) : aix hpux ] ;
+ local arch = power sparc x86 ;
+ compile-link-flags <target-os>$(generic-os)/<architecture>$(arch)/<address-model>32 : -m32 ;
+ compile-link-flags <target-os>$(generic-os)/<architecture>$(arch)/<address-model>64 : -m64 ;
+}
+
+{
+ # Handle threading
+ local rule threading-flags ( * )
+ {
+ compile-link-flags <threading>multi/$(1) : $(2) ;
+ if $(3)
+ {
+ toolset.flags gcc.link FINDLIBS-SA <threading>multi/$(1) : $(3) ;
+ }
+ }
+
+ threading-flags <target-os>windows : -mthreads ;
+ threading-flags <target-os>cygwin : -mthreads ;
+ threading-flags <target-os>solaris : -pthreads : rt ;
+ threading-flags <target-os>qnx : -pthread ;
+
+ local bsd = [ MATCH ^(.*bsd)$ : $(all-os) ] ;
+ threading-flags <target-os>$(bsd) : -pthread ;
+
+ local no-threading = android beos haiku sgi darwin vxworks ;
+ local threading-generic-os = [ set.difference $(all-os) : $(no-threading) $(bsd) windows cygwin solaris qnx ] ;
+ threading-flags <target-os>$(threading-generic-os) : -pthread : rt ;
+}
+
+{
+ local rule cxxstd-flags ( * )
+ {
+ toolset.flags gcc.compile.c++ OPTIONS $(1) : $(2) ;
+ toolset.flags gcc.link OPTIONS $(1) : $(2) ;
+ }
+
+ local cxxstd = [ feature.values <cxxstd> ] ;
+ local dialects = [ feature.values <cxxstd-dialect> ] ;
+ .cxxstd-dialects = [ set.difference $(dialects) : gnu iso ] ;
+ # C++ latest needs to be set up on a per-toolset basis
+ for local std in [ set.difference $(cxxstd) : latest ]
+ {
+ cxxstd-flags <cxxstd>$(std)/<cxxstd-dialect>iso : -std=c++$(std) ;
+ cxxstd-flags <cxxstd>$(std)/<cxxstd-dialect>gnu : -std=gnu++$(std) ;
+ # If we see this it's probably a mistake, but
+ # toolset.flags has no way to set up diagnostics.
+ cxxstd-flags <cxxstd>$(std)/<cxxstd-dialect>$(.cxxstd-dialects) : -std=c++$(std) ;
+ }
+
+ local rule version-ge ( lhs : rhs )
+ {
+ lhs = [ regex.split $(lhs) "[.]" ] ;
+ rhs = [ regex.split $(rhs) "[.]" ] ;
+ return [ sequence.compare $(rhs) : $(lhs) : numbers.less ] ;
+ }
+ # Version specific flags
+ local rule init-cxxstd-flags ( condition * : version )
+ {
+ local std ;
+ if [ version-ge $(version) : 8 ] { std = 2a ; }
+ else if [ version-ge $(version) : 5 ] { std = 1z ; }
+ else if [ version-ge $(version) : 4.8 ] { std = 1y ; }
+ else if [ version-ge $(version) : 4.7 ] { std = 11 ; }
+ else if [ version-ge $(version) : 3.3 ] { std = 98 ; }
+ if $(std)
+ {
+ cxxstd-flags $(condition)/<cxxstd>latest/<cxxstd-dialect>iso : -std=c++$(std) ;
+ cxxstd-flags $(condition)/<cxxstd>latest/<cxxstd-dialect>gnu : -std=gnu++$(std) ;
+ cxxstd-flags $(condition)/<cxxstd>latest/<cxxstd-dialect>$(.cxxstd-dialects) : -std=c++$(std) ;
+ }
+ }
+}
+
+generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
+
+generators.register [ new fortran-compiling-generator
+ gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ] ;
+
+rule compile.c++.preprocess ( targets * : sources * : properties * )
+{
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c.preprocess ( targets * : sources * : properties * )
+{
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.fortran ( targets * : sources * : properties * )
+{
+}
+
+actions compile.c++ bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)"
+}
+
+actions compile.c bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)"
+}
+
+actions compile.c.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<)
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.asm ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x assembler-with-cpp" ;
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+###
+### Precompiled header use and generation.
+###
+
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that the BASE_PCH suffix is appended
+# to the full header name.
+
+type.set-generated-target-suffix PCH : <toolset>gcc : gch ;
+
+# GCC-specific pch generator.
+class gcc-pch-generator : pch-generator
+{
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule run-pch ( project name ? : property-set : sources + )
+ {
+ # Find the header in sources. Ignore any CPP sources.
+ local header ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ header = $(s) ;
+ }
+ }
+
+ # Error handling: base header file name should be the same as the base
+ # precompiled header name.
+ local header-name = [ $(header).name ] ;
+ local header-basename = $(header-name:B) ;
+ if $(header-basename) != $(name)
+ {
+ local location = [ $(project).project-module ] ;
+ import errors : user-error : errors.user-error ;
+ errors.user-error "in" "$(location):" pch target name '$(name)' should
+ be the same as the base name of header file '$(header-name)' ;
+ }
+
+ local pch-file = [ generator.run $(project) $(name) : $(property-set)
+ : $(header) ] ;
+
+ # Return result of base class and pch-file property as
+ # usage-requirements.
+ return
+ [ $(pch-file[1]).add-raw <pch-file>$(pch-file[2-]) <cflags>-Winvalid-pch ]
+ $(pch-file[2-])
+ ;
+ }
+
+ # Calls the base version specifying source's name as the name of the created
+ # target. As a result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ name = [ $(sources[1]).name ] ;
+ return [ generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
+generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ;
+generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ;
+
+# Override default do-nothing generators.
+generators.override gcc.compile.c.pch : pch.default-c-pch-generator ;
+generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ;
+
+rule compile.c++.pch ( targets * : sources * : properties * )
+{
+}
+
+actions compile.c++.pch
+{
+ "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * )
+{
+}
+
+actions compile.c.pch
+{
+ "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+###
+### General options, like optimization.
+###
+
+# Declare flags and action for compilation.
+toolset.flags gcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags gcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags gcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags gcc.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags gcc.compile OPTIONS <warnings>all : -Wall ;
+toolset.flags gcc.compile OPTIONS <warnings>extra : -Wall -Wextra ;
+toolset.flags gcc.compile OPTIONS <warnings>pedantic : -Wall -Wextra -pedantic ;
+toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.compile OPTIONS <profiling>on : -pg ;
+
+toolset.flags gcc.compile OPTIONS <local-visibility>hidden : -fvisibility=hidden ;
+toolset.flags gcc.compile.c++ OPTIONS <local-visibility>hidden : -fvisibility-inlines-hidden ;
+toolset.flags gcc.compile OPTIONS <local-visibility>protected : -fvisibility=protected ;
+toolset.flags gcc.compile OPTIONS <local-visibility>protected/<target-os>darwin : ;
+toolset.flags gcc.compile OPTIONS <local-visibility>global : -fvisibility=default ;
+
+toolset.flags gcc.compile.c++ OPTIONS <exception-handling>off : -fno-exceptions ;
+toolset.flags gcc.compile.c++ OPTIONS <rtti>off : -fno-rtti ;
+
+# sanitizers
+toolset.flags gcc.compile.c++ OPTIONS <address-sanitizer>on : -fsanitize=address -fno-omit-frame-pointer ;
+toolset.flags gcc.compile.c++ OPTIONS <address-sanitizer>norecover : -fsanitize=address -fno-sanitize-recover=address -fno-omit-frame-pointer ;
+toolset.flags gcc.compile.c++ OPTIONS <leak-sanitizer>on : -fsanitize=leak -fno-omit-frame-pointer ;
+toolset.flags gcc.compile.c++ OPTIONS <leak-sanitizer>norecover : -fsanitize=leak -fno-sanitize-recover=leak -fno-omit-frame-pointer ;
+toolset.flags gcc.compile.c++ OPTIONS <thread-sanitizer>on : -fsanitize=thread -fno-omit-frame-pointer ;
+toolset.flags gcc.compile.c++ OPTIONS <thread-sanitizer>norecover : -fsanitize=thread -fno-sanitize-recover=thread -fno-omit-frame-pointer ;
+toolset.flags gcc.compile.c++ OPTIONS <undefined-sanitizer>on : -fsanitize=undefined -fno-omit-frame-pointer ;
+toolset.flags gcc.compile.c++ OPTIONS <undefined-sanitizer>norecover : -fsanitize=undefined -fno-sanitize-recover=undefined -fno-omit-frame-pointer ;
+
+toolset.flags gcc.compile.c++ OPTIONS <coverage>on : --coverage ;
+
+# configure Dinkum STL to match compiler options
+toolset.flags gcc.compile.c++ DEFINES <rtti>off/<target-os>vxworks : _NO_RTTI ;
+toolset.flags gcc.compile.c++ DEFINES <exception-handling>off/<target-os>vxworks : _NO_EX=1 ;
+
+# LTO
+toolset.flags gcc.compile OPTIONS <lto>on/<lto-mode>full : -flto ;
+toolset.flags gcc.link OPTIONS <lto>on/<lto-mode>full : -flto ;
+
+toolset.flags gcc.compile OPTIONS <lto>on/<lto-mode>fat : -flto -ffat-lto-objects ;
+toolset.flags gcc.link OPTIONS <lto>on/<lto-mode>fat : -flto ;
+
+# ABI selection
+toolset.flags gcc.compile.c++ DEFINES <stdlib>gnu : _GLIBCXX_USE_CXX11_ABI=0 ;
+toolset.flags gcc.compile.c++ DEFINES <stdlib>gnu11 : _GLIBCXX_USE_CXX11_ABI=1 ;
+
+###
+### User free feature options.
+###
+
+toolset.flags gcc.compile USER_OPTIONS <cflags> ;
+toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ;
+toolset.flags gcc.compile.asm USER_OPTIONS <asmflags> ;
+toolset.flags gcc.compile DEFINES <define> ;
+toolset.flags gcc.compile INCLUDES <include> ;
+toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ;
+
+###
+### Linking generators and actions.
+###
+
+# Class checking that we do not try to use the <runtime-link>static property
+# while creating or using a shared library, since it is not supported by
+# gcc/libc.
+class gcc-linking-generator : unix-linking-generator
+{
+ rule run ( project name ? : property-set : sources + )
+ {
+ local target-os = [ $(property-set).get <target-os> ] ;
+ local no-static-link = true ;
+ switch $(target-os)
+ {
+ case vms : no-static-link = ;
+ case windows : no-static-link = ;
+ }
+
+ local properties = [ $(property-set).raw ] ;
+ local reason ;
+ if $(no-static-link) && <runtime-link>static in $(properties)
+ {
+ if <link>shared in $(properties)
+ {
+ reason = On gcc, DLLs can not be built with
+ '<runtime-link>static'. ;
+ }
+ else if [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ reason = On gcc, using DLLs together with the
+ '<runtime-link>static' option is not possible. ;
+ }
+ }
+ }
+ }
+ if $(reason)
+ {
+ ECHO "warning:" $(reason) ;
+ ECHO "warning:" It is suggested to use '<runtime-link>static' together
+ with '<link>static'. ;
+ }
+ else
+ {
+ return [ unix-linking-generator.run $(project) $(name) :
+ $(property-set) : $(sources) ] ;
+ }
+ }
+}
+
+# The set of permissible input types is different on mingw. So, define two sets
+# of generators, with mingw generators selected when target-os=windows.
+
+local g ;
+g = [ new gcc-linking-generator gcc.mingw.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.mingw.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.register
+ [ new gcc-linking-generator gcc.link
+ : LIB OBJ
+ : EXE
+ : <toolset>gcc ] ;
+generators.register
+ [ new gcc-linking-generator gcc.link.dll
+ : LIB OBJ
+ : SHARED_LIB
+ : <toolset>gcc ] ;
+
+generators.override gcc.mingw.link : gcc.link ;
+generators.override gcc.mingw.link.dll : gcc.link.dll ;
+
+# Cygwin is similar to msvc and mingw in that it uses import libraries. While in
+# simple cases, it can directly link to a shared library, it is believed to be
+# slower, and not always possible. Define cygwin-specific generators here.
+
+g = [ new gcc-linking-generator gcc.cygwin.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.cygwin.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.override gcc.cygwin.link : gcc.link ;
+generators.override gcc.cygwin.link.dll : gcc.link.dll ;
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.link OPTIONS <profiling>on : -pg ;
+toolset.flags gcc.link USER_OPTIONS <linkflags> ;
+toolset.flags gcc.link LINKPATH <library-path> ;
+toolset.flags gcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags gcc.link LIBRARIES <library-file> ;
+
+# Specify compile flags for linker as well as they may be needed for LTO
+toolset.flags gcc.link OPTIONS <local-visibility>hidden : -fvisibility=hidden -fvisibility-inlines-hidden ;
+toolset.flags gcc.link OPTIONS <local-visibility>protected : -fvisibility=protected ;
+toolset.flags gcc.link OPTIONS <local-visibility>protected/<target-os>darwin : ;
+toolset.flags gcc.link OPTIONS <local-visibility>global : -fvisibility=default ;
+
+# sanitizers
+toolset.flags gcc.link OPTIONS <address-sanitizer>on : -fsanitize=address -fno-omit-frame-pointer ;
+toolset.flags gcc.link OPTIONS <address-sanitizer>norecover : -fsanitize=address -fno-sanitize-recover=address -fno-omit-frame-pointer ;
+toolset.flags gcc.link OPTIONS <leak-sanitizer>on : -fsanitize=leak -fno-omit-frame-pointer ;
+toolset.flags gcc.link OPTIONS <leak-sanitizer>norecover : -fsanitize=leak -fno-sanitize-recover=leak -fno-omit-frame-pointer ;
+toolset.flags gcc.link OPTIONS <thread-sanitizer>on : -fsanitize=thread -fno-omit-frame-pointer ;
+toolset.flags gcc.link OPTIONS <thread-sanitizer>norecover : -fsanitize=thread -fno-sanitize-recover=thread -fno-omit-frame-pointer ;
+toolset.flags gcc.link OPTIONS <undefined-sanitizer>on : -fsanitize=undefined -fno-omit-frame-pointer ;
+toolset.flags gcc.link OPTIONS <undefined-sanitizer>norecover : -fsanitize=undefined -fno-sanitize-recover=undefined -fno-omit-frame-pointer ;
+
+toolset.flags gcc.link OPTIONS <coverage>on : --coverage ;
+
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ;
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ;
+
+# target specific link flags
+{
+ # aix
+
+ # On AIX we *have* to use the native linker.
+ #
+ # Using -brtl, the AIX linker will look for libraries with both the .a
+ # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
+ # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
+ # file that may contain shared objects and is different from static libs
+ # as on Linux.
+ #
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The -bbigtoc option instrcuts the linker to create a TOC bigger than 64k.
+ # This is necessary for some submodules such as math, but it does make running
+ # the tests a tad slower.
+ #
+ # The above options are definitely for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+ toolset.flags gcc.link OPTIONS <target-os>aix : -Wl,-brtl -Wl,-bnoipath -Wl,-bbigtoc ;
+
+ # See note [1]
+ toolset.flags gcc.link OPTIONS <target-os>aix/<runtime-link>static : -static ;
+
+ # darwin
+
+ # On Darwin, the -s option to ld does not work unless we pass -static,
+ # and passing -static unconditionally is a bad idea. So, do not pass -s
+ # at all and darwin.jam will use a separate 'strip' invocation.
+ toolset.flags gcc.link RPATH <target-os>darwin : <dll-path> ;
+ # This does not support -R.
+ toolset.flags gcc.link RPATH_OPTION <target-os>darwin : -rpath ;
+ # -rpath-link is not supported at all.
+
+ # See note [1]
+ toolset.flags gcc.link OPTIONS <target-os>darwin/<runtime-link>static : -static ;
+
+ # vxworks
+ # On VxWorks we want to reflect what ever special flags have been set in the
+ # environment for the CPU we are targeting in the cross build
+ toolset.flags gcc.link OPTIONS <target-os>vxworks/<strip>on : -Wl,--strip-all ;
+ toolset.flags gcc.link OPTIONS <target-os>vxworks/<link>static : [ os.environ LDFLAGS_STATIC ] ;
+ toolset.flags gcc.link.dll OPTIONS <target-os>vxworks : [ os.environ LDFLAGS_SO ] ;
+ toolset.flags gcc.link OPTIONS <target-os>vxworks/<link>shared : [ os.environ LDFLAGS_DYNAMIC ] ;
+
+ # default
+
+ local generic-os = [ set.difference $(all-os) : aix darwin vxworks solaris osf hpux ] ;
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+ toolset.flags gcc.link OPTIONS <target-os>$(generic-os)/<strip>on :
+ -Wl,--strip-all ;
+ toolset.flags gcc.link RPATH <target-os>$(generic-os) : <dll-path> ;
+ toolset.flags gcc.link RPATH_OPTION <target-os>$(generic-os) : -rpath ;
+ toolset.flags gcc.link RPATH_LINK <target-os>$(generic-os) : <xdll-path> ;
+ toolset.flags gcc.link START-GROUP <target-os>$(generic-os) :
+ -Wl,--start-group ;
+ toolset.flags gcc.link END-GROUP <target-os>$(generic-os) : -Wl,--end-group ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by the -l switch. These modifiers are -Bstatic and
+ # -Bdynamic and change search for -l switches that follow them. The
+ # following list shows the tried variants. Search stops at the first
+ # variant that has a match.
+ #
+ # *nix: -Bstatic -lxxx
+ # libxxx.a
+ #
+ # *nix: -Bdynamic -lxxx
+ # libxxx.so
+ # libxxx.a
+ #
+ # windows (mingw, cygwin) -Bstatic -lxxx
+ # libxxx.a
+ # xxx.lib
+ #
+ # windows (mingw, cygwin) -Bdynamic -lxxx
+ # libxxx.dll.a
+ # xxx.dll.a
+ # libxxx.a
+ # xxx.lib
+ # cygxxx.dll (*)
+ # libxxx.dll
+ # xxx.dll
+ # libxxx.a
+ #
+ # (*) This is for cygwin
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
+ # On *nix mixing shared libs with static runtime is not a good idea.
+ toolset.flags gcc.link FINDLIBS-ST-PFX <target-os>$(generic-os)/<runtime-link>shared : -Wl,-Bstatic ;
+ toolset.flags gcc.link FINDLIBS-SA-PFX <target-os>$(generic-os)/<runtime-link>shared : -Wl,-Bdynamic ;
+
+ # On windows allow mixing of static and dynamic libs with static
+ # runtime is not a good idea.
+ toolset.flags gcc.link FINDLIBS-ST-PFX <target-os>windows/<runtime-link>static : -Wl,-Bstatic ;
+ toolset.flags gcc.link FINDLIBS-SA-PFX <target-os>windows/<runtime-link>static : -Wl,-Bdynamic ;
+ toolset.flags gcc.link OPTIONS <target-os>windows/<runtime-link>static : -Wl,-Bstatic ;
+
+ toolset.flags gcc.link HAVE_SONAME <target-os>$(generic-os) : "" ;
+ toolset.flags gcc.link SONAME_OPTION <target-os>$(generic-os) : -h ;
+
+ # See note [1]
+ toolset.flags gcc.link OPTIONS <target-os>$(generic-os)/<runtime-link>static : -static ;
+
+ # hpux
+
+ toolset.flags gcc.link OPTIONS <target-os>hpux/<strip>on : -Wl,-s ;
+
+ toolset.flags gcc.link HAVE_SONAME <target-os>hpux : "" ;
+ toolset.flags gcc.link SONAME_OPTION <target-os>hpux : +h ;
+
+ # osf
+
+ # No --strip-all, just -s.
+ toolset.flags gcc.link OPTIONS <target-os>osf/<strip>on : -Wl,-s ;
+ toolset.flags gcc.link RPATH <target-os>osf : <dll-path> ;
+ # This does not support -R.
+ toolset.flags gcc.link RPATH_OPTION <target-os>osf : -rpath ;
+ # -rpath-link is not supported at all.
+
+ # See note [1]
+ toolset.flags gcc.link OPTIONS <target-os>osf/<runtime-link>static : -static ;
+
+ # sun
+
+ toolset.flags gcc.link OPTIONS <target-os>solaris/<strip>on : -Wl,-s ;
+
+ toolset.flags gcc.link RPATH <target-os>solaris : <dll-path> ;
+ # Solaris linker does not have a separate -rpath-link, but allows using
+ # -L for the same purpose.
+ toolset.flags gcc.link LINKPATH <target-os>solaris : <xdll-path> ;
+
+ # This permits shared libraries with non-PIC code on Solaris.
+ # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+ # following is not needed. Whether -fPIC should be hardcoded, is a
+ # separate question.
+ # AH, 2004/10/16: it is still necessary because some tests link against
+ # static libraries that were compiled without PIC.
+ toolset.flags gcc.link OPTIONS <target-os>solaris : -mimpure-text ;
+
+ # See note [1]
+ toolset.flags gcc.link OPTIONS <target-os>solaris/<runtime-link>static : -static ;
+
+ # [1]
+ # For <runtime-link>static we made sure there are no dynamic libraries in the
+ # link. On HP-UX not all system libraries exist as archived libraries (for
+ # example, there is no libunwind.a), so, on this platform, the -static option
+ # cannot be specified.
+}
+
+
+# Enclose the RPATH variable on 'targets' in double quotes, unless it is already
+# enclosed in single quotes. This special casing is done because it is common to
+# pass '$ORIGIN' to linker -- and it has to have single quotes to prevent shell
+# expansion -- and if we add double quotes then the preventing properties of
+# single quotes disappear.
+#
+rule quote-rpath ( targets * )
+{
+ local r = [ on $(targets[1]) return $(RPATH) ] ;
+ if ! [ MATCH ('.*') : $(r) ]
+ {
+ r = \"$(r)\" ;
+ }
+ RPATH on $(targets) = $(r) ;
+}
+
+# Declare actions for linking.
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+# Differs from 'link' above only by -shared.
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+###
+### Archive library generation.
+###
+
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that is hardly worth the trouble as on Linux, 'ar'
+# is always available.
+.AR = ar ;
+.ARFLAGS = rc ;
+.RANLIB = ranlib ;
+
+toolset.flags gcc.archive AROPTIONS <archiveflags> ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here is the rationale from
+ #
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we do not care about replacement, but there is no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+#
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) $(.ARFLAGS) "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+}
+
+###
+### CPU architecture and instruction set options.
+###
+
+local rule cpu-flags ( toolset variable : architecture : instruction-set + :
+ values + : default ? )
+{
+ if $(default)
+ {
+ toolset.flags $(toolset) $(variable)
+ <architecture>$(architecture)/<instruction-set> : $(values) ;
+ }
+ toolset.flags $(toolset) $(variable)
+ <architecture>/<instruction-set>$(instruction-set)
+ <architecture>$(architecture)/<instruction-set>$(instruction-set)
+ : $(values) ;
+}
+
+
+# Set architecture/instruction-set options.
+#
+# x86 and compatible
+# The 'native' option appeared in gcc 4.2 so we cannot safely use it as default.
+# Use i686 instead for 32-bit.
+toolset.flags gcc OPTIONS <architecture>x86/<address-model>32/<instruction-set> : -march=i686 ;
+cpu-flags gcc OPTIONS : x86 : native : -march=native ;
+cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ;
+cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ;
+cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ;
+cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ;
+cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ;
+cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ;
+cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ;
+cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ;
+cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ;
+cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ;
+cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ;
+cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ;
+cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ;
+cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ;
+cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : conroe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : conroe-xe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : conroe-l : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : allendale : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : wolfdale : -march=core2 -msse4.1 ;
+cpu-flags gcc OPTIONS : x86 : merom : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : merom-xe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : kentsfield : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : kentsfield-xe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : yorksfield : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : penryn : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : corei7 : -march=corei7 ;
+cpu-flags gcc OPTIONS : x86 : nehalem : -march=corei7 ;
+cpu-flags gcc OPTIONS : x86 : corei7-avx : -march=corei7-avx ;
+cpu-flags gcc OPTIONS : x86 : sandy-bridge : -march=corei7-avx ;
+cpu-flags gcc OPTIONS : x86 : core-avx-i : -march=core-avx-i ;
+cpu-flags gcc OPTIONS : x86 : ivy-bridge : -march=core-avx-i ;
+cpu-flags gcc OPTIONS : x86 : haswell : -march=core-avx-i -mavx2 -mfma -mbmi -mbmi2 -mlzcnt ;
+cpu-flags gcc OPTIONS : x86 : broadwell : -march=broadwell ;
+cpu-flags gcc OPTIONS : x86 : skylake : -march=skylake ;
+cpu-flags gcc OPTIONS : x86 : skylake-avx512 : -march=skylake-avx512 ;
+cpu-flags gcc OPTIONS : x86 : cannonlake : -march=skylake-avx512 -mavx512vbmi -mavx512ifma -msha ;
+cpu-flags gcc OPTIONS : x86 : icelake : -march=icelake ;
+cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ;
+cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ;
+cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ;
+cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ;
+cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ;
+cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ;
+cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ;
+cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ;
+##
+cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ;
+cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ;
+cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ;
+cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ;
+cpu-flags gcc OPTIONS : x86 : k8-sse3 : -march=k8-sse3 ;
+cpu-flags gcc OPTIONS : x86 : opteron-sse3 : -march=opteron-sse3 ;
+cpu-flags gcc OPTIONS : x86 : athlon64-sse3 : -march=athlon64-sse3 ;
+cpu-flags gcc OPTIONS : x86 : amdfam10 : -march=amdfam10 ;
+cpu-flags gcc OPTIONS : x86 : barcelona : -march=barcelona ;
+cpu-flags gcc OPTIONS : x86 : bdver1 : -march=bdver1 ;
+cpu-flags gcc OPTIONS : x86 : bdver2 : -march=bdver2 ;
+cpu-flags gcc OPTIONS : x86 : bdver3 : -march=bdver3 ;
+cpu-flags gcc OPTIONS : x86 : bdver4 : -march=bdver4 ;
+cpu-flags gcc OPTIONS : x86 : btver1 : -march=btver1 ;
+cpu-flags gcc OPTIONS : x86 : btver2 : -march=btver2 ;
+cpu-flags gcc OPTIONS : x86 : znver1 : -march=znver1 ;
+cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ;
+cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ;
+cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ;
+cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ;
+##
+cpu-flags gcc OPTIONS : x86 : atom : -march=atom ;
+# Sparc
+cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 : default ;
+cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ;
+cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ;
+cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ;
+cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ;
+cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ;
+cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ;
+cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ;
+cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ;
+cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ;
+# RS/6000 & PowerPC
+cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ;
+cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ;
+cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ;
+cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ;
+cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ;
+cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ;
+cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ;
+cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ;
+cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ;
+cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ;
+cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ;
+cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ;
+cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ;
+cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ;
+cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ;
+cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ;
+cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ;
+cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ;
+cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ;
+cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ;
+cpu-flags gcc OPTIONS : power : power : -mcpu=power ;
+cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ;
+cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ;
+cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ;
+cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ;
+cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ;
+cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ;
+cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ;
+cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ;
+cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ;
+cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ;
+cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ;
+cpu-flags gcc OPTIONS : s390x : z196 : -march=z196 ;
+cpu-flags gcc OPTIONS : s390x : zEC12 : -march=zEC12 ;
+cpu-flags gcc OPTIONS : s390x : z13 : -march=z13 ;
+cpu-flags gcc OPTIONS : s390x : z14 : -march=z14 ;
+cpu-flags gcc OPTIONS : s390x : z15 : -march=z15 ;
+# AIX variant of RS/6000 & PowerPC
+toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X64" ;
diff --git a/src/boost/tools/build/src/tools/gcc.py b/src/boost/tools/build/src/tools/gcc.py
new file mode 100644
index 000000000..1db7d5758
--- /dev/null
+++ b/src/boost/tools/build/src/tools/gcc.py
@@ -0,0 +1,871 @@
+# Status: being ported by Steven Watanabe
+# Base revision: 47077
+# TODO: common.jam needs to be ported
+# TODO: generators.jam needs to have register_c_compiler.
+#
+# Copyright 2001 David Abrahams.
+# Copyright 2002-2006 Rene Rivera.
+# Copyright 2002-2003 Vladimir Prus.
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov.
+# Copyright 2007 Roland Schwarz
+# Copyright 2007 Boris Gubenko.
+# Copyright 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import os
+import subprocess
+import re
+
+import bjam
+
+from b2.tools import unix, common, rc, pch, builtin
+from b2.build import feature, type, toolset, generators, property_set
+from b2.build.property import Property
+from b2.util.utility import os_name, on_windows
+from b2.manager import get_manager
+from b2.build.generators import Generator
+from b2.build.toolset import flags
+from b2.util.utility import to_seq
+
+
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+feature.extend('toolset', ['gcc'])
+
+
+toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll'])
+toolset.inherit_flags('gcc', 'unix')
+toolset.inherit_rules('gcc', 'unix')
+
+generators.override('gcc.prebuilt', 'builtin.prebuilt')
+generators.override('gcc.searched-lib-generator', 'searched-lib-generator')
+
+# Target naming is determined by types/lib.jam and the settings below this
+# comment.
+#
+# On *nix:
+# libxxx.a static library
+# libxxx.so shared library
+#
+# On windows (mingw):
+# libxxx.lib static library
+# xxx.dll DLL
+# xxx.lib import library
+#
+# On windows (cygwin) i.e. <target-os>cygwin
+# libxxx.a static library
+# xxx.dll DLL
+# libxxx.dll.a import library
+#
+# Note: user can always override by using the <tag>@rule
+# This settings have been chosen, so that mingw
+# is in line with msvc naming conventions. For
+# cygwin the cygwin naming convention has been chosen.
+
+# Make the "o" suffix used for gcc toolset on all
+# platforms
+type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o')
+type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a')
+
+type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a')
+type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib')
+
+__machine_match = re.compile('^([^ ]+)')
+__version_match = re.compile('^([0-9.]+)')
+
+def init(version = None, command = None, options = None):
+ """
+ Initializes the gcc toolset for the given version. If necessary, command may
+ be used to specify where the compiler is located. The parameter 'options' is a
+ space-delimited list of options, each one specified as
+ <option-name>option-value. Valid option names are: cxxflags, linkflags and
+ linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun
+ and the default value will be selected based on the current OS.
+ Example:
+ using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+ """
+
+ options = to_seq(options)
+ command = to_seq(command)
+
+ # Information about the gcc command...
+ # The command.
+ command = to_seq(common.get_invocation_command('gcc', 'g++', command))
+ # The root directory of the tool install.
+ root = feature.get_values('<root>', options)
+ root = root[0] if root else ''
+ # The bin directory where to find the command to execute.
+ bin = None
+ # The flavor of compiler.
+ flavor = feature.get_values('<flavor>', options)
+ flavor = flavor[0] if flavor else ''
+ # Autodetect the root and bin dir if not given.
+ if command:
+ if not bin:
+ bin = common.get_absolute_tool_path(command[-1])
+ if not root:
+ root = os.path.dirname(bin)
+ # Autodetect the version and flavor if not given.
+ if command:
+ machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0]
+ machine = __machine_match.search(machine_info).group(1)
+
+ version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0]
+ version = __version_match.search(version_info).group(1)
+ if not flavor and machine.find('mingw') != -1:
+ flavor = 'mingw'
+
+ condition = None
+ if flavor:
+ condition = common.check_init_parameters('gcc', None,
+ ('version', version),
+ ('flavor', flavor))
+ else:
+ condition = common.check_init_parameters('gcc', None,
+ ('version', version))
+
+ if command:
+ command = command[0]
+
+ common.handle_options('gcc', condition, command, options)
+
+ linker = feature.get_values('<linker-type>', options)
+ if not linker:
+ if os_name() == 'OSF':
+ linker = 'osf'
+ elif os_name() == 'HPUX':
+ linker = 'hpux' ;
+ else:
+ linker = 'gnu'
+
+ init_link_flags('gcc', linker, condition)
+
+ # If gcc is installed in non-standard location, we'd need to add
+ # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+ # rules).
+ if command:
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right ones. Note that we don't provide a clean way to build 32-bit
+ # binary with 64-bit compiler, but user can always pass -m32 manually.
+ lib_path = [os.path.join(root, 'bin'),
+ os.path.join(root, 'lib'),
+ os.path.join(root, 'lib32'),
+ os.path.join(root, 'lib64')]
+ if debug():
+ print 'notice: using gcc libraries ::', condition, '::', lib_path
+ toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path)
+
+ # If it's not a system gcc install we should adjust the various programs as
+ # needed to prefer using the install specific versions. This is essential
+ # for correct use of MinGW and for cross-compiling.
+
+ # - The archive builder.
+ archiver = common.get_invocation_command('gcc',
+ 'ar', feature.get_values('<archiver>', options), [bin], path_last=True)
+ toolset.flags('gcc.archive', '.AR', condition, [archiver])
+ if debug():
+ print 'notice: using gcc archiver ::', condition, '::', archiver
+
+ # - Ranlib
+ ranlib = common.get_invocation_command('gcc',
+ 'ranlib', feature.get_values('<ranlib>', options), [bin], path_last=True)
+ toolset.flags('gcc.archive', '.RANLIB', condition, [ranlib])
+ if debug():
+ print 'notice: using gcc archiver ::', condition, '::', ranlib
+
+ # - The resource compiler.
+ rc_command = common.get_invocation_command_nodefault('gcc',
+ 'windres', feature.get_values('<rc>', options), [bin], path_last=True)
+ rc_type = feature.get_values('<rc-type>', options)
+
+ if not rc_type:
+ rc_type = 'windres'
+
+ if not rc_command:
+ # If we can't find an RC compiler we fallback to a null RC compiler that
+ # creates empty object files. This allows the same Jamfiles to work
+ # across the board. The null RC uses the assembler to create the empty
+ # objects, so configure that.
+ rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True)
+ rc_type = 'null'
+ rc.configure([rc_command], condition, ['<rc-type>' + rc_type])
+
+###if [ os.name ] = NT
+###{
+### # This causes single-line command invocation to not go through .bat files,
+### # thus avoiding command-line length limitations.
+### JAMSHELL = % ;
+###}
+
+#FIXME: when register_c_compiler is moved to
+# generators, these should be updated
+builtin.register_c_compiler('gcc.compile.c++.preprocess', ['CPP'], ['PREPROCESSED_CPP'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.c.preprocess', ['C'], ['PREPROCESSED_C'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc'])
+
+# pch support
+
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
+# full name of the header.
+
+type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch')
+
+# GCC-specific pch generator.
+class GccPchGenerator(pch.PchGenerator):
+
+ # Inherit the __init__ method
+
+ def run_pch(self, project, name, prop_set, sources):
+ # Find the header in sources. Ignore any CPP sources.
+ header = None
+ for s in sources:
+ if type.is_derived(s.type(), 'H'):
+ header = s
+
+ # Error handling: Base header file name should be the same as the base
+ # precompiled header name.
+ header_name = header.name()
+ header_basename = os.path.basename(header_name).rsplit('.', 1)[0]
+ if header_basename != name:
+ location = project.project_module
+ ###FIXME:
+ raise Exception()
+ ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
+
+ pch_file = Generator.run(self, project, name, prop_set, [header])
+
+ # return result of base class and pch-file property as usage-requirements
+ # FIXME: what about multiple results from generator.run?
+ return (property_set.create([Property('pch-file', pch_file[0]),
+ Property('cflags', '-Winvalid-pch')]),
+ pch_file)
+
+ # Calls the base version specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ def generated_targets(self, sources, prop_set, project, name = None):
+ name = sources[0].name()
+ return Generator.generated_targets(self, sources,
+ prop_set, project, name)
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
+generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ]))
+generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ]))
+
+# Override default do-nothing generators.
+generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator')
+generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator')
+
+flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>'])
+
+# Declare flags and action for compilation
+flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
+flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
+flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
+
+flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
+flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
+flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
+
+flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w'])
+flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall'])
+flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic'])
+flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror'])
+
+flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
+
+flags('gcc.compile.c++', 'OPTIONS', ['<rtti>off'], ['-fno-rtti'])
+flags('gcc.compile.c++', 'OPTIONS', ['<exception-handling>off'], ['-fno-exceptions'])
+
+# On cygwin and mingw, gcc generates position independent code by default, and
+# warns if -fPIC is specified. This might not be the right way of checking if
+# we're using cygwin. For example, it's possible to run cygwin gcc from NT
+# shell, or using crosscompiling. But we'll solve that problem when it's time.
+# In that case we'll just add another parameter to 'init' and move this login
+# inside 'init'.
+if not os_name () in ['CYGWIN', 'NT']:
+ # This logic will add -fPIC for all compilations:
+ #
+ # lib a : a.cpp b ;
+ # obj b : b.cpp ;
+ # exe c : c.cpp a d ;
+ # obj d : d.cpp ;
+ #
+ # This all is fine, except that 'd' will be compiled with -fPIC even though
+ # it's not needed, as 'd' is used only in exe. However, it's hard to detect
+ # where a target is going to be used. Alternative, we can set -fPIC only
+ # when main target type is LIB but than 'b' will be compiled without -fPIC.
+ # In x86-64 that will lead to link errors. So, compile everything with
+ # -fPIC.
+ #
+ # Yet another alternative would be to create propagated <sharedable>
+ # feature, and set it when building shared libraries, but that's hard to
+ # implement and will increase target path length even more.
+ flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC'])
+
+if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX':
+ # OSF does have an option called -soname but it doesn't seem to work as
+ # expected, therefore it has been disabled.
+ HAVE_SONAME = ''
+ SONAME_OPTION = '-h'
+
+
+flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>'])
+flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>'])
+flags('gcc.compile', 'DEFINES', [], ['<define>'])
+flags('gcc.compile', 'INCLUDES', [], ['<include>'])
+
+engine = get_manager().engine()
+
+engine.register_action('gcc.compile.c++.pch',
+ '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
+
+engine.register_action('gcc.compile.c.pch',
+ '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
+
+
+def gcc_compile_cpp(targets, sources, properties):
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ extension = os.path.splitext (sources [0]) [1]
+ lang = ''
+ if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
+ lang = '-x c++'
+ get_manager().engine().set_target_variable (targets, 'LANG', lang)
+ engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
+
+def gcc_compile_c(targets, sources, properties):
+ engine = get_manager().engine()
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ engine.set_target_variable (targets, 'LANG', '-x c')
+ #}
+ engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
+
+engine.register_action(
+ 'gcc.compile.c++',
+ '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' +
+ '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' +
+ '-c -o "$(<:W)" "$(>:W)"',
+ function=gcc_compile_cpp,
+ bound_list=['PCH_FILE'])
+
+engine.register_action(
+ 'gcc.compile.c',
+ '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' +
+ '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+ function=gcc_compile_c,
+ bound_list=['PCH_FILE'])
+
+engine.register_action(
+ 'gcc.compile.c++.preprocess',
+ function=gcc_compile_cpp,
+ bound_list=['PCH_FILE'],
+ command="""
+ $(CONFIG_COMMAND) $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)"
+ """
+)
+
+engine.register_action(
+ 'gcc.compile.c.preprocess',
+ function=gcc_compile_c,
+ bound_list=['PCH_FILE'],
+ command="""
+ $(CONFIG_COMMAND) $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<)
+ """
+)
+
+def gcc_compile_asm(targets, sources, properties):
+ get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp')
+
+engine.register_action(
+ 'gcc.compile.asm',
+ '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+ function=gcc_compile_asm)
+
+
+class GccLinkingGenerator(unix.UnixLinkingGenerator):
+ """
+ The class which check that we don't try to use the <runtime-link>static
+ property while creating or using shared library, since it's not supported by
+ gcc/libc.
+ """
+ def run(self, project, name, ps, sources):
+ # TODO: Replace this with the use of a target-os property.
+
+ no_static_link = False
+ if bjam.variable('UNIX'):
+ no_static_link = True;
+ ##FIXME: what does this mean?
+## {
+## switch [ modules.peek : JAMUNAME ]
+## {
+## case * : no-static-link = true ;
+## }
+## }
+
+ reason = None
+ if no_static_link and ps.get('runtime-link') == 'static':
+ if ps.get('link') == 'shared':
+ reason = "On gcc, DLL can't be build with '<runtime-link>static'."
+ elif type.is_derived(self.target_types[0], 'EXE'):
+ for s in sources:
+ source_type = s.type()
+ if source_type and type.is_derived(source_type, 'SHARED_LIB'):
+ reason = "On gcc, using DLLS together with the " +\
+ "<runtime-link>static options is not possible "
+ if reason:
+ print 'warning:', reason
+ print 'warning:',\
+ "It is suggested to use '<runtime-link>static' together",\
+ "with '<link>static'." ;
+ return
+ else:
+ generated_targets = unix.UnixLinkingGenerator.run(self, project,
+ name, ps, sources)
+ return generated_targets
+
+if on_windows():
+ flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,'])
+ generators.register(
+ GccLinkingGenerator('gcc.link', True,
+ ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
+ [ 'EXE' ],
+ [ '<toolset>gcc' ]))
+ generators.register(
+ GccLinkingGenerator('gcc.link.dll', True,
+ ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
+ ['IMPORT_LIB', 'SHARED_LIB'],
+ ['<toolset>gcc']))
+else:
+ generators.register(
+ GccLinkingGenerator('gcc.link', True,
+ ['LIB', 'OBJ'],
+ ['EXE'],
+ ['<toolset>gcc']))
+ generators.register(
+ GccLinkingGenerator('gcc.link.dll', True,
+ ['LIB', 'OBJ'],
+ ['SHARED_LIB'],
+ ['<toolset>gcc']))
+
+# Declare flags for linking.
+# First, the common flags.
+flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
+flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>'])
+flags('gcc.link', 'LINKPATH', [], ['<library-path>'])
+flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
+flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
+flags('gcc.link', 'LIBRARIES', [], ['<library-file>'])
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link. On HP-UX not all system libraries exist as archived libraries (for
+# example, there is no libunwind.a), so, on this platform, the -static option
+# cannot be specified.
+if os_name() != 'HPUX':
+ flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static'])
+
+# Now, the vendor specific flags.
+# The parameter linker can be either gnu, darwin, osf, hpux or sun.
+def init_link_flags(toolset, linker, condition):
+ """
+ Now, the vendor specific flags.
+ The parameter linker can be either gnu, darwin, osf, hpux or sun.
+ """
+ toolset_link = toolset + '.link'
+ if linker == 'gnu':
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+
+ # FIXME: what does unchecked translate to?
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all']) # : unchecked ;
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
+ flags(toolset_link, 'START-GROUP', condition, ['-Wl,--start-group'])# : unchecked ;
+ flags(toolset_link, 'END-GROUP', condition, ['-Wl,--end-group']) # : unchecked ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
+ # and change search for -l switches that follow them. The following list
+ # shows the tried variants.
+ # The search stops at the first variant that has a match.
+ # *nix: -Bstatic -lxxx
+ # libxxx.a
+ #
+ # *nix: -Bdynamic -lxxx
+ # libxxx.so
+ # libxxx.a
+ #
+ # windows (mingw,cygwin) -Bstatic -lxxx
+ # libxxx.a
+ # xxx.lib
+ #
+ # windows (mingw,cygwin) -Bdynamic -lxxx
+ # libxxx.dll.a
+ # xxx.dll.a
+ # libxxx.a
+ # xxx.lib
+ # cygxxx.dll (*)
+ # libxxx.dll
+ # xxx.dll
+ # libxxx.a
+ #
+ # (*) This is for cygwin
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
+ # On *nix mixing shared libs with static runtime is not a good idea.
+ flags(toolset_link, 'FINDLIBS-ST-PFX',
+ map(lambda x: x + '/<runtime-link>shared', condition),
+ ['-Wl,-Bstatic']) # : unchecked ;
+ flags(toolset_link, 'FINDLIBS-SA-PFX',
+ map(lambda x: x + '/<runtime-link>shared', condition),
+ ['-Wl,-Bdynamic']) # : unchecked ;
+
+ # On windows allow mixing of static and dynamic libs with static
+ # runtime.
+ flags(toolset_link, 'FINDLIBS-ST-PFX',
+ map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+ ['-Wl,-Bstatic']) # : unchecked ;
+ flags(toolset_link, 'FINDLIBS-SA-PFX',
+ map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+ ['-Wl,-Bdynamic']) # : unchecked ;
+ flags(toolset_link, 'OPTIONS',
+ map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+ ['-Wl,-Bstatic']) # : unchecked ;
+
+ elif linker == 'darwin':
+ # On Darwin, the -s option to ld does not work unless we pass -static,
+ # and passing -static unconditionally is a bad idea. So, don't pass -s.
+ # at all, darwin.jam will use separate 'strip' invocation.
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
+
+ elif linker == 'osf':
+ # No --strip-all, just -s.
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
+ # : unchecked ;
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ # This does not supports -R.
+ flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ;
+ # -rpath-link is not supported at all.
+
+ elif linker == 'sun':
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
+ # : unchecked ;
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ # Solaris linker does not have a separate -rpath-link, but allows to use
+ # -L for the same purpose.
+ flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ;
+
+ # This permits shared libraries with non-PIC code on Solaris.
+ # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+ # following is not needed. Whether -fPIC should be hardcoded, is a
+ # separate question.
+ # AH, 2004/10/16: it is still necessary because some tests link against
+ # static libraries that were compiled without PIC.
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text'])
+ # : unchecked ;
+
+ elif linker == 'hpux':
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition),
+ ['-Wl,-s']) # : unchecked ;
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition),
+ ['-fPIC']) # : unchecked ;
+
+ else:
+ # FIXME:
+ errors.user_error(
+ "$(toolset) initialization: invalid linker '$(linker)' " +
+ "The value '$(linker)' specified for <linker> is not recognized. " +
+ "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'")
+
+# Declare actions for linking.
+def gcc_link(targets, sources, properties):
+ engine = get_manager().engine()
+ engine.set_target_variable(targets, 'SPACE', ' ')
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
+ engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
+
+engine.register_action(
+ 'gcc.link',
+ '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
+ '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
+ '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' +
+ '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
+ '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
+ '$(OPTIONS) $(USER_OPTIONS)',
+ function=gcc_link,
+ bound_list=['LIBRARIES'])
+
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
+# always available.
+__AR = 'ar'
+
+flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>'])
+
+def gcc_archive(targets, sources, properties):
+ # Always remove archive and start again. Here's rationale from
+ #
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
+ clean = targets[0] + '(clean)'
+ bjam.call('TEMPORARY', clean)
+ bjam.call('NOCARE', clean)
+ engine = get_manager().engine()
+ engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE'))
+ engine.add_dependency(clean, sources)
+ engine.add_dependency(targets, clean)
+ engine.set_update_action('common.RmTemps', clean, targets)
+
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we don't care about replacement, but there's no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+engine.register_action('gcc.archive',
+ '''"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+ ''',
+ function=gcc_archive,
+ flags=['piecemeal'])
+
+def gcc_link_dll(targets, sources, properties):
+ engine = get_manager().engine()
+ engine.set_target_variable(targets, 'SPACE', ' ')
+ engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
+ engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME)
+ engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION)
+
+engine.register_action(
+ 'gcc.link.dll',
+ # Differ from 'link' above only by -shared.
+ '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
+ '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
+ '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' +
+ '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' +
+ '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
+ '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
+ '$(OPTIONS) $(USER_OPTIONS)',
+ function = gcc_link_dll,
+ bound_list=['LIBRARIES'])
+
+# Set up threading support. It's somewhat contrived, so perform it at the end,
+# to avoid cluttering other code.
+
+if on_windows():
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads'])
+elif bjam.variable('UNIX'):
+ jamuname = bjam.variable('JAMUNAME')
+ host_os_name = jamuname[0]
+ if host_os_name.startswith('SunOS'):
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads'])
+ flags('gcc', 'FINDLIBS-SA', [], ['rt'])
+ elif host_os_name == 'BeOS':
+ # BeOS has no threading options, don't set anything here.
+ pass
+ elif host_os_name == 'Haiku':
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-lroot'])
+ # there is no -lrt on Haiku, and -pthread is implicit
+ elif host_os_name.endswith('BSD'):
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+ # there is no -lrt on BSD
+ elif host_os_name == 'DragonFly':
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+ # there is no -lrt on BSD - DragonFly is a FreeBSD variant,
+ # which anoyingly doesn't say it's a *BSD.
+ elif host_os_name == 'IRIX':
+ # gcc on IRIX does not support multi-threading, don't set anything here.
+ pass
+ elif host_os_name == 'Darwin':
+ # Darwin has no threading options, don't set anything here.
+ pass
+ else:
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+ flags('gcc', 'FINDLIBS-SA', [], ['rt'])
+
+def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None):
+ #FIXME: for some reason this fails. Probably out of date feature code
+## if default:
+## flags(toolset, variable,
+## ['<architecture>' + architecture + '/<instruction-set>'],
+## values)
+ flags(toolset, variable,
+ #FIXME: same as above
+ [##'<architecture>/<instruction-set>' + instruction_set,
+ '<architecture>' + architecture + '/<instruction-set>' + instruction_set],
+ values)
+
+# Set architecture/instruction-set options.
+#
+# x86 and compatible
+flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'native', ['-march=native'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686'], default=True)
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'core2', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe-xe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe-l', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'allendale', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'wolfdale', ['-march=core2', '-msse4.1'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'merom', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'merom-xe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'kentsfield', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'kentsfield-xe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'yorksfield', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'penryn', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'corei7', ['-march=corei7'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'nehalem', ['-march=corei7'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'corei7-avx', ['-march=corei7-avx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'sandy-bridge', ['-march=corei7-avx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'core-avx-i', ['-march=core-avx-i'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'ivy-bridge', ['-march=core-avx-i'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'haswell', ['-march=core-avx-i', '-mavx2', '-mfma', '-mbmi', '-mbmi2', '-mlzcnt'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp'])
+##
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k8-sse3', ['-march=k8-sse3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron-sse3', ['-march=opteron-sse3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64-sse3', ['-march=athlon64-sse3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'amdfam10', ['-march=amdfam10'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'barcelona', ['-march=barcelona'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver1', ['-march=bdver1'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver2', ['-march=bdver2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver3', ['-march=bdver3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'btver1', ['-march=btver1'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'btver2', ['-march=btver2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2'])
+##
+cpu_flags('gcc', 'OPTIONS', 'x86', 'atom', ['-march=atom'])
+# Sparc
+flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'], default=True)
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3'])
+# RS/6000 & PowerPC
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403'])
+cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505'])
+cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601'])
+cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602'])
+cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603'])
+cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e'])
+cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604'])
+cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e'])
+cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620'])
+cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630'])
+cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740'])
+cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400'])
+cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450'])
+cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750'])
+cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801'])
+cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821'])
+cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823'])
+cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860'])
+cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970'])
+cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64'])
+cpu_flags('gcc', 'OPTIONS', 's390x', 'z196', ['-march=z196'])
+cpu_flags('gcc', 'OPTIONS', 's390x', 'zEC12', ['-march=zEC12'])
+cpu_flags('gcc', 'OPTIONS', 's390x', 'z13', ['-march=z13'])
+cpu_flags('gcc', 'OPTIONS', 's390x', 'z14', ['-march=z14'])
+cpu_flags('gcc', 'OPTIONS', 's390x', 'z15', ['-march=z15'])
+# AIX variant of RS/6000 & PowerPC
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32'])
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64'])
+flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X64'])
diff --git a/src/boost/tools/build/src/tools/generate.jam b/src/boost/tools/build/src/tools/generate.jam
new file mode 100644
index 000000000..7eaecc889
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generate.jam
@@ -0,0 +1,111 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Declares main target 'generate' used to produce targets by calling a
+# user-provided rule that takes and produces virtual targets.
+
+import "class" : new ;
+import errors ;
+import feature ;
+import param ;
+import project ;
+import property ;
+import property-set ;
+import targets ;
+import regex ;
+
+
+feature.feature generating-rule : : free ;
+
+
+class generated-target-class : basic-target
+{
+ import errors ;
+ import indirect ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources)
+ : $(requirements) : $(default-build) : $(usage-requirements) ;
+
+ if ! [ $(self.requirements).get <generating-rule> ]
+ {
+ errors.user-error "The generate rule requires the <generating-rule>"
+ "property to be set" ;
+ }
+ }
+
+ rule construct ( name : sources * : property-set )
+ {
+ local result ;
+ local gr = [ $(property-set).get <generating-rule> ] ;
+
+ # FIXME: this is a copy-paste from virtual-target.jam. We should add a
+ # utility rule to call a rule like this.
+ local rule-name = [ MATCH ^@(.*) : $(gr) ] ;
+ if $(rule-name)
+ {
+ if $(gr[2])
+ {
+ local target-name = [ full-name ] ;
+ errors.user-error "Multiple <generating-rule> properties"
+ "encountered for target $(target-name)." ;
+ }
+
+ result = [ indirect.call $(rule-name) $(self.project) $(name)
+ : $(property-set) : $(sources) ] ;
+
+ if ! $(result)
+ {
+ ECHO "warning: Unable to construct" [ full-name ] ;
+ }
+ }
+
+ local ur ;
+ local targets ;
+
+ if $(result)
+ {
+ if [ class.is-a $(result[1]) : property-set ]
+ {
+ ur = $(result[1]) ;
+ targets = $(result[2-]) ;
+ }
+ else
+ {
+ ur = [ property-set.empty ] ;
+ targets = $(result) ;
+ }
+ }
+ # FIXME: the following loop should be doable using sequence.transform or
+ # some similar utility rule.
+ local rt ;
+ for local t in $(targets)
+ {
+ rt += [ virtual-target.register $(t) ] ;
+ }
+ return $(ur) $(rt) ;
+ }
+}
+
+
+rule generate ( name : sources * : requirements * : default-build *
+ : usage-requirements * )
+{
+ param.handle-named-params
+ sources requirements default-build usage-requirements ;
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new generated-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ;
+}
+
+IMPORT $(__name__) : generate : : generate ;
diff --git a/src/boost/tools/build/src/tools/generators/__init_generators__.jam b/src/boost/tools/build/src/tools/generators/__init_generators__.jam
new file mode 100644
index 000000000..5f2483305
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/__init_generators__.jam
@@ -0,0 +1,23 @@
+# Copyright 2017 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Here we automatically define any "generator" modules in this directory.
+
+local key = generator ;
+
+import os path modules ;
+
+.this-module's-file = [ modules.binding $(__name__) ] ;
+.this-module's-dir = [ path.parent [ path.make $(.this-module's-file) ] ] ;
+.to-load-jamfiles = [ path.glob $(.this-module's-dir) : *-$(key).jam ] ;
+.to-load-modules = [ MATCH ^(.*)\.jam$ : $(.to-load-jamfiles) ] ;
+
+# A loop over all matched modules in this directory
+for local m in $(.to-load-modules)
+{
+ m = [ path.basename $(m) ] ;
+ m = $(key)s/$(m) ;
+ import $(m) ;
+}
diff --git a/src/boost/tools/build/src/tools/generators/archive-generator.jam b/src/boost/tools/build/src/tools/generators/archive-generator.jam
new file mode 100644
index 000000000..6afc8e7aa
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/archive-generator.jam
@@ -0,0 +1,74 @@
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2017 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+
+# The generator class for handling STATIC_LIB creation.
+#
+class archive-generator : generator
+{
+ import generators ;
+ import property-set ;
+
+ rule __init__ ( id composing ? : source-types + : target-types +
+ : requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ property-set = [ $(property-set).add-raw <relevant>link ] ;
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ # For static linking, if we get a library in source, we can not directly
+ # link to it so we need to cause our dependencies to link to that
+ # library. There are two approaches:
+ # - adding the library to the list of returned targets.
+ # - using the <library> usage requirements.
+ # The problem with the first is:
+ #
+ # lib a1 : : <file>liba1.a ;
+ # lib a2 : a2.cpp a1 : <link>static ;
+ # install dist : a2 ;
+ #
+ # here we will try to install 'a1', even though it is not necessary in
+ # the general case. With the second approach, even indirect dependants
+ # will link to the library, but it should not cause any harm. So, return
+ # all LIB sources together with created targets, so that dependants link
+ # to them.
+ local usage-requirements = <relevant>link ;
+ if [ $(property-set).get <link> ] = static
+ {
+ for local t in $(sources)
+ {
+ if [ $(t).type ] && [ type.is-derived [ $(t).type ] LIB ]
+ {
+ usage-requirements += <library>$(t) ;
+ }
+ }
+ }
+
+ return [ generators.add-usage-requirements $(result) : $(usage-requirements) ] ;
+ }
+}
+
+
+rule register-archiver ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new archive-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+IMPORT $(__name__) : register-archiver : : generators.register-archiver ;
diff --git a/src/boost/tools/build/src/tools/generators/c-compiling-generator.jam b/src/boost/tools/build/src/tools/generators/c-compiling-generator.jam
new file mode 100644
index 000000000..00cd42a4d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/c-compiling-generator.jam
@@ -0,0 +1,70 @@
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2017 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+import virtual-target ;
+
+# Declare a special compiler generator. The only thing it does is changing the
+# type used to represent 'action' in the constructed dependency graph to
+# 'compile-action'. That class in turn adds additional include paths to handle
+# cases when a source file includes headers which are generated themselves.
+#
+class C-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) :
+ $(requirements) : $(optional-properties) ;
+ }
+
+ rule action-class ( )
+ {
+ return compile-action ;
+ }
+}
+
+
+rule register-c-compiler ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+{
+ generators.register [ new C-compiling-generator $(id) : $(source-types) :
+ $(target-types) : $(requirements) : $(optional-properties) ] ;
+}
+
+# FIXME: this is ugly, should find a better way (we would like client code to
+# register all generators as "generators.some-rule" instead of
+# "some-module.some-rule".)
+#
+IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
+
+class compile-action : action
+{
+ import sequence ;
+
+ rule __init__ ( targets * : sources * : action-name : properties * )
+ {
+ action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
+ }
+
+ # For all virtual targets for the same dependency graph as self, i.e. which
+ # belong to the same main target, add their directories to the include path.
+ #
+ rule adjust-properties ( property-set )
+ {
+ local s = [ $(self.targets[1]).creating-subvariant ] ;
+ if $(s)
+ {
+ return [ $(property-set).add-raw
+ [ $(s).implicit-includes "include" : H ] ] ;
+ }
+ else
+ {
+ return $(property-set) ;
+ }
+ }
+}
diff --git a/src/boost/tools/build/src/tools/generators/dummy-generator.jam b/src/boost/tools/build/src/tools/generators/dummy-generator.jam
new file mode 100644
index 000000000..ee93cfe88
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/dummy-generator.jam
@@ -0,0 +1,20 @@
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2017 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import generators ;
+
+# Generator that accepts everything and produces nothing. Useful as a general
+# fallback for toolset-specific actions like PCH generation.
+#
+class dummy-generator : generator
+{
+ import property-set ;
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ return [ property-set.empty ] ;
+ }
+}
diff --git a/src/boost/tools/build/src/tools/generators/lib-generator.jam b/src/boost/tools/build/src/tools/generators/lib-generator.jam
new file mode 100644
index 000000000..2d9ce4aee
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/lib-generator.jam
@@ -0,0 +1,121 @@
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2017 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+import param ;
+import project ;
+import targets ;
+
+# The generator class for libraries (target type LIB). Depending on properties
+# it will request building of the appropriate specific library type --
+# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
+#
+class lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8)
+ : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) :
+ $(17) : $(18) : $(19) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ # The lib generator is composing, and can be only invoked with an
+ # explicit name. This check is present in generator.run (and so in
+ # builtin.linking-generator) but duplicated here to avoid doing extra
+ # work.
+ if $(name)
+ {
+ local properties = [ $(property-set).raw ] ;
+ # Determine the needed target type.
+ local actual-type ;
+ # <source>files can be generated by <conditional>@rule feature
+ # in which case we do not consider it a SEARCHED_LIB type.
+ if ! <source> in $(properties:G) &&
+ ( <search> in $(properties:G) || <name> in $(properties:G) )
+ {
+ actual-type = SEARCHED_LIB ;
+ }
+ else if <file> in $(properties:G)
+ {
+ actual-type = LIB ;
+ }
+ else if <link>shared in $(properties)
+ {
+ actual-type = SHARED_LIB ;
+ }
+ else
+ {
+ actual-type = STATIC_LIB ;
+ }
+ property-set = [ $(property-set).add-raw <main-target-type>LIB <relevant>link ] ;
+ # Construct the target.
+ local result = [ generators.construct $(project) $(name) : $(actual-type)
+ : $(property-set) : $(sources) ] ;
+ return [ $(result[1]).add-raw <relevant>link ] $(result[2-]) ;
+ }
+ }
+
+ rule viable-source-types ( )
+ {
+ return * ;
+ }
+}
+
+generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
+
+# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
+# simplified: "lib a b c ;".
+#
+rule lib ( names + : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ param.handle-named-params
+ sources requirements default-build usage-requirements ;
+ if $(names[2])
+ {
+ if <name> in $(requirements:G)
+ {
+ import errors ;
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify the <name> feature." ;
+ }
+ if $(sources)
+ {
+ import errors ;
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify sources." ;
+ }
+ }
+
+ # This is a circular module dependency so it must be imported here.
+ import targets ;
+
+ local project = [ project.current ] ;
+ local result ;
+
+ for local name in $(names)
+ {
+ local r = $(requirements) ;
+ # Support " lib a ; " and " lib a b c ; " syntax.
+ if ! $(sources) && ! <name> in $(requirements:G)
+ && ! <file> in $(requirements:G)
+ {
+ r += <name>$(name) ;
+ }
+ result += [ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : LIB
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(r) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+ }
+ return $(result) ;
+}
+IMPORT $(__name__) : lib : : lib ;
diff --git a/src/boost/tools/build/src/tools/generators/linking-generator.jam b/src/boost/tools/build/src/tools/generators/linking-generator.jam
new file mode 100644
index 000000000..37277a0b2
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/linking-generator.jam
@@ -0,0 +1,179 @@
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2017 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+
+# The generator class for handling EXE and SHARED_LIB creation.
+#
+class linking-generator : generator
+{
+ import path ;
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule __init__ ( id
+ composing ? : # The generator will be composing if a non-empty
+ # string is passed or the parameter is not given. To
+ # make the generator non-composing, pass an empty
+ # string ("").
+ source-types + :
+ target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ # Add <library-path> properties for all searched libraries.
+ local extra = <relevant>link ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = SEARCHED_LIB
+ {
+ local search = [ $(s).search ] ;
+ extra += <library-path>$(search) ;
+ }
+ }
+
+ # It is possible that sources include shared libraries that did not came
+ # from 'lib' targets, e.g. .so files specified as sources. In this case
+ # we have to add extra dll-path properties and propagate extra xdll-path
+ # properties so that application linking to us will get xdll-path to
+ # those libraries.
+ local extra-xdll-paths ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] && [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
+ {
+ local location = [ path.root [ $(s).name ]
+ [ $(s).path ] ] ;
+ extra-xdll-paths += [ path.parent $(location) ] ;
+ }
+ }
+
+ # Hardcode DLL paths only when linking executables.
+ # Pros: do not need to relink libraries when installing.
+ # Cons: "standalone" libraries (plugins, python extensions) can not
+ # hardcode paths to dependent libraries.
+ if [ $(property-set).get <hardcode-dll-paths> ] = true
+ && [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ local xdll-path = [ $(property-set).get <xdll-path> ] ;
+ extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
+ }
+
+ if $(extra)
+ {
+ property-set = [ $(property-set).add-raw $(extra) ] ;
+ }
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ local ur ;
+ if $(result)
+ {
+ ur = [ extra-usage-requirements $(result[2-]) : $(property-set) ] ;
+ ur = [ $(ur).add-raw
+ <relevant>link <xdll-path>$(extra-xdll-paths) ] ;
+ ur = [ $(ur).add $(result[1]) ] ;
+ }
+ return $(ur) $(result[2-]) ;
+ }
+
+ rule extra-usage-requirements ( created-targets * : property-set )
+ {
+ local result = [ property-set.empty ] ;
+ local extra ;
+
+ # Add appropriate <xdll-path> usage requirements.
+ local raw = [ $(property-set).raw ] ;
+ if <link>shared in $(raw)
+ {
+ local paths ;
+ local pwd = [ path.pwd ] ;
+ for local t in $(created-targets)
+ {
+ if [ type.is-derived [ $(t).type ] SHARED_LIB ]
+ {
+ paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
+ }
+ }
+ extra += $(paths:G=<xdll-path>) ;
+ }
+
+ # We need to pass <xdll-path> features that we've got from sources,
+ # because if a shared library is built, exe using it needs to know paths
+ # to other shared libraries this one depends on in order to be able to
+ # find them all at runtime.
+
+ # Just pass all features in property-set, it is theoretically possible
+ # that we will propagate <xdll-path> features explicitly specified by
+ # the user, but then the user is to blame for using an internal feature.
+ local values = [ $(property-set).get <xdll-path> ] ;
+ extra += $(values:G=<xdll-path>) ;
+
+ if $(extra)
+ {
+ result = [ property-set.create $(extra) ] ;
+ }
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ; # Sources to pass to inherited rule.
+ local properties2 ; # Properties to pass to inherited rule.
+ local libraries ; # Library sources.
+
+ # Searched libraries are not passed as arguments to the linker but via
+ # some option. So, we pass them to the action using a property.
+ properties2 = [ $(property-set).raw ] ;
+ local fsa ;
+ local fst ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] && [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
+ {
+ local name = [ $(s).name ] ;
+ if [ $(s).shared ]
+ {
+ fsa += $(name) ;
+ }
+ else
+ {
+ fst += $(name) ;
+ }
+ }
+ else
+ {
+ sources2 += $(s) ;
+ }
+ }
+ properties2 += <find-shared-library>$(fsa:J=&&)
+ <find-static-library>$(fst:J=&&) ;
+
+ return [ generator.generated-targets $(sources2)
+ : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
+ }
+}
+
+
+rule register-linker ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new linking-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+IMPORT $(__name__) : register-linker : : generators.register-linker ;
diff --git a/src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam b/src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam
new file mode 100644
index 000000000..00c8e6bdc
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam
@@ -0,0 +1,30 @@
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2017 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+
+class prebuilt-lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8)
+ : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) :
+ $(17) : $(18) : $(19) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local f = [ $(property-set).get <file> ] ;
+ return $(f) $(sources) ;
+ }
+}
+
+generators.register
+ [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
+
+generators.override builtin.prebuilt : builtin.lib-generator ;
+ \ No newline at end of file
diff --git a/src/boost/tools/build/src/tools/generators/searched-lib-generator.jam b/src/boost/tools/build/src/tools/generators/searched-lib-generator.jam
new file mode 100644
index 000000000..b3435daa3
--- /dev/null
+++ b/src/boost/tools/build/src/tools/generators/searched-lib-generator.jam
@@ -0,0 +1,97 @@
+# Copyright 2002-2017 Rene Rivera
+# Copyright 2002-2017 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+
+class searched-lib-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( )
+ {
+ # The requirements cause the generators to be tried *only* when we are
+ # building a lib target with a 'search' feature. This seems ugly --- all
+ # we want is to make sure searched-lib-generator is not invoked deep
+ # inside transformation search to produce intermediate targets.
+ generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if $(name)
+ {
+ # If 'name' is empty, it means we have not been called to build a
+ # top-level target. In this case, we just fail immediately, because
+ # searched-lib-generator cannot be used to produce intermediate
+ # targets.
+
+ local properties = [ $(property-set).raw ] ;
+ local shared ;
+ if <link>shared in $(properties)
+ {
+ shared = true ;
+ }
+
+ local search = [ feature.get-values <search> : $(properties) ] ;
+
+ local a = [ new null-action [ $(property-set).add-raw <relevant>link ] ] ;
+ local lib-name = [ feature.get-values <name> : $(properties) ] ;
+ lib-name ?= $(name) ;
+ local t = [ new searched-lib-target $(lib-name) : $(project)
+ : $(shared) : $(search) : $(a) ] ;
+ # We return sources for a simple reason. If there is
+ # lib png : z : <name>png ;
+ # the 'z' target should be returned, so that apps linking to 'png'
+ # will link to 'z', too.
+ return [ property-set.create <xdll-path>$(search) <relevant>link ]
+ [ virtual-target.register $(t) ] $(sources) ;
+ }
+ }
+}
+
+generators.register [ new searched-lib-generator ] ;
+
+class searched-lib-target : abstract-file-target
+{
+ rule __init__ ( name
+ : project
+ : shared ?
+ : search *
+ : action
+ )
+ {
+ abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
+ : $(action) : ;
+
+ self.shared = $(shared) ;
+ self.search = $(search) ;
+ }
+
+ rule shared ( )
+ {
+ return $(self.shared) ;
+ }
+
+ rule search ( )
+ {
+ return $(self.search) ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ }
+
+ rule relevant ( )
+ {
+ return [ property-set.create <relevant>link ] ;
+ }
+
+ rule path ( )
+ {
+ }
+}
diff --git a/src/boost/tools/build/src/tools/gettext.jam b/src/boost/tools/build/src/tools/gettext.jam
new file mode 100644
index 000000000..35827d750
--- /dev/null
+++ b/src/boost/tools/build/src/tools/gettext.jam
@@ -0,0 +1,230 @@
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module support GNU gettext internationalization utilities.
+#
+# It provides two main target rules: 'gettext.catalog', used for
+# creating machine-readable catalogs from translations files, and
+# 'gettext.update', used for update translation files from modified
+# sources.
+#
+# To add i18n support to your application you should follow these
+# steps.
+#
+# - Decide on a file name which will contain translations and
+# what main target name will be used to update it. For example::
+#
+# gettext.update update-russian : russian.po a.cpp my_app ;
+#
+# - Create the initial translation file by running::
+#
+# bjam update-russian
+#
+# - Edit russian.po. For example, you might change fields like LastTranslator.
+#
+# - Create a main target for final message catalog::
+#
+# gettext.catalog russian : russian.po ;
+#
+# The machine-readable catalog will be updated whenever you update
+# "russian.po". The "russian.po" file will be updated only on explicit
+# request. When you're ready to update translations, you should
+#
+# - Run::
+#
+# bjam update-russian
+#
+# - Edit "russian.po" in appropriate editor.
+#
+# The next bjam run will convert "russian.po" into machine-readable form.
+#
+# By default, translations are marked by 'i18n' call. The 'gettext.keyword'
+# feature can be used to alter this.
+
+
+import targets ;
+import property-set ;
+import virtual-target ;
+import "class" : new ;
+import project ;
+import type ;
+import generators ;
+import errors ;
+import feature : feature ;
+import toolset : flags ;
+import regex ;
+
+.path = "" ;
+
+# Initializes the gettext module.
+rule init ( path ? # Path where all tools are located. If not specified,
+ # they should be in PATH.
+ )
+{
+ if $(.initialized) && $(.path) != $(path)
+ {
+ errors.error "Attempt to reconfigure with different path" ;
+ }
+ .initialized = true ;
+ if $(path)
+ {
+ .path = $(path)/ ;
+ }
+}
+
+# Creates a main target 'name', which, when updated, will cause
+# file 'existing-translation' to be updated with translations
+# extracted from 'sources'. It's possible to specify main target
+# in sources --- it which case all target from dependency graph
+# of those main targets will be scanned, provided they are of
+# appropricate type. The 'gettext.types' feature can be used to
+# control the types.
+#
+# The target will be updated only if explicitly requested on the
+# command line.
+rule update ( name : existing-translation sources + : requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : gettext.UPDATE :
+ $(existing-translation) $(sources)
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ ] ;
+ $(project).mark-target-as-explicit $(name) ;
+}
+
+
+# The human editable source, containing translation.
+type.register gettext.PO : po ;
+# The machine readable message catalog.
+type.register gettext.catalog : mo ;
+# Intermediate type produce by extracting translations from
+# sources.
+type.register gettext.POT : pot ;
+# Pseudo type used to invoke update-translations generator
+type.register gettext.UPDATE ;
+
+# Identifies the keyword that should be used when scanning sources.
+# Default: i18n
+feature gettext.keyword : : free ;
+# Contains space-separated list of sources types which should be scanned.
+# Default: "C CPP"
+feature gettext.types : : free ;
+
+generators.register-standard gettext.compile : gettext.PO : gettext.catalog ;
+
+class update-translations-generator : generator
+{
+ import regex : split ;
+ import property-set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ # The rule should be called with at least two sources. The first source
+ # is the translation (.po) file to update. The remaining sources are targets
+ # which should be scanned for new messages. All sources files for those targets
+ # will be found and passed to the 'xgettext' utility, which extracts the
+ # messages for localization. Those messages will be merged to the .po file.
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local types = [ $(property-set).get <gettext.types> ] ;
+ types ?= "C CPP" ;
+ types = [ regex.split $(types) " " ] ;
+
+ local keywords = [ $(property-set).get <gettext.keyword> ] ;
+ property-set = [ property-set.create $(keywords:G=<gettext.keyword>) ] ;
+
+ # First deterime the list of sources that must be scanned for
+ # messages.
+ local all-sources ;
+ # CONSIDER: I'm not sure if the logic should be the same as for 'stage':
+ # i.e. following dependency properties as well.
+ for local s in $(sources[2-])
+ {
+ all-sources += [ virtual-target.traverse $(s) : : include-sources ] ;
+ }
+ local right-sources ;
+ for local s in $(all-sources)
+ {
+ if [ $(s).type ] in $(types)
+ {
+ right-sources += $(s) ;
+ }
+ }
+
+ local .constructed ;
+ if $(right-sources)
+ {
+ # Create the POT file, which will contain list of messages extracted
+ # from the sources.
+ local extract =
+ [ new action $(right-sources) : gettext.extract : $(property-set) ] ;
+ local new-messages = [ new file-target $(name) : gettext.POT
+ : $(project) : $(extract) ] ;
+
+ # Create a notfile target which will update the existing translation file
+ # with new messages.
+ local a = [ new action $(sources[1]) $(new-messages)
+ : gettext.update-po-dispatch ] ;
+ local r = [ new notfile-target $(name) : $(project) : $(a) ] ;
+ .constructed = [ virtual-target.register $(r) ] ;
+ }
+ else
+ {
+ errors.error "No source could be scanned by gettext tools" ;
+ }
+ return $(.constructed) ;
+ }
+}
+generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ;
+
+flags gettext.extract KEYWORD <gettext.keyword> ;
+actions extract
+{
+ $(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>)
+}
+
+# Does really updating of po file. The tricky part is that
+# we're actually updating one of the sources:
+# $(<) is the NOTFILE target we're updating
+# $(>[1]) is the PO file to be really updated.
+# $(>[2]) is the PO file created from sources.
+#
+# When file to be updated does not exist (during the
+# first run), we need to copy the file created from sources.
+# In all other cases, we need to update the file.
+rule update-po-dispatch
+{
+ NOCARE $(>[1]) ;
+ gettext.create-po $(<) : $(>) ;
+ gettext.update-po $(<) : $(>) ;
+ _ on $(<) = " " ;
+ ok on $(<) = "" ;
+ EXISTING_PO on $(<) = $(>[1]) ;
+}
+
+# Due to fancy interaction of existing and updated, this rule can be called with
+# one source, in which case we copy the lonely source into EXISTING_PO, or with
+# two sources, in which case the action body expands to nothing. I'd really like
+# to have "missing" action modifier.
+actions quietly existing updated create-po bind EXISTING_PO
+{
+ cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok))
+}
+
+actions updated update-po bind EXISTING_PO
+{
+ $(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])"
+}
+
+actions gettext.compile
+{
+ $(.path)msgfmt -o $(<) $(>)
+}
+
+IMPORT $(__name__) : update : : gettext.update ;
diff --git a/src/boost/tools/build/src/tools/gfortran.jam b/src/boost/tools/build/src/tools/gfortran.jam
new file mode 100644
index 000000000..0aa69b85c
--- /dev/null
+++ b/src/boost/tools/build/src/tools/gfortran.jam
@@ -0,0 +1,39 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags gfortran OPTIONS <fflags> ;
+
+flags gfortran OPTIONS <optimization>off : -O0 ;
+flags gfortran OPTIONS <optimization>speed : -O3 ;
+flags gfortran OPTIONS <optimization>space : -Os ;
+
+flags gfortran OPTIONS <debug-symbols>on : -g ;
+flags gfortran OPTIONS <profiling>on : -pg ;
+
+flags gfortran OPTIONS <link>shared/<main-target-type>LIB : -fPIC ;
+
+flags gfortran DEFINES <define> ;
+flags gfortran INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ;
diff --git a/src/boost/tools/build/src/tools/hp_cxx.jam b/src/boost/tools/build/src/tools/hp_cxx.jam
new file mode 100644
index 000000000..82ef8080d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/hp_cxx.jam
@@ -0,0 +1,222 @@
+# Copyright 2001 David Abrahams.
+# Copyright 2004, 2005 Markus Schoepflin.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.hp_cxx]]
+= HP C++ Compiler for Tru64 Unix
+
+The `hp_cxx` modules supports the
+http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN[HP C++ Compiler]
+for Tru64 Unix.
+
+The module is initialized using the following syntax:
+
+----
+using hp_cxx : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the command is not specified, B2 will search for a binary
+named `hp_cxx` in PATH.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+|# # end::doc[]
+
+#
+# HP CXX compiler
+# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN
+#
+#
+# Notes on this toolset:
+#
+# - Because of very subtle issues with the default ansi mode, strict_ansi mode
+# is used for compilation. One example of things that don't work correctly in
+# the default ansi mode is overload resolution of function templates when
+# mixed with non-template functions.
+#
+# - For template instantiation "-timplicit_local" is used. Previously,
+# "-tlocal" has been tried to avoid the need for a template repository
+# but this doesn't work with manually instantiated templates. "-tweak"
+# has not been used to avoid the stream of warning messages issued by
+# ar or ld when creating a library or linking an application.
+#
+# - Debug symbols are generated with "-g3", as this works both in debug and
+# release mode. When compiling C++ code without optimization, we additionally
+# use "-gall", which generates full symbol table information for all classes,
+# structs, and unions. As this turns off optimization, it can't be used when
+# optimization is needed.
+#
+
+import feature generators common ;
+import toolset : flags ;
+
+feature.extend toolset : hp_cxx ;
+feature.extend c++abi : cxxarm ;
+
+# Inherit from Unix toolset to get library ordering magic.
+toolset.inherit hp_cxx : unix ;
+
+generators.override hp_cxx.prebuilt : builtin.lib-generator ;
+generators.override hp_cxx.prebuilt : builtin.prebuilt ;
+generators.override hp_cxx.searched-lib-generator : searched-lib-generator ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ;
+
+ local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ;
+
+ if $(command)
+ {
+ local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+
+ if $(root)
+ {
+ flags hp_cxx .root $(condition) : "\"$(root)\"/" ;
+ }
+ }
+ # If we can't find 'cxx' anyway, at least show 'cxx' in the commands
+ command ?= cxx ;
+
+ common.handle-options hp_cxx : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : <toolset>hp_cxx ;
+generators.register-c-compiler hp_cxx.compile.c : C : OBJ : <toolset>hp_cxx ;
+
+
+
+# No static linking as far as I can tell.
+# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
+flags hp_cxx.compile OPTIONS <debug-symbols>on : -g3 ;
+flags hp_cxx.compile OPTIONS <optimization>off/<debug-symbols>on : -gall ;
+flags hp_cxx.link OPTIONS <debug-symbols>on : -g ;
+flags hp_cxx.link OPTIONS <debug-symbols>off : -s ;
+
+flags hp_cxx.compile OPTIONS <optimization>off : -O0 ;
+flags hp_cxx.compile OPTIONS <optimization>speed/<inlining>on : -O2 ;
+flags hp_cxx.compile OPTIONS <optimization>speed : -O2 ;
+
+# This (undocumented) macro needs to be defined to get all C function
+# overloads required by the C++ standard.
+flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ;
+
+# Added for threading support
+flags hp_cxx.compile OPTIONS <threading>multi : -pthread ;
+flags hp_cxx.link OPTIONS <threading>multi : -pthread ;
+
+flags hp_cxx.compile OPTIONS <optimization>space/<inlining>on : <inlining>size ;
+flags hp_cxx.compile OPTIONS <optimization>space : -O1 ;
+flags hp_cxx.compile OPTIONS <inlining>off : -inline none ;
+
+# The compiler versions tried (up to V6.5-040) hang when compiling Boost code
+# with full inlining enabled. So leave it at the default level for now.
+#
+# flags hp_cxx.compile OPTIONS <inlining>full : -inline all ;
+
+flags hp_cxx.compile OPTIONS <profiling>on : -pg ;
+flags hp_cxx.link OPTIONS <profiling>on : -pg ;
+
+# Selection of the object model. This flag is needed on both the C++ compiler
+# and linker command line.
+
+# Unspecified ABI translates to '-model ansi' as most
+# standard-conforming.
+flags hp_cxx.compile.c++ OPTIONS <c++abi> : -model ansi : : hack-hack ;
+flags hp_cxx.compile.c++ OPTIONS <c++abi>cxxarm : -model arm ;
+flags hp_cxx.link OPTIONS <c++abi> : -model ansi : : hack-hack ;
+flags hp_cxx.link OPTIONS <c++abi>cxxarm : -model arm ;
+
+# Display a descriptive tag together with each compiler message. This tag can
+# be used by the user to explicitly suppress the compiler message.
+flags hp_cxx.compile OPTIONS : -msg_display_tag ;
+
+flags hp_cxx.compile OPTIONS <cflags> ;
+flags hp_cxx.compile.c++ OPTIONS <cxxflags> ;
+flags hp_cxx.compile DEFINES <define> ;
+flags hp_cxx.compile INCLUDES <include> ;
+flags hp_cxx.link OPTIONS <linkflags> ;
+
+flags hp_cxx.link LIBPATH <library-path> ;
+flags hp_cxx.link LIBRARIES <library-file> ;
+flags hp_cxx.link FINDLIBS-ST <find-static-library> ;
+flags hp_cxx.link FINDLIBS-SA <find-shared-library> ;
+
+flags hp_cxx.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm
+}
+
+# When creating dynamic libraries, we don't want to be warned about unresolved
+# symbols, therefore all unresolved symbols are marked as expected by
+# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
+# chain.
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm
+}
+
+
+# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
+# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
+# is the default, no special flag is needed.
+actions compile.c
+{
+ $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Note: The compiler is forced to compile the files as C++ (-x cxx) because
+# otherwise it will silently ignore files with no file extension.
+#
+# Note: We deliberately don't suppress any warnings on the compiler command
+# line, the user can always do this in a customized toolset later on.
+
+rule compile.c++
+{
+ # We preprocess the TEMPLATE_DEPTH command line option here because we found
+ # no way to do it correctly in the actual action code. There we either get
+ # the -pending_instantiations parameter when no c++-template-depth property
+ # has been specified or we get additional quotes around
+ # "-pending_instantiations ".
+ local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
+ TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ;
+}
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Always create archive from scratch. See the gcc toolet for rationale.
+RM = [ common.rm-command ] ;
+actions together piecemeal archive
+{
+ $(RM) "$(<)"
+ ar rc $(<) $(>)
+}
diff --git a/src/boost/tools/build/src/tools/hpfortran.jam b/src/boost/tools/build/src/tools/hpfortran.jam
new file mode 100644
index 000000000..96e8d18b5
--- /dev/null
+++ b/src/boost/tools/build/src/tools/hpfortran.jam
@@ -0,0 +1,35 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags hpfortran OPTIONS <optimization>off : -O0 ;
+flags hpfortran OPTIONS <optimization>speed : -O3 ;
+flags hpfortran OPTIONS <optimization>space : -O1 ;
+
+flags hpfortran OPTIONS <debug-symbols>on : -g ;
+flags hpfortran OPTIONS <profiling>on : -pg ;
+
+flags hpfortran DEFINES <define> ;
+flags hpfortran INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ;
diff --git a/src/boost/tools/build/src/tools/ifort.jam b/src/boost/tools/build/src/tools/ifort.jam
new file mode 100644
index 000000000..958485928
--- /dev/null
+++ b/src/boost/tools/build/src/tools/ifort.jam
@@ -0,0 +1,44 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags ifort OPTIONS <fflags> ;
+
+flags ifort OPTIONS <optimization>off : /Od ;
+flags ifort OPTIONS <optimization>speed : /O3 ;
+flags ifort OPTIONS <optimization>space : /O1 ;
+
+flags ifort OPTIONS <debug-symbols>on : "/debug:full" ;
+flags ifort OPTIONS <profiling>on : /Qprof_gen ;
+
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+flags ifort.compile FFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
+flags ifort.compile FFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
+
+flags ifort DEFINES <define> ;
+flags ifort INCLUDES <include> ;
+
+rule compile.fortran
+{
+}
+
+actions compile.fortran
+{
+ ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ;
diff --git a/src/boost/tools/build/src/tools/intel-darwin.jam b/src/boost/tools/build/src/tools/intel-darwin.jam
new file mode 100644
index 000000000..03a75df61
--- /dev/null
+++ b/src/boost/tools/build/src/tools/intel-darwin.jam
@@ -0,0 +1,233 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import intel ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset intel : platform : darwin ;
+
+toolset.inherit-generators intel-darwin
+ <toolset>intel <toolset-intel:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override intel-darwin.prebuilt : builtin.lib-generator ;
+generators.override intel-darwin.prebuilt : builtin.prebuilt ;
+generators.override intel-darwin.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules intel-darwin : gcc ;
+toolset.inherit-flags intel-darwin : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Initializes the intel-darwin toolset
+# version in mandatory
+# name (default icc) is used to invoke the specified intel compiler
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-darwin
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command intel-darwin : icc
+ : $(command) : /opt/intel_cc_80/bin ] ;
+
+ common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
+
+ # handle <library-path>
+ # local library-path = [ feature.get-values <library-path> : $(options) ] ;
+ # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice\: using intel libraries "::" $(condition) "::" $(lib_path) ;
+ }
+ flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+
+ local m = [ MATCH (..).* : $(version) ] ;
+ local n = [ MATCH (.)\\. : $(m) ] ;
+ if $(n) {
+ m = $(n) ;
+ }
+
+ local major = $(m) ;
+
+ if $(major) = "9" {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ;
+ }
+ else {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ;
+ }
+
+ local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
+
+ # wchar_t char_traits workaround for compilers older than 10.2
+ if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
+ flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
+ }
+
+ # - Ranlib.
+ local ranlib = [ feature.get-values <ranlib> : $(options) ] ;
+ toolset.flags intel-darwin.archive .RANLIB $(condition) : $(ranlib[1]) ;
+
+ # - Archive builder.
+ local archiver = [ feature.get-values <archiver> : $(options) ] ;
+ toolset.flags intel-darwin.archive .AR $(condition) : $(archiver[1]) ;
+}
+
+SPACE = " " ;
+
+flags intel-darwin.compile OPTIONS <cflags> ;
+flags intel-darwin.compile.c++ OPTIONS <cxxflags> ;
+# flags intel-darwin.compile INCLUDES <include> ;
+
+flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc
+
+#
+.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i
+ conroe conroe-xe conroe-l allendale merom
+ merom-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem sandy-bridge ivy-bridge haswell
+ broadwell skylake skylake-avx512 cannonlake icelake ;
+.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3
+ athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3
+ bdver4 btver1 btver2 znver1 ;
+.cpu-type-x86-64 = $(.cpu-type-em64t) $(.cpu-type-amd64) ;
+
+flags intel-darwin.compile OPTIONS <instruction-set>$(.cpu-type-x86-64)/<address-model>32 : -m32 ; # -mcmodel=small ;
+flags intel-darwin.compile OPTIONS <instruction-set>$(.cpu-type-x86-64)/<address-model>64 : -m64 ; # -mcmodel=large ;
+
+flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ;
+
+flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags intel-darwin ARFLAGS <archiveflags> ;
+
+# Default value. Mostly for the sake of intel-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+.RANLIB = ranlib ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" -cs "$(<)"
+}
+
+flags intel-darwin.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
diff --git a/src/boost/tools/build/src/tools/intel-linux.jam b/src/boost/tools/build/src/tools/intel-linux.jam
new file mode 100644
index 000000000..d5edf2e1d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/intel-linux.jam
@@ -0,0 +1,232 @@
+# Copyright (c) 2003 Michael Stevens
+# Copyright (c) 2011 Bryce Lelbach
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+
+import intel ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+import type ;
+import numbers ;
+
+feature.extend-subfeature toolset intel : platform : linux ;
+
+toolset.inherit-generators intel-linux
+ <toolset>intel <toolset-intel:platform>linux : gcc : gcc.mingw.link gcc.mingw.link.dll ;
+generators.override intel-linux.prebuilt : builtin.lib-generator ;
+generators.override intel-linux.prebuilt : builtin.prebuilt ;
+generators.override intel-linux.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override intel-linux.compile.c.pch : pch.default-c-pch-generator ;
+generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH : <toolset>intel <toolset-intel:platform>linux : pchi ;
+
+toolset.inherit-rules intel-linux : gcc ;
+toolset.inherit-flags intel-linux : gcc
+ : <inlining>off <inlining>on <inlining>full
+ <optimization>space <optimization>speed
+ <warnings>off <warnings>all <warnings>on
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Initializes the intel-linux toolset
+# version in mandatory
+# name (default icpc) is used to invoke the specified intel-linux compiler
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-linux
+ : version $(version) ] ;
+
+ if $(.debug-configuration)
+ {
+ ECHO "notice: intel-linux version is" $(version) ;
+ }
+
+ local default_path ;
+
+ # Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0,
+ # aka intel-linux-12.0. In this version, Intel thankfully decides to install
+ # to a sane 'intel' folder in /opt.
+ if [ MATCH "(12[.]0|12)" : $(version) ]
+ { default_path = /opt/intel/bin ; }
+ # Intel C++ Compiler 11.1.
+ else if [ MATCH "(11[.]1)" : $(version) ]
+ { default_path = /opt/intel_cce_11.1.064.x86_64/bin ; }
+ # Intel C++ Compiler 11.0.
+ else if [ MATCH "(11[.]0|11)" : $(version) ]
+ { default_path = /opt/intel_cce_11.0.074.x86_64/bin ; }
+ # Intel C++ Compiler 10.1.
+ else if [ MATCH "(10[.]1)" : $(version) ]
+ { default_path = /opt/intel_cce_10.1.013_x64/bin ; }
+ # Intel C++ Compiler 9.1.
+ else if [ MATCH "(9[.]1)" : $(version) ]
+ { default_path = /opt/intel_cc_91/bin ; }
+ # Intel C++ Compiler 9.0.
+ else if [ MATCH "(9[.]0|9)" : $(version) ]
+ { default_path = /opt/intel_cc_90/bin ; }
+ # Intel C++ Compiler 8.1.
+ else if [ MATCH "(8[.]1)" : $(version) ]
+ { default_path = /opt/intel_cc_81/bin ; }
+ # Intel C++ Compiler 8.0 - this used to be the default, so now it's the
+ # fallback.
+ else
+ { default_path = /opt/intel_cc_80/bin ; }
+
+ if $(.debug-configuration)
+ {
+ ECHO "notice: default search path for intel-linux is" $(default_path) ;
+ }
+
+ command = [ common.get-invocation-command intel-linux : icpc
+ : $(command) : $(default_path) ] ;
+
+ common.handle-options intel-linux : $(condition) : $(command) : $(options) ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ local command-string = $(command:J=" ") ;
+ local version-output = [ SHELL "$(command-string) --version" ] ;
+ local real-version = [ MATCH "([0-9.]+)" : $(version-output) ] ;
+ local major = [ MATCH "([0-9]+).*" : $(real-version) ] ;
+
+ # If we failed to determine major version, use the behaviour for
+ # the current compiler.
+ if $(major) && [ numbers.less $(major) 10 ]
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-Ob0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-Ob1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-Ob2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+ else if $(major) && [ numbers.less $(major) 11 ]
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-O1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+ else # newer version of intel do have -Os (at least 11+, don't know about 10)
+ {
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>off : "-inline-level=0" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>on : "-inline-level=1" ;
+ flags intel-linux.compile OPTIONS $(condition)/<inlining>full : "-inline-level=2" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>space : "-Os" ;
+ flags intel-linux.compile OPTIONS $(condition)/<optimization>speed : "-O3 -ip" ;
+ }
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice\: using intel libraries "::" $(condition) "::" $(lib_path) ;
+ }
+ flags intel-linux.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+}
+
+SPACE = " " ;
+
+flags intel-linux.compile OPTIONS <warnings>off : -w0 ;
+flags intel-linux.compile OPTIONS <warnings>on : -w1 ;
+flags intel-linux.compile OPTIONS <warnings>all : -w2 ;
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+actions compile.c++ bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+actions compile.c bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c++.pch ( targets * : sources * : properties * )
+{
+}
+#
+# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler
+# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi
+# etc - which appear not to do anything except take up disk space :-(
+#
+actions compile.c++.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * )
+{
+}
+
+actions compile.c.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>intel-linux-link-semaphore ;
+}
+
+# Differ from 'link' above only by -shared.
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
+}
+
+
+
diff --git a/src/boost/tools/build/src/tools/intel-vxworks.jam b/src/boost/tools/build/src/tools/intel-vxworks.jam
new file mode 100644
index 000000000..2a457c334
--- /dev/null
+++ b/src/boost/tools/build/src/tools/intel-vxworks.jam
@@ -0,0 +1,183 @@
+# Copyright Wind River 2017.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import intel ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset intel : platform : vxworks ;
+
+toolset.inherit-generators intel-vxworks
+ <toolset>intel <toolset-intel:platform>vxworks
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override intel-vxworks.prebuilt : builtin.lib-generator ;
+generators.override intel-vxworks.prebuilt : builtin.prebuilt ;
+generators.override intel-vxworks.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules intel-vxworks : gcc ;
+toolset.inherit-flags intel-vxworks : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Initializes the intel-vxworks toolset
+# version in mandatory
+# name (default icc) is used to invoke the specified intel compiler
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-vxworks
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command intel-vxworks : icc
+ : $(command) : /opt/intel_cc_80/bin ] ;
+
+ common.handle-options intel-vxworks : $(condition) : $(command) : $(options) ;
+
+ # handle <library-path>
+ # local library-path = [ feature.get-values <library-path> : $(options) ] ;
+ # flags intel-vxworks.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice\: using intel libraries "::" $(condition) "::" $(lib_path) ;
+ }
+ flags intel-vxworks.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+
+ local m = [ MATCH (..).* : $(version) ] ;
+ local n = [ MATCH (.)\\. : $(m) ] ;
+ if $(n) {
+ m = $(n) ;
+ }
+
+ local major = $(m) ;
+
+
+ flags intel-vxworks.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
+ flags intel-vxworks.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
+ flags intel-vxworks.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
+
+ flags intel-vxworks.link OPTIONS $(condition)/<runtime-link>static : [ os.environ LDFLAGS_STATIC ] ;
+ flags intel-vxworks.link OPTIONS $(condition)/<runtime-link>shared : [ os.environ LDFLAGS_DYNAMIC ] ;
+ flags intel-vxworks.compile OPTIONS $(condition)/<link>shared : -fPIC ;
+
+ local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
+
+
+}
+
+SPACE = " " ;
+
+flags intel-vxworks.compile OPTIONS <cflags> ;
+flags intel-vxworks.compile.c++ OPTIONS <cxxflags> ;
+flags intel-vxworks.compile INCLUDES <include> ;
+
+
+.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i
+ conroe conroe-xe conroe-l allendale merom
+ merom-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem sandy-bridge ivy-bridge haswell ;
+.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3
+ athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3 btver1 btver2 ;
+.cpu-type-x86-64 = $(.cpu-type-em64t) $(.cpu-type-amd64) ;
+
+#flags intel-vxworks.compile OPTIONS <instruction-set>$(.cpu-type-x86-64)/<address-model>32 : -m32 ; # -mcmodel=small ;
+#flags intel-vxworks.compile OPTIONS <instruction-set>$(.cpu-type-x86-64)/<address-model>64 : -m64 ; # -mcmodel=large ;
+
+flags intel-vxworks.compile.c OPTIONS <warnings>off : -w0 ;
+flags intel-vxworks.compile.c OPTIONS <warnings>on : -w1 ;
+flags intel-vxworks.compile.c OPTIONS <warnings>all : -w2 ;
+
+flags intel-vxworks.compile.c++ OPTIONS <warnings>off : -w0 ;
+flags intel-vxworks.compile.c++ OPTIONS <warnings>on : -w1 ;
+flags intel-vxworks.compile.c++ OPTIONS <warnings>all : -w2 ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags intel-vxworks ARFLAGS <archiveflags> ;
+
+.AR = ar ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again.
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+}
+
+flags intel-vxworks.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>intel-vxworks-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
diff --git a/src/boost/tools/build/src/tools/intel-win.jam b/src/boost/tools/build/src/tools/intel-win.jam
new file mode 100644
index 000000000..f79fd530a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/intel-win.jam
@@ -0,0 +1,514 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Importing common is needed because the rules we inherit here depend on it.
+# That is nasty.
+import common ;
+import errors ;
+import feature ;
+import intel ;
+import msvc ;
+import os ;
+import set ;
+import toolset ;
+import generators ;
+import type ;
+import path ;
+
+feature.extend-subfeature toolset intel : platform : win ;
+
+toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ;
+toolset.inherit-flags intel-win : msvc : : YLOPTION ;
+toolset.inherit-rules intel-win : msvc ;
+
+# Override default do-nothing generators.
+generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ;
+generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ;
+generators.override intel-win.compile.rc : rc.compile.resource ;
+generators.override intel-win.compile.mc : mc.compile ;
+
+toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ;
+
+toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ;
+
+# Initializes the intel toolset for windows
+rule init ( version ? : # the compiler version
+ command * : # the command to invoke the compiler itself
+ options * # Additional option: <compatibility>
+ # either 'vc6', 'vc7', 'vc7.1'
+ # or 'native'(default).
+ )
+{
+ if $(version)
+ {
+ configure $(version) : $(command) : $(options) ;
+ }
+ else
+ {
+ if $(command)
+ {
+ errors.error "Autodetect of version from command not implemented!" ;
+ }
+ local intel_versions = [ get-autodetect-versions () ] ;
+ if ! $(intel_versions)
+ {
+ errors.error "No intel compiler version found!" ;
+ }
+ else
+ {
+ local msvc-version = [ feature.get-values <compatibility> : $(options) ] ; # On auto config mode the user can still request a msvc backend. If some intel compiler doesn't support it, don't try to configure it!
+ msvc-version = [ get-msvc-version-from-vc-string $(msvc-version) ] ;
+ for local v in $(intel_versions)
+ {
+ if [ is-msvc-supported $(v) : $(msvc-version) ]
+ {
+ configure $(v) : : $(options) ;
+ }
+ }
+ }
+ }
+}
+
+local rule configure ( version ? : command * : options * )
+{
+ local compatibility =
+ [ feature.get-values <compatibility> : $(options) ] ;
+ # Allow to specify toolset and visual studio backend from commandline .e.g --toolset=intel-14.0-vc10
+ local vc_in_version = [ MATCH "(vc[0-9]+(\\.[0-9]+)?)$" : $(version) ] ;
+ vc_in_version = $(vc_in_version[1]) ;
+ if $(compatibility) && $(vc_in_version)
+ {
+ if $(compatibility) != $(vc_in_version)
+ {
+ errors.error "feature compatibility and vc version in toolset present!" ;
+ }
+ }
+
+ if $(vc_in_version) && ! $(compatibility)
+ {
+ # vc Version must be stripped before check-init-parameters is called!
+ version = [ MATCH (.+)-vc.+$ : $(version) ] ;
+
+ compatibility = $(vc_in_version) ;
+ options += <compatibility>$(vc_in_version) ;
+ }
+ if $(compatibility)
+ {
+ configure-really $(version) : $(command) : $(options) : $(compatibility) ;
+ }
+ else
+ {
+ local msvc_versions = [ feature.values <toolset-msvc:version> ] ;
+ if ! $(msvc_versions)
+ {
+ ECHO notice\: no msvc versions detected. trying auto detect ;
+ toolset.using msvc : all ;
+ msvc_versions = [ feature.values <toolset-msvc:version> ] ;
+ }
+ if ! $(.iclvars-$(version)-supported-vcs)
+ {
+ errors.error "Supported msvc versions not known for intel $(version)" ;
+ }
+
+ for local v in $(msvc_versions)
+ {
+ if [ MATCH "($(v))" : $(.iclvars-$(version)-supported-vcs) ]
+ {
+ # Strip trailing .0 from msvc version as intel compiler uses atm only major version for Qvc
+ local m = [ MATCH "([0-9]+).0$" : $(v) ] ;
+ if $(m)
+ {
+ v = $(m) ;
+ }
+ v = "vc$(v)" ;
+ local options_really = $(options) ;
+ options_really += <compatibility>$(v) ;
+ if $(.debug-configuration)
+ {
+ ECHO "configure: intel version: $(version) msvc version: $(v)" ;
+ }
+ configure-really $(version) : $(command) : $(options) : $(v) ;
+ }
+ }
+ if ! [ feature.values <toolset-intel:version> ]
+ {
+ errors.error "Failed to register an intel toolset!" ;
+ }
+ }
+}
+
+local rule configure-really ( version ? : command * : options * : compatibility )
+{
+ local rewrite-setupscript = [ feature.get-values <rewrite-setup-scripts> : $(options) ] ;
+ local condition = [ common.check-init-parameters intel-win
+ : version $(version) : compatibility $(compatibility) ] ;
+
+ local m = [ MATCH "([0-9]+).*" : $(version) ] ;
+ local major = $(m[1]) ;
+ if ! $(major)
+ {
+ errors.error "Major version not found: $(version)" ;
+ }
+
+ local msvc-version = [ get-msvc-version-from-vc-string $(compatibility) ] ;
+ if ! $(msvc-version)
+ {
+ errors.user-error "Invalid value for compatibility option:"
+ $(compatibility) ;
+ }
+
+ command = [ get-compiler-invocation-cmd $(major) : $(command) ] ;
+
+ common.handle-options intel-win : $(condition) : $(command) : $(options) ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ if $(command) || $(root)
+ {
+ local bin = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ if $(major) >= 12
+ {
+ bin = [ path.make $(bin) ] ;
+ bin = [ path.parent $(bin) ] ;
+ }
+ root ?= $(bin) ;
+ root = $(root)/ ;
+ }
+
+ local setup ;
+ setup = [ path.glob $(root) : iclvars_*.bat ] ;
+ if ! $(setup)
+ {
+ setup = [ path.join $(root) "iclvars.bat" ] ;
+ setup = [ path.native $(setup) ] ;
+ }
+
+ local target_types ;
+ local iclvars_vs_arg ;
+ if $(major) >= 12
+ {
+ # if we have a known intel toolset check for visual studio compatibility
+ # if not trust parameters
+ if ! [ is-msvc-supported $(version) : $(msvc-version) ]
+ {
+ errors.error "msvc $(msvc-version) not supported for intel toolset version $(version)" ;
+ }
+ if $(.iclvars-version-alias-$(compatibility))
+ {
+ iclvars_vs_arg = $(.iclvars-version-alias-$(compatibility)) ;
+ }
+ else
+ {
+ errors.error "Don't know what parameter to pass for vc version ( $(compatibility) )" ;
+ }
+ # There are two possible paths for the 64-bit intel compiler,
+ # one for the IA32-Intel64 cross compiler, and one for the native
+ # 64 bit compiler. We prefer the latter one if it's installed,
+ # and don't rely on whether the OS reports whether we're 64 or 32 bit
+ # as that really only tells us which subsystem bjam is running in:
+ #
+ local intel64_path = [ path.join $(root) intel64 ] ;
+ if [ path.glob $(intel64_path) : icl.exe ]
+ {
+ target_types = ia32 intel64 ;
+ }
+ else
+ {
+ target_types = ia32 ia32_intel64 ;
+ }
+ }
+ else
+ {
+ target_types = default ;
+ iclvars_vs_arg = $(compatibility) ;
+ }
+
+ local default-assembler-intel64 = ml64 ;
+ local default-assembler-ia32_intel64 = ml64 ;
+ local default-assembler-ia32 = "ml -coff" ;
+ assembler = [ feature.get-values <assembler> : $(options) ] ;
+
+ for local c in $(target_types)
+ {
+ local cpu-conditions ;
+ local setup-call ;
+ if $(major) >= 12
+ {
+ cpu-conditions = $(condition)/$(.cpu-arch-$(c)) ;
+
+ if ! $(setup)
+ {
+ # No setup script
+ }
+ else if $(rewrite-setupscript) = off || [ os.name ] != NT
+ {
+ setup-call = "call \"$(setup)\" $(c) $(iclvars_vs_arg) > nul " ;
+ }
+ else
+ {
+ if $(rewrite-setupscript) = always
+ {
+ toolset.flags intel-win .REWRITE-SETUP $(cpu-conditions) : true ;
+ }
+ toolset.flags intel-win .SETUP-SCRIPT $(cpu-conditions) : $(setup) ;
+ toolset.flags intel-win .SETUP-OPTIONS $(cpu-conditions) : "$(c) $(iclvars_vs_arg)" ;
+ }
+ }
+ else
+ {
+ setup-call = "call \""$(setup)"\" $(compatibility) > nul " ;
+ cpu-conditions = $(condition) ;
+ }
+
+ if $(setup-call)
+ {
+ if [ os.name ] = NT
+ {
+ setup-call = $(setup-call)"\n " ;
+ }
+ else
+ {
+ setup-call = "cmd /S /C "$(setup-call)" \"&&\" " ;
+ }
+ toolset.flags intel-win .SETUP $(cpu-conditions) : $(setup-call) ;
+ }
+
+ if $(.debug-configuration)
+ {
+ for local cond in $(cpu-conditions)
+ {
+ ECHO "notice: [intel-cfg] condition: '$(cond)', setup: '$(setup-call)'" ;
+ }
+ }
+
+ local cpu-assembler = $(assembler) ;
+ cpu-assembler ?= $(default-assembler-$(c)) ;
+
+ toolset.flags intel-win.compile .CC $(cpu-conditions) : icl ;
+ toolset.flags intel-win.link .LD $(cpu-conditions) : xilink /nologo ;
+ toolset.flags intel-win.archive .LD $(cpu-conditions) : xilink /lib /nologo ;
+ toolset.flags intel-win.link .MT $(cpu-conditions) : mt -nologo ;
+ toolset.flags intel-win.compile .ASM $(cpu-conditions) : $(cpu-assembler) -nologo ;
+ toolset.flags intel-win.compile .MC $(cpu-conditions) : mc ;
+ toolset.flags intel-win.compile .RC $(cpu-conditions) : rc ;
+ }
+
+ # Depending on the settings, running of tests require some runtime DLLs.
+ toolset.flags intel-win RUN_PATH $(condition) : $(root) ;
+
+
+ local C++FLAGS ;
+
+ C++FLAGS += /nologo ;
+
+ # Reduce the number of spurious error messages
+ C++FLAGS += /Qwn5 /Qwd985 ;
+
+ # Enable ADL
+ C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too
+
+ # Disable Microsoft "secure" overloads in Dinkumware libraries since they
+ # cause compile errors with Intel versions 9 and 10.
+ if $(major) < 12
+ {
+ C++FLAGS += -D_SECURE_SCL=0 ;
+ }
+
+ if $(major) > 5
+ {
+ C++FLAGS += "/Zc:forScope" ; # Add support for correct for loop scoping.
+ }
+
+ # Add options recognized only by intel7 and above.
+ if $(major) >= 7
+ {
+ C++FLAGS += /Qansi_alias ;
+ }
+
+ if $(compatibility) = vc6
+ {
+ C++FLAGS +=
+ # Emulate VC6
+ /Qvc6
+
+ # No wchar_t support in vc6 dinkum library. Furthermore, in vc6
+ # compatibility-mode, wchar_t is not a distinct type from unsigned
+ # short.
+ -DBOOST_NO_INTRINSIC_WCHAR_T
+ ;
+ }
+ else
+ {
+ if $(major) > 5
+ {
+ # Add support for wchar_t
+ C++FLAGS += "/Zc:wchar_t"
+ # Tell the dinkumware library about it.
+ -D_NATIVE_WCHAR_T_DEFINED
+ ;
+ }
+ }
+
+ if $(compatibility) && $(compatibility) != native
+ {
+ C++FLAGS += /Q$(compatibility) ;
+ }
+ else
+ {
+ C++FLAGS +=
+ -Qoption,cpp,--arg_dep_lookup
+ # The following options were intended to disable the Intel compiler's
+ # 'bug-emulation' mode, but were later reported to be causing ICE with
+ # Intel-Win 9.0. It is not yet clear which options can be safely used.
+ # -Qoption,cpp,--const_string_literals
+ # -Qoption,cpp,--new_for_init
+ # -Qoption,cpp,--no_implicit_typename
+ # -Qoption,cpp,--no_friend_injection
+ # -Qoption,cpp,--no_microsoft_bugs
+ ;
+ }
+
+ toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ;
+ # By default, when creating PCH, intel adds 'i' to the explicitly
+ # specified name of the PCH file. Of course, B2 is not
+ # happy when compiler produces not the file it was asked for.
+ # The option below stops this behaviour.
+ toolset.flags intel-win CFLAGS $(condition) : -Qpchi- ;
+
+ if ! $(compatibility)
+ {
+ # If there's no backend version, assume 7.1.
+ compatibility = vc7.1 ;
+ }
+
+ msvc-version = [ msvc.resolve-possible-msvc-version-alias $(msvc-version) ] ;
+ msvc.configure-version-specific intel-win : $(msvc-version) : $(condition) ;
+}
+
+local rule get-autodetect-versions
+{
+ local result ;
+ for local v in $(.intel-autodetect-versions)
+ {
+ local major = [ MATCH "([0-9]+).*" : $(v) ] ; # Use only major version
+ if [ get-icl-path-from-environment $(major) ]
+ {
+ result += $(v) ;
+ }
+ }
+ return $(result) ;
+}
+
+local rule get-icl-path-from-environment ( major_version )
+{
+ local path = [ os.environ ICPP_COMPILER$(major_version) ] ;
+ if $(path)
+ {
+ path = [ path.make $(path) ] ;
+ local cmdpath ;
+ local subdirs = $(.icl-target-subdirectories) ;
+ while $(subdirs)
+ {
+ cmdpath = [ path.join $(path) "bin/$(subdirs[0])/icl.exe" ] ;
+ cmdpath = [ path.native $(cmdpath) ] ;
+ if [ path.exists $(cmdpath) ]
+ {
+ subdirs = ;
+ } else {
+ cmdpath = ;
+ subdirs = $(subdirs[2-]) ;
+ }
+ }
+ path = $(cmdpath) ;
+ }
+ return $(path) ;
+}
+
+local rule get-compiler-invocation-cmd ( major_version : command * )
+{
+ if $(command)
+ {
+ return [ common.get-invocation-command intel-win : icl.exe : $(command) ] ;
+ }
+ else
+ {
+ local path = [ get-icl-path-from-environment $(major_version) ] ;
+ return [ common.get-invocation-command intel-win : icl.exe : $(path) ] ;
+ }
+}
+
+local rule is-msvc-supported ( intel-version : msvc-version )
+{
+ if ! $(msvc-version)
+ {
+ return true ;
+ }
+ else
+ {
+ if $(.iclvars-$(intel-version)-supported-vcs)
+ {
+ if [ MATCH "($(msvc-version))" : $(.iclvars-$(intel-version)-supported-vcs) ]
+ {
+ return true ;
+ }
+ }
+ else
+ {
+ return true ;
+ }
+ }
+}
+
+local rule get-msvc-version-from-vc-string ( vc-string )
+{
+ local r = [ MATCH "^vc([0-9]+(\\.[0-9]+)?)$" : $(vc-string) ] ;
+ return $(r[1]) ;
+}
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Copied from msvc.jam
+# Supported CPU architectures.
+.cpu-arch-ia32 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32
+ <architecture>x86/<address-model>
+ <architecture>x86/<address-model>32 ;
+
+.cpu-arch-intel64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.cpu-arch-ia32_intel64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.intel-autodetect-versions = 14.0 13.0 12.0 ;
+.iclvars-12.0-supported-vcs = "10.0 9.0 8.0" ;
+.iclvars-12.1-supported-vcs = "10.0 9.0 8.0" ;
+.iclvars-13.0-supported-vcs = "11.0 10.0 9.0" ;
+.iclvars-14.0-supported-vcs = "12.0 11.0 10.0 9.0" ;
+.iclvars-15.0-supported-vcs = "12.0 11.0 10.0 9.0" ;
+.iclvars-16.0-supported-vcs = "14.0 12.0 11.0 10.0 9.0" ;
+.iclvars-17.0-supported-vcs = "14.1 14.0 12.0 11.0 10.0" ;
+.iclvars-18.0-supported-vcs = "14.1 14.0 12.0 11.0 10.0" ;
+.iclvars-19.0-supported-vcs = "14.2 14.1 14.0 12.0" ;
+.iclvars-19.1-supported-vcs = "14.2 14.1 14.0 12.0" ;
+.iclvars-version-alias-vc14.2 = vs2019 ;
+.iclvars-version-alias-vc14.1 = vs2017 ;
+.iclvars-version-alias-vc14 = vs2015 ;
+.iclvars-version-alias-vc12 = vs2013 ;
+.iclvars-version-alias-vc11 = vs2012 ;
+.iclvars-version-alias-vc10 = vs2010 ;
+.iclvars-version-alias-vc9 = vs2008 ;
+.iclvars-version-alias-vc8 = vs2005 ;
+.icl-target-subdirectories = ia32 ia32_intel64 intel64 ;
+
+toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ;
+
+toolset.flags intel-win YLOPTION ;
+
diff --git a/src/boost/tools/build/src/tools/intel.jam b/src/boost/tools/build/src/tools/intel.jam
new file mode 100644
index 000000000..0c602a8c0
--- /dev/null
+++ b/src/boost/tools/build/src/tools/intel.jam
@@ -0,0 +1,84 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.intel]]
+= Intel C++
+
+The `intel-*` modules support the Intel C++ command-line compiler.
+
+The module is initialized using the following syntax:
+
+----
+using intel : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If compiler command is not specified, then B2 will look in PATH
+for an executable `icpc` (on Linux), or `icl.exe` (on Windows).
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+`root`::
+For the Linux version, specifies the root directory of the compiler installation.
+This option is necessary only if it is not possible to detect this information
+from the compiler command -- for example if the specified compiler command is
+a user script. For the Windows version, specifies the directory where the
+`iclvars.bat` file for configuring the compiler exists. Specifying the `root`
+option without specifying the compiler command allows the end-user not to have
+to worry about whether he is compiling 32-bit or 64-bit code, as the toolset will
+automatically configure the compiler for the appropriate address model and compiler
+command using the `iclvars.bat` batch file.
+
+|# # end::doc[]
+
+# This is a generic 'intel' toolset. Depending on the current
+# system, it forwards either to 'intel-linux' or 'intel-win'
+# modules.
+
+import feature ;
+import os ;
+import toolset ;
+
+feature.extend toolset : intel ;
+feature.subfeature toolset intel : platform : : propagated link-incompatible ;
+
+rule init ( * : * )
+{
+ if [ os.name ] = LINUX
+ {
+ toolset.using intel-linux :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ toolset.using intel-darwin :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+ else
+ {
+ toolset.using intel-win :
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+}
diff --git a/src/boost/tools/build/src/tools/lex.jam b/src/boost/tools/build/src/tools/lex.jam
new file mode 100644
index 000000000..e85d1d1ad
--- /dev/null
+++ b/src/boost/tools/build/src/tools/lex.jam
@@ -0,0 +1,25 @@
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+import generators ;
+import feature ;
+import toolset : flags ;
+
+feature.feature flex.prefix : : free ;
+type.register LEX : l ;
+type.register LEX++ : ll ;
+generators.register-standard lex.lex : LEX : C ;
+generators.register-standard lex.lex : LEX++ : CPP ;
+
+rule init ( )
+{
+}
+
+flags lex.lex PREFIX <flex.prefix> ;
+
+actions lex
+{
+ flex -P$(PREFIX) -o$(<) $(>)
+}
diff --git a/src/boost/tools/build/src/tools/libjpeg.jam b/src/boost/tools/build/src/tools/libjpeg.jam
new file mode 100644
index 000000000..f267ecb73
--- /dev/null
+++ b/src/boost/tools/build/src/tools/libjpeg.jam
@@ -0,0 +1,234 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the libjpeg library
+#
+# After 'using libjpeg', the following targets are available:
+#
+# /libjpeg//libjpeg -- The libjpeg library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = jpeglib.h ;
+
+# jpeglib.h requires stdio.h to be included first.
+header-test = "#include <stdio.h>\n#include <jpeglib.h>\n" ;
+
+names = jpeg ;
+
+sources = jaricom.c jcapimin.c jcapistd.c jcarith.c jccoefct.c jccolor.c
+ jcdctmgr.c jchuff.c jcinit.c jcmainct.c jcmarker.c jcmaster.c
+ jcomapi.c jcparam.c jcprepct.c jcsample.c jctrans.c jdapimin.c
+ jdapistd.c jdarith.c jdatadst.c jdatasrc.c jdcoefct.c jdcolor.c
+ jddctmgr.c jdhuff.c jdinput.c jdmainct.c jdmarker.c jdmaster.c
+ jdmerge.c jdpostct.c jdsample.c jdtrans.c jerror.c jfdctflt.c
+ jfdctfst.c jfdctint.c jidctflt.c jidctfst.c jidctint.c jquant1.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the libjpeg library.
+#
+# libjpeg can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt libjpeg::
+#
+# <search>
+# The directory containing the libjpeg binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the libjpeg headers.
+#
+# If none of these options is specified, then the environmental
+# variables LIBJPEG_LIBRARY_PATH, LIBJPEG_NAME, and LIBJPEG_INCLUDE will
+# be used instead.
+#
+# Options for building libjpeg from source::
+#
+# <source>
+# The libjpeg source directory. Defaults to the environmental variable
+# LIBJPEG_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find libjpeg in the default system location
+# using libjpeg ;
+# # Build libjpeg from source
+# using libjpeg : 8c : <source>/home/steven/libjpeg-8c ;
+# # Find libjpeg in /usr/local
+# using libjpeg : 8c
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build libjpeg from source for msvc and find
+# # prebuilt binaries for gcc.
+# using libjpeg : 8c : <source>C:/Devel/src/libjpeg-8c : <toolset>msvc ;
+# using libjpeg : 8c : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The libjpeg version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the libjpeg target
+
+ : is-default ?
+ # Default configurations are only used when libjpeg
+ # has not yet been configured. This option is
+ # deprecated. A configuration will be treated
+ # as a default when none of <include>, <search>,
+ # <name>, and <source> are present.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project libjpeg ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local source-path = [ feature.get-values <source> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+ local tag = [ feature.get-values <tag> : $(options) ] ;
+ local build-name = [ feature.get-values <build-name> : $(options) ] ;
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
+ {
+ is-default = true ;
+ }
+
+ # Ignore environmental LIBJPEG_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for libjpeg:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ }
+ else
+ {
+ source-path ?= [ modules.peek : LIBJPEG_SOURCE ] ;
+ }
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [libjpeg] libjpeg is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "libjpeg is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path)
+ {
+ build-name ?= jpeg ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag)
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [libjpeg] Building libjpeg from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [libjpeg] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [libjpeg] found libjpeg source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [libjpeg] could not find libjpeg source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources) {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library libjpeg : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ } else {
+ if $(.debug)
+ {
+ ECHO "notice: [libjpeg] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [libjpeg] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library libjpeg : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) : $(root) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-header-test $(header-test) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/src/boost/tools/build/src/tools/libpng.jam b/src/boost/tools/build/src/tools/libpng.jam
new file mode 100644
index 000000000..dc49b6dbd
--- /dev/null
+++ b/src/boost/tools/build/src/tools/libpng.jam
@@ -0,0 +1,229 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the libpng library
+#
+# After 'using libpng', the following targets are available:
+#
+# /libpng//libpng -- The libpng library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = png.h ;
+
+# On Windows, binary distributions of libpng and package managers
+# name the library differently (e.g. vcpkg installs libpng16.lib).
+# Listing popular names increases chances of successful look-up.
+names = libpng libpng16 png png16 ;
+
+sources = png.c pngerror.c pngget.c pngmem.c pngpread.c pngread.c pngrio.c pngrtran.c pngrutil.c
+ pngset.c pngtrans.c pngwio.c pngwrite.c pngwtran.c pngwutil.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the libpng library.
+#
+# libpng can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt libpng::
+#
+# <search>
+# The directory containing the libpng binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the libpng headers.
+#
+# If none of these options is specified, then the environmental
+# variables LIBPNG_LIBRARY_PATH, LIBPNG_NAME, and LIBPNG_INCLUDE will
+# be used instead.
+#
+# Options for building libpng from source::
+#
+# <source>
+# The libpng source directory. Defaults to the environmental variable
+# LIBPNG_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find libpng in the default system location
+# using libpng ;
+# # Build libpng from source
+# using libpng : 1.5.4 : <source>/home/steven/libpng-1.5.4 ;
+# # Find libpng in /usr/local
+# using libpng : 1.5.4
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build libpng from source for msvc and find
+# # prebuilt binaries for gcc.
+# using libpng : 1.5.4 : <source>C:/Devel/src/libpng-1.5.4 : <toolset>msvc ;
+# using libpng : 1.5.4 : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The libpng version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the libpng target
+
+ : is-default ?
+ # Default configurations are only used when libpng
+ # has not yet been configured. This option is
+ # deprecated. A configuration will be treated
+ # as a default when none of <include>, <search>,
+ # <name>, and <source> are present.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project libpng ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local source-path = [ feature.get-values <source> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+ local tag = [ feature.get-values <tag> : $(options) ] ;
+ local build-name = [ feature.get-values <build-name> : $(options) ] ;
+
+ if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
+ {
+ is-default = true ;
+ }
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ # Ignore environmental LIBPNG_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for libpng:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ }
+ else
+ {
+ source-path ?= [ modules.peek : LIBPNG_SOURCE ] ;
+ }
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [libpng] libpng is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "libpng is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path)
+ {
+ build-name ?= png ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag)
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [libpng] Building libpng from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [libpng] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [libpng] found libpng source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [libpng] could not find libpng source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources) {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ <link>shared:<define>LIBPNG_DLL
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library libpng : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ } else {
+ if $(.debug)
+ {
+ ECHO "notice: [libpng] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [libpng] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library libpng : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) : $(root) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/src/boost/tools/build/src/tools/libtiff.jam b/src/boost/tools/build/src/tools/libtiff.jam
new file mode 100644
index 000000000..f31561491
--- /dev/null
+++ b/src/boost/tools/build/src/tools/libtiff.jam
@@ -0,0 +1,227 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the libtiff library
+#
+# After 'using libtiff', the following targets are available:
+#
+# /libtiff//libtiff -- The libtiff library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = tiff.h ;
+names = tiff ;
+
+sources = tif_aux.c tif_close.c tif_codec.c tif_color.c tif_compress.c tif_dir.c tif_dirinfo.c
+ tif_dirread.c tif_dirwrite.c tif_dumpmode.c tif_error.c tif_extension.c tif_fax3.c tif_fax3sm.c
+ tif_getimage.c tif_jbig.c tif_jpeg.c tif_jpeg_12.c tif_ojpeg.c tif_flush.c tif_luv.c tif_lzw.c
+ tif_next.c tif_open.c tif_packbits.c tif_pixarlog.c tif_predict.c tif_print.c tif_read.c tif_stream.cxx
+ tif_swab.c tif_strip.c tif_thunder.c tif_tile.c tif_version.c tif_warning.c tif_write.c tif_zip.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the libtiff library.
+#
+# libtiff can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt libtiff::
+#
+# <search>
+# The directory containing the libtiff binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the libtiff headers.
+#
+# If none of these options is specified, then the environmental
+# variables LIBTIFF_LIBRARY_PATH, LIBTIFF_NAME, and LIBTIFF_INCLUDE will
+# be used instead.
+#
+# Options for building libtiff from source::
+#
+# <source>
+# The libtiff source directory. Defaults to the environmental variable
+# LIBTIFF_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find libtiff in the default system location
+# using libtiff ;
+# # Build libtiff from source
+# using libtiff : 4.0.1 : <source>/home/steven/libtiff-4.0.1 ;
+# # Find libtiff in /usr/local
+# using libtiff : 4.0.1
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build libtiff from source for msvc and find
+# # prebuilt binaries for gcc.
+# using libtiff : 4.0.1 : <source>C:/Devel/src/libtiff-4.0.1 : <toolset>msvc ;
+# using libtiff : 4.0.1 : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The libtiff version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the libtiff target
+
+ : is-default ?
+ # Default configurations are only used when libtiff
+ # has not yet been configured. This option is
+ # deprecated. A configuration will be treated
+ # as a default when none of <include>, <search>,
+ # <name>, and <source> are present.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project libtiff ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local source-path = [ feature.get-values <source> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+ local tag = [ feature.get-values <tag> : $(options) ] ;
+ local build-name = [ feature.get-values <build-name> : $(options) ] ;
+
+ if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
+ {
+ is-default = true ;
+ }
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ # Ignore environmental LIBTIFF_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for libtiff:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ }
+ else
+ {
+ source-path ?= [ modules.peek : LIBTIFF_SOURCE ] ;
+ }
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [libtiff] libtiff is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "libtiff is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path)
+ {
+ build-name ?= tiff ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag)
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [libtiff] Building libtiff from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [libtiff] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [libtiff] found libtiff source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [libtiff] could not find libtiff source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources) {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library libtiff : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ } else {
+ if $(.debug)
+ {
+ ECHO "notice: [libtiff] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [libtiff] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library libtiff : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) : $(root) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/src/boost/tools/build/src/tools/link.jam b/src/boost/tools/build/src/tools/link.jam
new file mode 100644
index 000000000..3cf6e46f7
--- /dev/null
+++ b/src/boost/tools/build/src/tools/link.jam
@@ -0,0 +1,547 @@
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import os ;
+import targets ;
+import project ;
+import "class" : new ;
+import virtual-target ;
+import configure ;
+import path ;
+import property ;
+import property-set ;
+import common ;
+
+rule get-root-project ( project )
+{
+ # Find the root project.
+ local root-project = $(project) ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config &&
+ [ project.attribute $(root-project) parent-module ] != project-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+ return $(root-project) ;
+}
+
+TOUCH = [ common.file-touch-command ] ;
+
+actions touch {
+ $(TOUCH) "$(<)"
+}
+
+rule can-symlink ( project )
+{
+ if ! $(.can-symlink)
+ {
+ local root-project = [ get-root-project $(project) ] ;
+
+ local source-target = [ new file-target test-symlink-source : :
+ $(project) : [ new action : link.touch ] ] ;
+ local target = [ new file-target test-symlink : :
+ $(project) : [ new action $(source-target) : link.mklink ] ] ;
+
+ if [ configure.try-build $(target) : [ property-set.empty ] : "symlinks supported" ]
+ {
+ .can-symlink = true ;
+ }
+ else
+ {
+ .can-symlink = false ;
+ }
+ }
+ if $(.can-symlink) = true
+ {
+ return true ;
+ }
+}
+
+if [ os.name ] = NT
+{
+
+# Test for Windows junctions (mklink /J)
+rule can-junction ( project )
+{
+ if ! $(.can-junction)
+ {
+ local root-project = [ get-root-project $(project) ] ;
+
+ local source-target = [ new file-target test-junction-source : :
+ $(project) : [ new action : common.mkdir ] ] ;
+ local target = [ new file-target test-junction : :
+ $(project) : [ new action $(source-target) : link.junction ] ] ;
+
+ if [ configure.try-build $(target) : [ property-set.empty ] : "junctions supported" ]
+ {
+ .can-junction = true ;
+ }
+ else
+ {
+ .can-junction = false ;
+ }
+ }
+ if $(.can-junction) = true
+ {
+ return true ;
+ }
+}
+
+}
+else
+{
+
+.can-junction = false ;
+
+rule can-junction ( project )
+{
+}
+
+}
+
+rule can-hardlink ( project )
+{
+ if ! $(.can-hardlink)
+ {
+ local root-project = [ get-root-project $(project) ] ;
+
+ local source-target = [ new file-target test-hardlink-source : :
+ $(project) : [ new action : link.touch ] ] ;
+ # Use <location-prefix> so that the destination link is created
+ # in a different directory. AFS refuses to make hard links
+ # between files in different directories, so we want to check
+ # it.
+ local target = [ new file-target test-hardlink : :
+ $(project) : [ new action $(source-target) : link.hardlink
+ : [ new property-set <location-prefix>symlink ]
+ ] ] ;
+
+ if [ configure.try-build $(target) : [ property-set.empty ] : "hardlinks supported" ]
+ {
+ .can-hardlink = true ;
+ }
+ else
+ {
+ .can-hardlink = false ;
+ }
+ }
+ if $(.can-hardlink) = true
+ {
+ return true ;
+ }
+}
+
+class file-or-directory-reference : basic-target
+{
+ import virtual-target ;
+ import property-set ;
+ import path ;
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
+ [ location ] : $(self.project) ] ;
+ }
+
+ # Returns true if the referred file really exists.
+ rule exists ( )
+ {
+ location ;
+ return $(self.file-path) ;
+ }
+
+ # Returns the location of target. Needed by 'testing.jam'.
+ rule location ( )
+ {
+ if ! $(self.file-location)
+ {
+ local source-location = [ $(self.project).get source-location ] ;
+ for local src-dir in $(source-location)
+ {
+ if ! $(self.file-location)
+ {
+ local location = [ path.root $(self.name) $(src-dir) ] ;
+ if [ path.exists [ path.native $(location) ] ]
+ {
+ self.file-location = $(src-dir) ;
+ self.file-path = $(location) ;
+ }
+ }
+ }
+ }
+ return $(self.file-location) ;
+ }
+}
+
+class symlink-target-class : basic-target
+{
+ import path ;
+ import virtual-target ;
+ import link ;
+ import os ;
+ import type ;
+ rule construct ( name : source-target : property-set )
+ {
+ local location = [ path.join
+ [ $(source-target).path ] [ $(source-target).name ] ] ;
+ local files = [ path.glob-tree $(location) : * ] ;
+ local targets ;
+
+ # If we have symlinks, don't bother checking
+ # for hardlinks and junctions.
+ if ! [ link.can-symlink $(self.project) ]
+ {
+ link.can-junction $(self.project) ;
+ link.can-hardlink $(self.project) ;
+ }
+
+ if [ $(property-set).get <location> ]
+ {
+ property-set = [ property-set.create
+ [ property.select <location> : [ $(property-set).raw ] ] ] ;
+ }
+ else
+ {
+ local path,relative-to-build-dir = [ $(property-set).target-path ] ;
+ local path = $(path,relative-to-build-dir[1]) ;
+ local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
+
+ if $(relative-to-build-dir)
+ {
+ path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
+ }
+
+ property-set = [ property-set.create <location>$(path) ] ;
+ }
+
+ local a = [ new non-scanning-action $(source-target) :
+ link.do-link-recursively : $(property-set) ] ;
+
+ local t = [ new notfile-target $(name)
+ : $(self.project) : $(a) ] ;
+
+ return [ property-set.empty ] [ virtual-target.register $(t) ] ;
+ }
+}
+
+rule do-file-link
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ local source = [ path.native [ path.relative-to [ path.pwd ] $(>) ] ] ;
+ local old-source = [ on $(target) return $(LINK-SOURCE) ] ;
+ if $(old-source)
+ {
+ import errors ;
+ errors.user-error
+ Cannot create link $(target) to $(source). :
+ Link previously defined to another file, $(old-source[1]). ;
+ }
+ LINK-SOURCE on $(target) = $(source) $(.current-target) ;
+ LOCATE on $(target) = . ;
+ DEPENDS $(.current-target) : $(target) ;
+ if $(.can-symlink) = true
+ {
+ DEPENDS $(target) : $(source) ;
+ link.mklink $(target) : $(source) ;
+ }
+ else if $(.can-hardlink) = true
+ {
+ DEPENDS $(target) : $(source) ;
+ link.hardlink $(target) : $(source) ;
+ }
+ else
+ {
+ DEPENDS $(target) : $(source) ;
+ common.copy $(target) : $(source) ;
+ }
+}
+
+rule do-link
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ local source = [ path.native [ path.relative-to [ path.pwd ] $(>) ] ] ;
+ local relative = [ path.native [ path.relative-to [ path.parent $(<) ] $(>) ] ] ;
+ if ! [ on $(target) return $(MKLINK_OR_DIR) ]
+ {
+ LOCATE on $(target) = . ;
+ DEPENDS $(.current-target) : $(target) ;
+ mklink-or-dir $(target) : $(source) ;
+ }
+ if [ os.name ] = NT
+ {
+ if $(.can-symlink) = true
+ {
+ MKLINK_OR_DIR on $(target) = mklink /D \"$(target)\" \"$(relative)\" ;
+ }
+ else
+ {
+ # This function should only be called
+ # if either symlinks or junctions are supported.
+ # To get here $(.can-junction) must be true.
+ mklink-opt = /J ;
+ MKLINK_OR_DIR on $(target) = mklink /J \"$(target)\" \"$(source)\" ;
+ }
+ }
+ else
+ {
+ MKLINK_OR_DIR on $(target) = ln -s $(relative) $(target) ;
+ }
+}
+
+rule force-update
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ ALWAYS $(target) ;
+}
+
+rule do-split
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ if ! [ on $(target) return $(MKLINK_OR_DIR) ]
+ {
+ LOCATE on $(target) = . ;
+ DEPENDS $(.current-target) : $(target) ;
+ common.mkdir $(target) ;
+ }
+ MKLINK_OR_DIR on $(target) = mkdir \"$(target)\" ;
+}
+
+rule do-rm
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ ALWAYS $(target) ;
+ RM on $(target) = rmdir ;
+ link.rm $(target) ;
+}
+
+rule mklink-or-dir
+{
+ NOUPDATE $(<) ;
+}
+
+actions mklink-or-dir
+{
+ $(MKLINK_OR_DIR)
+}
+
+rule link-entries ( target : files * : split ? : deleted ? )
+{
+ for local s in $(files)
+ {
+ local t = [ path.join $(target) [ path.basename $(s) ] ] ;
+ if ! $(.known-dirs.$(t))
+ {
+ local t = [ path.native [ path.relative-to [ path.pwd ] $(t) ] ] ;
+ local s = [ path.native [ path.relative-to [ path.pwd ] $(target) ] ] ;
+ LOCATE on $(t) = . ;
+ DEPENDS $(t) : $(s) ;
+ NOUPDATE $(s) ;
+ }
+ if $(split)
+ {
+ link-recursively $(t) : $(s) : : $(deleted) ;
+ }
+ else
+ {
+ link-entries $(t) : [ path.glob $(s) : * ] ;
+ }
+ }
+ if ! $(.known-dirs.$(target))
+ {
+ .known-dirs.$(target) += $(files) ;
+ .known-dirs.base.$(target) = $(.current-target) ;
+ }
+}
+
+rule link-recursively ( target : source : no-recurse ? : deleted ? )
+{
+ if $(deleted) {
+ force-update $(target) ;
+ }
+
+ local split ;
+ if [ CHECK_IF_FILE [ path.native $(source) ] ]
+ {
+ do-file-link $(target) : $(source) ;
+ }
+ else if $(.known-dirs.$(target)) && ! $(no-recurse)
+ {
+ split = true ;
+ if ! $(.split-dirs.$(target))
+ {
+ if [ READLINK [ path.native $(target) ] ]
+ {
+ if ! $(deleted) {
+ do-rm $(target) ;
+ deleted = true ;
+ .deleted-dirs.$(target) = true ;
+ }
+ }
+ local .current-target = $(.known-dirs.base.$(target)) ;
+ for local s in $(.known-dirs.$(target))
+ {
+ local t = [ path.join $(target) [ path.basename $(s) ] ] ;
+ link-recursively $(t) : $(s) : flat : $(deleted) ;
+ }
+ do-split $(target) ;
+ }
+ else if $(.deleted-dirs.$(target))
+ {
+ deleted = true ;
+ }
+ }
+ else if [ path.exists [ path.native $(target) ] ] && ! $(deleted)
+ {
+ local link-target = [ READLINK [ path.native $(target) ] ] ;
+ if $(link-target)
+ {
+ local full-path =
+ [ path.root [ path.make $(link-target) ] [ path.parent $(target) ] ] ;
+ # HACK: Take advantage of the fact that path.glob
+ # normalizes its arguments. If full-path and
+ # source are different, but both are empty, they
+ # will compare equal, but that's okay because
+ # for the purposes of this module, empty directories
+ # are equivalent.
+ if [ path.glob $(full-path) : * ] != [ path.glob $(source) : * ]
+ {
+ if ! $(deleted) {
+ do-rm $(target) ;
+ deleted = true ;
+ .deleted-dirs.$(target) = true ;
+ }
+ do-split $(target) ;
+ split = true ;
+ }
+ }
+ else
+ {
+ do-split $(target) ;
+ split = true ;
+ }
+ }
+ else if $(.can-symlink) = false && $(.can-junction) = false
+ {
+ if [ READLINK [ path.native $(target) ] ]
+ {
+ if ! $(deleted) {
+ do-rm $(target) ;
+ deleted = true ;
+ .deleted-dirs.$(target) = true ;
+ }
+ }
+ do-split $(target) ;
+ split = true ;
+ }
+ else
+ {
+ do-link $(target) : $(source) ;
+ }
+
+ if $(split)
+ {
+ .split-dirs.$(target) = true ;
+ }
+
+ if ! $(no-recurse)
+ {
+ link-entries $(target) : [ path.glob $(source) : * ] : $(split) : $(deleted) ;
+ }
+}
+
+rule do-link-recursively ( target : source : properties * )
+{
+ local target-path = [ property.select <location> : $(properties) ] ;
+ local source-path = [ on $(source) return $(LOCATE) ] [ on $(source) return $(SEARCH) ] ;
+
+ local absolute-target = [ path.root
+ [ path.join [ path.make $(target-path[1]:G=) ]
+ [ path.basename [ path.make $(source:G=) ] ] ]
+ [ path.pwd ] ] ;
+
+ local absolute-source = [ path.root
+ [ path.root [ path.make $(source:G=) ]
+ [ path.make $(source-path[1]) ] ]
+ [ path.pwd ] ] ;
+
+ local .current-target = $(target) ;
+
+ link-recursively $(absolute-target) : $(absolute-source) ;
+}
+
+rule mklink
+{
+ local target-path = [ on $(<) return $(LOCATE) ] [ on $(<) return $(SEARCH) ] . ;
+ local source-path = [ on $(>) return $(LOCATE) ] [ on $(>) return $(SEARCH) ] . ;
+ local relative-path = [ path.relative-to
+ [ path.parent [ path.join [ path.root [ path.make $(target-path[1]) ] [ path.pwd ] ] [ path.make $(<:G=) ] ] ]
+ [ path.join [ path.root [ path.make $(source-path[1]) ] [ path.pwd ] ] [ path.make $(>:G=) ] ] ] ;
+
+ PATH_TO_SOURCE on $(<) = [ path.native $(relative-path) ] ;
+}
+
+if [ os.name ] = NT
+{
+
+actions junction
+{
+ if exist "$(<)" rmdir "$(<)"
+ mklink /J "$(<)" "$(>)"
+}
+
+actions mklink
+{
+ if exist "$(<)" del "$(<)"
+ mklink "$(<)" "$(PATH_TO_SOURCE)"
+}
+
+actions hardlink
+{
+ if exist "$(<)" del "$(<)"
+ mklink /H "$(<)" "$(>)"
+}
+
+actions rm
+{
+ rmdir "$(<)"
+}
+
+}
+else
+{
+
+actions mklink
+{
+ ln -f -s "$(PATH_TO_SOURCE)" "$(<)"
+}
+
+actions hardlink
+{
+ ln -f "$(>)" "$(<)"
+}
+
+actions rm
+{
+ rm "$(<)"
+}
+
+}
+
+rule link-directory ( name : sources : requirements * : default-build * : usage-requirements * )
+{
+ local project = [ project.current ] ;
+ sources = [ new file-or-directory-reference $(sources) : $(project) ] ;
+ targets.main-target-alternative $(sources) ;
+ return [ targets.main-target-alternative
+ [ new symlink-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ] ;
+}
+
+IMPORT $(__name__) : link-directory : : link-directory ;
diff --git a/src/boost/tools/build/src/tools/lzma.jam b/src/boost/tools/build/src/tools/lzma.jam
new file mode 100644
index 000000000..465d1d693
--- /dev/null
+++ b/src/boost/tools/build/src/tools/lzma.jam
@@ -0,0 +1,134 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the lzma library
+#
+# After 'using lzma', the following targets are available:
+#
+# /lzma//lzma -- The lzma library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = lzma.h ;
+# liblzma only needed for VisualC++ builds
+names = lzma liblzma ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the lzma library.
+#
+# Options for configuring lzma::
+#
+# <search>
+# The directory containing the lzma binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the lzma headers.
+#
+# If none of these options is specified, then the environmental
+# variables LZMA_LIBRARY_PATH, LZMA_NAME, and LZMA_INCLUDE will
+# be used instead.
+#
+# Examples::
+#
+# # Find lzma in the default system location
+# using lzma ;
+# # Find lzma in /usr/local
+# using lzma : 1.2.7
+# : <include>/usr/local/include <search>/usr/local/lib ;
+#
+rule init (
+ version ?
+ # (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the target
+
+ : is-default ?
+ # Default configurations are only used when
+ # not yet configured. This option is
+ # deprecated. A configuration will be treated
+ # as a default when none of <include>, <search>,
+ # <name>, and <source> are present.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project lzma ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+
+ if ! $(options)
+ {
+ is-default = true ;
+ }
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [lzma] lzma is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "lzma is already configured" ;
+ }
+ return ;
+ }
+ else
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [lzma] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [lzma] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library lzma : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/src/boost/tools/build/src/tools/make.jam b/src/boost/tools/build/src/tools/make.jam
new file mode 100644
index 000000000..b0784b620
--- /dev/null
+++ b/src/boost/tools/build/src/tools/make.jam
@@ -0,0 +1,69 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'make' main target rule.
+
+import "class" : new ;
+import param ;
+import project ;
+import property-set ;
+import targets ;
+
+
+class make-target-class : basic-target
+{
+ import "class" : new ;
+ import indirect ;
+ import toolset ;
+ import type ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local action-name = [ $(property-set).get <action> ] ;
+ # 'm' will always be set -- we add '@' ourselves in the 'make' rule
+ # below.
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+
+ local relevant = [ toolset.relevant [ indirect.get-rule $(m[1]) ] ] ;
+ local a = [ new action $(source-targets) : $(m[1]) : [ $(property-set).add $(relevant) ] ] ;
+ local t = [ new file-target $(self.name) exact : [ type.type
+ $(self.name) ] : $(self.project) : $(a) ] ;
+ return $(relevant) [ virtual-target.register $(t) ] ;
+ }
+}
+
+
+# Declares the 'make' main target.
+#
+rule make ( target-name : sources * : generating-rule + : requirements * :
+ usage-requirements * )
+{
+ param.handle-named-params
+ sources generating-rule requirements default-build usage-requirements ;
+ # The '@' sign causes the feature.jam module to qualify rule name with the
+ # module name of current project, if needed.
+ local m = [ MATCH ^(@).* : $(generating-rule) ] ;
+ if ! $(m)
+ {
+ generating-rule = @$(generating-rule) ;
+ }
+ targets.create-metatarget make-target-class : [ project.current ] :
+ $(target-name) : $(sources) : $(requirements) <action>$(generating-rule)
+ : : $(usage-requirements) ;
+}
+
+
+IMPORT $(__name__) : make : : make ;
diff --git a/src/boost/tools/build/src/tools/make.py b/src/boost/tools/build/src/tools/make.py
new file mode 100644
index 000000000..716a56119
--- /dev/null
+++ b/src/boost/tools/build/src/tools/make.py
@@ -0,0 +1,59 @@
+# Status: ported.
+# Base revision: 64068
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'make' main target rule.
+
+from b2.build.targets import BasicTarget
+from b2.build.virtual_target import Action, FileTarget
+from b2.build import type
+from b2.manager import get_manager
+import b2.build.property_set
+
+
+class MakeTarget(BasicTarget):
+
+ def construct(self, name, source_targets, property_set):
+
+ action_name = property_set.get("<action>")[0]
+ action = Action(get_manager(), source_targets, action_name[1:], property_set)
+ target = FileTarget(self.name(), type.type(self.name()),
+ self.project(), action, exact=True)
+ return [ b2.build.property_set.empty(),
+ [self.project().manager().virtual_targets().register(target)]]
+
+def make (target_name, sources, generating_rule,
+ requirements=None, usage_requirements=None):
+
+ target_name = target_name[0]
+ generating_rule = generating_rule[0]
+ if generating_rule[0] != '@':
+ generating_rule = '@' + generating_rule
+
+ if not requirements:
+ requirements = []
+
+
+ requirements.append("<action>%s" % generating_rule)
+
+ m = get_manager()
+ targets = m.targets()
+ project = m.projects().current()
+ engine = m.engine()
+ engine.register_bjam_action(generating_rule)
+
+ targets.main_target_alternative(MakeTarget(
+ target_name, project,
+ targets.main_target_sources(sources, target_name),
+ targets.main_target_requirements(requirements, project),
+ targets.main_target_default_build([], project),
+ targets.main_target_usage_requirements(usage_requirements or [], project)))
+
+get_manager().projects().add_rule("make", make)
+
diff --git a/src/boost/tools/build/src/tools/mc.jam b/src/boost/tools/build/src/tools/mc.jam
new file mode 100644
index 000000000..578377735
--- /dev/null
+++ b/src/boost/tools/build/src/tools/mc.jam
@@ -0,0 +1,44 @@
+#~ Copyright 2005 Alexey Pakhunov.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Microsoft message compiler tool.
+# Notes:
+# - there's just message compiler tool, there's no tool for
+# extracting message strings from sources
+# - This file allows to use Microsoft message compiler
+# with any toolset. In msvc.jam, there's more specific
+# message compiling action.
+
+import common ;
+import generators ;
+import feature : feature get-values ;
+import toolset : flags ;
+import type ;
+import rc ;
+
+rule init ( )
+{
+}
+
+type.register MC : mc ;
+
+
+# Command line options
+feature mc-input-encoding : ansi unicode : free ;
+feature mc-output-encoding : unicode ansi : free ;
+feature mc-set-customer-bit : no yes : free ;
+
+flags mc.compile MCFLAGS <mc-input-encoding>ansi : -a ;
+flags mc.compile MCFLAGS <mc-input-encoding>unicode : -u ;
+flags mc.compile MCFLAGS <mc-output-encoding>ansi : -A ;
+flags mc.compile MCFLAGS <mc-output-encoding>unicode : -U ;
+flags mc.compile MCFLAGS <mc-set-customer-bit>no : ;
+flags mc.compile MCFLAGS <mc-set-customer-bit>yes : -c ;
+
+generators.register-standard mc.compile : MC : H RC ;
+
+actions compile
+{
+ mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
+}
diff --git a/src/boost/tools/build/src/tools/mc.py b/src/boost/tools/build/src/tools/mc.py
new file mode 100644
index 000000000..d8b970194
--- /dev/null
+++ b/src/boost/tools/build/src/tools/mc.py
@@ -0,0 +1,46 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Microsoft message compiler tool.
+# Notes:
+# - there's just message compiler tool, there's no tool for
+# extracting message strings from sources
+# - This file allows to use Microsoft message compiler
+# with any toolset. In msvc.jam, there's more specific
+# message compiling action.
+
+import bjam
+
+from b2.tools import common, rc
+from b2.build import generators, type
+from b2.build.toolset import flags
+from b2.build.feature import feature
+from b2.manager import get_manager
+
+def init():
+ pass
+
+type.register('MC', ['mc'])
+
+
+# Command line options
+feature('mc-input-encoding', ['ansi', 'unicode'], ['free'])
+feature('mc-output-encoding', ['unicode', 'ansi'], ['free'])
+feature('mc-set-customer-bit', ['no', 'yes'], ['free'])
+
+flags('mc.compile', 'MCFLAGS', ['<mc-input-encoding>ansi'], ['-a'])
+flags('mc.compile', 'MCFLAGS', ['<mc-input-encoding>unicode'], ['-u'])
+flags('mc.compile', 'MCFLAGS', ['<mc-output-encoding>ansi'], ['-A'])
+flags('mc.compile', 'MCFLAGS', ['<mc-output-encoding>unicode'], ['-U'])
+flags('mc.compile', 'MCFLAGS', ['<mc-set-customer-bit>no'], [])
+flags('mc.compile', 'MCFLAGS', ['<mc-set-customer-bit>yes'], ['-c'])
+
+generators.register_standard('mc.compile', ['MC'], ['H', 'RC'])
+
+get_manager().engine().register_action(
+ 'mc.compile',
+ 'mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"')
diff --git a/src/boost/tools/build/src/tools/message.jam b/src/boost/tools/build/src/tools/message.jam
new file mode 100644
index 000000000..672b6e0bc
--- /dev/null
+++ b/src/boost/tools/build/src/tools/message.jam
@@ -0,0 +1,62 @@
+# Copyright 2008 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target type 'message', that prints a message when built for the
+# first time.
+
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+
+class message-target-class : basic-target
+{
+ rule set-message ( * )
+ {
+ self.1 = $(1) ;
+ self.2 = $(2) ;
+ self.3 = $(3) ;
+ self.4 = $(4) ;
+ self.5 = $(5) ;
+ self.6 = $(6) ;
+ self.7 = $(7) ;
+ self.8 = $(8) ;
+ self.9 = $(9) ;
+ self.built = ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ if ! $(self.built)
+ {
+ for i in 1 2 3 4 5 6 7 8 9
+ {
+ if $(self.$(i))
+ {
+ ECHO $(self.$(i)) ;
+ }
+ }
+ self.built = 1 ;
+ }
+
+ return [ property-set.empty ] ;
+ }
+}
+
+
+rule message ( name : * )
+{
+ local project = [ project.current ] ;
+
+ local result = [ targets.main-target-alternative
+ [ new message-target-class $(name) : $(project)
+ : [ targets.main-target-sources : $(name) ]
+ : [ targets.main-target-requirements : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements : $(project) ]
+ ] ] ;
+ $(result).set-message $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ return $(result) ;
+}
+IMPORT $(__name__) : message : : message ;
diff --git a/src/boost/tools/build/src/tools/message.py b/src/boost/tools/build/src/tools/message.py
new file mode 100644
index 000000000..3f276f93e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/message.py
@@ -0,0 +1,54 @@
+# Status: ported.
+# Base revision: 64488.
+#
+# Copyright 2008, 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target type 'message', that prints a message when built for the
+# first time.
+
+import b2.build.targets as targets
+import b2.build.property_set as property_set
+
+from b2.manager import get_manager
+
+class MessageTargetClass(targets.BasicTarget):
+
+ def __init__(self, name, project, sources, requirements, default_build,
+ usage_requirements, *args):
+ targets.BasicTarget.__init__(
+ self, name, project, sources, requirements, default_build, usage_requirements)
+ self.args = args
+ self.built = False
+
+ def construct(self, name, sources, ps):
+
+ if not self.built:
+ for arg in self.args:
+ if type(arg) == type([]):
+ arg = " ".join(arg)
+ print arg
+ self.built = True
+
+ return (property_set.empty(), [])
+
+def message(name, *args):
+
+ if type(name) == type([]):
+ name = name[0]
+
+ t = get_manager().targets()
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ MessageTargetClass(
+ name, project,
+ t.main_target_sources([], name),
+ t.main_target_requirements([], project),
+ t.main_target_default_build([], project),
+ t.main_target_usage_requirements([], project),
+ *args
+ ))
+
+get_manager().projects().add_rule("message", message)
diff --git a/src/boost/tools/build/src/tools/midl.jam b/src/boost/tools/build/src/tools/midl.jam
new file mode 100644
index 000000000..0aa5dda31
--- /dev/null
+++ b/src/boost/tools/build/src/tools/midl.jam
@@ -0,0 +1,142 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Microsoft Interface Definition Language (MIDL) related routines
+
+import common ;
+import generators ;
+import feature : feature get-values ;
+import os ;
+import scanner ;
+import toolset : flags ;
+import type ;
+
+rule init ( )
+{
+}
+
+type.register IDL : idl ;
+
+# A type library (.tlb) is generated by MIDL compiler and can be included
+# to resources of an application (.rc). In order to be found by a resource
+# compiler its target type should be derived from 'H' - otherwise
+# the property '<implicit-dependency>' will be ignored.
+type.register MSTYPELIB : tlb : H ;
+
+
+# Register scanner for MIDL files
+class midl-scanner : scanner
+{
+ import path property-set regex scanner type virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ self.includes = $(includes) ;
+
+ # List of quoted strings
+ self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
+
+ # 'import' and 'importlib' directives
+ self.re-import = "import"$(self.re-strings)"[ \t]*;" ;
+ self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ;
+
+ # C preprocessor 'include' directive
+ self.re-include-angle = "#[ \t]*include[ \t]*<(.*)>" ;
+ self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
+ }
+
+ rule pattern ( )
+ {
+ # Match '#include', 'import' and 'importlib' directives
+ return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local included-angle = [ regex.transform $(matches) : $(self.re-include-angle) : 1 ] ;
+ local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ;
+ local imported = [ regex.transform $(matches) : $(self.re-import) : 1 3 ] ;
+ local imported_tlbs = [ regex.transform $(matches) : $(self.re-importlib) : 1 3 ] ;
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ local g2 = $(g)"#"$(b) ;
+
+ included-angle = $(included-angle:G=$(g)) ;
+ included-quoted = $(included-quoted:G=$(g2)) ;
+ imported = $(imported:G=$(g2)) ;
+ imported_tlbs = $(imported_tlbs:G=$(g2)) ;
+
+ local all = $(included-angle) $(included-quoted) $(imported) ;
+
+ INCLUDES $(target) : $(all) ;
+ DEPENDS $(target) : $(imported_tlbs) ;
+ NOCARE $(all) $(imported_tlbs) ;
+ SEARCH on $(included-angle) = $(self.includes:G=) ;
+ SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ;
+ SEARCH on $(imported) = $(b) $(self.includes:G=) ;
+ SEARCH on $(imported_tlbs) = $(b) $(self.includes:G=) ;
+
+ scanner.propagate
+ [ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] :
+ $(included-angle) $(included-quoted) : $(target) ;
+
+ scanner.propagate $(__name__) : $(imported) : $(target) ;
+ }
+}
+
+scanner.register midl-scanner : include ;
+type.set-scanner IDL : midl-scanner ;
+
+
+# Command line options
+feature midl-stubless-proxy : yes no : propagated ;
+feature midl-robust : yes no : propagated ;
+
+flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>yes : /Oicf ;
+flags midl.compile.idl MIDLFLAGS <midl-stubless-proxy>no : /Oic ;
+flags midl.compile.idl MIDLFLAGS <midl-robust>yes : /robust ;
+flags midl.compile.idl MIDLFLAGS <midl-robust>no : /no_robust ;
+
+# Architecture-specific options
+architecture-x86 = <architecture> <architecture>x86 ;
+address-model-32 = <address-model> <address-model>32 ;
+address-model-64 = <address-model> <address-model>64 ;
+
+flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ;
+flags midl.compile.idl MIDLFLAGS $(architecture-x86)/<address-model>64 : /x64 ;
+flags midl.compile.idl MIDLFLAGS <architecture>ia64/$(address-model-64) : /ia64 ;
+
+
+flags midl.compile.idl DEFINES <define> ;
+flags midl.compile.idl UNDEFS <undef> ;
+flags midl.compile.idl INCLUDES <include> ;
+
+
+generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ;
+
+
+# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
+# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
+# that both files will be created so bjam will not try to recreate them
+# constantly.
+TOUCH_FILE = [ common.file-touch-command ] ;
+
+actions compile.idl
+{
+ midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")"
+ $(TOUCH_FILE) "$(<[4]:W)"
+ $(TOUCH_FILE) "$(<[5]:W)"
+}
diff --git a/src/boost/tools/build/src/tools/midl.py b/src/boost/tools/build/src/tools/midl.py
new file mode 100644
index 000000000..51bc51feb
--- /dev/null
+++ b/src/boost/tools/build/src/tools/midl.py
@@ -0,0 +1,134 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Microsoft Interface Definition Language (MIDL) related routines
+from b2.build import scanner, type
+from b2.build.toolset import flags
+from b2.build.feature import feature
+from b2.manager import get_manager
+from b2.tools import builtin, common
+from b2.util import regex, utility
+
+def init():
+ pass
+
+type.register('IDL', ['idl'])
+
+# A type library (.tlb) is generated by MIDL compiler and can be included
+# to resources of an application (.rc). In order to be found by a resource
+# compiler its target type should be derived from 'H' - otherwise
+# the property '<implicit-dependency>' will be ignored.
+type.register('MSTYPELIB', ['tlb'], 'H')
+
+# Register scanner for MIDL files
+class MidlScanner(scanner.Scanner):
+ def __init__ (self, includes=[]):
+ scanner.Scanner.__init__(self)
+ self.includes = includes
+
+ # List of quoted strings
+ re_strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
+
+ # 'import' and 'importlib' directives
+ self.re_import = "import" + re_strings + "[ \t]*;" ;
+ self.re_importlib = "importlib[ \t]*[(]" + re_strings + "[)][ \t]*;" ;
+
+ # C preprocessor 'include' directive
+ self.re_include_angle = "#[ \t]*include[ \t]*<(.*)>" ;
+ self.re_include_quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
+
+ def pattern():
+ # Match '#include', 'import' and 'importlib' directives
+ return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)"
+
+ def process(self, target, matches, binding):
+ included_angle = regex.transform(matches, self.re_include_angle)
+ included_quoted = regex.transform(matches, self.re_include_quoted)
+ imported = regex.transform(matches, self.re_import, [1, 3])
+ imported_tlbs = regex.transform(matches, self.re_importlib, [1, 3])
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ g = bjam.call('get-target-variable', target, 'HDRGRIST')[0]
+ b = os.path.normpath(os.path.dirname(binding))
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ g2 = g + "#" + b
+
+ g = "<" + g + ">"
+ g2 = "<" + g2 + ">"
+
+ included_angle = [ g + x for x in included_angle ]
+ included_quoted = [ g + x for x in included_quoted ]
+ imported = [ g + x for x in imported ]
+ imported_tlbs = [ g + x for x in imported_tlbs ]
+
+ all = included_angle + included_quoted + imported
+
+ bjam.call('INCLUDES', [target], all)
+ bjam.call('DEPENDS', [target], imported_tlbs)
+ bjam.call('NOCARE', all + imported_tlbs)
+ engine.set_target_variable(included_angle , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(included_quoted, 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(imported , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(imported_tlbs , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+
+ get_manager().scanners().propagate(type.get_scanner('CPP', PropertySet(self.includes)), included_angle + included_quoted)
+ get_manager().scanners().propagate(self, imported)
+
+scanner.register(MidlScanner, 'include')
+type.set_scanner('IDL', MidlScanner)
+
+
+# Command line options
+feature('midl-stubless-proxy', ['yes', 'no'], ['propagated'] )
+feature('midl-robust', ['yes', 'no'], ['propagated'] )
+
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>yes'], ['/Oicf' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>no' ], ['/Oic' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>yes' ], ['/robust' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>no' ], ['/no_robust'])
+
+# Architecture-specific options
+architecture_x86 = ['<architecture>' , '<architecture>x86']
+address_model_32 = ['<address-model>', '<address-model>32']
+address_model_64 = ['<address-model>', '<address-model>64']
+
+flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/' + m for ar in architecture_x86 for m in address_model_32 ], ['/win32'])
+flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/<address-model>64' for ar in architecture_x86], ['/x64'])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<architecture>ia64/' + m for m in address_model_64], ['/ia64'])
+
+flags('midl.compile.idl', 'DEFINES', [], ['<define>'])
+flags('midl.compile.idl', 'UNDEFS', [], ['<undef>'])
+flags('midl.compile.idl', 'INCLUDES', [], ['<include>'])
+
+
+builtin.register_c_compiler('midl.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], [])
+
+
+# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
+# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
+# that both files will be created so bjam will not try to recreate them
+# constantly.
+get_manager().engine().register_action(
+ 'midl.compile.idl',
+ '''midl /nologo @"@($(<[1]:W).rsp:E=
+"$(>:W)"
+-D$(DEFINES)
+"-I$(INCLUDES)"
+-U$(UNDEFS)
+$(MIDLFLAGS)
+/tlb "$(<[1]:W)"
+/h "$(<[2]:W)"
+/iid "$(<[3]:W)"
+/proxy "$(<[4]:W)"
+/dlldata "$(<[5]:W)")"
+{touch} "$(<[4]:W)"
+{touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
diff --git a/src/boost/tools/build/src/tools/mipspro.jam b/src/boost/tools/build/src/tools/mipspro.jam
new file mode 100644
index 000000000..9e8c39693
--- /dev/null
+++ b/src/boost/tools/build/src/tools/mipspro.jam
@@ -0,0 +1,145 @@
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import fortran ;
+import type ;
+import common ;
+
+feature.extend toolset : mipspro ;
+toolset.inherit mipspro : unix ;
+generators.override mipspro.prebuilt : builtin.lib-generator ;
+generators.override mipspro.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.sgi.com/products/software/irix/tools/
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters mipspro : version $(version) ] ;
+
+ command = [ common.get-invocation-command mipspro : CC : $(command) ] ;
+
+ common.handle-options mipspro : $(condition) : $(command) : $(options) ;
+
+ command_c = $(command_c[1--2]) $(command[-1]:B=cc) ;
+
+ toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ # fortran support
+ local command = [
+ common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ;
+
+ command_f = $(command_f[1--2]) $(command[-1]:B=f77) ;
+ toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ;
+
+ # set link flags
+ flags mipspro.link FINDLIBS-ST : [
+ feature.get-values <find-static-library> : $(options) ] : unchecked ;
+
+ flags mipspro.link FINDLIBS-SA : [
+ feature.get-values <find-shared-library> : $(options) ] : unchecked ;
+}
+
+# Declare generators
+generators.register-c-compiler mipspro.compile.c : C : OBJ : <toolset>mipspro ;
+generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : <toolset>mipspro ;
+generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : <toolset>mipspro ;
+
+cpu-arch-32 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32 ;
+
+cpu-arch-64 =
+ <architecture>/<address-model>64 ;
+
+flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ;
+flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ;
+
+# Declare flags and actions for compilation
+flags mipspro.compile OPTIONS <debug-symbols>on : -g ;
+# flags mipspro.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags mipspro.compile OPTIONS <warnings>off : -w ;
+flags mipspro.compile OPTIONS <warnings>on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning
+flags mipspro.compile OPTIONS <warnings>all : -fullwarn ;
+flags mipspro.compile OPTIONS <optimization>speed : -Ofast ;
+flags mipspro.compile OPTIONS <optimization>space : -O2 ;
+flags mipspro.compile OPTIONS <cflags> : "-LANG:std" ;
+flags mipspro.compile.c++ OPTIONS <inlining>off : "-INLINE:none" ;
+flags mipspro.compile.c++ OPTIONS <cxxflags> ;
+flags mipspro.compile DEFINES <define> ;
+flags mipspro.compile INCLUDES <include> ;
+
+
+flags mipspro.compile.fortran OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags mipspro.link OPTIONS <debug-symbols>on : -g ;
+# Strip the binary when no debugging is needed
+# flags mipspro.link OPTIONS <debug-symbols>off : -s ;
+# flags mipspro.link OPTIONS <profiling>on : -xprofile=tcov ;
+# flags mipspro.link OPTIONS <threading>multi : -mt ;
+
+flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ;
+flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ;
+
+flags mipspro.link OPTIONS <optimization>speed : -Ofast ;
+flags mipspro.link OPTIONS <optimization>space : -O2 ;
+flags mipspro.link OPTIONS <linkflags> ;
+flags mipspro.link LINKPATH <library-path> ;
+flags mipspro.link FINDLIBS-ST <find-static-library> ;
+flags mipspro.link FINDLIBS-SA <find-shared-library> ;
+flags mipspro.link FINDLIBS-SA <threading>multi : pthread ;
+flags mipspro.link LIBRARIES <library-file> ;
+flags mipspro.link LINK-RUNTIME <runtime-link>static : static ;
+flags mipspro.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags mipspro.link RPATH <dll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Declare action for creating static libraries
+actions piecemeal archive
+{
+ ar -cr "$(<)" "$(>)"
+}
diff --git a/src/boost/tools/build/src/tools/mpi.jam b/src/boost/tools/build/src/tools/mpi.jam
new file mode 100644
index 000000000..1f1658c92
--- /dev/null
+++ b/src/boost/tools/build/src/tools/mpi.jam
@@ -0,0 +1,638 @@
+# Support for the Message Passing Interface (MPI)
+#
+# (C) Copyright 2005, 2006 Trustees of Indiana University
+# (C) Copyright 2005 Douglas Gregor
+#
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
+#
+# Authors: Douglas Gregor
+# Andrew Lumsdaine
+#
+# ==== MPI Configuration ====
+#
+# For many users, MPI support can be enabled simply by adding the following
+# line to your user-config.jam file:
+#
+# using mpi ;
+#
+# This should auto-detect MPI settings based on the MPI wrapper compiler in
+# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
+# has a different name, you can pass the name of the wrapper compiler as the
+# first argument to the mpi module:
+#
+# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
+#
+# If your MPI implementation does not have a wrapper compiler, or the MPI
+# auto-detection code does not work with your MPI's wrapper compiler,
+# you can pass MPI-related options explicitly via the second parameter to the
+# mpi module:
+#
+# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
+# <find-shared-library>mpi <find-shared-library>lam
+# <find-shared-library>dl ;
+#
+# To see the results of MPI auto-detection, pass "--debug-configuration" on
+# the bjam command line.
+#
+# The (optional) fourth argument configures Boost.MPI for running
+# regression tests. These parameters specify the executable used to
+# launch jobs (default: "mpirun") followed by any necessary arguments
+# to this to run tests and tell the program to expect the number of
+# processors to follow (default: "-np"). With the default parameters,
+# for instance, the test harness will execute, e.g.,
+#
+# mpirun -np 4 all_gather_test
+#
+# ==== Linking Against the MPI Libraries ===
+#
+# To link against the MPI libraries, import the "mpi" module and add the
+# following requirement to your target:
+#
+# <library>/mpi//mpi
+#
+# Since MPI support is not always available, you should check
+# "mpi.configured" before trying to link against the MPI libraries.
+
+import "class" : new ;
+import common ;
+import feature : feature ;
+import generators ;
+import os ;
+import project ;
+import property ;
+import testing ;
+import toolset ;
+import type ;
+import path ;
+
+# Make this module a project
+project.initialize $(__name__) ;
+project mpi ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Assuming the first part of the command line is the given prefix
+# followed by some non-empty value, remove the first argument. Returns
+# either nothing (if there was no prefix or no value) or a pair
+#
+# <name>value rest-of-cmdline
+#
+# This is a subroutine of cmdline_to_features
+rule add_feature ( prefix name cmdline )
+{
+ local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+
+ # If there was no value associated with the prefix, abort
+ if ! $(match) {
+ return ;
+ }
+
+ local value = $(match[1]) ;
+
+ if [ MATCH " +" : $(value) ] {
+ value = "\"$(value)\"" ;
+ }
+
+ return "<$(name)>$(value)" $(match[2]) ;
+}
+
+# Strip any end-of-line characters off the given string and return the
+# result.
+rule strip-eol ( string )
+{
+ local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
+
+ if $(match)
+ {
+ return $(match[1]) ;
+ }
+ else
+ {
+ return $(string) ;
+ }
+}
+
+# Split a command-line into a set of features. Certain kinds of
+# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
+# with their B2 equivalents (e.g., <include>, <define>,
+# <library-path>, <find-library>). All other arguments are introduced
+# using the features in the unknown-features parameter, because we
+# don't know how to deal with them. For instance, if your compile and
+# correct. The incoming command line should be a string starting with
+# an executable (e.g., g++ -I/include/path") and may contain any
+# number of command-line arguments thereafter. The result is a list of
+# features corresponding to the given command line, ignoring the
+# executable.
+rule cmdline_to_features ( cmdline : unknown-features ? )
+{
+ local executable ;
+ local features ;
+ local otherflags ;
+ local result ;
+
+ unknown-features ?= <cxxflags> <linkflags> ;
+
+ # Pull the executable out of the command line. At this point, the
+ # executable is just thrown away.
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ executable = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # List the prefix/feature pairs that we will be able to transform.
+ # Every kind of parameter not mentioned here will be placed in both
+ # cxxflags and linkflags, because we don't know where they should go.
+ local feature_kinds-D = "define" ;
+ local feature_kinds-I = "include" ;
+ local feature_kinds-L = "library-path" ;
+ local feature_kinds-l = "find-shared-library" ;
+
+ while $(cmdline) {
+
+ # Check for one of the feature prefixes we know about. If we
+ # find one (and the associated value is nonempty), convert it
+ # into a feature.
+ local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
+ local matched ;
+ if $(match) && $(match[2]) {
+ local prefix = $(match[1]) ;
+ if $(feature_kinds$(prefix)) {
+ local name = $(feature_kinds$(prefix)) ;
+ local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
+
+ if $(add) {
+
+ if $(add[1]) = <find-shared-library>pthread
+ {
+ # Uhm. It's not really nice that this MPI implementation
+ # uses -lpthread as opposed to -pthread. We do want to
+ # set <threading>multi, instead of -lpthread.
+ result += "<threading>multi" ;
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else
+ {
+ result += $(add[1]) ;
+ }
+
+ cmdline = $(add[2]) ;
+ matched = yes ;
+ }
+ }
+ }
+
+ # If we haven't matched a feature prefix, just grab the command-line
+ # argument itself. If we can map this argument to a feature
+ # (e.g., -pthread -> <threading>multi), then do so; otherwise,
+ # and add it to the list of "other" flags that we don't
+ # understand.
+ if ! $(matched) {
+ match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ local value = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # Check for multithreading support
+ if $(value) = "-pthread" || $(value) = "-pthreads"
+ {
+ result += "<threading>multi" ;
+
+ # DPG: This is a hack intended to work around a BBv2 bug where
+ # requirements propagated from libraries are not checked for
+ # conflicts when BBv2 determines which "common" properties to
+ # apply to a target. In our case, the <threading>single property
+ # gets propagated from the common properties to Boost.MPI
+ # targets, even though <threading>multi is in the usage
+ # requirements of <library>/mpi//mpi.
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
+ otherflags += $(value) ;
+ }
+ }
+ }
+
+ # If there are other flags that we don't understand, add them to the
+ # result as both <cxxflags> and <linkflags>
+ if $(otherflags) {
+ for unknown in $(unknown-features)
+ {
+ result += "$(unknown)$(otherflags:J= )" ;
+ }
+ }
+
+ return $(result) ;
+}
+
+# Determine if it is safe to execute the given shell command by trying
+# to execute it and determining whether the exit code is zero or
+# not. Returns true for an exit code of zero, false otherwise.
+local rule safe-shell-command ( cmdline )
+{
+ local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
+ return [ MATCH ".*(SSCOK).*" : $(result) ] ;
+}
+
+# Initialize the MPI module.
+rule init ( mpicxx ? : options * : mpirun-with-options * )
+{
+ if ! $(options) && $(.debug-configuration)
+ {
+ ECHO "===============MPI Auto-configuration===============" ;
+ }
+
+ if ! $(mpicxx) && [ os.on-windows ]
+ {
+ # Paths for Microsoft MPI
+ local ms_mpi_path_native = "C:\\Program Files\\Microsoft MPI" ;
+ local ms_mpi_sdk_path_native = "C:\\Program Files (x86)\\Microsoft SDKs\\MPI" ;
+
+ # Path for Microsoft Compute Cluster Pack
+ local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ;
+
+ # Try to auto-configure Microsoft MPI
+ if [ GLOB $(ms_mpi_path_native)\\Bin : mpiexec.exe ] &&
+ [ GLOB $(ms_mpi_sdk_path_native)\\Include : mpi.h ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found Microsoft MPI: $(ms_mpi_path_native)" ;
+ ECHO "Found Microsoft MPI SDK: $(ms_mpi_sdk_path_native)" ;
+ }
+
+ local ms_mpi_sdk_path = [ path.make $(ms_mpi_sdk_path_native) ] ;
+
+ # Pick up either the 32-bit or 64-bit library, depending on which address
+ # model the user has selected. Default to 32-bit.
+ options = <include>$(ms_mpi_sdk_path)/Include
+ <address-model>64:<library-path>$(ms_mpi_sdk_path)/Lib/x64
+ <library-path>$(ms_mpi_sdk_path)/Lib/x86
+ <find-static-library>msmpi
+ <toolset>msvc:<define>_SECURE_SCL=0
+ ;
+
+ # Setup the "mpirun" equivalent (mpiexec)
+ .mpirun = "\"$(ms_mpi_path_native)\\Bin\\mpiexec.exe"\" ;
+ .mpirun_flags = -n ;
+ }
+ # Try to auto-configure to the Microsoft Compute Cluster Pack
+ else if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ;
+ }
+
+ local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ;
+
+ # Pick up either the 32-bit or 64-bit library, depending on which address
+ # model the user has selected. Default to 32-bit.
+ options = <include>$(cluster_pack_path)/Include
+ <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64
+ <library-path>$(cluster_pack_path)/Lib/i386
+ <find-static-library>msmpi
+ <toolset>msvc:<define>_SECURE_SCL=0
+ ;
+
+ # Setup the "mpirun" equivalent (mpiexec)
+ .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ;
+ .mpirun_flags = -n ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "Did not find Microsoft MPI in $(ms_mpi_path_native)" ;
+ ECHO " and/or Microsoft MPI SDK in $(ms_mpi_sdk_path_native)." ;
+ ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ;
+ }
+ }
+
+ if ! $(options)
+ {
+ # Try to auto-detect options based on the wrapper compiler
+ local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpiCC", which is used by MPICH
+ command = [ common.get-invocation-command mpi : mpiCC ] ;
+ }
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpicxx", which is used by OpenMPI and MPICH2
+ command = [ common.get-invocation-command mpi : mpicxx ] ;
+ }
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "CC", which is used by Cray
+ command = [ common.get-invocation-command mpi : CC ] ;
+ }
+
+ local result ;
+ local compile_flags ;
+ local link_flags ;
+
+ if ! $(command)
+ {
+ # Do nothing: we'll complain later
+ }
+ # OpenMPI and newer versions of LAM-MPI have -showme:compile and
+ # -showme:link.
+ else if [ safe-shell-command "$(command) -showme:compile" ] &&
+ [ safe-shell-command "$(command) -showme:link" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ SHELL "$(command) -showme:compile" ] ;
+ link_flags = [ SHELL "$(command) -showme:link" ] ;
+
+ # Prepend COMPILER as the executable name, to match the format of
+ # other compilation commands.
+ compile_flags = "COMPILER $(compile_flags) -DOMPI_SKIP_MPICXX " ;
+ link_flags = "COMPILER $(link_flags)" ;
+ }
+ # Look for LAM-MPI's -showme
+ else if [ safe-shell-command "$(command) -showme" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
+ }
+
+ result = [ SHELL "$(command) -showme" ] ;
+ }
+ # Look for MPICH
+ else if [ safe-shell-command "$(command) -show" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found MPICH wrapper compiler: $(command)" ;
+ }
+ compile_flags = [ SHELL "$(command) -compile_info" ] ;
+ link_flags = [ SHELL "$(command) -link_info" ] ;
+ }
+ # Sun HPC and Ibm POE
+ else if [ SHELL "$(command) -v 2>/dev/null" ]
+ {
+ compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ;
+
+ local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ;
+ if $(back)
+ {
+ # Sun HPC
+ if $(.debug-configuration)
+ {
+ ECHO "Found Sun MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ;
+ compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ;
+ link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ;
+ link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ;
+
+ # strip out -v from compile options
+ local front = [ MATCH "(.*)-v" : $(link_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ;
+ back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ }
+ else
+ {
+ # Ibm POE
+ if $(.debug-configuration)
+ {
+ ECHO "Found IBM MPI wrapper compiler: $(command)" ;
+ }
+
+ #
+ compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ;
+ compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ;
+ local front = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-c" : $(compile_flags) ] ;
+ back = [ MATCH "-c(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ link_flags = $(compile_flags) ;
+
+ # get location of mpif.h from mpxlf
+ local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ;
+ f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ;
+ front = [ MATCH "(.*)-v" : $(f_flags) ] ;
+ back = [ MATCH "-v(.*)" : $(f_flags) ] ;
+ f_flags = "$(front) $(back)" ;
+ f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ;
+ f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ;
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ compile_flags = "$(compile_flags) $(f_flags)" ;
+ }
+ }
+ # Cray
+ else if [ safe-shell-command "$(command) -v" ]
+ {
+ compile_flags = [ safe-shell-command "$(command) -###" ] ;
+ link_flags = [ safe-shell-command "$(command) -###" ] ;
+ # ECHO "Noel: compile_flags: $(compile_flags)" ;
+ # ECHO "Noel: link_flags: $(link_flags)" ;
+ result = " " ;
+ }
+
+ # Prepend COMPILER as the executable name, to match the format of
+
+ if $(result) || $(compile_flags) && $(link_flags)
+ {
+ if $(result)
+ {
+ result = [ strip-eol $(result) ] ;
+ options = [ cmdline_to_features $(result) ] ;
+ }
+ else
+ {
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ link_flags = [ strip-eol $(link_flags) ] ;
+
+ # Separately process compilation and link features, then combine
+ # them at the end.
+ local compile_features = [ cmdline_to_features $(compile_flags)
+ : "<cxxflags>" ] ;
+ local link_features = [ cmdline_to_features $(link_flags)
+ : "<linkflags>" ] ;
+ options = $(compile_features) $(link_features) ;
+ }
+
+ # If requested, display MPI configuration information.
+ if $(.debug-configuration)
+ {
+ if $(result)
+ {
+ ECHO " Wrapper compiler command line: $(result)" ;
+ }
+ else
+ {
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(compile_flags) ] ;
+ ECHO "MPI compilation flags: $(match[2])" ;
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(link_flags) ] ;
+ ECHO "MPI link flags: $(match[2])" ;
+ }
+ }
+ }
+ else
+ {
+ if $(command)
+ {
+ ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
+ ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
+ }
+ else if $(mpicxx)
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
+ }
+ else
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
+ }
+ ECHO "You will need to manually configure MPI support." ;
+ }
+
+ }
+
+ # Find mpirun (or its equivalent) and its flags
+ if ! $(.mpirun)
+ {
+ .mpirun =
+ [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
+ .mpirun_flags = $(mpirun-with-options[2-]) ;
+ .mpirun_flags ?= -np ;
+ }
+
+ if $(.debug-configuration)
+ {
+ if $(options)
+ {
+ echo "MPI build features: " ;
+ ECHO $(options) ;
+ }
+
+ if $(.mpirun)
+ {
+ echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ;
+ }
+
+ ECHO "====================================================" ;
+ }
+
+ if $(options)
+ {
+ .configured = true ;
+
+ # Set up the "mpi" alias
+ alias mpi : : : : $(options) ;
+ }
+}
+
+# States whether MPI has bee configured
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+# Returns the "extra" requirements needed to build MPI. These requirements are
+# part of the /mpi//mpi library target, but they need to be added to anything
+# that uses MPI directly to work around bugs in BBv2's propagation of
+# requirements.
+rule extra-requirements ( )
+{
+ return $(MPI_EXTRA_REQUIREMENTS) ;
+}
+
+# Support for testing; borrowed from Python
+type.register RUN_MPI_OUTPUT ;
+type.register RUN_MPI : : TEST ;
+
+class mpi-test-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ # Generate an executable from the sources. This is the executable we will run.
+ local executable =
+ [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
+
+ result =
+ [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
+ }
+}
+
+# Use mpi-test-generator to generate MPI tests from sources
+generators.register
+ [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_MPI_OUTPUT : RUN_MPI ;
+
+# The number of processes to spawn when executing an MPI test.
+feature "mpi:processes" : : free incidental ;
+
+# The flag settings on testing.capture-output do not
+# apply to mpi.capture output at the moment.
+# Redo this explicitly.
+toolset.flags mpi.capture-output ARGS <testing.arg> ;
+toolset.uses-features mpi.capture-output :
+ <testing.launcher> <testing.execute> <dll-path> <xdll-path> <target-os>
+ <mpi:processes> ;
+
+rule capture-output ( target : sources * : properties * )
+{
+ # Use the standard capture-output rule to run the tests
+ testing.capture-output $(target) : $(sources[1]) : $(properties) ;
+
+ # Determine the number of processes we should run on.
+ local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
+ num_processes = $(num_processes:G=) ;
+
+ # serialize the MPI tests to avoid overloading systems
+ JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ;
+
+ # We launch MPI processes using the "mpirun" equivalent specified by the user.
+ LAUNCHER on $(target) =
+ [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
+}
+
+# Creates a set of test cases to be run through the MPI launcher. The name, sources,
+# and requirements are the same as for any other test generator. However, schedule is
+# a list of numbers, which indicates how many processes each test run will use. For
+# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
+# 7 processes. The name provided is just the base name: the actual tests will be
+# the name followed by a hyphen, then the number of processes.
+rule mpi-test ( name : sources * : requirements * : schedule * )
+{
+ sources ?= $(name).cpp ;
+ schedule ?= 1 2 3 4 7 8 13 17 ;
+
+ local result ;
+ for processes in $(schedule)
+ {
+ result += [ testing.make-test
+ run-mpi : $(sources) /boost/mpi//boost_mpi
+ : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ;
+ }
+ return $(result) ;
+}
diff --git a/src/boost/tools/build/src/tools/msvc-config.jam b/src/boost/tools/build/src/tools/msvc-config.jam
new file mode 100644
index 000000000..6c71e3b00
--- /dev/null
+++ b/src/boost/tools/build/src/tools/msvc-config.jam
@@ -0,0 +1,12 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for VisualStudio toolset. To use, just import this module.
+
+import toolset : using ;
+
+ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ;
+
+using msvc : all ;
+
diff --git a/src/boost/tools/build/src/tools/msvc.jam b/src/boost/tools/build/src/tools/msvc.jam
new file mode 100644
index 000000000..4b0fed892
--- /dev/null
+++ b/src/boost/tools/build/src/tools/msvc.jam
@@ -0,0 +1,2092 @@
+# Copyright (c) 2003 David Abrahams
+# Copyright (c) 2005 Vladimir Prus
+# Copyright (c) 2005 Alexey Pakhunov
+# Copyright (c) 2006 Bojan Resnik
+# Copyright (c) 2006 Ilya Sokolov
+# Copyright (c) 2007-2017 Rene Rivera
+# Copyright (c) 2008 Jurko Gospodnetic
+# Copyright (c) 2014 Microsoft Corporation
+# Copyright (c) 2019 Michał Janiszewski
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.msvc]]
+= Microsoft Visual C++
+
+The `msvc` module supports the
+http://msdn.microsoft.com/visualc/[Microsoft Visual C++] command-line
+tools on Microsoft Windows. The supported products and versions of
+command line tools are listed below:
+
+* Visual Studio 2019-14.2
+* Visual Studio 2017—14.1
+* Visual Studio 2015—14.0
+* Visual Studio 2013—12.0
+* Visual Studio 2012—11.0
+* Visual Studio 2010—10.0
+* Visual Studio 2008—9.0
+* Visual Studio 2005—8.0
+* Visual Studio .NET 2003—7.1
+* Visual Studio .NET—7.0
+* Visual Studio 6.0, Service Pack 5--6.5
+
+The user would then call the boost build executable with the toolset set
+equal to `msvc-[version number]` for example to build with Visual Studio
+2017 one could run:
+
+----
+.\b2 toolset=msvc-14.2 target
+----
+
+The `msvc` module is initialized using the following syntax:
+
+----
+using msvc : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the version is not explicitly specified, the most recent version
+found in the registry will be used instead. If the special value `all`
+is passed as the version, all versions found in the registry will be
+configured. If a version is specified, but the command is not, the
+compiler binary will be searched in standard installation paths for that
+version, followed by PATH.
+
+The compiler command should be specified using forward slashes, and
+quoted.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+`assembler`::
+The command that compiles assembler sources. If not specified, `ml`
+will be used. The command will be invoked after the setup script was
+executed and adjusted the PATH variable.
+
+`compiler`::
+The command that compiles C and C++ sources. If not specified, `cl`
+will be used. The command will be invoked after the setup script was
+executed and adjusted the PATH variable.
+
+`compiler-filter`::
+Command through which to pipe the output of running the compiler. For
+ example to pass the output to STLfilt.
+
+`idl-compiler`::
+The command that compiles Microsoft COM interface definition files. If
+not specified, `midl` will be used. The command will be invoked after
+the setup script was executed and adjusted the PATH variable.
+
+`linker`::
+The command that links executables and dynamic libraries. If not
+specified, `link` will be used. The command will be invoked after the
+setup script was executed and adjusted the PATH variable.
+
+`mc-compiler`::
+The command that compiles Microsoft message catalog files. If not
+specified, `mc` will be used. The command will be invoked after the
+setup script was executed and adjusted the PATH variable.
+
+`resource-compiler`::
+The command that compiles resource files. If not specified, `rc` will
+be used. The command will be invoked after the setup script was
+executed and adjusted the PATH variable.
+
+`setup`::
+The filename of the global environment setup script to run before
+invoking any of the tools defined in this toolset. Will not be used in
+case a target platform specific script has been explicitly specified
+for the current target platform. Used setup script will be passed the
+target platform identifier (x86, x86_amd64, x86_ia64, amd64 or ia64)
+as a parameter. If not specified a default script is chosen based on
+the used compiler binary, e.g. `vcvars32.bat` or `vsvars32.bat`.
+
+`setup-amd64`; `setup-i386`; `setup-ia64`::
+The filename of the target platform specific environment setup script
+to run before invoking any of the tools defined in this toolset. If
+not specified the global environment setup script is used.
+
+[[bbv2.reference.tools.compiler.msvc.64]]
+== 64-bit support
+
+Starting with version 8.0, Microsoft Visual Studio can generate binaries
+for 64-bit processor, both 64-bit flavours of x86 (codenamed
+AMD64/EM64T), and Itanium (codenamed IA64). In addition, compilers that
+are itself run in 64-bit mode, for better performance, are provided. The
+complete list of compiler configurations are as follows (we abbreviate
+AMD64/EM64T to just AMD64):
+
+* 32-bit x86 host, 32-bit x86 target
+* 32-bit x86 host, 64-bit AMD64 target
+* 32-bit x86 host, 64-bit IA64 target
+* 64-bit AMD64 host, 64-bit AMD64 target
+* 64-bit IA64 host, 64-bit IA64 target
+
+The 32-bit host compilers can be always used, even on 64-bit Windows. On
+the contrary, 64-bit host compilers require both 64-bit host processor
+and 64-bit Windows, but can be faster. By default, only 32-bit host,
+32-bit target compiler is installed, and additional compilers need to be
+installed explicitly.
+
+To use 64-bit compilation you should:
+
+1. Configure you compiler as usual. If you provide a path to the
+compiler explicitly, provide the path to the 32-bit compiler. If you try
+to specify the path to any of 64-bit compilers, configuration will not
+work.
+2. When compiling, use `address-model=64`, to generate AMD64 code.
+3. To generate IA64 code, use `architecture=ia64`
+
+The (AMD64 host, AMD64 target) compiler will be used automatically when
+you are generating AMD64 code and are running 64-bit Windows on AMD64.
+The (IA64 host, IA64 target) compiler will never be used, since nobody
+has an IA64 machine to test.
+
+It is believed that AMD64 and EM64T targets are essentially compatible.
+The compiler options `/favor:AMD64` and `/favor:EM64T`, which are
+accepted only by AMD64 targeting compilers, cause the generated code to
+be tuned to a specific flavor of 64-bit x86. B2 will make use
+of those options depending on the value of the`instruction-set` feature.
+
+[[bbv2.reference.tools.compiler.msvc.winrt]]
+== Windows Runtime support
+
+Starting with version 11.0, Microsoft Visual Studio can produce binaries
+for Windows Store and Phone in addition to traditional Win32 desktop. To
+specify which Windows API set to target, use the `windows-api` feature.
+Available options are `desktop`, `store`, or `phone`. If not specified,
+`desktop` will be used.
+
+When using `store` or `phone` the specified toolset determines what
+Windows version is targeted. The following options are available:
+
+* Windows 8.0: toolset=msvc-11.0 windows-api=store
+* Windows 8.1: toolset=msvc-12.0 windows-api=store
+* Windows Phone 8.0: toolset=msvc-11.0 windows-api=phone
+* Windows Phone 8.1: toolset=msvc-12.0 windows-api=phone
+
+For example use the following to build for Windows Store 8.1 with the
+ARM architecture:
+
+----
+.\b2 toolset=msvc-12.0 windows-api=store architecture=arm
+----
+
+Note that when targeting Windows Phone 8.1, version 12.0 didn't include
+the vcvars phone setup scripts. They can be separately downloaded from
+http://blogs.msdn.com/b/vcblog/archive/2014/07/18/using-boost-libraries-in-windows-store-and-phone-applications.aspx[here].
+
+|# # end::doc[]
+
+
+################################################################################
+#
+# MSVC Boost Build toolset module.
+# --------------------------------
+#
+# All toolset versions need to have their location either auto-detected or
+# explicitly specified except for the special 'default' version that expects the
+# environment to find the needed tools or report an error.
+#
+################################################################################
+
+import "class" : new ;
+import common ;
+import feature ;
+import generators ;
+import mc ;
+import midl ;
+import os ;
+import path ;
+import pch ;
+import project ;
+import property ;
+import property-set ;
+import rc ;
+import sequence ;
+import set ;
+import toolset ;
+import type ;
+import virtual-target ;
+
+
+type.register MANIFEST : manifest ;
+
+#| tag::embed-doc[]
+
+[[bbv2.builtin.features.embed-manifest]]`embed-manifest`::
+*Allowed values:* `on`, `off`.
++
+This feature is specific to the `msvc` toolset (see <<Microsoft Visual C++>>),
+and controls whether the manifest files should be embedded inside executables
+and shared libraries, or placed alongside them. This feature corresponds to the
+IDE option found in the project settings dialog, under Configuration Properties
+-> Manifest Tool -> Input and Output -> Embed manifest.
+
+|# # end::embed-doc[]
+
+feature.feature embed-manifest : on off : incidental propagated ;
+
+#| tag::embed-doc[]
+
+[[bbv2.builtin.features.embed-manifest-file]]`embed-manifest-file`::
+This feature is specific to the `msvc` toolset (see <<Microsoft Visual C++>>),
+and controls which manifest files should be embedded inside executables and
+shared libraries. This feature corresponds to the IDE option found in the
+project settings dialog, under Configuration Properties -> Manifest Tool ->
+Input and Output -> Additional Manifest Files.
+
+|# # end::embed-doc[]
+
+feature.feature embed-manifest-file : : free dependency ;
+
+type.register PDB : pdb ;
+
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Initialize a specific toolset version configuration. As the result, path to
+# compiler and, possible, program names are set up, and will be used when that
+# version of compiler is requested. For example, you might have:
+#
+# using msvc : 6.5 : cl.exe ;
+# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
+#
+# The version parameter may be omitted:
+#
+# using msvc : : Z:/foo/bar/cl.exe ;
+#
+# The following keywords have special meanings when specified as versions:
+# - all - all detected but not yet used versions will be marked as used
+# with their default options.
+# - default - this is an equivalent to an empty version.
+#
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# the possible scenarios:
+#
+# Nothing "x.y"
+# Passed Nothing "x.y" detected, detected,
+# version detected detected cl.exe in path cl.exe in path
+#
+# default Error Use "x.y" Create "default" Use "x.y"
+# all None Use all None Use all
+# x.y - Use "x.y" - Use "x.y"
+# a.b Error Error Create "a.b" Create "a.b"
+#
+# "x.y" - refers to a detected version;
+# "a.b" - refers to an undetected version.
+#
+# FIXME: Currently the command parameter and the <compiler> property parameter
+# seem to overlap in duties. Remove this duplication. This seems to be related
+# to why someone started preparing to replace init with configure rules.
+#
+rule init (
+ # The msvc version being configured. When omitted the tools invoked when no
+ # explicit version is given will be configured.
+ version ?
+
+ # The command used to invoke the compiler. If not specified:
+ # - if version is given, default location for that version will be
+ # searched
+ #
+ # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0
+ # and 6.* will be searched
+ #
+ # - if compiler is not found in the default locations, PATH will be
+ # searched.
+ : command *
+
+ # Options may include:
+ #
+ # All options shared by multiple toolset types as handled by the
+ # common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>,
+ # <fflags> & <linkflags>.
+ #
+ # <assembler>
+ # <compiler>
+ # <idl-compiler>
+ # <linker>
+ # <mc-compiler>
+ # <resource-compiler>
+ # Exact tool names to be used by this msvc toolset configuration.
+ #
+ # <compiler-filter>
+ # Command through which to pipe the output of running the compiler.
+ # For example to pass the output to STLfilt.
+ #
+ # <setup>
+ # Global setup command to invoke before running any of the msvc tools.
+ # It will be passed additional option parameters depending on the actual
+ # target platform.
+ #
+ # <setup-amd64>
+ # <setup-i386>
+ # <setup-ia64>
+ # <setup-arm>
+ # <setup-phone-i386>
+ # <setup-phone-arm>
+ # Platform specific setup command to invoke before running any of the
+ # msvc tools used when builing a target for a specific platform, e.g.
+ # when building a 32 or 64 bit executable.
+ #
+ # <rewrite-setup-scripts>
+ # Whether to rewrite setup scripts. New scripts will be output in
+ # build tree and will be used instead of originals in build actions.
+ # Possible values:
+ # * on - rewrite scripts, if they do not already exist (default)
+ # * always - always rewrite scripts, even if they already exist
+ # * off - use original setup scripts
+ : options *
+)
+{
+ if $(command)
+ {
+ options += <command>$(command) ;
+ }
+ configure $(version) : $(options) ;
+}
+
+
+# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
+# a part of the 'options' list. See the 'init' rule comment for more detailed
+# information.
+#
+rule configure ( version ? : options * )
+{
+ switch $(version)
+ {
+ case "all" :
+ if $(options)
+ {
+ import errors ;
+ errors.error "MSVC toolset configuration: options should be"
+ "empty when '$(version)' is specified." ;
+ }
+
+ # Configure (i.e. mark as used) all registered versions.
+ local all-versions = [ $(.versions).all ] ;
+ if ! $(all-versions)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [msvc-cfg] Asked to configure all registered"
+ "msvc toolset versions when there are none currently"
+ "registered." ;
+ }
+ }
+ else
+ {
+ for local v in $(all-versions)
+ {
+ # Note that there is no need to skip already configured
+ # versions here as this will request configure-really rule
+ # to configure the version using default options which will
+ # in turn cause it to simply do nothing in case the version
+ # has already been configured.
+ configure-really $(v) ;
+ }
+ }
+
+ case "default" :
+ configure-really : $(options) ;
+
+ case * :
+ configure-really $(version) : $(options) ;
+ }
+}
+
+
+# Sets up flag definitions dependent on the compiler version used.
+# - 'version' is the version of compiler in N.M format.
+# - 'conditions' is the property set to be used as flag conditions.
+# - 'toolset' is the toolset for which flag settings are to be defined.
+# This makes the rule reusable for other msvc-option-compatible compilers.
+#
+rule configure-version-specific ( toolset : version : conditions )
+{
+ toolset.push-checking-for-flags-module unchecked ;
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we are sure that the msvc version is at
+ # 7.*, add those options explicitly. We can be sure either if user specified
+ # version 7.* explicitly or if we auto-detected the version ourselves.
+ if ! [ MATCH ^(6\\.) : $(version) ]
+ {
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : "/Zc:forScope" "/Zc:wchar_t" ;
+ toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ;
+
+ # Explicitly disable the 'function is deprecated' warning. Some msvc
+ # versions have a bug, causing them to emit the deprecation warning even
+ # with /W0.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ;
+
+ if [ MATCH "^([78]\\.)" : $(version) ]
+ {
+ # 64-bit compatibility warning deprecated since 9.0, see
+ # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ;
+ }
+ }
+
+ #
+ # Processor-specific optimization.
+ #
+
+ if [ MATCH "^([67])" : $(version) ]
+ {
+ # 8.0 deprecates some of the options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ;
+
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ;
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ;
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+ }
+ else
+ {
+ # 8.0 and above adds some more options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : "/favor:blend" ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : "/favor:EM64T" ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : "/favor:AMD64" ;
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
+
+ # Specify target machine type so the linker will not need to guess.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : "/MACHINE:X64" ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : "/MACHINE:X86" ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : "/MACHINE:IA64" ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-arm) : "/MACHINE:ARM" ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-arm64) : "/MACHINE:ARM64" ;
+
+ # Make sure that manifest will be generated even if there is no
+ # dependencies to put there.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions) : /MANIFEST ;
+ }
+
+ toolset.pop-checking-for-flags-module ;
+}
+
+# Feature for handling targeting different Windows API sets.
+feature.feature windows-api : desktop store phone : propagated composite link-incompatible ;
+feature.compose <windows-api>store : <define>WINAPI_FAMILY=WINAPI_FAMILY_APP <define>_WIN32_WINNT=0x0602
+ <linkflags>/APPCONTAINER ;
+feature.compose <windows-api>phone : <define>WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP <define>_WIN32_WINNT=0x0602
+ <linkflags>/APPCONTAINER <linkflags>"/NODEFAULTLIB:ole32.lib" <linkflags>"/NODEFAULTLIB:kernel32.lib" <linkflags>WindowsPhoneCore.lib ;
+feature.set-default windows-api : desktop ;
+
+
+# Registers this toolset including all of its flags, features & generators. Does
+# nothing on repeated calls.
+#
+rule register-toolset ( )
+{
+ if ! msvc in [ feature.values toolset ]
+ {
+ register-toolset-really ;
+ }
+}
+
+rule resolve-possible-msvc-version-alias ( version )
+{
+ if $(.version-alias-$(version))
+ {
+ version = $(.version-alias-$(version)) ;
+ }
+ return $(version) ;
+}
+
+
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
+if [ os.name ] in NT
+{
+ # The 'DEL' command would issue a message to stdout if the file does not
+ # exist, so need a check.
+ actions archive
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
+ $(.SETUP) $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+else
+{
+ actions archive
+ {
+ $(.RM) "$(<[1])"
+ $(.SETUP) $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+
+rule compile.asm ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+}
+
+actions compile.asm
+{
+ $(.SETUP) $(.ASM) -D$(ASMDEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) $(.ASM_OUTPUT) "$(<:W)" "$(>:W)"
+}
+
+
+rule compile.c ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.preprocess ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.pch ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets[1]) : -TC ;
+ get-rspline $(targets[2]) : -TC ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+toolset.flags msvc YLOPTION : "-Yl" ;
+
+# Action for running the C/C++ compiler without using precompiled headers.
+#
+# WARNING: Synchronize any changes this in action with intel-win
+#
+# Notes regarding PDB generation, for when we use
+# <debug-symbols>on/<debug-store>database:
+#
+# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring
+# that the /Fd flag is dropped if PDB_CFLAG is empty.
+#
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source
+# file basis by rule compile-c-c++. The linker will pull these into the
+# executable's PDB.
+#
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb
+# for each source file by rule archive, as in this case compiler must be used
+# to create a single PDB for our library.
+#
+actions compile-c-c++ bind PDB_NAME
+{
+ $(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+actions preprocess-c-c++ bind PDB_NAME
+{
+ $(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
+}
+
+rule compile-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<[1]:S=.pdb) ;
+ LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ;
+}
+
+rule preprocess-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<:S=.pdb) ;
+ LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ;
+}
+
+# Action for running the C/C++ compiler using precompiled headers. In addition
+# to whatever else it needs to compile, this action also adds a temporary source
+# .cpp file used to compile the precompiled headers themselves.
+#
+# The global .escaped-double-quote variable is used to avoid messing up Emacs
+# syntax highlighting in the messy N-quoted code below.
+actions compile-c-c++-pch
+{
+ $(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
+}
+
+
+# Action for running the C/C++ compiler using precompiled headers. An already
+# built source file for compiling the precompiled headers is expected to be
+# given as one of the source parameters.
+actions compile-c-c++-pch-s
+{
+ $(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+
+rule compile.c++ ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ get-rspline $(targets) : -TP ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+rule compile.c++.preprocess ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ get-rspline $(targets) : -TP ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c++.pch ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ get-rspline $(targets[1]) : -TP ;
+ get-rspline $(targets[2]) : -TP ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+rule compile.idl ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+}
+
+# See midl.jam for details.
+#
+actions compile.idl
+{
+ $(.SETUP) $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
+ $(.TOUCH_FILE) "$(<[4]:W)"
+ $(.TOUCH_FILE) "$(<[5]:W)"
+}
+
+rule compile.mc ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+}
+
+actions compile.mc
+{
+ $(.SETUP) $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
+}
+
+
+rule compile.rc ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+}
+
+actions compile.rc
+{
+ $(.SETUP) $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
+}
+
+toolset.uses-features msvc.link : <embed-manifest> <embed-manifest-file> ;
+
+rule link ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ if <embed-manifest>on in $(properties)
+ {
+ if [ feature.get-values <embed-manifest-file> : $(properties) ]
+ {
+ DEPENDS $(<) : [ on $(<) return $(EMBED_MANIFEST_FILE) ] ;
+ msvc.manifest.user $(targets) $(EMBED_MANIFEST_FILE) : $(sources) : $(properties) ;
+ }
+ else
+ {
+ msvc.manifest $(targets) : $(sources) : $(properties) ;
+ }
+ }
+}
+
+rule link.dll ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
+ local import-lib ;
+ if ! <suppress-import-lib>true in $(properties)
+ {
+ import-lib = $(targets[2]) ;
+ IMPORT_LIB on $(targets) = $(import-lib) ;
+ }
+ # On msvc-14.1, the linker might not touch the import library
+ # if the exports do not change. (Apparently this could also
+ # happen for incremental linking, which is why we disable it,
+ # but that no longer seems to be enough).
+ # Therefore, don't update the import library just because
+ # it's out-dated. It will be force updated, when the dll
+ # is updated. Also, make it so that anything that depends
+ # on it depends on the dll as well.
+ NOUPDATE $(import-lib) ;
+ INCLUDES $(import-lib) : $(targets[1]) ;
+ if <embed-manifest>on in $(properties)
+ {
+ if [ feature.get-values <embed-manifest-file> : $(properties) ]
+ {
+ DEPENDS $(<) : [ on $(<) return $(EMBED_MANIFEST_FILE) ] ;
+ msvc.manifest.dll.user $(targets) $(EMBED_MANIFEST_FILE) : $(sources) : $(properties) ;
+ }
+ else
+ {
+ msvc.manifest.dll $(targets) : $(sources) : $(properties) ;
+ }
+ }
+}
+
+# Incremental linking a DLL causes no end of problems: if the actual exports do
+# not change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I am not sure that incremental
+# linking is such a great idea in general, but in this case I am sure we do not
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
+
+if [ os.name ] in NT
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.SETUP) $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest
+ {
+ if exist "$(<[1]).manifest" (
+ $(.SETUP) $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ )
+ }
+
+ actions manifest.user bind EMBED_MANIFEST_FILE
+ {
+ $(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);1"
+ }
+
+ actions link.dll bind IMPORT_LIB DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.SETUP) $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(IMPORT_LIB:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest.dll
+ {
+ if exist "$(<[1]).manifest" (
+ $(.SETUP) $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ )
+ }
+ actions manifest.dll.user bind EMBED_MANIFEST_FILE
+ {
+ $(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
+ }
+}
+else
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.SETUP) $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1"
+ fi
+ }
+
+ actions link.dll bind IMPORT_LIB DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.SETUP) $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(IMPORT_LIB:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest.dll
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.SETUP) $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
+ fi
+ }
+
+ actions manifest.dll.user bind EMBED_MANIFEST_FILE
+ {
+ $(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
+ }
+}
+
+# This rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler puts
+# all the debug info into a single .pdb file named after the library.
+#
+# Poking at source targets this way is probably not clean, but it is the
+# easiest approach.
+#
+rule archive ( targets + : sources * : properties * )
+{
+ set-setup-command $(targets) : $(properties) ;
+ PDB_NAME on $(>) = $(<[1]:S=.pdb) ;
+ LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ;
+}
+
+
+################################################################################
+#
+# Classes.
+#
+################################################################################
+
+class msvc-pch-generator : pch-generator
+{
+ import property-set ;
+
+ rule run-pch ( project name ? : property-set : sources * )
+ {
+ # Searching for the header and source file in the sources.
+ local pch-header ;
+ local pch-source ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ pch-header = $(s) ;
+ }
+ else if
+ [ type.is-derived [ $(s).type ] CPP ] ||
+ [ type.is-derived [ $(s).type ] C ]
+ {
+ pch-source = $(s) ;
+ }
+ }
+
+ if ! $(pch-header)
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error "can not build pch without pch-header" ;
+ }
+
+ # If we do not have the PCH source - that is fine. We will just create a
+ # temporary .cpp file in the action.
+
+ local generated = [ generator.run $(project) $(name)
+ : [ property-set.create
+ # Passing of <pch-source> is a dirty trick, needed because
+ # non-composing generators with multiple inputs are subtly
+ # broken. For more detailed information see:
+ # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
+ <pch-source>$(pch-source)
+ [ $(property-set).raw ] ]
+ : $(pch-header) ] ;
+
+ local pch-file ;
+ for local g in $(generated[2-])
+ {
+ if [ type.is-derived [ $(g).type ] PCH ]
+ {
+ pch-file = $(g) ;
+ }
+ }
+
+ return [ $(generated[1]).add-raw <pch-header>$(pch-header)
+ <pch-file>$(pch-file) ] $(generated[2-]) ;
+ }
+}
+
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Detects versions listed as '.known-versions' by checking registry information,
+# environment variables & default paths. Supports both native Windows and
+# Cygwin.
+#
+local rule auto-detect-toolset-versions ( )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ # Get installation paths from the registry.
+ for local i in $(.known-versions)
+ {
+ if $(.version-$(i)-reg)
+ {
+ local vc-path ;
+ for local x in "" "Wow6432Node\\"
+ {
+ vc-path += [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg)
+ : "ProductDir" ] ;
+ }
+
+ if $(vc-path)
+ {
+ vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ;
+ register-configuration $(i) : [ path.native $(vc-path[1]) ] ;
+ }
+ }
+ }
+ }
+
+ # Check environment and default installation paths.
+ for local i in $(.known-versions)
+ {
+ if ! $(i) in [ $(.versions).all ]
+ {
+ register-configuration $(i) : [ default-path $(i) ] ;
+ }
+ }
+}
+
+actions write-setup-script
+{
+ @($(STDOUT):E=$(FILE_CONTENTS:J=$(.nl))) > "$(<)"
+}
+
+if [ os.name ] = NT
+{
+ local rule call-batch-script ( command )
+ {
+ return "call $(command) >nul$(.nl)" ;
+ }
+}
+else
+{
+ # On cygwin, we need to run both the batch script
+ # and the following command in the same instance
+ # of cmd.exe.
+ local rule call-batch-script ( command )
+ {
+ return "cmd.exe /S /C call $(command) \">nul\" \"&&\" " ;
+ }
+}
+
+# Local helper rule to create the vcvars setup command for given architecture
+# and options.
+#
+local rule generate-setup-cmd ( version : command : parent : options * : cpu : global-setup ? : default-global-setup-options : default-setup )
+{
+ local setup-options ;
+ local setup = [ feature.get-values <setup-$(cpu)> : $(options) ] ;
+
+ if ! $(setup)-is-defined
+ {
+ if $(global-setup)-is-defined
+ {
+ setup = $(global-setup) ;
+
+ # If needed we can easily add using configuration flags
+ # here for overriding which options get passed to the
+ # global setup command for which target platform:
+ # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ;
+ setup-options ?= $(default-global-setup-options) ;
+ }
+ else
+ {
+ if [ MATCH "(14.2)" : $(version) ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [generate-setup-cmd] $(version) is 14.2" ;
+ }
+ parent = [ path.native [ path.join $(parent) "..\\..\\..\\..\\..\\Auxiliary\\Build" ] ] ;
+ }
+ else if [ MATCH "(14.1)" : $(version) ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [generate-setup-cmd] $(version) is 14.1" ;
+ }
+ parent = [ path.native [ path.join $(parent) "..\\..\\..\\..\\..\\Auxiliary\\Build" ] ] ;
+ }
+ setup = [ locate-default-setup $(command) : $(parent) : $(default-setup) ] ;
+ setup ?= [ path.join $(parent) "vcvarsall.bat" ] ;
+ }
+ }
+
+ return $(setup) "$(setup-options:J= )" ;
+}
+
+# Worker for set-setup-command. Usable in a virtual-target.action.
+rule adjust-setup-command ( new-setup : setup : properties * )
+{
+ local internal = $(new-setup:S=.read) ;
+ NOTFILE $(internal) ;
+ local setup-options = [ property.select <msvc.setup-options> : $(properties) ] ;
+ setup-options = $(setup-options:G=:E=) ;
+ DEPENDS $(internal) : $(setup) ;
+ DEPENDS $(new-setup) : $(internal) ;
+ REBUILDS $(new-setup) : $(internal) ;
+ msvc.read-setup $(internal) : $(setup) ;
+ msvc.write-setup-script $(new-setup) : $(setup) ;
+ __ACTION_RULE__ on $(internal) = msvc.rewrite-setup $(setup) $(setup-options) $(new-setup) ;
+}
+
+# This doesn't actually do anything. It's merely
+# used as a trigger for __ACTION_RULE__.
+actions quietly read-setup { }
+
+# Calculates the changes to the environment make by setup-script
+# Should be used as a callback for __ACTION_RULE__
+local rule rewrite-setup ( setup-script setup-options new-setup : target : * )
+{
+ local setup-path = [ on $(setup-script) return $(LOCATE) $(SEARCH) ] ;
+ setup-path = $(setup-path[1]) ;
+ local command = "\"$(setup-script:G=:R=$(setup-path))\" $(setup-options)" ;
+ local original-vars = [ SPLIT_BY_CHARACTERS [ SHELL set ] : "\n" ] ;
+ local new-vars = [ SPLIT_BY_CHARACTERS [ SHELL "$(command) >nul && set" ] : "\n" ] ;
+ local diff-vars = [ set.difference $(new-vars) : $(original-vars) ] ;
+ if $(diff-vars)
+ {
+ FILE_CONTENTS on $(new-setup) = "REM $(command)" "SET "$(diff-vars) ;
+ }
+}
+
+IMPORT msvc : rewrite-setup : : msvc.rewrite-setup ;
+
+# Helper rule to generate a faster alternative to MSVC setup scripts.
+# We used to call MSVC setup scripts directly in every action, however in
+# newer MSVC versions (10.0+) they make long-lasting registry queries
+# which have a significant impact on build time.
+local rule set-setup-command ( targets * : properties * )
+{
+ if ! [ on $(targets) return $(.SETUP) ]
+ {
+ local setup-script = [ on $(targets) return $(.SETUP-SCRIPT) ] ;
+ # If no setup script was given, then we don't need to do anything.
+ if ! $(setup-script)
+ {
+ return ;
+ }
+ local setup-options = [ on $(targets) return $(.SETUP-OPTIONS) ] ;
+ local key = .setup-command-$(setup-script:E=)-$(setup-options:E=) ;
+ if ! $($(key))
+ {
+ properties = [ feature.expand $(properties) ] ;
+ properties = [ property.select <toolset> <toolset-msvc:version> <architecture> <address-model> <windows-api> <relevant> : $(properties) ] ;
+ local ps = [ property-set.create $(properties) <msvc.setup-options>$(setup-options) ] ;
+ local original = [ virtual-target.from-file $(setup-script) : [ path.pwd ] : $(.project) ] ;
+ local action = [ new non-scanning-action $(original) : msvc.adjust-setup-command : $(ps) ] ;
+ local new-setup = [ virtual-target.register [ new file-target msvc-setup.bat exact : : $(.project) : $(action) ] ] ;
+ local command = [ $(new-setup).actualize ] ;
+ local path = [ on $(command) return $(LOCATE) ] ;
+ local block-update = $(command:S=.nup) ;
+ NOUPDATE $(block-update) ;
+ NOTFILE $(block-update) ;
+ DEPENDS $(block-update) : $(command) ;
+ if [ on $(targets) return $(.REWRITE-SETUP) ]
+ {
+ ALWAYS $(command) ;
+ }
+ $(key) = [ call-batch-script "\"$(command:WG=:R=$(path))\" $(setup-options:E=)" ] $(block-update) ;
+ }
+ DEPENDS $(targets) : $($(key)[2]) ;
+ .SETUP on $(targets) = $($(key)[1]) ;
+ }
+}
+
+# Worker rule for toolset version configuration. Takes an explicit version id or
+# nothing in case it should configure the default toolset version (the first
+# registered one or a new 'default' one in case no toolset versions have been
+# registered yet).
+#
+local rule configure-really ( version ? : options * )
+{
+ local command = [ feature.get-values <command> : $(options) ] ;
+
+ if ! $(version) && ! $(command)
+ {
+ # We were given neither a command, nor a version.
+ # Take the best registered (i.e. auto-detected) version.
+ # FIXME: consider whether an explicitly specified setup script
+ # should disable this logic. We already won't get here if
+ # there is a user specified command.
+ version = [ $(.versions).all ] ;
+ for local known in $(.known-versions)
+ {
+ if $(known) in $(version)
+ {
+ version = $(known) ;
+ break ;
+ }
+ }
+ # version might still have multiple elements if no versions
+ # were auto-detected, but an unknown version was configured
+ # manually.
+ version = $(version[1]) ;
+ }
+
+ # Handle a user-provided command, and deduce the version if necessary.
+ # If the user-requested version was not autodetected and no command
+ # was given, attempt to find it in PATH
+ if $(command) || ! ( $(version:E=default) in [ $(.versions).all ] )
+ {
+ local found-command = [ common.get-invocation-command-nodefault msvc : cl.exe : $(command) ] ;
+
+ if $(found-command)
+ {
+ command = $(found-command) ;
+ if ! $(command:D)
+ {
+ local path = [ common.get-absolute-tool-path $(command) ] ;
+ command = $(command:R=$(path)) ;
+ }
+ }
+ else
+ {
+ # If we still failed to find cl.exe, bail out.
+ ECHO ;
+ ECHO warning\:
+ "Did not find command for MSVC toolset."
+ "If you have Visual Studio 2017 installed you will need to"
+ "specify the full path to the command,"
+ "set VS150COMNTOOLS for your installation,"
+ "or"
+ "build from the 'Visual Studio Command Prompt for VS 2017'."
+ ;
+ ECHO ;
+ command ?= cl.exe ;
+ }
+
+ if ! $(version)
+ {
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
+ # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
+ # 9.0express as 9.0 here.
+ if [ MATCH "(MSVC\\\\14.2)" : $(command) ]
+ {
+ version = 14.2 ;
+ }
+ else if [ MATCH "(MSVC\\\\14.1)" : $(command) ]
+ {
+ version = 14.1 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 14)" : $(command) ]
+ {
+ version = 14.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 12)" : $(command) ]
+ {
+ version = 12.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 11)" : $(command) ]
+ {
+ version = 11.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
+ {
+ version = 10.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ]
+ {
+ version = 9.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ]
+ {
+ version = 8.0 ;
+ }
+ else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.1 ;
+ }
+ else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" :
+ $(command) ]
+ {
+ version = 7.1toolkit ;
+ }
+ else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.0 ;
+ }
+ else
+ {
+ version = 6.0 ;
+ }
+ }
+ }
+
+ # Version alias -> real version number.
+ version = [ resolve-possible-msvc-version-alias $(version) ] ;
+
+ # Check whether the selected configuration is already in use.
+ if $(version) in [ $(.versions).used ]
+ {
+ # Allow multiple 'toolset.using' calls for the same configuration if the
+ # identical sets of options are used.
+ if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] )
+ {
+ import errors ;
+ errors.user-error "MSVC toolset configuration: Toolset version"
+ "'$(version)' already configured." ;
+ }
+ }
+ else
+ {
+ # Register a new configuration.
+ $(.versions).register $(version) ;
+ $(.versions).set $(version) : options : $(options) ;
+
+ # Mark the configuration as 'used'.
+ $(.versions).use $(version) ;
+
+ # Generate conditions and save them.
+ local conditions = [ common.check-init-parameters msvc : version $(version) ] ;
+
+ $(.versions).set $(version) : conditions : $(conditions) ;
+
+ command ?= [ $(.versions).get $(version) : default-command ] ;
+
+ # For 14.1+ we need the exact version as MS is planning rolling updates
+ # that will cause our `setup-cmd` to become invalid
+ exact-version = [ MATCH "(14\.[1-9][0-9]\.[0-9\.]+)" : $(command) ] ;
+
+ common.handle-options msvc : $(conditions) : $(command) : $(options) ;
+
+ # Generate and register setup command.
+
+ local below-8.0 = [ MATCH "^([67]\\.)" : $(version) ] ;
+ local below-11.0 = [ MATCH "^([6789]\\.|10\\.)" : $(version) ] ;
+
+ local cpu = i386 amd64 ia64 arm arm64 ;
+ if $(below-8.0)
+ {
+ cpu = i386 ;
+ }
+ else if $(below-11.0)
+ {
+ cpu = i386 amd64 ia64 ;
+ }
+
+ local setup-amd64 ;
+ local setup-i386 ;
+ local setup-ia64 ;
+ local setup-arm ;
+ local setup-arm64 ;
+ local setup-phone-i386 ;
+ local setup-phone-arm ;
+
+ if $(command)
+ {
+ # TODO: Note that if we specify a non-existant toolset version then
+ # this rule may find and use a corresponding compiler executable
+ # belonging to an incorrect toolset version. For example, if you
+ # have only MSVC 7.1 installed, have its executable on the path and
+ # specify you want Boost Build to use MSVC 9.0, then you want Boost
+ # Build to report an error but this may cause it to silently use the
+ # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
+ # toolset version.
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+
+ if $(command)
+ {
+ local parent = [ path.make $(command) ] ;
+ parent = [ path.parent $(parent) ] ;
+ parent = [ path.native $(parent) ] ;
+
+ # Setup will be used if the command name has been specified. If
+ # setup is not specified explicitly then a default setup script will
+ # be used instead. Setup scripts may be global or architecture/
+ # /platform/cpu specific. Setup options are used only in case of
+ # global setup scripts.
+
+ # Default setup scripts provided with different VC distributions:
+ #
+ # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
+ # builds. It was located in the bin folder for the regular version
+ # and in the root folder for the free VC 7.1 tools.
+ #
+ # Later 8.0 & 9.0 versions introduce separate platform specific
+ # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
+ # located in or under the bin folder. Most also include a global
+ # vcvarsall.bat helper script located in the root folder which runs
+ # one of the aforementioned vcvars*.bat scripts based on the options
+ # passed to it. So far only the version coming with some PlatformSDK
+ # distributions does not include this top level script but to
+ # support those we need to fall back to using the worker scripts
+ # directly in case the top level script can not be found.
+
+ local global-setup = [ feature.get-values <setup> : $(options) ] ;
+ global-setup = $(global-setup[1]) ;
+ local global-setup-phone = $(global-setup) ;
+ if ! $(below-8.0)
+ {
+ global-setup ?= [ locate-default-setup $(command) : $(parent) :
+ vcvarsall.bat ] ;
+ }
+
+ local default-setup-amd64 = vcvarsx86_amd64.bat ;
+ local default-setup-i386 = vcvars32.bat ;
+ local default-setup-ia64 = vcvarsx86_ia64.bat ;
+ local default-setup-arm = vcvarsx86_arm.bat ;
+ local default-setup-arm64 = vcvarsx86_arm64.bat ;
+ local default-setup-phone-i386 = vcvarsphonex86.bat ;
+ local default-setup-phone-arm = vcvarsphonex86_arm.bat ;
+
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
+ # mention an x86_IPF option, that seems to be a documentation bug
+ # and x86_ia64 is the correct option.
+ local default-global-setup-options-amd64 = x86_amd64 ;
+ local default-global-setup-options-i386 = x86 ;
+ local default-global-setup-options-ia64 = x86_ia64 ;
+ local default-global-setup-options-arm = x86_arm ;
+ local default-global-setup-options-arm64 = x86_arm64 ;
+
+ # When using 64-bit Windows, and targeting 64-bit, it is possible to
+ # use a native 64-bit compiler, selected by the "amd64" & "ia64"
+ # parameters to vcvarsall.bat. There are two variables we can use --
+ # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
+ # 'x86' when running 32-bit Windows, no matter which processor is
+ # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
+ # Windows.
+ #
+ if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
+ {
+ default-global-setup-options-amd64 = amd64 ;
+ }
+ # When B2 itself is running as a 32-bit process on 64-bit
+ # Windows, the above test will fail (since WOW64 simulates a 32-bit
+ # environment, including environment values). So check the WOW64
+ # variable PROCESSOR_ARCHITEW6432 as well.
+ if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITEW6432 ] ]
+ {
+ default-global-setup-options-amd64 = amd64 ;
+ }
+ # TODO: The same 'native compiler usage' should be implemented for
+ # the Itanium platform by using the "ia64" parameter. For this
+ # though we need someone with access to this platform who can find
+ # out how to correctly detect this case.
+ else if $(somehow-detect-the-itanium-platform)
+ {
+ default-global-setup-options-ia64 = ia64 ;
+ }
+
+ for local c in $(cpu)
+ {
+ exact-version ?= $(version) ;
+ setup-$(c) = [ generate-setup-cmd $(exact-version) : $(command) : $(parent) : $(options) : $(c) : $(global-setup) : $(default-global-setup-options-$(c)) : $(default-setup-$(c)) ] ;
+ }
+
+ # Windows phone has different setup scripts, located in a different directory hierarchy.
+ # The 11.0 toolset can target Windows Phone 8.0 and the 12.0 toolset can target Windows Phone 8.1,
+ # each of which have a different directory for their vcvars setup scripts.
+ local phone-parent = [ path.native [ path.join $(parent) WPSDK ] ] ;
+ local phone-directory = $(phone-parent) ;
+ if [ MATCH "(11.0)" : $(version) ]
+ {
+ phone-directory = [ path.native [ path.join $(phone-directory) WP80 ] ] ;
+ }
+ else if [ MATCH "(12.0)" : $(version) ]
+ {
+ phone-directory = [ path.native [ path.join $(phone-directory) WP81 ] ] ;
+ }
+ global-setup-phone ?= [ locate-default-setup $(phone-directory) : $(phone-parent) : vcvarsphoneall.bat ] ;
+
+ # If can't locate default phone setup script then this VS version doesn't support Windows Phone.
+ if $(global-setup-phone)-is-defined
+ {
+ # i386 CPU is for the Windows Phone emulator in Visual Studio.
+ local phone-cpu = i386 arm ;
+ for local c in $(phone-cpu)
+ {
+ setup-phone-$(c) = [ generate-setup-cmd $(version) : $(phone-directory) : $(phone-parent) : $(options) : $(c) : $(global-setup-phone) : $(default-global-setup-options-$(c)) : $(default-setup-phone-$(c)) ] ;
+ }
+ }
+ }
+
+ # Get tool names (if any) and finish setup.
+
+ compiler = [ feature.get-values <compiler> : $(options) ] ;
+ compiler ?= cl ;
+
+ linker = [ feature.get-values <linker> : $(options) ] ;
+ linker ?= link ;
+
+ resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ;
+ resource-compiler ?= rc ;
+
+ # Turn on some options for i386 assembler
+ # -coff generate COFF format object file (compatible with cl.exe output)
+ local default-assembler-amd64 = ml64 ;
+ local default-assembler-i386 = "ml -coff" ;
+ local default-assembler-ia64 = ias ;
+ local default-assembler-arm = armasm ;
+ local default-assembler-arm64 = armasm64 ;
+
+ # For the assembler the following options are turned on by default:
+ #
+ # -Zp4 align structures to 4 bytes
+ # -Cp preserve case of user identifiers
+ # -Cx preserve case in publics, externs
+ #
+ local assembler-flags-amd64 = "-c -Zp4 -Cp -Cx" ;
+ local assembler-flags-i386 = "-c -Zp4 -Cp -Cx" ;
+ local assembler-flags-ia64 = "-c -Zp4 -Cp -Cx" ;
+ local assembler-flags-arm = "" ;
+ local assembler-flags-arm64 = "" ;
+
+ local assembler-output-flag-amd64 = -Fo ;
+ local assembler-output-flag-i386 = -Fo ;
+ local assembler-output-flag-ia64 = -Fo ;
+ local assembler-output-flag-arm = -o ;
+ local assembler-output-flag-arm64 = -o ;
+
+ assembler = [ feature.get-values <assembler> : $(options) ] ;
+
+ idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ;
+ idl-compiler ?= midl ;
+
+ mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
+ mc-compiler ?= mc ;
+
+ manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ;
+ manifest-tool ?= mt ;
+
+ local cc-filter = [ feature.get-values <compiler-filter> : $(options) ]
+ ;
+
+ for local c in $(cpu)
+ {
+ # Setup script is not required in some configurations.
+ setup-$(c) ?= "" ;
+
+ local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ;
+
+ if $(.debug-configuration)
+ {
+ for local cpu-condition in $(cpu-conditions)
+ {
+ ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c):J= )'" ;
+ }
+ }
+
+ local cpu-assembler = $(assembler) ;
+ cpu-assembler ?= $(default-assembler-$(c)) ;
+ local assembler-flags = $(assembler-flags-$(c)) ;
+ local assembler-output-flag = $(assembler-output-flag-$(c)) ;
+
+ for local api in desktop store phone
+ {
+ local setup-script = $(setup-$(c)) ;
+ if $(api) = phone
+ {
+ setup-script = $(setup-phone-$(c)) ;
+ }
+
+ if <rewrite-setup-scripts>always in $(options)
+ {
+ toolset.flags msvc .REWRITE-SETUP <windows-api>$(api)/$(cpu-conditions) : true ;
+ }
+
+ if ! $(setup-script)
+ {
+ # Should we try to set up some error handling or fallbacks here?
+ }
+ else if <rewrite-setup-scripts>off in $(options) || [ os.name ] != NT
+ {
+ toolset.flags msvc .SETUP <windows-api>$(api)/$(cpu-conditions) : [ call-batch-script "\"$(setup-script[1]:W)\" $(setup-script[2-]:E=)" ] ;
+ }
+ else
+ {
+ toolset.flags msvc .SETUP-SCRIPT <windows-api>$(api)/$(cpu-conditions) : $(setup-script[1]) ;
+ toolset.flags msvc .SETUP-OPTIONS <windows-api>$(api)/$(cpu-conditions) : $(setup-script[2-]) ;
+ }
+
+ toolset.flags msvc.compile .RC <windows-api>$(api)/$(cpu-conditions) : $(resource-compiler) ;
+ toolset.flags msvc.compile .IDL <windows-api>$(api)/$(cpu-conditions) : $(idl-compiler) ;
+ toolset.flags msvc.compile .MC <windows-api>$(api)/$(cpu-conditions) : $(mc-compiler) ;
+ toolset.flags msvc.link .MT <windows-api>$(api)/$(cpu-conditions) : $(manifest-tool) -nologo ;
+
+ if $(api) = desktop
+ {
+ toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(compiler) /Zm800 -nologo ;
+ }
+ else
+ {
+ toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(compiler) /Zm800 /ZW /EHsc -nologo ;
+ }
+ toolset.flags msvc.compile .ASM <windows-api>$(api)/$(cpu-conditions) : $(cpu-assembler) $(assembler-flags) -nologo ;
+ toolset.flags msvc.compile .ASM_OUTPUT <windows-api>$(api)/$(cpu-conditions) : $(assembler-output-flag) ;
+ toolset.flags msvc.link .LD <windows-api>$(api)/$(cpu-conditions) : $(linker) /NOLOGO "/INCREMENTAL:NO" ;
+ toolset.flags msvc.archive .LD <windows-api>$(api)/$(cpu-conditions) : $(linker) /lib /NOLOGO ;
+ }
+
+ if $(cc-filter)
+ {
+ toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ;
+ }
+ }
+
+ # Starting with Visual Studio 2013 the CRT is split into a desktop and app dll.
+ # If targeting WinRT and 12.0 set lib path to link against app CRT.
+ if [ MATCH "(12)" : $(version) ]
+ {
+ local storeLibPath = [ path.join $(parent) "lib/store" ] ;
+ toolset.flags msvc.link LINKPATH $(conditions)/<windows-api>store/$(.cpu-arch-i386) : [ path.native $(storeLibPath) ] ;
+ toolset.flags msvc.link LINKPATH $(conditions)/<windows-api>store/$(.cpu-arch-amd64) : [ path.native [ path.join $(storeLibPath) "amd64" ] ] ;
+ toolset.flags msvc.link LINKPATH $(conditions)/<windows-api>store/$(.cpu-arch-arm) : [ path.native [ path.join $(storeLibPath) "arm" ] ] ;
+ }
+
+ # LTO
+ toolset.flags msvc.compile OPTIONS $(conditions)/<lto>on : /GL ;
+ toolset.flags msvc.link OPTIONS $(conditions)/<lto>on : /LTCG ;
+
+ # Set version-specific flags.
+ configure-version-specific msvc : $(version) : $(conditions) ;
+ }
+}
+
+
+# Returns the default installation path for the given version.
+#
+local rule default-path ( version )
+{
+ local result ;
+ {
+ # try to use vswhere
+ local pseudo_env_VSCOMNTOOLS ;
+ local all-env-paths ;
+ local root = [ os.environ "ProgramFiles(x86)" ] ;
+ if ( ! $(root) )
+ {
+ root = [ os.environ "ProgramFiles" ] ;
+ }
+ if ( ! $(root) ) && [ os.name ] in CYGWIN
+ {
+ # We probably are in an 'env -i' Cygwin session, where the user
+ # was unable restore the "ProgramFiles(x86)" environment variable,
+ # because it is an invalid environment variable name in Cygwin.
+ # However, we can try to query cygpath instead.
+ root = [ SHELL "cygpath -w -F 42" : strip-eol ] ; # CSIDL_PROGRAM_FILESX86
+ if ( ! $(root) )
+ {
+ root = [ SHELL "cygpath -w -F 38" : strip-eol ] ; # CSIDL_PROGRAM_FILES
+ }
+ }
+ # When we are a Cygwin build, [ SHELL ] does execute using "/bin/sh -c".
+ # When /bin/sh does find a forward slash, no PATH search is performed,
+ # causing [ SHELL "C:\\...\\Installer/vswhere.exe" ] to succeed.
+ # And fortunately, forward slashes do also work in native Windows.
+ local vswhere = "$(root)/Microsoft Visual Studio/Installer/vswhere.exe" ;
+ # The check for $(root) is to avoid a segmentation fault if not found.
+ if $(version) in 14.1 14.2 default && $(root) && [ path.exists $(vswhere) ]
+ {
+ local req = "-requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64" ;
+ local prop = "-property installationPath" ;
+ local limit ;
+
+ if $(version) = 14.2 || $(version) = "default"
+ {
+ limit = "-version \"[16.0,17.0)\"" ;
+ }
+ else if $(version) = 14.1
+ {
+ limit = "-version \"[15.0,16.0)\"" ;
+ }
+
+ # Quoting the "*" is for when we are a Cygwin build, to bypass /bin/sh.
+ local vswhere_cmd = "\"$(vswhere)\" -latest -products \"*\" $(req) $(prop) $(limit)" ;
+ # The split character "\r" is for when we are a Cygwin build.
+ local shell_ret = [ SPLIT_BY_CHARACTERS [ SHELL $(vswhere_cmd) ] : "\r\n" ] ;
+ pseudo_env_VSCOMNTOOLS = [ path.native [ path.join $(shell_ret) "\\Common7\\Tools" ] ] ;
+ if ! [ path.exists $(pseudo_env_VSCOMNTOOLS) ]
+ {
+ return ; # Not found. If we have vswhere, assume that it works.
+ }
+ all-env-paths = $(pseudo_env_VSCOMNTOOLS) ;
+ }
+ else
+ {
+ all-env-paths = [ sequence.transform os.environ
+ : $(.version-$(version)-env) ] ;
+ }
+
+ # Check environment or previous path_VS150
+ for local env-path in $(all-env-paths)
+ {
+ if $(env-path) && $(.version-$(version)-path)
+ {
+ for local bin-path in $(.version-$(version)-path)
+ {
+ result = [ path.glob [ path.make $(env-path) ] : $(bin-path) ] ;
+ if $(result)
+ {
+ result = [ path.native $(result[1]) ] ;
+ break ;
+ }
+ }
+ }
+ if $(result)
+ {
+ break ;
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+
+rule get-rspline ( target : lang-opt )
+{
+ CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS)
+ $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES)
+ $(.nl)\"-I$(INCLUDES:W)\" ] ;
+}
+
+class msvc-linking-generator : linking-generator
+{
+ # Calls the base version. If necessary, also create a target for the
+ # manifest file.specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local result = [ linking-generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+
+ if $(result)
+ {
+ local name-main = [ $(result[1]).name ] ;
+ local action = [ $(result[1]).action ] ;
+
+ if [ $(property-set).get <debug-symbols> ] = "on"
+ {
+ # We force the exact name on PDB. The reason is tagging -- the
+ # tag rule may reasonably special case some target types, like
+ # SHARED_LIB. The tag rule will not catch PDBs, and it cannot
+ # even easily figure out if a PDB is paired with a SHARED_LIB,
+ # EXE or something else. Because PDBs always get the same name
+ # as the main target, with .pdb as extension, just force it.
+ local target = [ class.new file-target $(name-main:S=.pdb) exact
+ : PDB : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ]
+ ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+
+ if [ $(property-set).get <embed-manifest> ] = "off"
+ {
+ # Manifest is an evil target. It has .manifest appened to the
+ # name of the main target, including extension, e.g.
+ # a.exe.manifest. We use the 'exact' name to achieve this
+ # effect.
+ local target = [ class.new file-target $(name-main).manifest
+ exact : MANIFEST : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ]
+ ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+ }
+ return $(result) ;
+ }
+}
+
+
+# Unsafe worker rule for the register-toolset() rule. Must not be called
+# multiple times.
+#
+local rule register-toolset-really ( )
+{
+ feature.extend toolset : msvc ;
+
+ # Intel and msvc supposedly have link-compatible objects.
+ feature.subfeature toolset msvc : vendor : intel : propagated optional ;
+
+ # Inherit MIDL flags.
+ toolset.inherit-flags msvc : midl ;
+
+ # Inherit MC flags.
+ toolset.inherit-flags msvc : mc ;
+
+ # Dynamic runtime comes only in MT flavour.
+ toolset.add-requirements
+ <toolset>msvc,<runtime-link>shared:<threading>multi ;
+
+ # Declare msvc toolset specific features.
+ {
+ feature.feature debug-store : object database : propagated ;
+ feature.feature pch-source : : dependency free ;
+ }
+
+ # Declare generators.
+ {
+ # TODO: Is it possible to combine these? Make the generators
+ # non-composing so that they do not convert each source into a separate
+ # .rsp file.
+ generators.register [ new msvc-linking-generator msvc.link :
+ OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ;
+ generators.register [ new msvc-linking-generator msvc.link.dll :
+ OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB :
+ <toolset>msvc <suppress-import-lib>false ] ;
+ generators.register [ new msvc-linking-generator msvc.link.dll :
+ OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB :
+ <toolset>msvc <suppress-import-lib>true ] ;
+
+ generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ;
+
+ # Using 'register-c-compiler' adds the build directory to INCLUDES.
+ generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ;
+ generators.override msvc.compile.rc : rc.compile.resource ;
+ generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ;
+
+ generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ;
+ generators.override msvc.compile.idl : midl.compile.idl ;
+
+ generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ;
+ generators.override msvc.compile.mc : mc.compile ;
+
+ # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
+ # the latter have their HPP type derived from H. The type of compilation
+ # is determined entirely by the destination type.
+ generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ;
+ generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ;
+
+ generators.override msvc.compile.c.pch : pch.default-c-pch-generator ;
+ generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ;
+ }
+
+ toolset.flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
+ toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
+ toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
+
+ #
+ # Declare flags for compilation.
+ #
+
+ toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ;
+ toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ;
+
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ;
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ;
+
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ;
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ;
+ toolset.flags msvc.compile CFLAGS <optimization>off : /Od ;
+ toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ;
+ toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ;
+ toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ;
+
+ toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ;
+ toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ;
+ toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ;
+ toolset.flags msvc.compile CFLAGS <warnings>extra : /W4 ;
+ toolset.flags msvc.compile CFLAGS <warnings>pedantic : /W4 ;
+ toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ;
+
+ toolset.flags msvc.compile C++FLAGS <cxxstd>14 : "/std:c++14" ;
+ toolset.flags msvc.compile C++FLAGS <cxxstd>17 : "/std:c++17" ;
+ toolset.flags msvc.compile C++FLAGS <cxxstd>latest : "/std:c++latest" ;
+
+ # By default 8.0 enables rtti support while prior versions disabled it. We
+ # simply enable or disable it explicitly so we do not have to depend on this
+ # default behaviour.
+ toolset.flags msvc.compile CFLAGS <rtti>on : /GR ;
+ toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
+
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
+
+ toolset.flags msvc.compile OPTIONS <cflags> : ;
+ toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ;
+
+ toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ;
+
+ toolset.flags msvc.compile DEFINES <define> ;
+ toolset.flags msvc.compile UNDEFS <undef> ;
+ toolset.flags msvc.compile INCLUDES <include> ;
+
+ # Declare flags for the assembler.
+ toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <architecture>x86/<debug-symbols>on : "/Zi /Zd" ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <architecture>x86/<warnings>on : /W3 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <architecture>x86/<warnings>off : /W0 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <architecture>x86/<warnings>all : /W4 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <architecture>x86/<warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile.asm ASMDEFINES <architecture>x86 : <define> ;
+
+ # Declare flags for linking.
+ {
+ toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : "/PDB:" ; # not used yet
+ toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ;
+ toolset.flags msvc.link DEF_FILE <def-file> ;
+
+ # The linker disables the default optimizations when using /DEBUG so we
+ # have to enable them manually for release builds with debug symbols.
+ toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : "/OPT:REF,ICF" ;
+
+ toolset.flags msvc LINKFLAGS <user-interface>console : "/subsystem:console" ;
+ toolset.flags msvc LINKFLAGS <user-interface>gui : "/subsystem:windows" ;
+ toolset.flags msvc LINKFLAGS <user-interface>wince : "/subsystem:windowsce" ;
+ toolset.flags msvc LINKFLAGS <user-interface>native : "/subsystem:native" ;
+ toolset.flags msvc LINKFLAGS <user-interface>auto : "/subsystem:posix" ;
+
+ toolset.flags msvc.link OPTIONS <linkflags> ;
+ toolset.flags msvc.link LINKPATH <library-path> ;
+
+ toolset.flags msvc.link FINDLIBS_ST <find-static-library> ;
+ toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ;
+ toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ;
+ toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
+
+ toolset.flags msvc.link.dll LINKFLAGS <suppress-import-lib>true : /NOENTRY ;
+ }
+
+ toolset.flags msvc.archive AROPTIONS <archiveflags> ;
+
+ # Create a project to allow building the setup scripts
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project msvc ;
+
+ feature.feature msvc.setup-options : : free ;
+}
+
+
+# Locates the requested setup script under the given folder and returns its full
+# path or nothing in case the script can not be found. In case multiple scripts
+# are found only the first one is returned.
+#
+# TODO: There used to exist a code comment for the msvc.init rule stating that
+# we do not correctly detect the location of the vcvars32.bat setup script for
+# the free VC7.1 tools in case user explicitly provides a path. This should be
+# tested or simply remove this whole comment in case this toolset version is no
+# longer important.
+#
+local rule locate-default-setup ( command : parent : setup-name )
+{
+ local result = [ GLOB $(command) $(parent) : $(setup-name) ] ;
+ if $(result[1])
+ {
+ return $(result[1]) ;
+ }
+}
+
+
+# Validates given path, registers found configuration and prints debug
+# information about it.
+#
+local rule register-configuration ( version : path ? )
+{
+ if $(path)
+ {
+ local command = [ GLOB $(path) : cl.exe ] ;
+
+ if $(command)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO notice\: "[msvc-cfg]" msvc-$(version) detected, command\:
+ '$(command)' ;
+ }
+
+ $(.versions).register $(version) ;
+ $(.versions).set $(version) : default-command : $(command) ;
+ }
+ }
+}
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Miscellaneous constants.
+.RM = [ common.rm-command ] ;
+.nl = "
+" ;
+.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ;
+.escaped-double-quote = "\"" ;
+.TOUCH_FILE = [ common.file-touch-command ] ;
+
+# List of all registered configurations.
+.versions = [ new configurations ] ;
+
+# Supported CPU architectures.
+.cpu-arch-i386 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32
+ <architecture>x86/<address-model>
+ <architecture>x86/<address-model>32 ;
+
+.cpu-arch-amd64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.cpu-arch-ia64 =
+ <architecture>ia64/<address-model>
+ <architecture>ia64/<address-model>64 ;
+
+.cpu-arch-arm =
+ <architecture>arm/<address-model>
+ <architecture>arm/<address-model>32 ;
+
+.cpu-arch-arm64 =
+ <architecture>arm/<address-model>
+ <architecture>arm/<address-model>64 ;
+
+# Supported CPU types (only Itanium optimization options are supported from
+# VC++ 2005 on). See
+# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
+# detailed information.
+.cpu-type-g5 = i586 pentium pentium-mmx ;
+.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6
+ k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ;
+.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i
+ conroe conroe-xe conroe-l allendale merom
+ merom-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem sandy-bridge ivy-bridge haswell
+ broadwell skylake skylake-avx512 cannonlake icelake ;
+.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3
+ athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3
+ bdver4 btver1 btver2 znver1 ;
+.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ;
+.cpu-type-itanium = itanium itanium1 merced ;
+.cpu-type-itanium2 = itanium2 mckinley ;
+.cpu-type-arm = armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
+ armv7 armv7s ;
+
+# Known toolset versions, in order of preference.
+.known-versions = 14.2 14.1 14.0 12.0 11.0 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1
+ 7.1toolkit 7.0 6.0 ;
+
+# Version aliases.
+.version-alias-6 = 6.0 ;
+.version-alias-6.5 = 6.0 ;
+.version-alias-7 = 7.0 ;
+.version-alias-8 = 8.0 ;
+.version-alias-9 = 9.0 ;
+.version-alias-10 = 10.0 ;
+.version-alias-11 = 11.0 ;
+.version-alias-12 = 12.0 ;
+.version-alias-14 = 14.0 ;
+
+# Names of registry keys containing the Visual C++ installation path (relative
+# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
+.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ;
+.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ;
+.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ;
+.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ;
+.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ;
+.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ;
+.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
+.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ;
+.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ;
+.version-11.0-reg = "VisualStudio\\11.0\\Setup\\VC" ;
+.version-12.0-reg = "VisualStudio\\12.0\\Setup\\VC" ;
+.version-14.0-reg = "VisualStudio\\14.0\\Setup\\VC" ;
+
+# Visual C++ Toolkit 2003 does not store its installation path in the registry.
+# The environment variable 'VCToolkitInstallDir' and the default installation
+# path will be checked instead.
+.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003/bin" ;
+.version-7.1toolkit-env = VCToolkitInstallDir ;
+# Visual Studio 2017 doesn't use a registry at all. And the suggested methods
+# of discovery involve having a compiled program. So as a fallback we search
+# paths for VS2017 (aka msvc >= 14.1).
+.version-14.1-path =
+ "../../VC/Tools/MSVC/*/bin/Host*/*"
+ "Microsoft Visual Studio/2017/*/VC/Tools/MSVC/*/bin/Host*/*"
+ ;
+.version-14.1-env = VS150COMNTOOLS ProgramFiles ProgramFiles(x86) ;
+.version-14.2-path =
+ "../../VC/Tools/MSVC/*/bin/Host*/*"
+ "Microsoft Visual Studio/2019/*/VC/Tools/MSVC/*/bin/Host*/*"
+ ;
+.version-14.2-env = VS160COMNTOOLS ProgramFiles ProgramFiles(x86) ;
+
+# Auto-detect all the available msvc installations on the system.
+auto-detect-toolset-versions ;
+
+
+# And finally trigger the actual Boost Build toolset registration.
+register-toolset ;
diff --git a/src/boost/tools/build/src/tools/msvc.py b/src/boost/tools/build/src/tools/msvc.py
new file mode 100644
index 000000000..efd3a908a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/msvc.py
@@ -0,0 +1,1313 @@
+# Copyright (c) 2003 David Abrahams.
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2006 Bojan Resnik.
+# Copyright (c) 2006 Ilya Sokolov.
+# Copyright (c) 2007 Rene Rivera
+# Copyright (c) 2008 Jurko Gospodnetic
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+################################################################################
+#
+# MSVC Boost Build toolset module.
+# --------------------------------
+#
+# All toolset versions need to have their location either auto-detected or
+# explicitly specified except for the special 'default' version that expects the
+# environment to find the needed tools or report an error.
+#
+################################################################################
+
+from os import environ
+import os.path
+import re
+import _winreg
+
+import bjam
+
+from b2.tools import common, rc, pch, builtin, mc, midl
+from b2.build import feature, type, toolset, generators, property_set
+from b2.build.property import Property
+from b2.util import path
+from b2.manager import get_manager
+from b2.build.generators import Generator
+from b2.build.toolset import flags
+from b2.util.utility import to_seq, on_windows
+from b2.tools.common import Configurations
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+
+# It is not yet clear what to do with Cygwin on python port.
+def on_cygwin():
+ return False
+
+
+type.register('MANIFEST', ['manifest'])
+feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ;
+
+type.register('PDB',['pdb'])
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Initialize a specific toolset version configuration. As the result, path to
+# compiler and, possible, program names are set up, and will be used when that
+# version of compiler is requested. For example, you might have:
+#
+# using msvc : 6.5 : cl.exe ;
+# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
+#
+# The version parameter may be omitted:
+#
+# using msvc : : Z:/foo/bar/cl.exe ;
+#
+# The following keywords have special meanings when specified as versions:
+# - all - all detected but not yet used versions will be marked as used
+# with their default options.
+# - default - this is an equivalent to an empty version.
+#
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# the possible scenarios:
+#
+# Nothing "x.y"
+# Passed Nothing "x.y" detected, detected,
+# version detected detected cl.exe in path cl.exe in path
+#
+# default Error Use "x.y" Create "default" Use "x.y"
+# all None Use all None Use all
+# x.y - Use "x.y" - Use "x.y"
+# a.b Error Error Create "a.b" Create "a.b"
+#
+# "x.y" - refers to a detected version;
+# "a.b" - refers to an undetected version.
+#
+# FIXME: Currently the command parameter and the <compiler> property parameter
+# seem to overlap in duties. Remove this duplication. This seems to be related
+# to why someone started preparing to replace init with configure rules.
+
+def init(version = None, command = None, options = None):
+ # When initialized from
+ # using msvc : x.0 ;
+ # we get version as a single element list i.e. ['x.0'],
+ # but when specified from the command line we get a string i.e. 'x.0'.
+ # We want to work with a string, so unpack the list if needed.
+ is_single_element_list = (isinstance(version,list) and len(version) == 1)
+ assert(version==None or isinstance(version,str) or is_single_element_list)
+ if is_single_element_list:
+ version = version[0]
+
+ options = to_seq(options)
+ command = to_seq(command)
+
+ if command:
+ options.extend("<command>"+cmd for cmd in command)
+ configure(version,options)
+
+def configure(version=None, options=None):
+ if version == "all":
+ if options:
+ raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version))
+
+ # Configure (i.e. mark as used) all registered versions.
+ all_versions = __versions.all()
+ if not all_versions:
+ if debug():
+ print "notice: [msvc-cfg] Asked to configure all registered" \
+ "msvc toolset versions when there are none currently" \
+ "registered." ;
+ else:
+ for v in all_versions:
+ # Note that there is no need to skip already configured
+ # versions here as this will request configure-really rule
+ # to configure the version using default options which will
+ # in turn cause it to simply do nothing in case the version
+ # has already been configured.
+ configure_really(v)
+ elif version == "default":
+ configure_really(None,options)
+ else:
+ configure_really(version, options)
+
+def extend_conditions(conditions,exts):
+ return [ cond + '/' + ext for cond in conditions for ext in exts ]
+
+def configure_version_specific(toolset_arg, version, conditions):
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we are sure that the msvc version is at
+ # 7.*, add those options explicitly. We can be sure either if user specified
+ # version 7.* explicitly or if we auto-detected the version ourselves.
+ if not re.search('^6\\.', version):
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',conditions, ['/Zc:forScope','/Zc:wchar_t'])
+ toolset.flags('{}.compile.c++'.format(toolset_arg), 'C++FLAGS',conditions, ['/wd4675'])
+
+ # Explicitly disable the 'function is deprecated' warning. Some msvc
+ # versions have a bug, causing them to emit the deprecation warning even
+ # with /W0.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>off']), ['/wd4996'])
+ if re.search('^[78]\.', version):
+ # 64-bit compatibility warning deprecated since 9.0, see
+ # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>all']), ['/Wp64'])
+
+ #
+ # Processor-specific optimization.
+ #
+ if re.search('^[67]', version ):
+ # 8.0 deprecates some of the options.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed','<optimization>space']), ['/Ogiy', '/Gs'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed']), ['/Ot'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>space']), ['/Os'])
+
+ cpu_arch_i386_cond = extend_conditions(conditions, __cpu_arch_i386)
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>']),['/GB'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>i486']),['/G4'])
+
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g5]), ['/G5'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g6]), ['/G6'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g7]), ['/G7'])
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', conditions, ['/Op'])
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/ML'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MLd'])
+ else:
+ # 8.0 and above adds some more options.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' for a in __cpu_arch_amd64]), ['/favor:blend'])
+
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_em64t]), ['/favor:EM64T'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_amd64]), ['/favor:AMD64'])
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/MT'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MTd'])
+
+ # Specify target machine type so the linker will not need to guess.
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_amd64), ['/MACHINE:X64'])
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_i386), ['/MACHINE:X86'])
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_ia64), ['/MACHINE:IA64'])
+
+ # Make sure that manifest will be generated even if there is no
+ # dependencies to put there.
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', conditions, ['/MANIFEST'])
+
+
+# Registers this toolset including all of its flags, features & generators. Does
+# nothing on repeated calls.
+
+def register_toolset():
+ if not 'msvc' in feature.values('toolset'):
+ register_toolset_really()
+
+
+engine = get_manager().engine()
+
+# this rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler
+# puts all debug info into a single .pdb file named after the library
+#
+# Poking at source targets this way is probably not clean, but it's the
+# easiest approach.
+def archive(targets, sources=None, properties=None):
+ bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
+
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
+if not on_cygwin():
+ engine.register_action(
+ 'msvc.archive',
+ '''if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=archive)
+else:
+ engine.register_action(
+ 'msvc.archive',
+ '''{rm} "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"'''.format(rm=common.rm_command()),
+ function=archive)
+
+# For the assembler the following options are turned on by default:
+#
+# -Zp4 align structures to 4 bytes
+# -Cp preserve case of user identifiers
+# -Cx preserve case in publics, externs
+#
+engine.register_action(
+ 'msvc.compile.asm',
+ '$(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"' )
+
+
+# Equivalent to [ on $(target) return $(prefix)$(var)$(suffix) ]. Note that $(var) can be a list.
+def expand_target_variable(target,var,prefix=None,suffix=None):
+ list = bjam.call( 'get-target-variable', target, var )
+ return " ".join([ ("" if prefix is None else prefix) + elem + ("" if suffix is None else suffix) for elem in list ])
+
+
+def get_rspline(targets, lang_opt):
+ result = lang_opt + '\n' + \
+ expand_target_variable(targets, 'UNDEFS' , '\n-U' ) + \
+ expand_target_variable(targets, 'CFLAGS' , '\n' ) + \
+ expand_target_variable(targets, 'C++FLAGS', '\n' ) + \
+ expand_target_variable(targets, 'OPTIONS' , '\n' ) + '\n-c' + \
+ expand_target_variable(targets, 'DEFINES' , '\n-D' , '\n' ) + \
+ expand_target_variable(targets, 'INCLUDES', '\n"-I', '"\n' )
+ bjam.call('set-target-variable', targets, 'CC_RSPLINE', result)
+
+def compile_c(targets, sources = [], properties = None):
+ get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' )
+ get_rspline(targets, '-TC')
+ compile_c_cpp(targets,sources)
+
+def compile_c_preprocess(targets, sources = [], properties = None):
+ get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' )
+ get_rspline(targets, '-TC')
+ preprocess_c_cpp(targets,sources)
+
+def compile_c_pch(targets, sources = [], properties = []):
+ get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' )
+ get_rspline([targets[0]], '-TC')
+ get_rspline([targets[1]], '-TC')
+
+toolset.flags( 'msvc', 'YLOPTION', [], ['-Yl'] )
+
+def compile_cpp(targets,sources=[],properties=None):
+ get_rspline(targets,'-TP')
+ bjam.call('set-target-variable', targets, 'PCH_FILE', sources)
+ compile_c_cpp(targets,sources)
+
+def compile_cpp_preprocess(targets,sources=[],properties=None):
+ get_rspline(targets,'-TP')
+ preprocess_c_cpp(targets,sources)
+
+def compile_cpp_pch(targets,sources=[],properties=None):
+ get_rspline([targets[0]], '-TP')
+ get_rspline([targets[1]], '-TP')
+
+
+# Action for running the C/C++ compiler without using precompiled headers.
+#
+# WARNING: Synchronize any changes this in action with intel-win
+#
+# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
+#
+# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
+#
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
+# The linker will pull these into the executable's PDB
+#
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
+# as in this case the compiler must be used to create a single PDB for our library.
+#
+
+class SetupAction:
+ def __init__(self, setup_func, function):
+ self.setup_func = setup_func
+ self.function = function
+
+ def __call__(self, targets, sources, property_set):
+ assert(callable(self.setup_func))
+ # This can modify sources.
+ action_name = self.setup_func(targets, sources, property_set)
+ # Bjam actions defined from Python have only the command
+ # to execute, and no associated jam procedural code. So
+ # passing 'property_set' to it is not necessary.
+ bjam.call("set-update-action", action_name, targets, sources, [])
+ if self.function:
+ self.function(targets, sources, property_set)
+
+def register_setup_action(action_name,setup_function,function=None):
+ global engine
+ if action_name in engine.actions:
+ raise "Bjam action %s is already defined" % action_name
+ engine.actions[action_name] = SetupAction(setup_function, function)
+
+
+engine.register_action('compile-c-c++',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)''',
+bound_list=['PDB_NAME'])
+
+def setup_compile_c_cpp_action(targets, sources, properties):
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ return 'compile-c-c++'
+
+
+register_setup_action(
+ 'msvc.compile.c',
+ setup_compile_c_cpp_action,
+ function=compile_c)
+
+register_setup_action(
+ 'msvc.compile.c++',
+ setup_compile_c_cpp_action,
+ function=compile_cpp)
+
+
+engine.register_action('preprocess-c-c++',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"',
+bound_list=['PDB_NAME'])
+
+def setup_preprocess_c_cpp_action(targets, sources, properties):
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ return 'preprocess-c-c++'
+
+register_setup_action(
+ 'msvc.compile.c.preprocess',
+ setup_preprocess_c_cpp_action,
+ function=compile_c_preprocess)
+
+register_setup_action(
+ 'msvc.compile.c++.preprocess',
+ setup_preprocess_c_cpp_action,
+ function=compile_cpp_preprocess)
+
+def compile_c_cpp(targets,sources=None):
+ pch_header = bjam.call('get-target-variable',targets[0],'PCH_HEADER')
+ pch_file = bjam.call('get-target-variable',targets[0],'PCH_FILE')
+ if pch_header: get_manager().engine().add_dependency(targets[0],pch_header)
+ if pch_file: get_manager().engine().add_dependency(targets[0],pch_file)
+ bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
+
+def preprocess_c_cpp(targets,sources=None):
+ #same as above
+ return compile_c_cpp(targets,sources)
+
+# Action for running the C/C++ compiler using precompiled headers. In addition
+# to whatever else it needs to compile, this action also adds a temporary source
+# .cpp file used to compile the precompiled headers themselves.
+
+
+engine.register_action('compile-c-c++-pch',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include "$(>[1]:D=)"\n)" $(.CC.FILTER)')
+
+engine.register_action('compile-c-c++-pch-s',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)')
+
+def setup_c_cpp_pch(targets, sources, properties):
+ pch_source = bjam.call('get-target-variable', targets, 'PCH_SOURCE')
+ if pch_source:
+ sources += pch_source
+ get_manager().engine().add_dependency(targets,pch_source)
+ return 'compile-c-c++-pch-s'
+ else:
+ return 'compile-c-c++-pch'
+
+register_setup_action(
+ 'msvc.compile.c.pch',
+ setup_c_cpp_pch,
+ function=compile_c_pch)
+
+register_setup_action(
+ 'msvc.compile.c++.pch',
+ setup_c_cpp_pch,
+ function=compile_cpp_pch)
+
+
+# See midl.py for details.
+#
+engine.register_action(
+ 'msvc.compile.idl',
+ '''$(.IDL) /nologo @"@($(<[1]:W).rsp:E=
+"$(>:W)"
+-D$(DEFINES)
+"-I$(INCLUDES:W)"
+-U$(UNDEFS)
+$(MIDLFLAGS)
+/tlb "$(<[1]:W)"
+/h "$(<[2]:W)"
+/iid "$(<[3]:W)"
+/proxy "$(<[4]:W)"
+/dlldata "$(<[5]:W)")"
+ {touch} "$(<[4]:W)"
+ {touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
+
+engine.register_action(
+ 'msvc.compile.mc',
+ '$(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"')
+
+engine.register_action(
+ 'msvc.compile.rc',
+ '$(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"')
+
+def link_dll(targets,sources=None,properties=None):
+ get_manager().engine().add_dependency(targets,bjam.call('get-target-variable',targets,'DEF_FILE'))
+ manifest(targets, sources, properties)
+
+def manifest(targets,sources=None,properties=None):
+ if 'on' in properties.get('<embed-manifest>'):
+ get_manager().engine().set_update_action('msvc.manifest', targets, sources, properties)
+
+
+# Incremental linking a DLL causes no end of problems: if the actual exports do
+# not change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I am not sure that incremental
+# linking is such a great idea in general, but in this case I am sure we do not
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
+
+if not on_cygwin():
+ engine.register_action(
+ 'msvc.link',
+ '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
+ function=manifest,
+ bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest',
+ '''if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ )''')
+
+ engine.register_action(
+ 'msvc.link.dll',
+ '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
+ function=link_dll,
+ bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest.dll',
+ '''if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ )''')
+else:
+ engine.register_action(
+ 'msvc.link',
+ '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=manifest,
+ bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest',
+ '''if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ fi''')
+
+ engine.register_action(
+ 'msvc.link.dll',
+ '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=link_dll,
+ bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest.dll',
+ '''if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ fi''')
+
+
+################################################################################
+#
+# Classes.
+#
+################################################################################
+
+class MsvcPchGenerator(pch.PchGenerator):
+
+ # Inherit the __init__ method
+ def run_pch(self, project, name, prop_set, sources):
+ # Find the header in sources. Ignore any CPP sources.
+ pch_header = None
+ pch_source = None
+ for s in sources:
+ if type.is_derived(s.type(), 'H'):
+ pch_header = s
+ elif type.is_derived(s.type(), 'CPP') or type.is_derived(s.type(), 'C'):
+ pch_source = s
+
+ if not pch_header:
+ raise RuntimeError( "can not build pch without pch-header" )
+
+ # If we do not have the PCH source - that is fine. We will just create a
+ # temporary .cpp file in the action.
+ properties = prop_set.all()
+ # Passing of <pch-source> is a dirty trick, needed because
+ # non-composing generators with multiple inputs are subtly
+ # broken. For more detailed information see:
+ # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
+ if pch_source:
+ properties.append(Property('pch-source',pch_source))
+ generated = Generator.run(self,project,name,property_set.create(properties),[pch_header])
+ pch_file = None
+ for g in generated:
+ if type.is_derived(g.type(), 'PCH'):
+ pch_file = g
+ result_props = []
+ if pch_header:
+ result_props.append(Property('pch-header', pch_header))
+ if pch_file:
+ result_props.append(Property('pch-file', pch_file))
+
+ return property_set.PropertySet(result_props), generated
+
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Detects versions listed as '_known_versions' by checking registry information,
+# environment variables & default paths. Supports both native Windows and
+# Cygwin.
+def auto_detect_toolset_versions():
+ if on_windows() or on_cygwin():
+ for version in _known_versions:
+ versionVarName = '__version_{}_reg'.format(version.replace('.','_'))
+ if versionVarName in globals():
+ vc_path = None
+ for x64elt in [ '', 'Wow6432Node\\' ]:
+ try:
+ with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\{}Microsoft\\{}'.format(x64elt, globals()[versionVarName])) as reg_key:
+ vc_path = _winreg.QueryValueEx(reg_key, "ProductDir")[0]
+ except:
+ pass
+ if vc_path:
+ vc_path = os.path.join(vc_path,'bin')
+ register_configuration(version,os.path.normpath(vc_path))
+
+ for i in _known_versions:
+ if not i in __versions.all():
+ register_configuration(i,default_path(i))
+
+
+def maybe_rewrite_setup(toolset, setup_script, setup_options, version, rewrite_setup='off'):
+ """
+ Helper rule to generate a faster alternative to MSVC setup scripts.
+
+ We used to call MSVC setup scripts directly in every action, however in
+ newer MSVC versions (10.0+) they make long-lasting registry queries
+ which have a significant impact on build time.
+ """
+ result = '"{}" {}'.format(setup_script, setup_options)
+
+ # At the moment we only know how to rewrite scripts with cmd shell.
+ if os.name == 'nt' and rewrite_setup != 'off':
+ basename = os.path.basename(setup_script)
+ filename, _ = os.path.splitext(basename)
+ setup_script_id = 'b2_{}_{}_{}'.format(toolset, version, filename)
+ if setup_options:
+ setup_script_id = '{}_{}'.format(setup_script_id, setup_options)
+
+ tempdir = os.environ.get('TEMP')
+ replacement = os.path.join(tempdir, setup_script_id + '.cmd')
+ if rewrite_setup == 'always' or not os.path.exists(replacement):
+ import subprocess
+ # call the setup script and print the environment after doing so
+ p = subprocess.Popen([
+ setup_script, setup_options, '>', 'nul', '&&', 'set',
+ ], stdout=subprocess.PIPE, shell=True
+ )
+ stdout, _ = p.communicate()
+
+ diff_vars = []
+ for var in stdout.splitlines():
+ # returns a tuple of ('var-name', '=', 'value').
+ # partition is being used here (over something like .split())
+ # for two reasons:
+ # 1) an environment variable may have a value that contains an '=';
+ # .partition() will still return the correct key and value pair.
+ # 2) if the line doesn't contain an '=' at all, then the returned
+ # tuple will contain only empty strings rather than raising
+ # an exception.
+ key, _, value = var.partition('=')
+ # os.environ handles casing differences here. Usually the
+ # call to "set" above will produce pascal-cased environment
+ # variable names, so a normal python dict can't be used here.
+ # check for the existence of key in case the partitioning() above
+ # returned an empty key value pair.
+ if key and os.environ.get(key) != value:
+ diff_vars.append('SET {}={}'.format(key, value))
+
+ if diff_vars:
+ with open(replacement, 'wb') as f:
+ f.write(os.linesep.join(diff_vars))
+
+ result = '"{}"'.format(replacement)
+ else:
+ result = '"{}"'.format(replacement)
+
+ return result
+
+
+def generate_setup_cmd(version, command, parent, options, cpu, global_setup,
+ default_global_setup_options, default_setup):
+ setup_prefix = "call "
+ setup_suffix = """ >nul\n"""
+ if on_cygwin():
+ setup_prefix = "cmd.exe /S /C call "
+ setup_suffix = " \">nul\" \"&&\" "
+
+ setup_options = ''
+ setup_cpu = feature.get_values('<setup-{}>'.format(cpu), options)
+
+ if not setup_cpu:
+ if global_setup:
+ setup_cpu = global_setup
+ # If needed we can easily add using configuration flags
+ # here for overriding which options get passed to the
+ # global setup command for which target platform:
+ # setup_options = feature.get_values('<setup-options-{}>'.format(cpu),options)
+ if not setup_options:
+ setup_options = default_global_setup_options[cpu]
+ else:
+ setup_cpu = locate_default_setup(command, parent, default_setup[cpu])
+ else:
+ setup_cpu = setup_cpu[0]
+
+ # Cygwin to Windows path translation.
+ # setup-$(c) = "\""$(setup-$(c):W)"\"" ;
+
+ # Append setup options to the setup name and add the final setup
+ # prefix & suffix.
+ rewrite = feature.get_values('<rewrite-setup-scripts>', options)
+ rewrite = rewrite[0] if rewrite else ''
+ setup = maybe_rewrite_setup(
+ 'msvc', setup_cpu, setup_options, version, rewrite)
+ return '{}{}{}'.format(setup_prefix, setup, setup_suffix)
+
+
+# Worker rule for toolset version configuration. Takes an explicit version id or
+# nothing in case it should configure the default toolset version (the first
+# registered one or a new 'default' one in case no toolset versions have been
+# registered yet).
+#
+
+def configure_really(version=None, options=[]):
+ v = version
+ if not v:
+ # Take the first registered (i.e. auto-detected) version.
+ version = __versions.first()
+ v = version
+
+ # Note: 'version' can still be empty at this point if no versions have
+ # been auto-detected.
+ if not version:
+ version = "default"
+
+ # Version alias -> real version number.
+ version = globals().get("__version_alias_{}".format(version), version)
+
+ # Check whether the selected configuration is already in use.
+ if version in __versions.used():
+ # Allow multiple 'toolset.using' calls for the same configuration if the
+ # identical sets of options are used.
+ if options and options != __versions.get(version,'options'):
+ raise RuntimeError("MSVC toolset configuration: Toolset version '$(version)' already configured.".format(version))
+ else:
+ # Register a new configuration.
+ __versions.register(version)
+
+ # Add user-supplied to auto-detected options.
+ version_opts = __versions.get(version, 'options')
+ if (version_opts):
+ options = version_opts + options
+
+ # Mark the configuration as 'used'.
+ __versions.use(version)
+ # Generate conditions and save them.
+ conditions = common.check_init_parameters('msvc', None, ('version', v))
+ __versions.set(version, 'conditions', conditions)
+ command = feature.get_values('<command>', options)
+
+ # If version is specified, we try to search first in default paths, and
+ # only then in PATH.
+ command = common.get_invocation_command('msvc', 'cl.exe', command, default_paths(version))
+ common.handle_options('msvc', conditions, command, options)
+
+ if not version:
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
+ # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
+ # 9.0express as 9.0 here.
+ if re.search("Microsoft Visual Studio[\/\\]2017", command):
+ version = '15.0'
+ elif re.search("Microsoft Visual Studio 14", command):
+ version = '14.0'
+ elif re.search("Microsoft Visual Studio 12", command):
+ version = '12.0'
+ elif re.search("Microsoft Visual Studio 11", command):
+ version = '11.0'
+ elif re.search("Microsoft Visual Studio 10", command):
+ version = '10.0'
+ elif re.search("Microsoft Visual Studio 9", command):
+ version = '9.0'
+ elif re.search("Microsoft Visual Studio 8", command):
+ version = '8.0'
+ elif re.search("NET 2003[\/\\]VC7", command):
+ version = '7.1'
+ elif re.search("Microsoft Visual C\\+\\+ Toolkit 2003", command):
+ version = '7.1toolkit'
+ elif re.search(".NET[\/\\]VC7", command):
+ version = '7.0'
+ else:
+ version = '6.0'
+
+ # Generate and register setup command.
+
+ below_8_0 = re.search("^[67]\\.",version) != None
+
+ if below_8_0:
+ cpu = ['i386']
+ else:
+ cpu = ['i386', 'amd64', 'ia64']
+
+ setup_scripts = {}
+
+ if command:
+ # TODO: Note that if we specify a non-existant toolset version then
+ # this rule may find and use a corresponding compiler executable
+ # belonging to an incorrect toolset version. For example, if you
+ # have only MSVC 7.1 installed, have its executable on the path and
+ # specify you want Boost Build to use MSVC 9.0, then you want Boost
+ # Build to report an error but this may cause it to silently use the
+ # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
+ # toolset version.
+ command = common.get_absolute_tool_path(command)
+
+ if command:
+ parent = os.path.dirname(os.path.normpath(command))
+ # Setup will be used if the command name has been specified. If
+ # setup is not specified explicitly then a default setup script will
+ # be used instead. Setup scripts may be global or arhitecture/
+ # /platform/cpu specific. Setup options are used only in case of
+ # global setup scripts.
+
+ # Default setup scripts provided with different VC distributions:
+ #
+ # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
+ # builds. It was located in the bin folder for the regular version
+ # and in the root folder for the free VC 7.1 tools.
+ #
+ # Later 8.0 & 9.0 versions introduce separate platform specific
+ # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
+ # located in or under the bin folder. Most also include a global
+ # vcvarsall.bat helper script located in the root folder which runs
+ # one of the aforementioned vcvars*.bat scripts based on the options
+ # passed to it. So far only the version coming with some PlatformSDK
+ # distributions does not include this top level script but to
+ # support those we need to fall back to using the worker scripts
+ # directly in case the top level script can not be found.
+
+ global_setup = feature.get_values('<setup>',options)
+ if global_setup:
+ global_setup = global_setup[0]
+ else:
+ global_setup = None
+
+ if not below_8_0 and not global_setup:
+ global_setup = locate_default_setup(command,parent,'vcvarsall.bat')
+
+
+ default_setup = {
+ 'amd64' : 'vcvarsx86_amd64.bat',
+ 'i386' : 'vcvars32.bat',
+ 'ia64' : 'vcvarsx86_ia64.bat' }
+
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
+ # mention an x86_IPF option, that seems to be a documentation bug
+ # and x86_ia64 is the correct option.
+ default_global_setup_options = {
+ 'amd64' : 'x86_amd64',
+ 'i386' : 'x86',
+ 'ia64' : 'x86_ia64' }
+
+ somehow_detect_the_itanium_platform = None
+ # When using 64-bit Windows, and targeting 64-bit, it is possible to
+ # use a native 64-bit compiler, selected by the "amd64" & "ia64"
+ # parameters to vcvarsall.bat. There are two variables we can use --
+ # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
+ # 'x86' when running 32-bit Windows, no matter which processor is
+ # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
+ # Windows.
+ #
+ if re.search( 'AMD64', environ[ "PROCESSOR_ARCHITECTURE" ] ) != None:
+ default_global_setup_options[ 'amd64' ] = 'amd64'
+ # TODO: The same 'native compiler usage' should be implemented for
+ # the Itanium platform by using the "ia64" parameter. For this
+ # though we need someone with access to this platform who can find
+ # out how to correctly detect this case.
+ elif somehow_detect_the_itanium_platform:
+ default_global_setup_options[ 'ia64' ] = 'ia64'
+
+ for c in cpu:
+ setup_scripts[c] = generate_setup_cmd(
+ version, command, parent, options, c, global_setup,
+ default_global_setup_options, default_setup
+ )
+
+ # Get tool names (if any) and finish setup.
+ compiler = feature.get_values("<compiler>", options)
+ compiler = compiler[0] if compiler else 'cl'
+
+ linker = feature.get_values("<linker>", options)
+ if not linker:
+ linker = "link"
+
+ resource_compiler = feature.get_values("<resource-compiler>", options)
+ if not resource_compiler:
+ resource_compiler = "rc"
+
+ # Turn on some options for i386 assembler
+ # -coff generate COFF format object file (compatible with cl.exe output)
+ default_assembler_amd64 = 'ml64'
+ default_assembler_i386 = 'ml -coff'
+ default_assembler_ia64 = 'ias'
+
+ assembler = feature.get_values('<assembler>',options)
+
+ idl_compiler = feature.get_values('<idl-compiler>',options)
+ if not idl_compiler:
+ idl_compiler = 'midl'
+
+ mc_compiler = feature.get_values('<mc-compiler>',options)
+ if not mc_compiler:
+ mc_compiler = 'mc'
+
+ manifest_tool = feature.get_values('<manifest-tool>',options)
+ if not manifest_tool:
+ manifest_tool = 'mt'
+
+ cc_filter = feature.get_values('<compiler-filter>',options)
+
+ for c in cpu:
+ cpu_conditions = [ condition + '/' + arch for arch in globals()['__cpu_arch_{}'.format(c)] for condition in conditions ]
+
+ setup_script = setup_scripts.get(c, '')
+
+ if debug():
+ for cpu_condition in cpu_conditions:
+ print "notice: [msvc-cfg] condition: '{}', setup: '{}'".format(cpu_condition,setup_script)
+
+ cpu_assembler = assembler
+ if not cpu_assembler:
+ cpu_assembler = locals()['default_assembler_{}'.format(c)]
+
+ toolset.flags('msvc.compile', '.CC' , cpu_conditions, ['{}{} /Zm800 -nologo' .format(setup_script, compiler)])
+ toolset.flags('msvc.compile', '.RC' , cpu_conditions, ['{}{}' .format(setup_script, resource_compiler)])
+ toolset.flags('msvc.compile', '.ASM', cpu_conditions, ['{}{} -nologo' .format(setup_script, cpu_assembler)])
+ toolset.flags('msvc.link' , '.LD' , cpu_conditions, ['{}{} /NOLOGO /INCREMENTAL:NO'.format(setup_script, linker)])
+ toolset.flags('msvc.archive', '.LD' , cpu_conditions, ['{}{} /lib /NOLOGO' .format(setup_script, linker)])
+ toolset.flags('msvc.compile', '.IDL', cpu_conditions, ['{}{}' .format(setup_script, idl_compiler)])
+ toolset.flags('msvc.compile', '.MC' , cpu_conditions, ['{}{}' .format(setup_script, mc_compiler)])
+ toolset.flags('msvc.link' , '.MT' , cpu_conditions, ['{}{} -nologo' .format(setup_script, manifest_tool)])
+
+ if cc_filter:
+ toolset.flags('msvc', '.CC.FILTER', cpu_conditions, ['"|" {}'.format(cc_filter)])
+
+ # Set version-specific flags.
+ configure_version_specific('msvc', version, conditions)
+
+
+# Returns the default installation path for the given version.
+#
+def default_path(version):
+ # Use auto-detected path if possible.
+ options = __versions.get(version, 'options')
+ tmp_path = None
+ if options:
+ tmp_path = feature.get_values('<command>', options)
+
+ if tmp_path:
+ tmp_path="".join(tmp_path)
+ tmp_path=os.path.dirname(tmp_path)
+ else:
+ env_var_var_name = '__version_{}_env'.format(version.replace('.','_'))
+ vc_path = None
+ if env_var_var_name in globals():
+ env_var_name = globals()[env_var_var_name]
+ if env_var_name in os.environ:
+ vc_path = environ[env_var_name]
+ if vc_path:
+ vc_path = os.path.join(vc_path,globals()['__version_{}_envpath'.format(version.replace('.','_'))])
+ tmp_path = os.path.normpath(vc_path)
+
+ var_name = '__version_{}_path'.format(version.replace('.','_'))
+ if not tmp_path and var_name in globals():
+ tmp_path = os.path.normpath(os.path.join(common.get_program_files_dir(), globals()[var_name]))
+ return tmp_path
+
+
+# Returns either the default installation path (if 'version' is not empty) or
+# list of all known default paths (if no version is given)
+#
+def default_paths(version = None):
+ possible_paths = []
+ if version:
+ path = default_path(version)
+ if path:
+ possible_paths.append(path)
+ else:
+ for i in _known_versions:
+ path = default_path(i)
+ if path:
+ possible_paths.append(path)
+ return possible_paths
+
+
+class MsvcLinkingGenerator(builtin.LinkingGenerator):
+ # Calls the base version. If necessary, also create a target for the
+ # manifest file.specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ def generated_targets(self, sources, prop_set, project, name):
+ result = builtin.LinkingGenerator.generated_targets(self, sources, prop_set, project, name)
+ if result:
+ name_main = result[0].name()
+ action = result[0].action()
+
+ if prop_set.get('<debug-symbols>') == 'on':
+ # We force exact name on PDB. The reason is tagging -- the tag rule may
+ # reasonably special case some target types, like SHARED_LIB. The tag rule
+ # will not catch PDB, and it cannot even easily figure if PDB is paired with
+ # SHARED_LIB or EXE or something else. Because PDB always get the
+ # same name as the main target, with .pdb as extension, just force it.
+ target = FileTarget(name_main.split_ext()[0]+'.pdb','PDB',project,action,True)
+ registered_target = virtual_target.register(target)
+ if target != registered_target:
+ action.replace_targets(target,registered_target)
+ result.append(registered_target)
+ if prop_set.get('<embed-manifest>') == 'off':
+ # Manifest is evil target. It has .manifest appened to the name of
+ # main target, including extension. E.g. a.exe.manifest. We use 'exact'
+ # name because to achieve this effect.
+ target = FileTarget(name_main+'.manifest', 'MANIFEST', project, action, True)
+ registered_target = virtual_target.register(target)
+ if target != registered_target:
+ action.replace_targets(target,registered_target)
+ result.append(registered_target)
+ return result
+
+
+# Unsafe worker rule for the register-toolset() rule. Must not be called
+# multiple times.
+
+def register_toolset_really():
+ feature.extend('toolset', ['msvc'])
+
+ # Intel and msvc supposedly have link-compatible objects.
+ feature.subfeature( 'toolset', 'msvc', 'vendor', ['intel'], ['propagated', 'optional'])
+
+ # Inherit MIDL flags.
+ toolset.inherit_flags('msvc', 'midl')
+
+ # Inherit MC flags.
+ toolset.inherit_flags('msvc','mc')
+
+ # Dynamic runtime comes only in MT flavour.
+ toolset.add_requirements(['<toolset>msvc,<runtime-link>shared:<threading>multi'])
+
+ # Declare msvc toolset specific features.
+ feature.feature('debug-store', ['object', 'database'], ['propagated'])
+ feature.feature('pch-source', [], ['dependency', 'free'])
+
+ # Declare generators.
+
+ # TODO: Is it possible to combine these? Make the generators
+ # non-composing so that they do not convert each source into a separate
+ # .rsp file.
+ generators.register(MsvcLinkingGenerator('msvc.link', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['EXE'], ['<toolset>msvc']))
+ generators.register(MsvcLinkingGenerator('msvc.link.dll', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['SHARED_LIB','IMPORT_LIB'], ['<toolset>msvc']))
+
+ builtin.register_archiver('msvc.archive', ['OBJ'], ['STATIC_LIB'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c', ['C'], ['OBJ'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c++.preprocess', ['CPP'], ['PREPROCESSED_CPP'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c.preprocess', ['C'], ['PREPROCESSED_C'], ['<toolset>msvc'])
+
+ # Using 'register-c-compiler' adds the build directory to INCLUDES.
+ builtin.register_c_compiler('msvc.compile.rc', ['RC'], ['OBJ(%_res)'], ['<toolset>msvc'])
+ generators.override('msvc.compile.rc', 'rc.compile.resource')
+ generators.register_standard('msvc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>msvc'])
+
+ builtin.register_c_compiler('msvc.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], ['<toolset>msvc'])
+ generators.override('msvc.compile.idl', 'midl.compile.idl')
+
+ generators.register_standard('msvc.compile.mc', ['MC'], ['H','RC'], ['<toolset>msvc'])
+ generators.override('msvc.compile.mc', 'mc.compile')
+
+ # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
+ # the latter have their HPP type derived from H. The type of compilation
+ # is determined entirely by the destination type.
+ generators.register(MsvcPchGenerator('msvc.compile.c.pch', False, ['H'], ['C_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
+ generators.register(MsvcPchGenerator('msvc.compile.c++.pch', False, ['H'], ['CPP_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
+
+ generators.override('msvc.compile.c.pch', 'pch.default-c-pch-generator')
+ generators.override('msvc.compile.c++.pch', 'pch.default-cpp-pch-generator')
+
+ toolset.flags('msvc.compile', 'PCH_FILE' , ['<pch>on'], ['<pch-file>' ])
+ toolset.flags('msvc.compile', 'PCH_SOURCE', ['<pch>on'], ['<pch-source>'])
+ toolset.flags('msvc.compile', 'PCH_HEADER', ['<pch>on'], ['<pch-header>'])
+
+ #
+ # Declare flags for compilation.
+ #
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>speed'], ['/O2'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>space'], ['/O1'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium ], ['/G1'])
+ toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium2 ], ['/G2'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>object'], ['/Z7'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>database'], ['/Zi'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>off'], ['/Od'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>off'], ['/Ob0'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>on'], ['/Ob1'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>full'], ['/Ob2'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>on'], ['/W3'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>off'], ['/W0'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>all'], ['/W4'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings-as-errors>on'], ['/WX'])
+
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off'], ['/EHs'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on'], ['/EHsc'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off'], ['/EHa'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on'], ['/EHac'])
+
+ # By default 8.0 enables rtti support while prior versions disabled it. We
+ # simply enable or disable it explicitly so we do not have to depend on this
+ # default behaviour.
+ toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>on'], ['/GR'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>off'], ['/GR-'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>shared'], ['/MD'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>shared'], ['/MDd'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>static/<threading>multi'], ['/MT'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>static/<threading>multi'], ['/MTd'])
+
+ toolset.flags('msvc.compile', 'OPTIONS', [], ['<cflags>'])
+ toolset.flags('msvc.compile.c++', 'OPTIONS', [], ['<cxxflags>'])
+
+ toolset.flags('msvc.compile', 'PDB_CFLAG', ['<debug-symbols>on/<debug-store>database'],['/Fd'])
+
+ toolset.flags('msvc.compile', 'DEFINES', [], ['<define>'])
+ toolset.flags('msvc.compile', 'UNDEFS', [], ['<undef>'])
+ toolset.flags('msvc.compile', 'INCLUDES', [], ['<include>'])
+
+ # Declare flags for the assembler.
+ toolset.flags('msvc.compile.asm', 'USER_ASMFLAGS', [], ['<asmflags>'])
+
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<debug-symbols>on'], ['/Zi', '/Zd'])
+
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>on'], ['/W3'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>off'], ['/W0'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>all'], ['/W4'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings-as-errors>on'], ['/WX'])
+
+ toolset.flags('msvc.compile.asm', 'DEFINES', [], ['<define>'])
+
+ # Declare flags for linking.
+ toolset.flags('msvc.link', 'PDB_LINKFLAG', ['<debug-symbols>on/<debug-store>database'], ['/PDB']) # not used yet
+ toolset.flags('msvc.link', 'LINKFLAGS', ['<debug-symbols>on'], ['/DEBUG'])
+ toolset.flags('msvc.link', 'DEF_FILE', [], ['<def-file>'])
+
+ # The linker disables the default optimizations when using /DEBUG so we
+ # have to enable them manually for release builds with debug symbols.
+ toolset.flags('msvc', 'LINKFLAGS', ['<debug-symbols>on/<runtime-debugging>off'], ['/OPT:REF,ICF'])
+
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>console'], ['/subsystem:console'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>gui'], ['/subsystem:windows'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>wince'], ['/subsystem:windowsce'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>native'], ['/subsystem:native'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>auto'], ['/subsystem:posix'])
+
+ toolset.flags('msvc.link', 'OPTIONS', [], ['<linkflags>'])
+ toolset.flags('msvc.link', 'LINKPATH', [], ['<library-path>'])
+
+ toolset.flags('msvc.link', 'FINDLIBS_ST', [], ['<find-static-library>'])
+ toolset.flags('msvc.link', 'FINDLIBS_SA', [], ['<find-shared-library>'])
+ toolset.flags('msvc.link', 'LIBRARY_OPTION', ['<toolset>msvc'], [''])
+ toolset.flags('msvc.link', 'LIBRARIES_MENTIONED_BY_FILE', [], ['<library-file>'])
+
+ toolset.flags('msvc.archive', 'AROPTIONS', [], ['<archiveflags>'])
+
+
+# Locates the requested setup script under the given folder and returns its full
+# path or nothing in case the script can not be found. In case multiple scripts
+# are found only the first one is returned.
+#
+# TODO: There used to exist a code comment for the msvc.init rule stating that
+# we do not correctly detect the location of the vcvars32.bat setup script for
+# the free VC7.1 tools in case user explicitly provides a path. This should be
+# tested or simply remove this whole comment in case this toolset version is no
+# longer important.
+#
+def locate_default_setup(command, parent, setup_name):
+ for setup in [os.path.join(dir,setup_name) for dir in [command,parent]]:
+ if os.path.exists(setup):
+ return setup
+ return None
+
+
+# Validates given path, registers found configuration and prints debug
+# information about it.
+#
+def register_configuration(version, path=None):
+ if path:
+ command = os.path.join(path, 'cl.exe')
+ if os.path.exists(command):
+ if debug():
+ print "notice: [msvc-cfg] msvc-$(version) detected, command: ''".format(version,command)
+ __versions.register(version)
+ __versions.set(version,'options',['<command>{}'.format(command)])
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+# Similar to Configurations, but remembers the first registered configuration.
+class MSVCConfigurations(Configurations):
+ def __init__(self):
+ Configurations.__init__(self)
+ self.first_ = None
+
+ def register(self, id):
+ Configurations.register(self,id)
+ if not self.first_:
+ self.first_ = id
+
+ def first(self):
+ return self.first_
+
+
+# List of all registered configurations.
+__versions = MSVCConfigurations()
+
+# Supported CPU architectures.
+__cpu_arch_i386 = [
+ '<architecture>/<address-model>',
+ '<architecture>/<address-model>32',
+ '<architecture>x86/<address-model>',
+ '<architecture>x86/<address-model>32']
+
+__cpu_arch_amd64 = [
+ '<architecture>/<address-model>64',
+ '<architecture>x86/<address-model>64']
+
+__cpu_arch_ia64 = [
+ '<architecture>ia64/<address-model>',
+ '<architecture>ia64/<address-model>64']
+
+
+# Supported CPU types (only Itanium optimization options are supported from
+# VC++ 2005 on). See
+# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
+# detailed information.
+__cpu_type_g5 = ['i586', 'pentium', 'pentium-mmx' ]
+__cpu_type_g6 = ['i686', 'pentiumpro', 'pentium2', 'pentium3', 'pentium3m', 'pentium-m', 'k6',
+ 'k6-2', 'k6-3', 'winchip-c6', 'winchip2', 'c3', 'c3-2' ]
+__cpu_type_em64t = ['prescott', 'nocona', 'core2', 'corei7', 'corei7-avx', 'core-avx-i', 'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'merom',
+ 'merom-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
+ 'yorksfield', 'nehalem', 'sandy-bridge', 'ivy-bridge', 'haswell' ]
+__cpu_type_amd64 = ['k8', 'opteron', 'athlon64', 'athlon-fx', 'k8-sse3', 'opteron-sse3', 'athlon64-sse3', 'amdfam10', 'barcelona',
+ 'bdver1', 'bdver2', 'bdver3', 'btver1', 'btver2' ]
+__cpu_type_g7 = ['pentium4', 'pentium4m', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp'
+ 'athlon-mp'] + __cpu_type_em64t + __cpu_type_amd64
+__cpu_type_itanium = ['itanium', 'itanium1', 'merced']
+__cpu_type_itanium2 = ['itanium2', 'mckinley']
+
+
+# Known toolset versions, in order of preference.
+_known_versions = ['15.0', '14.0', '12.0', '11.0', '10.0', '10.0express', '9.0', '9.0express', '8.0', '8.0express', '7.1', '7.1toolkit', '7.0', '6.0']
+
+# Version aliases.
+__version_alias_6 = '6.0'
+__version_alias_6_5 = '6.0'
+__version_alias_7 = '7.0'
+__version_alias_8 = '8.0'
+__version_alias_9 = '9.0'
+__version_alias_10 = '10.0'
+__version_alias_11 = '11.0'
+__version_alias_12 = '12.0'
+__version_alias_14 = '14.0'
+__version_alias_15 = '15.0'
+
+# Names of registry keys containing the Visual C++ installation path (relative
+# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
+__version_6_0_reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++"
+__version_7_0_reg = "VisualStudio\\7.0\\Setup\\VC"
+__version_7_1_reg = "VisualStudio\\7.1\\Setup\\VC"
+__version_8_0_reg = "VisualStudio\\8.0\\Setup\\VC"
+__version_8_0express_reg = "VCExpress\\8.0\\Setup\\VC"
+__version_9_0_reg = "VisualStudio\\9.0\\Setup\\VC"
+__version_9_0express_reg = "VCExpress\\9.0\\Setup\\VC"
+__version_10_0_reg = "VisualStudio\\10.0\\Setup\\VC"
+__version_10_0express_reg = "VCExpress\\10.0\\Setup\\VC"
+__version_11_0_reg = "VisualStudio\\11.0\\Setup\\VC"
+__version_12_0_reg = "VisualStudio\\12.0\\Setup\\VC"
+__version_14_0_reg = "VisualStudio\\14.0\\Setup\\VC"
+__version_15_0_reg = "VisualStudio\\15.0\\Setup\\VC"
+
+# Visual C++ Toolkit 2003 does not store its installation path in the registry.
+# The environment variable 'VCToolkitInstallDir' and the default installation
+# path will be checked instead.
+__version_7_1toolkit_path = 'Microsoft Visual C++ Toolkit 2003\\bin'
+__version_7_1toolkit_env = 'VCToolkitInstallDir'
+
+# Path to the folder containing "cl.exe" relative to the value of the
+# corresponding environment variable.
+__version_7_1toolkit_envpath = 'bin' ;
+#
+#
+# Auto-detect all the available msvc installations on the system.
+auto_detect_toolset_versions()
+
+# And finally trigger the actual Boost Build toolset registration.
+register_toolset()
diff --git a/src/boost/tools/build/src/tools/notfile.jam b/src/boost/tools/build/src/tools/notfile.jam
new file mode 100644
index 000000000..7d0985b45
--- /dev/null
+++ b/src/boost/tools/build/src/tools/notfile.jam
@@ -0,0 +1,65 @@
+# Copyright (c) 2005 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+import project ;
+import targets ;
+import toolset ;
+import type ;
+
+
+type.register NOTFILE_MAIN ;
+
+
+class notfile-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8)
+ : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) :
+ $(17) : $(18) : $(19) ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local action ;
+ local action-name = [ $(property-set).get <action> ] ;
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+ if $(m)
+ {
+ action = [ new action $(sources) : $(m[1]) : $(property-set) ] ;
+ }
+ else
+ {
+ action = [ new action $(sources) : notfile.run : $(property-set) ] ;
+ }
+ local t = [ new notfile-target $(name) : $(project) : $(action) ] ;
+ return [ virtual-target.register $(t) ] ;
+ }
+}
+
+
+generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
+
+
+toolset.flags notfile.run ACTION : <action> ;
+
+
+actions run
+{
+ $(ACTION)
+}
+
+
+rule notfile ( target-name : action + : sources * : requirements * :
+ default-build * )
+{
+ targets.create-typed-target NOTFILE_MAIN : [ project.current ] :
+ $(target-name) : $(sources) : $(requirements) <action>$(action) :
+ $(default-build) ;
+}
+
+IMPORT $(__name__) : notfile : : notfile ;
diff --git a/src/boost/tools/build/src/tools/notfile.py b/src/boost/tools/build/src/tools/notfile.py
new file mode 100644
index 000000000..afbf68fb0
--- /dev/null
+++ b/src/boost/tools/build/src/tools/notfile.py
@@ -0,0 +1,51 @@
+# Status: ported.
+# Base revision: 64429.
+#
+# Copyright (c) 2005-2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+
+import b2.build.type as type
+import b2.build.generators as generators
+import b2.build.virtual_target as virtual_target
+import b2.build.toolset as toolset
+import b2.build.targets as targets
+
+from b2.manager import get_manager
+from b2.util import bjam_signature
+
+type.register("NOTFILE_MAIN")
+
+class NotfileGenerator(generators.Generator):
+
+ def run(self, project, name, ps, sources):
+ pass
+ action_name = ps.get('action')[0]
+ if action_name[0] == '@':
+ action = virtual_target.Action(get_manager(), sources, action_name[1:], ps)
+ else:
+ action = virtual_target.Action(get_manager(), sources, "notfile.run", ps)
+
+ return [get_manager().virtual_targets().register(
+ virtual_target.NotFileTarget(name, project, action))]
+
+generators.register(NotfileGenerator("notfile.main", False, [], ["NOTFILE_MAIN"]))
+
+toolset.flags("notfile.run", "ACTION", [], ["<action>"])
+
+get_manager().engine().register_action("notfile.run", "$(ACTION)")
+
+@bjam_signature((["target_name"], ["action"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"]))
+def notfile(target_name, action, sources, requirements, default_build):
+
+ requirements.append("<action>" + action)
+
+ return targets.create_typed_metatarget(target_name, "NOTFILE_MAIN", sources, requirements,
+ default_build, [])
+
+
+get_manager().projects().add_rule("notfile", notfile)
diff --git a/src/boost/tools/build/src/tools/openssl.jam b/src/boost/tools/build/src/tools/openssl.jam
new file mode 100644
index 000000000..8efcd69a6
--- /dev/null
+++ b/src/boost/tools/build/src/tools/openssl.jam
@@ -0,0 +1,140 @@
+# Copyright (c) 2019 Damian Jarek
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the openssl library
+#
+# After 'using openssl', the following targets are available:
+#
+# /openssl//ssl -- The SSL/TLS library
+# /openssl//crypto -- The cryptography library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import os ;
+import property ;
+import property-set ;
+
+header = openssl.h ;
+ssl_names = ssl ssleay32 ;
+crypto_names = crypto libeay32 ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the openssl library.
+#
+# openssl can be configured to use pre-existing binaries.
+#
+# Options for configuring a prebuilt openssl::
+#
+# <search>
+# The directory containing the openssl binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the openssl headers.
+#
+# If none of these options is specified, then the environmental
+# variables OPENSSL_LIBRARY_PATH, OPENSSL_NAME, and OPENSSL_INCLUDE will
+# be used instead.
+#
+# Examples::
+#
+# # Find openssl in the default system location
+# using openssl ;
+# # Find openssl in /usr/local
+# using openssl : 1.2.7
+# : <include>/usr/local/include <search>/usr/local/lib ;
+#
+rule init (
+ version ?
+ # The OpenSSL version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the openssl target
+
+ : is-default ?
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project openssl ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+
+ if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
+ {
+ is-default = true ;
+ }
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [openssl] openssl is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "openssl is already configured" ;
+ }
+ return ;
+ }
+ else
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [openssl] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [openssl] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local ssl_lib = [ new ac-library ssl : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : ssl ] ;
+ $(ssl_lib).set-header openssl/ssl.h ;
+ $(ssl_lib).set-default-names $(ssl_names) ;
+
+ local crypto_lib = [ new ac-library crypto : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : crypto ] ;
+ $(crypto_lib).set-header openssl/crypto.h ;
+ $(crypto_lib).set-default-names $(crypto_names) ;
+
+ targets.main-target-alternative $(ssl_lib) ;
+ targets.main-target-alternative $(crypto_lib) ;
+ }
+ .configured.$(condition) = true ;
+}
+
diff --git a/src/boost/tools/build/src/tools/package.jam b/src/boost/tools/build/src/tools/package.jam
new file mode 100644
index 000000000..7ae9dc54a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/package.jam
@@ -0,0 +1,274 @@
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides mechanism for installing whole packages into a specific directory
+# structure. This is opposed to the 'install' rule, that installs a number of
+# targets to a single directory, and does not care about directory structure at
+# all.
+
+# Example usage:
+#
+# package.install boost : <properties>
+# : <binaries>
+# : <libraries>
+# : <headers>
+# ;
+#
+# This will install binaries, libraries and headers to the 'proper' location,
+# given by command line options --prefix, --exec-prefix, --bindir, --libdir and
+# --includedir.
+#
+# The rule is just a convenient wrapper, avoiding the need to define several
+# 'install' targets.
+#
+# The only install-related feature is <install-source-root>. It will apply to
+# headers only and if present, paths of headers relatively to source root will
+# be retained after installing. If it is not specified, then "." is assumed, so
+# relative paths in headers are always preserved.
+
+import "class" : new ;
+import option ;
+import project ;
+import feature ;
+import path ;
+import property ;
+import stage ;
+import targets ;
+import modules ;
+import os ;
+
+feature.feature install-default-prefix : : free incidental ;
+
+class package-paths
+{
+ import feature ;
+ import modules ;
+ import option ;
+ import os ;
+ import path ;
+ rule __init__ ( default-prefix )
+ {
+ local explicit-options = [ MATCH --(prefix|bindir|libdir|includedir|datarootdir)=.*
+ : [ modules.peek : ARGV ] ] ;
+ self.has-$(explicit-options) = true ;
+ if prefix in $(explicit-options)
+ {
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of libdir/includir that
+ # is specified via options in config files.
+ option.set bindir : ;
+ option.set libdir : ;
+ option.set includedir : ;
+ option.set datarootdir : ;
+ }
+
+ handle-path prefix : $(default-prefix) ;
+ handle-path libdir : $(self.prefix)/lib ;
+ handle-path bindir : $(self.prefix)/bin ;
+ handle-path includedir : $(self.prefix)/include ;
+ handle-path datarootdir : $(self.prefix)/share ;
+ }
+
+ local rule handle-path ( option : default-value )
+ {
+ local opt = [ option.get $(option) ] ;
+ if $(opt)
+ {
+ opt = [ path.root [ path.make $(opt) ] [ path.pwd ] ] ;
+ }
+ else
+ {
+ opt = $(default-value) ;
+ }
+ self.$(option) = $(opt) ;
+ }
+
+ rule prefix ( )
+ {
+ return $(self.prefix) ;
+ }
+
+ rule libdir ( )
+ {
+ return $(self.libdir) ;
+ }
+
+ rule bindir ( )
+ {
+ return $(self.bindir) ;
+ }
+
+ rule includedir ( )
+ {
+ return $(self.includedir) ;
+ }
+
+ rule datarootdir ( )
+ {
+ return $(self.datarootdir) ;
+ }
+
+ rule get ( option )
+ {
+ if ! $(self.$(option))
+ {
+ local info = [ modules.peek package : .options.$(option) ] ;
+ local default-value = $(info[1]) ;
+ local relative-to = $(info[2]) ;
+ if $(self.has-$(relative-to))
+ {
+ option.set $(option) ;
+ self.has-$(option) = true ;
+ }
+ if [ MATCH --$(option)=(.*) : [ modules.peek : ARGV ] ]
+ {
+ self.has-$(option) = true ;
+ }
+ local adjusted-default =
+ [ path.join [ get $(relative-to) ] $(default-value) ] ;
+ handle-path $(option) : $(adjusted-default) ;
+ }
+ return $(self.$(option)) ;
+ }
+}
+
+# Registers an additional path option. The option name
+# can then be used with a package-paths object.
+#
+# default-path is the default path that will be used if
+# the option is not set explicitly. It will be interpreted
+# relative to another option. This allows options to be
+# defined hierarchically with --prefix as the root.
+#
+# relative-to should be the name of another option. It defaults
+# to prefix.
+#
+# Example::
+#
+# package.add-path-option cmakedir : cmake : libdir ;
+# cmakedir = [ $(mypaths).get cmakedir ] ; # defaults to /usr/local/lib/cmake
+#
+rule add-path-option ( name : default-path : relative-to ? )
+{
+ local value = $(default-path) $(relative-to:E=prefix) ;
+ if $(.options.$(name)) && $(.options.$(name)) != $(value)
+ {
+ import errors ;
+ errors.error Duplicate definition of $(name) ;
+ }
+ .options.$(name) = $(value) ;
+}
+
+
+# Returns a package-paths object that can be used
+# to find the various install paths. If requirements
+# contains <install-default-prefix> then that will be used
+# as the default prefix, otherwise a platform specific
+# default prefix will be used. All other properties
+# in requirements are ignored.
+#
+rule paths ( package-name : requirements * )
+{
+ local default-prefix = [ feature.get-values <install-default-prefix> : $(requirements) ] ;
+ # Or some likely defaults if neither is given.
+ if ! $(default-prefix)
+ {
+ if [ os.name ] = NT { default-prefix = C:\\$(package-name) ; }
+ else { default-prefix = /usr/local ; }
+ }
+ default-prefix = [ path.make $(default-prefix) ] ;
+ if ! $(.package-paths.$(default-prefix))
+ {
+ .package-paths.$(default-prefix) = [ new package-paths $(default-prefix) ] ;
+ }
+ return $(.package-paths.$(default-prefix)) ;
+}
+
+rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * )
+{
+ package-name ?= $(name) ;
+
+ # If <install-source-root> is not specified, all headers are installed to
+ # prefix/include, no matter what their relative path is. Sometimes that is
+ # what is needed.
+ local install-source-root = [ property.select <install-source-root> :
+ $(requirements) ] ;
+ install-source-root = $(install-source-root:G=) ;
+ requirements = [ property.change $(requirements) : <install-source-root> ] ;
+
+ local install-header-subdir = [ property.select <install-header-subdir> :
+ $(requirements) ] ;
+ install-header-subdir = /$(install-header-subdir:G=) ;
+ install-header-subdir ?= "" ;
+ requirements = [ property.change $(requirements) : <install-header-subdir> ]
+ ;
+
+ # First, figure out all locations. Use the default if no prefix option
+ # given.
+ local paths = [ paths $(package-name) : $(requirements) ] ;
+
+ # Binaries.
+ local bin-locate = [ $(paths).bindir ] ;
+
+ # Object code libraries.
+ local lib-locate = [ $(paths).libdir ] ;
+
+ # Source header files.
+ local include-locate = [ $(paths).includedir ] ;
+
+ stage.install $(name)-bin : $(binaries) : $(requirements)
+ <location>$(bin-locate) ;
+ alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ;
+
+ # Since the install location of shared libraries differs on universe
+ # and cygwin, use target alternatives to make different targets.
+ # We should have used indirection conditioanl requirements, but it's
+ # awkward to pass bin-locate and lib-locate from there to another rule.
+ alias $(name)-lib-shared : $(name)-lib-shared-universe ;
+ alias $(name)-lib-shared : $(name)-lib-shared-cygwin : <target-os>cygwin ;
+
+ # For shared libraries, we install both explicitly specified one and the
+ # shared libraries that the installed executables depend on.
+ stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements)
+ <location>$(lib-locate) <install-dependencies>on <install-type>SHARED_LIB ;
+ stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements)
+ <location>$(bin-locate) <install-dependencies>on <install-type>SHARED_LIB ;
+
+ # For static libraries, we do not care about executable dependencies, since
+ # static libraries are already incorporated into them.
+ stage.install $(name)-lib-static : $(libraries) : $(requirements)
+ <location>$(lib-locate) <install-dependencies>on <install-type>STATIC_LIB ;
+ stage.install $(name)-headers : $(headers) : $(requirements)
+ <location>$(include-locate)$(install-header-subdir)
+ <install-source-root>$(install-source-root) ;
+ alias $(name) : $(name)-bin $(name)-lib $(name)-headers ;
+
+ local c = [ project.current ] ;
+ modules.call-in [ $(c).project-module ] : explicit $(name) $(name)-bin
+ $(name)-lib $(name)-headers $(name)-lib-shared $(name)-lib-static
+ $(name)-lib-shared-universe $(name)-lib-shared-cygwin ;
+}
+
+rule install-data ( target-name : package-name : data * : requirements * )
+{
+ package-name ?= target-name ;
+
+ local paths = [ paths $(package-name) : $(requirements) ] ;
+ local datadir = [ $(paths).datarootdir ] ;
+
+ stage.install $(target-name)
+ : $(data)
+ : $(requirements) <location>$(datadir)/$(package-name)
+ ;
+
+ local c = [ project.current ] ;
+ local project-module = [ $(c).project-module ] ;
+ module $(project-module)
+ {
+ explicit $(1) ;
+ }
+}
diff --git a/src/boost/tools/build/src/tools/package.py b/src/boost/tools/build/src/tools/package.py
new file mode 100644
index 000000000..a3b1baef4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/package.py
@@ -0,0 +1,168 @@
+# Status: ported
+# Base revision: 64488
+#
+# Copyright (c) 2005, 2010 Vladimir Prus.
+# Copyright 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides mechanism for installing whole packages into a specific directory
+# structure. This is opposed to the 'install' rule, that installs a number of
+# targets to a single directory, and does not care about directory structure at
+# all.
+
+# Example usage:
+#
+# package.install boost : <properties>
+# : <binaries>
+# : <libraries>
+# : <headers>
+# ;
+#
+# This will install binaries, libraries and headers to the 'proper' location,
+# given by command line options --prefix, --exec-prefix, --bindir, --libdir and
+# --includedir.
+#
+# The rule is just a convenient wrapper, avoiding the need to define several
+# 'install' targets.
+#
+# The only install-related feature is <install-source-root>. It will apply to
+# headers only and if present, paths of headers relatively to source root will
+# be retained after installing. If it is not specified, then "." is assumed, so
+# relative paths in headers are always preserved.
+
+import b2.build.feature as feature
+import b2.build.property as property
+import b2.util.option as option
+import b2.tools.stage as stage
+
+from b2.build.alias import alias
+
+from b2.manager import get_manager
+
+from b2.util import bjam_signature
+from b2.util.utility import ungrist
+
+
+import os
+
+feature.feature("install-default-prefix", [], ["free", "incidental"])
+
+@bjam_signature((["name", "package_name", "?"], ["requirements", "*"],
+ ["binaries", "*"], ["libraries", "*"], ["headers", "*"]))
+def install(name, package_name=None, requirements=[], binaries=[], libraries=[], headers=[]):
+
+ requirements = requirements[:]
+ binaries = binaries[:]
+ libraries
+
+ if not package_name:
+ package_name = name
+
+ if option.get("prefix"):
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of libdir/includir that
+ # is specified via options in config files.
+ option.set("bindir", None)
+ option.set("libdir", None)
+ option.set("includedir", None)
+
+ # If <install-source-root> is not specified, all headers are installed to
+ # prefix/include, no matter what their relative path is. Sometimes that is
+ # what is needed.
+ install_source_root = property.select('install-source-root', requirements)
+ if install_source_root:
+ requirements = property.change(requirements, 'install-source-root', None)
+
+ install_header_subdir = property.select('install-header-subdir', requirements)
+ if install_header_subdir:
+ install_header_subdir = ungrist(install_header_subdir[0])
+ requirements = property.change(requirements, 'install-header-subdir', None)
+
+ # First, figure out all locations. Use the default if no prefix option
+ # given.
+ prefix = get_prefix(name, requirements)
+
+ # Architecture dependent files.
+ exec_locate = option.get("exec-prefix", prefix)
+
+ # Binaries.
+ bin_locate = option.get("bindir", os.path.join(prefix, "bin"))
+
+ # Object code libraries.
+ lib_locate = option.get("libdir", os.path.join(prefix, "lib"))
+
+ # Source header files.
+ include_locate = option.get("includedir", os.path.join(prefix, "include"))
+
+ stage.install(name + "-bin", binaries, requirements + ["<location>" + bin_locate])
+
+ alias(name + "-lib", [name + "-lib-shared", name + "-lib-static"])
+
+ # Since the install location of shared libraries differs on universe
+ # and cygwin, use target alternatives to make different targets.
+ # We should have used indirection conditioanl requirements, but it's
+ # awkward to pass bin-locate and lib-locate from there to another rule.
+ alias(name + "-lib-shared", [name + "-lib-shared-universe"])
+ alias(name + "-lib-shared", [name + "-lib-shared-cygwin"], ["<target-os>cygwin"])
+
+ # For shared libraries, we install both explicitly specified one and the
+ # shared libraries that the installed executables depend on.
+ stage.install(name + "-lib-shared-universe", binaries + libraries,
+ requirements + ["<location>" + lib_locate, "<install-dependencies>on",
+ "<install-type>SHARED_LIB"])
+ stage.install(name + "-lib-shared-cygwin", binaries + libraries,
+ requirements + ["<location>" + bin_locate, "<install-dependencies>on",
+ "<install-type>SHARED_LIB"])
+
+ # For static libraries, we do not care about executable dependencies, since
+ # static libraries are already incorporated into them.
+ stage.install(name + "-lib-static", libraries, requirements +
+ ["<location>" + lib_locate, "<install-dependencies>on", "<install-type>STATIC_LIB"])
+ stage.install(name + "-headers", headers, requirements \
+ + ["<location>" + os.path.join(include_locate, s) for s in install_header_subdir]
+ + install_source_root)
+
+ alias(name, [name + "-bin", name + "-lib", name + "-headers"])
+
+ pt = get_manager().projects().current()
+
+ for subname in ["bin", "lib", "headers", "lib-shared", "lib-static", "lib-shared-universe", "lib-shared-cygwin"]:
+ pt.mark_targets_as_explicit([name + "-" + subname])
+
+@bjam_signature((["target_name"], ["package_name"], ["data", "*"], ["requirements", "*"]))
+def install_data(target_name, package_name, data, requirements):
+ if not package_name:
+ package_name = target_name
+
+ if option.get("prefix"):
+ # If --prefix is explicitly specified on the command line,
+ # then we need wipe away any settings of datarootdir
+ option.set("datarootdir", None)
+
+ prefix = get_prefix(package_name, requirements)
+ datadir = option.get("datarootdir", os.path.join(prefix, "share"))
+
+ stage.install(target_name, data,
+ requirements + ["<location>" + os.path.join(datadir, package_name)])
+
+ get_manager().projects().current().mark_targets_as_explicit([target_name])
+
+def get_prefix(package_name, requirements):
+
+ specified = property.select("install-default-prefix", requirements)
+ if specified:
+ specified = ungrist(specified[0])
+ prefix = option.get("prefix", specified)
+ requirements = property.change(requirements, "install-default-prefix", None)
+ # Or some likely defaults if neither is given.
+ if not prefix:
+ if os.name == "nt":
+ prefix = "C:\\" + package_name
+ elif os.name == "posix":
+ prefix = "/usr/local"
+
+ return prefix
+
diff --git a/src/boost/tools/build/src/tools/pathscale.jam b/src/boost/tools/build/src/tools/pathscale.jam
new file mode 100644
index 000000000..94abcf1c8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/pathscale.jam
@@ -0,0 +1,178 @@
+# Copyright 2006 Noel Belcourt
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import toolset : flags ;
+import feature ;
+import type ;
+import os ;
+import common ;
+import fortran ;
+
+feature.extend toolset : pathscale ;
+toolset.inherit pathscale : unix ;
+generators.override pathscale.prebuilt : builtin.prebuilt ;
+generators.override pathscale.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pathscale.com/docs.html
+
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command pathscale : pathCC : $(command)
+ : /opt/ekopath/bin ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters pathscale
+ : version $(version) ] ;
+
+ common.handle-options pathscale : $(condition) : $(command) : $(options) ;
+
+ toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ;
+
+ toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ # fortran support
+ local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ;
+ local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+ local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+
+ toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ;
+ toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ;
+
+ # always link lib rt to resolve clock_gettime()
+ flags pathscale.link FINDLIBS-SA : rt : unchecked ;
+
+ switch [ os.name ]
+ {
+ case SOLARIS :
+ toolset.flags pathscale.link RPATH_OPTION $(condition) : -Wl,-R, -Wl, : unchecked ;
+
+ case * : # GNU
+ toolset.flags pathscale.link RPATH_OPTION $(condition) : -Wl,-rpath= : unchecked ;
+ }
+}
+
+# Declare generators
+generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ;
+generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ;
+generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ;
+generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
+
+# Declare flags and actions for compilation
+flags pathscale.compile OPTIONS <optimization>off : -O0 ;
+flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
+flags pathscale.compile OPTIONS <optimization>space : -Os ;
+
+flags pathscale.compile OPTIONS <inlining>off : -noinline ;
+flags pathscale.compile OPTIONS <inlining>on : -inline ;
+flags pathscale.compile OPTIONS <inlining>full : -inline ;
+
+flags pathscale.compile OPTIONS <warnings>off : -woffall ;
+flags pathscale.compile OPTIONS <warnings>on : -Wall ;
+flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
+flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
+flags pathscale.compile OPTIONS <profiling>on : -pg ;
+flags pathscale.compile OPTIONS <link>shared : -fPIC ;
+flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
+flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
+
+flags pathscale.compile USER_OPTIONS <cflags> ;
+flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
+flags pathscale.compile DEFINES <define> ;
+flags pathscale.compile INCLUDES <include> ;
+
+flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
+flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.fortran90 ( targets * : sources * : properties * )
+{
+ # the space rule inserts spaces between targets and it's necessary
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the compile.fortran90 action
+ # F90 source must be compiled in a particular order so we
+ # serialize the build as a parallel F90 compile might fail
+ JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ;
+}
+
+actions compile.fortran90
+{
+ "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
+# Strip the binary when no debugging is needed
+flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
+flags pathscale.link OPTIONS <profiling>on : -pg ;
+flags pathscale.link USER_OPTIONS <linkflags> ;
+flags pathscale.link LINKPATH <library-path> ;
+flags pathscale.link FINDLIBS-ST <find-static-library> ;
+flags pathscale.link FINDLIBS-SA <find-shared-library> ;
+flags pathscale.link FINDLIBS-SA <threading>multi : pthread ;
+flags pathscale.link LIBRARIES <library-file> ;
+flags pathscale.link LINK-RUNTIME <runtime-link>static : static ;
+flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pathscale.link RPATH <dll-path> ;
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags pathscale.link RPATH <xdll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" $(RPATH_OPTION:E=-Wl,-rpath=)"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" $(RPATH_OPTION:E=-Wl,-rpath=)"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Declare action for creating static libraries
+# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)"
+actions piecemeal archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/src/boost/tools/build/src/tools/pch.jam b/src/boost/tools/build/src/tools/pch.jam
new file mode 100644
index 000000000..4737bda30
--- /dev/null
+++ b/src/boost/tools/build/src/tools/pch.jam
@@ -0,0 +1,95 @@
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+##### Using Precompiled Headers (Quick Guide) #####
+#
+# Make precompiled mypch.hpp:
+#
+# import pch ;
+#
+# cpp-pch mypch
+# : # sources
+# mypch.hpp
+# : # requiremnts
+# <toolset>msvc:<source>mypch.cpp
+# ;
+#
+# Add cpp-pch to sources:
+#
+# exe hello
+# : main.cpp hello.cpp mypch
+# ;
+
+import "class" : new ;
+import type ;
+import feature ;
+import generators ;
+
+type.register PCH : pch ;
+
+type.register C_PCH : : PCH ;
+type.register CPP_PCH : : PCH ;
+
+# Control precompiled header (PCH) generation.
+feature.feature pch :
+ on
+ off
+ : propagated ;
+
+
+feature.feature pch-header : : free dependency ;
+feature.feature pch-file : : free dependency ;
+
+# Base PCH generator. The 'run' method has the logic to prevent this generator
+# from being run unless it's being used for a top-level PCH target.
+class pch-generator : generator
+{
+ import property-set ;
+
+ rule action-class ( )
+ {
+ return compile-action ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ if ! $(name)
+ {
+ # Unless this generator is invoked as the top-most generator for a
+ # main target, fail. This allows using 'H' type as input type for
+ # this generator, while preventing B2 to try this generator
+ # when not explicitly asked for.
+ #
+ # One bad example is msvc, where pch generator produces both PCH
+ # target and OBJ target, so if there's any header generated (like by
+ # bison, or by msidl), we'd try to use pch generator to get OBJ from
+ # that H, which is completely wrong. By restricting this generator
+ # only to pch main target, such problem is solved.
+ }
+ else
+ {
+ local r = [ run-pch $(project) $(name)
+ : [ $(property-set).add-raw <define>BOOST_BUILD_PCH_ENABLED ]
+ : $(sources) ] ;
+ return [ generators.add-usage-requirements $(r)
+ : <define>BOOST_BUILD_PCH_ENABLED ] ;
+ }
+ }
+
+ # This rule must be overridden by the derived classes.
+ rule run-pch ( project name ? : property-set : sources + )
+ {
+ }
+}
+
+
+# NOTE: requirements are empty, default pch generator can be applied when
+# pch=off.
+generators.register
+ [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
+generators.register
+ [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
diff --git a/src/boost/tools/build/src/tools/pch.py b/src/boost/tools/build/src/tools/pch.py
new file mode 100644
index 000000000..71cb7166e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/pch.py
@@ -0,0 +1,83 @@
+# Status: Being ported by Steven Watanabe
+# Base revision: 47077
+#
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov
+# Copyright (c) 2008 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+##### Using Precompiled Headers (Quick Guide) #####
+#
+# Make precompiled mypch.hpp:
+#
+# import pch ;
+#
+# cpp-pch mypch
+# : # sources
+# mypch.hpp
+# : # requiremnts
+# <toolset>msvc:<source>mypch.cpp
+# ;
+#
+# Add cpp-pch to sources:
+#
+# exe hello
+# : main.cpp hello.cpp mypch
+# ;
+
+from b2.build import type, feature, generators
+from b2.tools import builtin
+
+type.register('PCH', ['pch'])
+type.register('C_PCH', [], 'PCH')
+type.register('CPP_PCH', [], 'PCH')
+
+# Control precompiled header (PCH) generation.
+feature.feature('pch',
+ ['on', 'off'],
+ ['propagated'])
+
+feature.feature('pch-header', [], ['free', 'dependency'])
+feature.feature('pch-file', [], ['free', 'dependency'])
+
+class PchGenerator(generators.Generator):
+ """
+ Base PCH generator. The 'run' method has the logic to prevent this generator
+ from being run unless it's being used for a top-level PCH target.
+ """
+ def action_class(self):
+ return builtin.CompileAction
+
+ def run(self, project, name, prop_set, sources):
+ if not name:
+ # Unless this generator is invoked as the top-most generator for a
+ # main target, fail. This allows using 'H' type as input type for
+ # this generator, while preventing Boost.Build to try this generator
+ # when not explicitly asked for.
+ #
+ # One bad example is msvc, where pch generator produces both PCH
+ # target and OBJ target, so if there's any header generated (like by
+ # bison, or by msidl), we'd try to use pch generator to get OBJ from
+ # that H, which is completely wrong. By restricting this generator
+ # only to pch main target, such problem is solved.
+ pass
+ else:
+ r = self.run_pch(project, name,
+ prop_set.add_raw(['<define>BOOST_BUILD_PCH_ENABLED']),
+ sources)
+ return generators.add_usage_requirements(
+ r, ['<define>BOOST_BUILD_PCH_ENABLED'])
+
+ # This rule must be overridden by the derived classes.
+ def run_pch(self, project, name, prop_set, sources):
+ pass
+
+# NOTE: requirements are empty, default pch generator can be applied when
+# pch=off.
+generators.register(builtin.DummyGenerator(
+ "pch.default-c-pch-generator", False, [], ['C_PCH'], []))
+generators.register(builtin.DummyGenerator(
+ "pch.default-cpp-pch-generator", False, [], ['CPP_PCH'], []))
diff --git a/src/boost/tools/build/src/tools/pgi.jam b/src/boost/tools/build/src/tools/pgi.jam
new file mode 100644
index 000000000..e40fcd09a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/pgi.jam
@@ -0,0 +1,138 @@
+# Copyright Noel Belcourt 2007.
+# Copyright 2017, NVIDIA CORPORATION.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import fortran ;
+import type ;
+import common ;
+import gcc ;
+
+feature.extend toolset : pgi ;
+toolset.inherit pgi : unix ;
+generators.override pgi.prebuilt : builtin.lib-generator ;
+generators.override pgi.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pgroup.com/resources/docs.htm
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters pgi : version $(version) ] ;
+
+ local l_command = [ common.get-invocation-command pgi : pgc++ : $(command) ] ;
+
+ common.handle-options pgi : $(condition) : $(l_command) : $(options) ;
+
+ command_c = $(command_c[1--2]) $(l_command[-1]:B=pgcc) ;
+
+ toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ flags pgi.compile DEFINES $(condition) :
+ [ feature.get-values <define> : $(options) ] : unchecked ;
+
+ # set link flags
+ flags pgi.link FINDLIBS-ST : [
+ feature.get-values <find-static-library> : $(options) ] : unchecked ;
+}
+
+# Declare generators
+generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ;
+generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ;
+generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
+
+# Declare flags and actions for compilation
+flags pgi.compile.c++ OPTIONS <cxxstd>98 : -std=c++03 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>03 : -std=c++03 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>0x : -std=c++11 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>11 : -std=c++11 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>1y : -std=c++14 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>14 : -std=c++14 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>1z : -std=c++17 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>17 : -std=c++17 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>2a : -std=c++17 ;
+flags pgi.compile.c++ OPTIONS <cxxstd>latest : -std=c++17 ;
+
+flags pgi.compile OPTIONS <link>shared : -fpic ;
+flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
+flags pgi.compile OPTIONS <optimization>off : -O0 ;
+flags pgi.compile OPTIONS <optimization>speed : -fast ;
+flags pgi.compile OPTIONS <optimization>space : -fast ;
+
+flags pgi.compile OPTIONS <warnings>off : -Minform=severe ;
+flags pgi.compile OPTIONS <warnings>on : -Minform=warn ;
+flags pgi.compile OPTIONS <warnings>all : -Minform=warn ;
+flags pgi.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+flags pgi.compile.c++ OPTIONS <rtti>off : --no_rtti ;
+flags pgi.compile.c++ OPTIONS <exception-handling>off : --no_exceptions ;
+
+flags pgi.compile OPTIONS <cflags> ;
+flags pgi.compile.c++ OPTIONS <cxxflags> ;
+flags pgi.compile DEFINES <define> ;
+flags pgi.compile INCLUDES <include> ;
+
+flags pgi.compile.fortran OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pgi.link OPTIONS <debug-symbols>on : -gopt ;
+# Strip the binary when no debugging is needed
+flags pgi.link OPTIONS <debug-symbols>off : -s ;
+flags pgi.link OPTIONS <linkflags> ;
+flags pgi.link OPTIONS <link>shared : -fpic ;
+flags pgi.link LINKPATH <library-path> ;
+flags pgi.link FINDLIBS-ST <find-static-library> ;
+flags pgi.link FINDLIBS-SA <find-shared-library> ;
+flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
+flags pgi.link LIBRARIES <library-file> ;
+flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
+flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pgi.link RPATH <dll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -soname $(<[-1]:D=) -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+actions updated together piecemeal pgi.archive
+{
+ ar -rc$(ARFLAGS:E=) "$(<)" "$(>)"
+}
+
diff --git a/src/boost/tools/build/src/tools/pkg-config.jam b/src/boost/tools/build/src/tools/pkg-config.jam
new file mode 100644
index 000000000..2efa9cf3a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/pkg-config.jam
@@ -0,0 +1,485 @@
+#|
+Copyright 2019 Dmitry Arkhipov
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import os ;
+import param ;
+import project ;
+import regex ;
+import sequence ;
+import string ;
+import targets ;
+
+
+#| tag::doc[]
+
+= pkg-config
+The *pkg-config* program is used to retrieve information about installed
+libraries in the system. It retrieves information about packages from special
+metadata files. These files are named after the package, and have a `.pc`
+extension. The package name specified to *pkg-config* is defined to be the name
+of the metadata file, minus the `.pc` extension.
+
+|# # end::doc[]
+
+
+#| tag::doc[]
+
+== Feature: `pkg-config`
+
+Selects one of the initialized `pkg-config` configurations. This feature is
+`propagated` to dependencies. Its use is dicussed in
+section <<pkg-config-init>>.
+
+|# # end::doc[]
+
+feature.feature pkg-config : : propagated ;
+
+
+#| tag::doc[]
+
+== Feature: `pkg-config-define`
+
+This `free` feature adds a variable assignment to pkg-config invocation. For
+example,
+
+[source, jam]
+----
+pkg-config.import mypackage : requirements <pkg-config-define>key=value ;
+----
+
+is equivalent to invoking on the comand line
+
+[source, shell]
+----
+pkg-config --define-variable=key=value mypackage ;
+----
+
+|# # end::doc[]
+
+feature.feature pkg-config-define : : free ;
+
+
+#| tag::doc[]
+
+== Rule: `import`
+
+Main target rule that imports a *pkg-config* package. When its consumer targets
+are built, *pkg-config* command will be invoked with arguments that depend on
+current property set. The features that have an effect are:
+
+* `<pkg-config-define>`: adds a `--define-variable` argument;
+* `<link>`: adds `--static` argument when `<link>static`;
+* `<link>`: adds `--static` argument when `<link>static`;
+* `<name>`: specifies package name (target name is used instead if the property
+ is not present);
+* `<version>`: specifies package version range, can be used multiple times and
+ should be a dot-separated sequence of numbers optionally prefixed with `=`,
+ `<`, `>`, `<=` or `>=`.
+
+Example:
+
+[source, jam]
+----
+pkg-config.import my-package
+ : requirements <name>my_package <version><4 <version>>=3.1 ;
+----
+
+|# # end::doc[]
+
+
+rule import
+ ( target-name
+ : sources *
+ : requirements *
+ : default-build *
+ : usage-requirements *
+ )
+{
+ param.handle-named-params
+ sources requirements default-build usage-requirements ;
+ targets.create-metatarget pkg-config-target
+ : [ project.current ]
+ : $(target-name)
+ : $(sources)
+ : $(requirements)
+ : $(default-build)
+ : $(usage-requirements)
+ ;
+}
+
+
+#| tag::doc[]
+
+== Initialization [[ pkg-config-init ]]
+
+To use the `pkg-config` tool you need to declare it in a configuration file
+with the `using` rule:
+
+[source, jam]
+----
+using pkg-config : [config] : [command] ... : [ options ] ... ;
+----
+
+
+* `config`: the name of initialized configuration. The name can be omitted, in
+ which case the configuration will become the default one.
+* `command`: the command, with any extra arguments, to execute. If no command
+ is given, first `PKG_CONFIG` environment variable is checked, and if its
+ empty the string `pkg-config` is used.
+* `options`: options that modify `pkg-config` behavior. Allowed options are:
+ * `<path>`: sets `PKG_CONFIG_PATH` environment variable;
+ multiple occurences are allowed.
+ * `<libdir>`: sets `PKG_CONFIG_LIBDIR` environment variable;
+ multiple occurences are allowed.
+ * `<allow-system-cflags>`: sets `PKG_CONFIG_ALLOW_SYSTEM_CFLAGS`
+ environment variable; multiple occurences are allowed.
+ * `<allow-system-libs>`: sets `PKG_CONFIG_ALLOW_SYSTEM_LIBS`
+ environment variable; multiple occurences are allowed.
+ * `<sysroot>`: sets `PKG_CONFIG_SYSROOT_DIR` environment variable;
+ multiple occurences are allowed.
+ * `<variable>`: adds a variable definition argument to command invocation;
+ multiple occurences are allowed.
+
+|# # end::doc[]
+
+rule init ( config ? : command * : options * )
+{
+ config ?= [ default-config ] ;
+
+ local tool = [ os.environ PKG_CONFIG ] ;
+ tool ?= pkg-config ;
+ command =
+ [ common.get-invocation-command pkg-config : $(tool) : $(command) ] ;
+
+ configure $(config) : $(command) : $(options) ;
+ $(.configs).use $(config) ;
+}
+
+
+rule run ( config ? : args * )
+{
+ config ?= [ default-config ] ;
+
+ local command = [ $(.configs).get $(config) : command ] ;
+ command = "$(command) $(args:J= )" ;
+
+ local output = [ SHELL "$(command)" : exit-status ] ;
+ if 0 != $(output[2])
+ {
+ errors.error "pkg-config: command '$(command)' resulted in error:"
+ [ common.newline-char ] $(output[1]) ;
+ }
+
+ local ws = [ string.whitespace ] ;
+ output = [ regex.split $(output[1]) "[$(ws)]" ] ;
+ return [ sequence.filter non-empty : $(output) ] ;
+}
+
+
+#| tag::doc[]
+
+== Class `pkg-config-target`
+
+[source, jam]
+----
+class pkg-config-target : alias-target-class {
+ rule construct ( name : sources * : property-set )
+ rule version ( property-set )
+ rule variable ( name : property-set )
+}
+----
+
+The class of objects returned by `import` rule. The objects themselves could be
+useful in situations that require more complicated logic for consuming a
+package. See <<pkg-config-tips>> for examples.
+
+. `rule construct ( name : sources * : property-set )`
+ Overrides `alias-target.construct`.
+
+. `rule version ( property-set )`
+ Returns the package's version, in the context of `property-set`.
+
+. `rule variable ( name : property-set )`
+ Returns the value of variable `name` in the package, in the context of
+ `property-set`.
+
+
+|# # end::doc[]
+
+class pkg-config-target : alias-target-class
+{
+ import pkg-config ;
+ import regex ;
+
+ rule construct ( name : sources * : property-set )
+ {
+ local config = [ $(property-set).get <pkg-config> ] ;
+ local args = [ common-arguments $(name) : $(property-set) ] ;
+ return
+ [ property-set.create
+ [ compile-flags $(config) $(property-set) : $(args) ]
+ [ link-flags $(config) $(property-set) : $(args) ]
+ ] ;
+ }
+
+ rule version ( property-set )
+ {
+ local config = [ $(property-set).get <pkg-config> ] ;
+ local args = [ common-arguments [ name ] : $(property-set) ] ;
+ local version = [ pkg-config.run $(config) : --modversion $(args) ] ;
+ return [ regex.split $(version) "\\." ] ;
+ }
+
+ rule variable ( name : property-set )
+ {
+ local config = [ $(property-set).get <pkg-config> ] ;
+ local args = [ common-arguments [ name ] : $(property-set) ] ;
+ return [ pkg-config.run $(config) : --variable=$(name) $(args) ] ;
+ }
+
+ local rule common-arguments ( name : property-set )
+ {
+ local defines = [ $(property-set).get <pkg-config-define> ] ;
+ local args = --define-variable=$(defines) ;
+ if [ $(property-set).get <link> ] = static
+ {
+ args += --static ;
+ }
+ return $(args) [ get-package-request $(property-set) $(name) ] ;
+ }
+
+ local rule get-package-request ( property-set name )
+ {
+ local pkg-name = [ $(property-set).get <name> ] ;
+ pkg-name ?= $(name) ;
+ if $(pkg-name[2])
+ {
+ errors.error "multiple package names were specified for target "
+ "'$(name)': $(pkg-name)" ;
+ }
+
+ local versions ;
+ for local version in [ $(property-set).get <version> ]
+ {
+ local match = [ MATCH "^(<=)(.*)" : $(version) ] ;
+ match ?= [ MATCH "^(>=)(.*)" : $(version) ] ;
+ match ?= [ MATCH "^([><=])(.*)" : $(version) ] ;
+ if $(match)
+ {
+ version = " $(match:J= )" ;
+ }
+ else
+ {
+ version = " = $(version)" ;
+ }
+ versions += $(version) ;
+ }
+ versions ?= "" ;
+
+ return "'$(pkg-name)"$(versions)"'" ;
+ }
+
+ local rule link-flags ( config property-set : args * )
+ {
+ local flags = [ pkg-config.run $(config) : --libs $(args) ] ;
+ return <linkflags>$(flags) ;
+ }
+
+ local rule compile-flags ( config property-set : args * )
+ {
+ local flags = [ pkg-config.run $(config) : --cflags $(args) ] ;
+ return <cflags>$(flags) ;
+ }
+}
+
+
+local rule default-config ( )
+{
+ return default ;
+}
+
+
+local rule configure ( config : command + : options * )
+{
+ $(.configs).register $(config) ;
+
+ local path ;
+ local libdir ;
+ local allow-system-cflags ;
+ local allow-system-libs ;
+ local sysroot ;
+ local defines ;
+ for local opt in $(options)
+ {
+ switch $(opt:G)
+ {
+ case <path> : path += $(opt:G=) ;
+ case <libdir> : libdir += $(opt:G=) ;
+ case <allow-system-cflags> : allow-system-cflags += $(opt:G=) ;
+ case <allow-system-libs> : allow-system-libs += $(opt:G=) ;
+ case <sysroot> : sysroot += $(opt:G=) ;
+ case <variable> : defines += $(opt:G=) ;
+ case * :
+ errors.error "pkg-config: invalid property '$(opt)' was "
+ "specified for configuration '$(config)'." ;
+ }
+ }
+
+ for local opt in allow-system-cflags allow-system-libs
+ {
+ if ! $($(opt)) in "on" off
+ {
+ errors.error "pkg-config: invalid value '$($(opt))' was specified "
+ "for option <$(opt)> of configuration '$(config)'."
+ [ common.newline-char ] "Available values are 'on' and 'off'" ;
+ }
+ }
+
+ if $(sysroot[2])
+ {
+ errors.error "pkg-config: several values were specified for option "
+ "<sysroot> of configuration '$(config)'."
+ [ common.newline-char ] "Only one value is allowed." ;
+ }
+
+ local sep = [ os.path-separator ] ;
+ path = [ envar-set-command PKG_CONFIG_PATH : $(path:J=$(sep)) ] ;
+ libdir = [ envar-set-command PKG_CONFIG_LIBDIR : $(libdir:J=$(sep)) ] ;
+ sysroot = [ envar-set-command PKG_CONFIG_SYSROOT_DIR : $(sysroot) ] ;
+ allow-cflags =
+ [ envar-set-command PKG_CONFIG_ALLOW_SYSTEM_CFLAGS
+ : $(allow-cflags)
+ : 1
+ ] ;
+ allow-libs =
+ [ envar-set-command PKG_CONFIG_ALLOW_SYSTEM_LIBS
+ : $(allow-libs)
+ : 1
+ ] ;
+
+ command += --print-errors --errors-to-stdout --define-variable=$(defines) ;
+ $(.configs).set $(config)
+ : command
+ : "$(path)$(libdir)$(sysroot)$(allow-cflags)$(allow-libs)$(command:J= )"
+ ;
+
+ feature.extend pkg-config : $(config) ;
+}
+
+
+local rule envar-set-command ( envar : value * : implied-value * )
+{
+ if $(value)
+ {
+ if $(implied-value)
+ {
+ value = $(implied-value) ;
+ }
+ return [ common.path-variable-setting-command $(envar) : $(value) ] ;
+ }
+ else
+ {
+ return "" ;
+ }
+}
+
+
+local rule non-empty ( string )
+{
+ if $(string) != "" { return true ; }
+}
+
+
+.configs = [ new configurations ] ;
+
+
+#| tag::doc[]
+
+== Tips [[pkg-config-tips]]
+
+
+=== Using several configurations
+
+Suppose, you have 2 collections of `.pc` files: one for platform A, and another
+for platform B. You can initialize 2 configurations of `pkg-config` tool each
+corresponding to specific collection:
+
+[source, jam]
+----
+using pkg-config : A : : <libdir>path/to/collection/A ;
+using pkg-config : B : : <libdir>path/to/collection/B ;
+----
+
+Then, you can specify that builds for platform A should use configuration A,
+while builds for B should use configuration B:
+
+[source, jam]
+----
+project
+ : requirements
+ <target-os>A-os,<architecture>A-arch:<pkg-config>A
+ <target-os>B-os,<architecture>B-arch:<pkg-config>B
+ ;
+----
+
+Thanks to the fact, that `project-config`, `user-config` and `site-config`
+modules are parents of jamroot module, you can put it in any of those files.o
+
+
+=== Choosing the package name based on the property set
+
+Since a file for a package should be named after the package suffixed with
+`.pc`, some projects came up with naming schemes in order to allow simultaneous
+installation of several major versions or build variants. In order to pick the
+specific name corresponding to the build request you can use `<conditional>`
+property in requirements:
+
+[source, jam]
+----
+pkg-config.import mypackage : requirements <conditional>@infer-name ;
+
+rule infer-name ( properties * )
+{
+ local name = mypackage ;
+ local variant = [ property.select <variant> : $(properties) ] ;
+ if $(variant) = debug
+ {
+ name += -d ;
+ }
+ return <name>$(name) ;
+}
+----
+
+The `common.format-name` rule can be very useful in this situation.
+
+
+=== Modify usage requirements based on package version or variable
+
+Sometimes you need to apply some logic based on package's version or a
+variable that it defines. For that you can use `<conditional>` property in
+usage requirements:
+
+----
+mypackage =
+ [ pkg-config.import mypackage : usage-requirements <conditional>@define_ns
+ ] ;
+
+rule extra-props ( properties * )
+{
+ local ps = [ property-set.create $(properties) ] ;
+ local prefix = [ $(mypackage).variable name_prefix : $(ps) ] ;
+ prefix += [ $(mypackage).version $(ps) ] ;
+ return <define>$(prefix:J=_) ;
+}
+----
+
+|# # end::doc[]
diff --git a/src/boost/tools/build/src/tools/python-config.jam b/src/boost/tools/build/src/tools/python-config.jam
new file mode 100644
index 000000000..40aa825bc
--- /dev/null
+++ b/src/boost/tools/build/src/tools/python-config.jam
@@ -0,0 +1,27 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for Python tools and librries. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ for local R in 2.4 2.3 2.2
+ {
+ local python-path = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ;
+ local python-version = $(R) ;
+
+ if $(python-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ;
+ }
+ using python : $(python-version) : $(python-path) ;
+ }
+ }
+}
diff --git a/src/boost/tools/build/src/tools/python.jam b/src/boost/tools/build/src/tools/python.jam
new file mode 100644
index 000000000..d510fcfb8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/python.jam
@@ -0,0 +1,1333 @@
+# Copyright 2004 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Python and the the Boost.Python library.
+#
+# This module defines
+#
+# - a project 'python' with a target 'python' in it, that corresponds to the
+# python library
+#
+# - a main target rule 'python-extension' which can be used to build a python
+# extension.
+#
+# Extensions that use Boost.Python must explicitly link to it.
+
+import type ;
+import testing ;
+import generators ;
+import project ;
+import errors ;
+import targets ;
+import "class" : new ;
+import os ;
+import common ;
+import toolset ;
+import regex ;
+import numbers ;
+import string ;
+import property ;
+import sequence ;
+import path ;
+import feature ;
+import set ;
+import builtin ;
+import property-set ;
+
+
+# Make this module a project.
+project.initialize $(__name__) ;
+project python ;
+
+# Save the project so that if 'init' is called several times we define new
+# targets in the python project, not in whatever project we were called by.
+.project = [ project.current ] ;
+
+# Dynamic linker lib. Necessary to specify it explicitly on some platforms.
+lib dl ;
+# This contains 'openpty' function need by python. Again, on some system need to
+# pass this to linker explicitly.
+lib util ;
+# Python uses pthread symbols.
+lib pthread ;
+# Extra library needed by phtread on some platforms.
+lib rt ;
+
+# The pythonpath feature specifies additional elements for the PYTHONPATH
+# environment variable, set by run-pyd. For example, pythonpath can be used to
+# access Python modules that are part of the product being built, but are not
+# installed in the development system's default paths.
+feature.feature pythonpath : : free optional path ;
+
+# The best configured version of Python 2 and 3.
+py2-version = ;
+py3-version = ;
+
+# Initializes the Python toolset. Note that all parameters are optional.
+#
+# - version -- the version of Python to use. Should be in Major.Minor format,
+# for example 2.3. Do not include the subminor version.
+#
+# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter.
+# Alternatively, the installation prefix for Python libraries and includes. If
+# empty, will be guessed from the version, the platform's installation
+# patterns, and the python executables that can be found in PATH.
+#
+# - includes: the include path to Python headers. If empty, will be guessed.
+#
+# - libraries: the path to Python library binaries. If empty, will be guessed.
+# On MacOS/Darwin, you can also pass the path of the Python framework.
+#
+# - condition: if specified, should be a set of properties that are matched
+# against the build configuration when B2 selects a Python
+# configuration to use.
+#
+# - extension-suffix: A string to append to the name of extension modules before
+# the true filename extension. Ordinarily we would just compute this based on
+# the value of the <python-debugging> feature. However ubuntu's python-dbg
+# package uses the windows convention of appending _d to debug-build extension
+# modules. We have no way of detecting ubuntu, or of probing python for the
+# "_d" requirement, and if you configure and build python using
+# --with-pydebug, you'll be using the standard *nix convention. Defaults to ""
+# (or "_d" when targeting windows and <python-debugging> is set).
+#
+# Example usage:
+#
+# using python : 2.3 ;
+# using python : 2.3 : /usr/local/bin/python ;
+#
+rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
+ : condition * : extension-suffix ? )
+{
+ project.push-current $(.project) ;
+
+ debug-message Configuring python... ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified $(v):" \"$($(v))\" ;
+ }
+ }
+
+ configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ;
+
+ project.pop-current ;
+}
+
+# A simpler version of SHELL that grabs stderr as well as stdout, but returns
+# nothing if there was an error.
+#
+local rule shell-cmd ( cmd )
+{
+ debug-message running command '$(cmd)" 2>&1"' ;
+ x = [ SHELL $(cmd)" 2>&1" : exit-status ] ;
+ if $(x[2]) = 0
+ {
+ return $(x[1]) ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Try to identify Cygwin symlinks. Invoking such a file directly as an NT
+# executable from a native Windows build of bjam would be fatal to the bjam
+# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove
+# that those are not also symlinks. ;-)
+#
+# If a symlink is found returns non-empty; we try to extract the target of the
+# symlink from the file and return that.
+#
+# Note: 1. only works on NT 2. path is a native path.
+local rule is-cygwin-symlink ( path )
+{
+ local is-symlink = ;
+
+ # Look for a file with the given path having the S attribute set, as cygwin
+ # symlinks do. /-C means "do not use thousands separators in file sizes."
+ local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ;
+
+ if $(dir-listing)
+ {
+ # Escape any special regex characters in the base part of the path.
+ local base-pat = [ regex.escape $(path:D=) : "].[()*+?|\\$^" : \\ ] ;
+
+ # Extract the file's size from the directory listing.
+ local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ;
+
+ # If the file has a reasonably small size, look for the special symlink
+ # identification text.
+ if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ]
+ {
+ local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ;
+ if $(link[2]) != 0
+ {
+ local nl = "
+
+" ;
+ is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ;
+ if $(is-symlink)
+ {
+ is-symlink = [ *nix-path-to-native $(is-symlink) ] ;
+ is-symlink = $(is-symlink:R=$(path:D)) ;
+ }
+
+ }
+ }
+ }
+ return $(is-symlink) ;
+}
+
+
+# Append ext to each member of names that does not contain '.'.
+#
+local rule default-extension ( names * : ext * )
+{
+ local result ;
+ for local n in $(names)
+ {
+ switch $(n)
+ {
+ case *.* : result += $(n) ;
+ case * : result += $(n)$(ext) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Tries to determine whether invoking "cmd" would actually attempt to launch a
+# cygwin symlink.
+#
+# Note: only works on NT.
+#
+local rule invokes-cygwin-symlink ( cmd )
+{
+ local dirs = $(cmd:D) ;
+ if ! $(dirs)
+ {
+ dirs = . [ os.executable-path ] ;
+ }
+ local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ;
+ local paths = [ GLOB $(dirs) : $(base) ] ;
+ if $(paths)
+ {
+ # Make sure we have not run into a Cygwin symlink. Invoking such a file
+ # as an NT executable would be fatal for the bjam process.
+ return [ is-cygwin-symlink $(paths[1]) ] ;
+ }
+}
+
+
+local rule debug-message ( message * )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" "[python-cfg]" $(message) ;
+ }
+}
+
+
+# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and
+# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result
+# found. Also accounts for the fact that on 64-bit machines, 32-bit software has
+# its own area, under SOFTWARE\Wow6432node.
+#
+local rule software-registry-value ( path : data ? )
+{
+ local result ;
+ for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE
+ {
+ for local x64elt in "" Wow6432node\\ # Account for 64-bit windows
+ {
+ if ! $(result)
+ {
+ result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ;
+ }
+ }
+
+ }
+ return $(result) ;
+}
+
+
+.windows-drive-letter-re = "^([A-Za-z]):[\\/](.*)" ;
+.cygwin-drive-letter-re = "^/cygdrive/([a-z])/(.*)" ;
+
+.working-directory = [ PWD ] ;
+.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ;
+.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ;
+
+
+local rule windows-to-cygwin-path ( path )
+{
+ # If path is rooted with a drive letter, rewrite it using the /cygdrive
+ # mountpoint.
+ local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ;
+
+ # Else if path is rooted without a drive letter, use the working directory.
+ p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ;
+
+ # Else return the path unchanged.
+ return $(p:E=$(path:T)) ;
+}
+
+
+# :W only works in Cygwin builds of bjam. This one works on NT builds as well.
+#
+local rule cygwin-to-windows-path ( path )
+{
+ path = $(path:R="") ; # strip any trailing slash
+
+ local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) "$1:/$2" ] ;
+ if $(drive-letter)
+ {
+ path = $(drive-letter) ;
+ }
+ else if $(path:R=/x) = $(path) # already rooted?
+ {
+ # Look for a cygwin mount that includes each head sequence in $(path).
+ local head = $(path) ;
+ local tail = "" ;
+
+ while $(head)
+ {
+ local root = [ software-registry-value
+ "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ;
+
+ if $(root)
+ {
+ path = $(tail:R=$(root)) ;
+ head = ;
+ }
+ tail = $(tail:R=$(head:D=)) ;
+
+ if $(head) = /
+ {
+ head = ;
+ }
+ else
+ {
+ head = $(head:D) ;
+ }
+ }
+ }
+ return [ regex.replace $(path:R="") / \\ ] ;
+}
+
+
+# Convert a *nix path to native.
+#
+local rule *nix-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ path = [ cygwin-to-windows-path $(path) ] ;
+ }
+ return $(path) ;
+}
+
+
+# Convert an NT path to native.
+#
+local rule windows-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ windows-to-cygwin-path $(path) ] ;
+ }
+}
+
+
+# Return nonempty if path looks like a windows path, i.e. it starts with a drive
+# letter or contains backslashes.
+#
+local rule guess-windows-path ( path )
+{
+ return [ SUBST $(path) "($(.windows-drive-letter-re)|.*([\\]).*)" $1 ] ;
+}
+
+
+local rule path-to-native ( paths * )
+{
+ local result ;
+
+ for local p in $(paths)
+ {
+ if [ guess-windows-path $(p) ]
+ {
+ result += [ windows-path-to-native $(p) ] ;
+ }
+ else
+ {
+ result += [ *nix-path-to-native $(p:T) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH "^([0-9]+)\.([0-9]+)(.*)$" : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+
+# Build a list of versions from 3.4 down to 1.5. Because bjam can not enumerate
+# registry sub-keys, we have no way of finding a version with a 2-digit minor
+# version, e.g. 2.10 -- let us hope that never happens.
+#
+.version-countdown = ;
+for local v in [ numbers.range 15 34 ]
+{
+ .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ;
+}
+
+
+local rule windows-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+ local interpreters ;
+
+ for local v in $(version)
+ {
+ local install-path = [
+ software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ;
+
+ if $(install-path)
+ {
+ install-path = [ windows-path-to-native $(install-path) ] ;
+ debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ;
+ }
+
+ interpreters += $(:E=python:R=$(install-path)) ;
+ }
+ return $(interpreters) ;
+}
+
+
+local rule darwin-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+
+ local prefix
+ = [ GLOB /System/Library/Frameworks /Library/Frameworks
+ : Python.framework ] ;
+
+ return $(prefix)/Versions/$(version)/bin/python ;
+}
+
+
+# Assume "python-cmd" invokes a python interpreter and invoke it to extract all
+# the information we care about from its "sys" module. Returns void if
+# unsuccessful.
+#
+local rule probe ( python-cmd )
+{
+ # Avoid invoking a Cygwin symlink on NT.
+ local skip-symlink ;
+ if [ os.name ] = NT
+ {
+ skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ;
+ }
+
+ if $(skip-symlink)
+ {
+ debug-message -------------------------------------------------------------------- ;
+ debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ;
+ debug-message causing a bjam built for Windows to hang. ;
+ debug-message ;
+ debug-message If you intend to target a Cygwin build of Python, please ;
+ debug-message replace the path to the link with the path to a real executable ;
+ debug-message "(guessing:" \"$(skip-symlink)\") "in" your 'using python' line ;
+ debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ;
+ debug-message backslashes ;
+ debug-message -------------------------------------------------------------------- ;
+ }
+ else
+ {
+ # Prepare a List of Python format strings and expressions that can be
+ # used to print the constants we want from the sys module.
+
+ # We do not really want sys.version since that is a complicated string,
+ # so get the information from sys.version_info instead.
+ local format = "version=%d.%d" ;
+ local exprs = "version_info[0]" "version_info[1]" ;
+
+ for local s in $(sys-elements[2-])
+ {
+ format += $(s)=%s ;
+ exprs += $(s) ;
+ }
+
+ # Invoke Python and ask it for all those values.
+ local full-cmd =
+ $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ;
+
+ local output = [ shell-cmd $(full-cmd) ] ;
+ if $(output)
+ {
+ # Parse the output to get all the results.
+ local nl = "
+
+" ;
+ for s in $(sys-elements)
+ {
+ # These variables are expected to be declared local in the
+ # caller, so Jam's dynamic scoping will set their values there.
+ sys.$(s) = [ SUBST $(output) "\\<$(s)=([^$(nl)]+)" $1 ] ;
+ }
+ }
+ return $(output) ;
+ }
+}
+
+
+# Make sure the "libraries" and "includes" variables (in an enclosing scope)
+# have a value based on the information given.
+#
+local rule compute-default-paths ( target-os : version ? : prefix ? :
+ exec-prefix ? )
+{
+ exec-prefix ?= $(prefix) ;
+
+ if $(target-os) = windows
+ {
+ # The exec_prefix is where you're supposed to look for machine-specific
+ # libraries.
+ local default-library-path = $(exec-prefix)\\libs ;
+ local default-include-path = $(:E=Include:R=$(prefix)) ;
+
+ # If the interpreter was found in a directory called "PCBuild" or
+ # "PCBuild8," assume we're looking at a Python built from the source
+ # distro, and go up one additional level to the default root. Otherwise,
+ # the default root is the directory where the interpreter was found.
+
+ # We ask Python itself what the executable path is in case of
+ # intermediate symlinks or shell scripts.
+ local executable-dir = $(sys.executable:D) ;
+
+ if [ MATCH ^(PCBuild) : $(executable-dir:D=) ]
+ {
+ debug-message "This Python appears to reside in a source distribution;" ;
+ debug-message "prepending \""$(executable-dir)"\" to default library search path" ;
+
+ default-library-path = $(executable-dir) $(default-library-path) ;
+
+ default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ;
+
+ debug-message "and \""$(default-include-path[1])"\" to default #include path" ;
+ }
+
+ libraries ?= $(default-library-path) ;
+ includes ?= $(default-include-path) ;
+ }
+ else
+ {
+ includes ?= $(prefix)/include/python$(version) ;
+
+ local lib = $(exec-prefix)/lib ;
+ libraries ?= $(lib)/python$(version)/config $(lib) ;
+ }
+}
+
+# The version of the python interpreter to use.
+feature.feature python : : propagated symmetric ;
+feature.feature python.interpreter : : free ;
+
+toolset.flags python.capture-output PYTHON : <python.interpreter> ;
+
+#
+# Support for Python configured --with-pydebug
+#
+feature.feature python-debugging : off on : propagated ;
+variant debug-python : debug : <python-debugging>on ;
+
+
+# Return a list of candidate commands to try when looking for a Python
+# interpreter. prefix is expected to be a native path.
+#
+local rule candidate-interpreters ( version ? : prefix ? : target-os )
+{
+ local bin-path = bin ;
+ if $(target-os) = windows
+ {
+ # On Windows, look in the root directory itself and, to work with the
+ # result of a build-from-source, the PCBuild directory.
+ bin-path = PCBuild8 PCBuild "" ;
+ }
+
+ bin-path = $(bin-path:R=$(prefix)) ;
+
+ if $(target-os) in windows darwin
+ {
+ return # Search:
+ $(:E=python:R=$(bin-path)) # Relative to the prefix, if any
+ python # In the PATH
+ [ $(target-os)-installed-pythons $(version) ] # Standard install locations
+ ;
+ }
+ else
+ {
+ # Search relative to the prefix, or if none supplied, in PATH.
+ local unversioned = $(:E=python:R=$(bin-path:E=)) ;
+
+ # If a version was specified, look for a python with that specific
+ # version appended before looking for one called, simply, "python"
+ return $(unversioned)$(version) $(unversioned) ;
+ }
+}
+
+
+# Compute system library dependencies for targets linking with static Python
+# libraries.
+#
+# On many systems, Python uses libraries such as pthreads or libdl. Since static
+# libraries carry no library dependency information of their own that the linker
+# can extract, these extra dependencies have to be given explicitly on the link
+# line of the client. The information about these dependencies is packaged into
+# the "python" target below.
+#
+# Even where Python itself uses pthreads, it never allows extension modules to
+# be entered concurrently (unless they explicitly give up the interpreter lock).
+# Therefore, extension modules do not need the efficiency overhead of threadsafe
+# code as produced by <threading>multi, and we handle libpthread along with
+# other libraries here. Note: this optimization is based on an assumption that
+# the compiler generates link-compatible code in both the single- and
+# multi-threaded cases, and that system libraries do not change their ABIs
+# either.
+#
+# Returns a list of usage-requirements that link to the necessary system
+# libraries.
+#
+local rule system-library-dependencies ( target-os )
+{
+ switch $(target-os)
+ {
+ case s[uo][nl]* : # solaris, sun, sunos
+ # Add a librt dependency for the gcc toolset on SunOS (the sun
+ # toolset adds -lrt unconditionally). While this appears to
+ # duplicate the logic already in gcc.jam, it does not as long as
+ # we are not forcing <threading>multi.
+
+ # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields
+ # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem
+ # to be the right list for extension modules. For example, on my
+ # installation, adding -ldl causes at least one test to fail because
+ # the library can not be found and removing it causes no failures.
+
+ # Apparently, though, we need to add -lrt for gcc.
+ return <toolset>gcc:<library>rt ;
+
+ case osf : return <library>pthread <toolset>gcc:<library>rt ;
+
+ case qnx* : return ;
+ case darwin : return ;
+ case windows : return ;
+ case haiku : return ;
+
+ case hpux : return <library>rt ;
+ case *bsd : return <library>pthread <toolset>gcc:<library>util ;
+
+ case aix : return <library>pthread <library>dl ;
+
+ case * : return <library>pthread <library>dl
+ <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ;
+ }
+}
+
+
+# Define a version suffix for libraries depending on Python.
+# For example, Boost.Python built for Python 2.7 uses the suffix "27"
+rule version-suffix ( version )
+{
+ local major-minor = [ split-version $(version) ] ;
+ local suffix = $(major-minor:J="") ;
+ return $(suffix) ;
+}
+
+# Declare a target to represent Python's library.
+#
+local rule declare-libpython-target ( version ? : requirements * )
+{
+ # Compute the representation of Python version in the name of Python's
+ # library file.
+ local lib-version = $(version) ;
+ if <target-os>windows in $(requirements)
+ {
+ local major-minor = [ split-version $(version) ] ;
+ lib-version = $(major-minor:J="") ;
+ if <python-debugging>on in $(requirements)
+ {
+ lib-version = $(lib-version)_d ;
+ }
+ }
+
+ if ! $(lib-version)
+ {
+ ECHO *** "warning:" could not determine Python version, which will ;
+ ECHO *** "warning:" probably prevent us from linking with the python ;
+ ECHO *** "warning:" library. Consider explicitly passing the version ;
+ ECHO *** "warning:" to 'using python'. ;
+ }
+
+ # Declare it.
+ lib python.lib : : <name>python$(lib-version) $(requirements) ;
+}
+
+
+# Implementation of init.
+local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
+ condition * : extension-suffix ? )
+{
+ local prefix ;
+ local exec-prefix ;
+ local cmds-to-try ;
+ local interpreter-cmd ;
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+
+ if $(target-os) = windows && <python-debugging>on in $(condition)
+ {
+ extension-suffix ?= _d ;
+ }
+ extension-suffix ?= "" ;
+
+ local cmds-to-try ;
+
+ if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ]
+ {
+ # If the user did not pass a command, whatever we got was a prefix.
+ prefix = $(cmd-or-prefix) ;
+ cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ;
+ }
+ else
+ {
+ # Work with the command the user gave us.
+ cmds-to-try = $(cmd-or-prefix) ;
+
+ # On Windows, do not nail down the interpreter command just yet in case
+ # the user specified something that turns out to be a cygwin symlink,
+ # which could bring down bjam if we invoke it.
+ if $(target-os) != windows
+ {
+ interpreter-cmd = $(cmd-or-prefix) ;
+ }
+ }
+
+ # Values to use in case we can not really find anything in the system.
+ local fallback-cmd = $(cmds-to-try[1]) ;
+ local fallback-version ;
+
+ # Anything left to find or check?
+ if ! ( $(interpreter-cmd) && $(version) && $(includes) && $(libraries) )
+ {
+ # Values to be extracted from python's sys module. These will be set by
+ # the probe rule, above, using Jam's dynamic scoping.
+ local sys-elements = version platform prefix exec_prefix executable ;
+ local sys.$(sys-elements) ;
+
+ # Compute the string Python's sys.platform needs to match. If not
+ # targeting Windows or cygwin we will assume only native builds can
+ # possibly run, so we will not require a match and we leave sys.platform
+ # blank.
+ local platform ;
+ switch $(target-os)
+ {
+ case windows : platform = win32 ;
+ case cygwin : platform = cygwin ;
+ }
+
+ while $(cmds-to-try)
+ {
+ # Pop top command.
+ local cmd = $(cmds-to-try[1]) ;
+ cmds-to-try = $(cmds-to-try[2-]) ;
+
+ debug-message Checking interpreter command \"$(cmd)\"... ;
+ if [ probe $(cmd) ]
+ {
+ fallback-version ?= $(sys.version) ;
+
+ # Check for version/platform validity.
+ for local x in version platform
+ {
+ if $($(x)) && $($(x)) != $(sys.$(x))
+ {
+ debug-message ...$(x) "mismatch (looking for"
+ $($(x)) but found $(sys.$(x))")" ;
+ cmd = ;
+ }
+ }
+
+ if $(cmd)
+ {
+ debug-message ...requested configuration matched! ;
+
+ exec-prefix = $(sys.exec_prefix) ;
+
+ compute-default-paths $(target-os) : $(sys.version) :
+ $(sys.prefix) : $(sys.exec_prefix) ;
+
+ version = $(sys.version) ;
+ interpreter-cmd ?= $(cmd) ;
+ cmds-to-try = ; # All done.
+ }
+ }
+ else
+ {
+ debug-message ...does not invoke a working interpreter ;
+ }
+ }
+ }
+
+ # Check whether configuration succeeded.
+ if ! ( $(includes) && $(libraries) )
+ {
+ debug-message Python headers and libraries not found. ;
+ return ;
+ }
+
+ .configured = true ;
+
+ if ! $(interpreter-cmd)
+ {
+ fallback-cmd ?= python ;
+ debug-message No working Python interpreter found. ;
+ if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ]
+ {
+ interpreter-cmd = $(fallback-cmd) ;
+ debug-message falling back to \"$(interpreter-cmd)\" ;
+ }
+ }
+
+ includes = [ path-to-native $(includes) ] ;
+ libraries = [ path-to-native $(libraries) ] ;
+
+ debug-message "Details of this Python configuration:" ;
+ debug-message " interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ;
+ debug-message " include path:" \"$(includes:E=<empty>)\" ;
+ debug-message " library path:" \"$(libraries:E=<empty>)\" ;
+ if $(target-os) = windows
+ {
+ debug-message " DLL search path:" \"$(exec-prefix:E=<empty>)\" ;
+ }
+
+ #
+ # Discover the presence of NumPy
+ #
+ debug-message "Checking for NumPy..." ;
+ local full-cmd = "import sys; sys.stderr = sys.stdout; import numpy; print(numpy.get_include())" ;
+ local full-cmd = $(interpreter-cmd)" -c \"$(full-cmd)\"" ;
+ debug-message "running command '$(full-cmd)'" ;
+ local result = [ SHELL $(full-cmd) : strip-eol : exit-status ] ;
+ if $(result[2]) = 0
+ {
+ .numpy = true ;
+ .numpy-include = $(result[1]) ;
+ debug-message "NumPy enabled" ;
+ }
+ else
+ {
+ debug-message "NumPy disabled. Reason:" ;
+ debug-message " $(full-cmd) aborted with " ;
+ debug-message " $(result[1])" ;
+ }
+
+ #
+ # End autoconfiguration sequence.
+ #
+
+ # Normalize and dissect any version number.
+ local major-minor ;
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+
+
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values python ]
+ {
+ feature.extend python : $(version) ;
+ py$(major-minor[1])-version ?= $(version) ;
+ if $(py$(major-minor[1])-version) < $(version)
+ {
+ py$(major-minor[1])-version = $(version) ;
+ }
+ }
+ target-requirements += <python>$(version:E=default) ;
+ }
+
+ target-requirements += <target-os>$(target-os) ;
+
+ # See if we can find a framework directory on darwin.
+ local framework-directory ;
+ if $(target-os) = darwin
+ {
+ # Search upward for the framework directory.
+ local framework-directory = $(libraries[-1]) ;
+ while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
+ {
+ framework-directory = $(framework-directory:D) ;
+ }
+
+ if $(framework-directory:D=) = Python.framework
+ {
+ debug-message framework directory is \"$(framework-directory)\" ;
+ }
+ else
+ {
+ debug-message "no framework directory found; using library path" ;
+ framework-directory = ;
+ }
+ }
+
+ local dll-path = $(libraries) ;
+
+ # Make sure that we can find the Python DLL on Windows.
+ if ( $(target-os) = windows ) && $(exec-prefix)
+ {
+ dll-path += $(exec-prefix) ;
+ }
+
+ #
+ # Prepare usage requirements.
+ #
+ local usage-requirements = [ system-library-dependencies $(target-os) ] ;
+ usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ;
+ if <python-debugging>on in $(condition)
+ {
+ if $(target-os) = windows
+ {
+ # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define
+ # Py_DEBUG we will get multiple definition warnings.
+ usage-requirements += <define>_DEBUG ;
+ }
+ else
+ {
+ usage-requirements += <define>Py_DEBUG ;
+ }
+ }
+
+ # In case we added duplicate requirements from what the user specified.
+ target-requirements = [ sequence.unique $(target-requirements) ] ;
+
+ # Global, but conditional, requirements to give access to the interpreter
+ # for general utilities, like other toolsets, that run Python scripts.
+ toolset.add-requirements
+ "$(target-requirements:J=,):<python.interpreter>$(interpreter-cmd)" ;
+
+ # Register the right suffix for extensions.
+ register-extension-suffix $(extension-suffix) : $(target-requirements) ;
+
+ # Make sure that the python feature is always considered
+ # relevant for any targets that depend on python. Without
+ # this, it would only be considered relevant when there are
+ # multiple configurations defined within the same build.
+ target-requirements += <relevant>python ;
+
+ #
+ # Declare the "python" target. This should really be called
+ # python_for_embedding.
+ #
+
+ if $(framework-directory)
+ {
+ alias python
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <framework>$(framework-directory)
+ ;
+ }
+ else
+ {
+ declare-libpython-target $(version) : $(target-requirements) ;
+
+ # This is an evil hack. On, Windows, when Python is embedded, nothing
+ # seems to set up sys.path to include Python's standard library
+ # (http://article.gmane.org/gmane.comp.python.general/544986). The evil
+ # here, aside from the workaround necessitated by Python's bug, is that:
+ #
+ # a. we're guessing the location of the python standard library from the
+ # location of pythonXX.lib
+ #
+ # b. we're hijacking the <testing.launcher> property to get the
+ # environment variable set up, and the user may want to use it for
+ # something else (e.g. launch the debugger).
+ local set-PYTHONPATH ;
+ if $(target-os) = windows
+ {
+ set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH :
+ $(libraries:D)/Lib ] ;
+ }
+
+ alias python
+ :
+ : $(target-requirements)
+ :
+ # Why python.lib must be listed here instead of along with the
+ # system libs is a mystery, but if we do not do it, on cygwin,
+ # -lpythonX.Y never appears in the command line (although it does on
+ # linux).
+ : $(usage-requirements)
+ <testing.launcher>$(set-PYTHONPATH)
+ <library-path>$(libraries) <dll-path>$(dll-path) <library>python.lib
+ ;
+ }
+
+ # On *nix, we do not want to link either Boost.Python or Python extensions
+ # to libpython, because the Python interpreter itself provides all those
+ # symbols. If we linked to libpython, we would get duplicate symbols. So
+ # declare two targets -- one for building extensions and another for
+ # embedding.
+ if $(target-os) in windows cygwin
+ {
+ alias python_for_extensions : python : $(target-requirements) ;
+ }
+ else if $(target-os) = darwin {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <linkflags>"-undefined dynamic_lookup"
+ ;
+ }
+ # On AIX we need Python extensions and Boost.Python to import symbols from
+ # the Python interpreter. Dynamic libraries opened with dlopen() do not
+ # inherit the symbols from the Python interpreter.
+ else if $(target-os) = aix
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <linkflags>"-Wl,-bI:$(libraries[1])/python.exp"
+ ;
+ }
+ else
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements)
+ ;
+ }
+
+}
+
+# Conditional rule specification that will prevent building of a target
+# if there is no matching python configuration available with the given
+# required properties.
+rule require-py ( properties * )
+{
+ local py-ext-target = [ $(.project).find python_for_extensions : no-error ] ;
+ if ! $(py-ext-target)
+ {
+ return <build>no ;
+ }
+ local property-set = [ property-set.create $(properties) ] ;
+ property-set = [ $(property-set).expand ] ;
+ local py-ext-alternative = [ $(py-ext-target).select-alternatives $(property-set) ] ;
+ if ! $(py-ext-alternative)
+ {
+ return <build>no ;
+ }
+}
+
+
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+rule numpy ( )
+{
+ return $(.numpy) ;
+}
+
+rule numpy-include ( )
+{
+ return $(.numpy-include) ;
+}
+
+
+type.register PYTHON_EXTENSION : : SHARED_LIB ;
+
+
+local rule register-extension-suffix ( root : condition * )
+{
+ local suffix ;
+
+ switch [ feature.get-values target-os : $(condition) ]
+ {
+ case windows : suffix = pyd ;
+ case cygwin : suffix = dll ;
+ case hpux :
+ {
+ if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4
+ {
+ suffix = sl ;
+ }
+ else
+ {
+ suffix = so ;
+ }
+ }
+ case * : suffix = so ;
+ }
+
+ type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ;
+}
+
+
+# Unset 'lib' prefix for PYTHON_EXTENSION
+type.set-generated-target-prefix PYTHON_EXTENSION : : "" ;
+
+
+rule python-extension ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ if [ configured ]
+ {
+ requirements += <use>/python//python_for_extensions ;
+ }
+ requirements += <suppress-import-lib>true ;
+
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+IMPORT python : python-extension : : python-extension ;
+
+# Support for testing.
+type.register PY : py ;
+type.register RUN_PYD_OUTPUT ;
+type.register RUN_PYD : : TEST ;
+
+
+class python-test-generator : generator
+{
+ import set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local pyversion = [ $(property-set).get <python> ] ;
+ local python ;
+ local other-pythons ;
+
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PY
+ {
+ if ! $(python)
+ {
+ # First Python source ends up on command line.
+ python = $(s) ;
+
+ }
+ else
+ {
+ # Other Python sources become dependencies.
+ other-pythons += $(s) ;
+ }
+ }
+ }
+
+ local extensions ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PYTHON_EXTENSION
+ {
+ extensions += $(s) ;
+ }
+ }
+
+ local libs ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] LIB ]
+ && ! $(s) in $(extensions)
+ {
+ libs += $(s) ;
+ }
+ }
+
+ local new-sources ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] CPP ]
+ {
+ local name = [ utility.basename [ $(s).name ] ] ;
+ if $(name) = [ utility.basename [ $(python).name ] ]
+ {
+ name = $(name)_ext ;
+ }
+ local extension = [ generators.construct $(project) $(name) :
+ PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
+
+ # The important part of usage requirements returned from
+ # PYTHON_EXTENSION generator are xdll-path properties that will
+ # allow us to find the python extension at runtime.
+ property-set = [ $(property-set).add $(extension[1]) ] ;
+
+ # Ignore usage requirements. We're a top-level generator and
+ # nobody is going to use what we generate.
+ new-sources += $(extension[2-]) ;
+ }
+ }
+
+ property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ;
+
+ return [ construct-result $(python) $(extensions) $(new-sources) :
+ $(project) $(name) : $(property-set) ] ;
+ }
+}
+
+
+generators.register
+ [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_PYD_OUTPUT : RUN_PYD ;
+
+
+# There are two different ways of spelling OS names. One is used for [ os.name ]
+# and the other is used for the <host-os> and <target-os> properties. Until that
+# is remedied, this sets up a crude mapping from the latter to the former, that
+# will work *for the purposes of cygwin/NT cross-builds only*. Could not think
+# of a better name than "translate".
+#
+.translate-os-windows = NT ;
+.translate-os-cygwin = CYGWIN ;
+local rule translate-os ( src-os )
+{
+ local x = $(.translate-os-$(src-os)) [ os.name ] ;
+ return $(x[1]) ;
+}
+
+
+# Extract the path to a single ".pyd" source. This is used to build the
+# PYTHONPATH for running bpl tests.
+#
+local rule pyd-pythonpath ( source )
+{
+ return [ on $(source) return $(LOCATE) $(SEARCH) ] ;
+}
+
+
+# The flag settings on testing.capture-output do not apply to python.capture
+# output at the moment. Redo this explicitly.
+toolset.flags python.capture-output ARGS <testing.arg> ;
+toolset.flags python.capture-output INPUT_FILES <testing.input-file> ;
+
+toolset.uses-features python.capture-output :
+ <testing.launcher> <testing.execute> <dll-path> <xdll-path> <target-os>
+ <pythonpath> ;
+
+rule capture-output ( target : sources * : properties * )
+{
+ # Setup up a proper DLL search path. Here, $(sources[1]) is a python module
+ # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to
+ # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not
+ # consulted. Move it over explicitly.
+ RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ;
+
+ PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ;
+ PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
+
+ # After test is run, we remove the Python module, but not the Python script.
+ testing.capture-output $(target) : $(sources[1]) : $(properties) ;
+
+ # PYTHONPATH is different; it will be interpreted by whichever Python is
+ # invoked and so must follow path rules for the target os. The only OSes
+ # where we can run python for other OSes currently are NT and CYGWIN so we
+ # only need to handle those cases.
+ local target-os = [ feature.get-values target-os : $(properties) ] ;
+ # Oddly, host-os is not in properties, so grab the default value.
+ local host-os = [ feature.defaults host-os ] ;
+ host-os = $(host-os:G=) ;
+ if $(target-os) != $(host-os) && $(target-os) in windows cygwin && $(host-os) in windows cygwin
+ {
+ PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path :
+ $(PYTHONPATH) ] ;
+ }
+ local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ;
+ local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH :
+ $(PYTHONPATH:E=:J=$(path-separator)) ] ;
+ LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ;
+}
+
+
+rule bpl-test ( name : sources * : requirements * )
+{
+ local s ;
+ sources ?= $(name).py $(name).cpp ;
+ return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python
+ : $(requirements) : $(name) ] ;
+}
+
+# The same as bpl-test but additionally require (and link to) boost_numpy.
+# Masked whenever NumPy is not enabled.
+rule numpy-test ( name : sources * : requirements * )
+{
+ numpy-include = [ python.numpy-include ] ;
+ # yuk !
+ if ! $(.numpy) { requirements += <build>no ; }
+ sources ?= $(name).py $(name).cpp ;
+ name = [ regex.replace $(name) "[/]" "~" ] ;
+ return [ testing.make-test run-pyd
+ : $(sources) /boost/python//boost_numpy /boost/python//boost_python
+ : $(requirements) <include>$(numpy-include)
+ : $(name) ] ;
+}
+
+rule py-version ( n )
+{
+ return $(py$(n)-version) ;
+}
+
+IMPORT $(__name__) : bpl-test : : bpl-test ;
+IMPORT $(__name__) : numpy-test : : numpy-test ;
+IMPORT $(__name__) : py-version : : py-version ;
diff --git a/src/boost/tools/build/src/tools/qcc.jam b/src/boost/tools/build/src/tools/qcc.jam
new file mode 100644
index 000000000..155b1ac3d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/qcc.jam
@@ -0,0 +1,242 @@
+# Copyright (c) 2001 David Abrahams.
+# Copyright (c) 2002-2003 Rene Rivera.
+# Copyright (c) 2002-2003 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import property ;
+import set ;
+import toolset ;
+import type ;
+import unix ;
+
+feature.extend toolset : qcc ;
+
+toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
+toolset.inherit-flags qcc : unix ;
+toolset.inherit-rules qcc : unix ;
+
+# Initializes the qcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one being specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted values for linker-type are gnu and sun, gnu being the
+# default.
+#
+# Example:
+# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters qcc : version $(version) ] ;
+ local command = [ common.get-invocation-command qcc : QCC : $(command) ] ;
+ common.handle-options qcc : $(condition) : $(command) : $(options) ;
+}
+
+
+generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ;
+
+
+# Declare flags for compilation.
+toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ;
+
+# Declare flags and action for compilation.
+toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags qcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ;
+
+toolset.flags qcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ;
+toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ;
+
+toolset.flags qcc.compile OPTIONS <profiling>on : -p ;
+
+toolset.flags qcc.compile OPTIONS <local-visibility>hidden : -fvisibility=hidden ;
+toolset.flags qcc.compile.c++ OPTIONS <local-visibility>hidden : -fvisibility-inlines-hidden ;
+toolset.flags qcc.compile OPTIONS <local-visibility>protected : -fvisibility=protected ;
+toolset.flags qcc.compile OPTIONS <local-visibility>global : -fvisibility=default ;
+
+toolset.flags qcc.compile OPTIONS <cflags> ;
+toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ;
+toolset.flags qcc.compile DEFINES <define> ;
+toolset.flags qcc.compile INCLUDES <include> ;
+
+toolset.flags qcc.compile OPTIONS <link>shared : -shared ;
+
+toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+
+rule compile.c++
+{
+ # Here we want to raise the template-depth parameter value to something
+ # higher than the default value of 17. Note that we could do this using the
+ # feature.set-default rule but we do not want to set the default value for
+ # all toolsets as well.
+ #
+ # TODO: This 'modified default' has been inherited from some 'older Boost
+ # Build implementation' and has most likely been added to make some Boost
+ # library parts compile correctly. We should see what exactly prompted this
+ # and whether we can get around the problem more locally.
+ local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
+ if ! $(template-depth)
+ {
+ TEMPLATE_DEPTH on $(1) = 128 ;
+ }
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -lang-c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+
+# The class checking that we do not try to use the <runtime-link>static property
+# while creating or using a shared library, since it is not supported by qcc/
+# /libc.
+#
+class qcc-linking-generator : unix-linking-generator
+{
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if <runtime-link>static in [ $(property-set).raw ]
+ {
+ local m ;
+ if [ id ] = "qcc.link.dll"
+ {
+ m = "on qcc, DLL can't be build with <runtime-link>static" ;
+ }
+ if ! $(m)
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ;
+ }
+ }
+ }
+ if $(m)
+ {
+ errors.user-error $(m) : "It is suggested to use"
+ "<runtime-link>static together with <link>static." ;
+ }
+ }
+
+ return [ unix-linking-generator.generated-targets
+ $(sources) : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE
+ : <toolset>qcc ] ;
+
+generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ
+ : SHARED_LIB : <toolset>qcc ] ;
+
+generators.override qcc.prebuilt : builtin.prebuilt ;
+generators.override qcc.searched-lib-generator : searched-lib-generator ;
+
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ;
+toolset.flags qcc.link OPTIONS <profiling>on : -p ;
+toolset.flags qcc.link OPTIONS <linkflags> ;
+toolset.flags qcc.link LINKPATH <library-path> ;
+toolset.flags qcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags qcc.link LIBRARIES <library-file> ;
+
+toolset.flags qcc.link FINDLIBS-SA : m ;
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link.
+toolset.flags qcc.link OPTIONS <runtime-link>static : -static ;
+
+# Assuming this is just like with gcc.
+toolset.flags qcc.link RPATH : <dll-path> : unchecked ;
+toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ;
+
+
+# Declare actions for linking.
+#
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only qcc links while it might
+ # be a good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
+
+
+# Always remove archive and start again. Here is the rationale from Andre Hentz:
+# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c,
+# updated my Jamfiles and rebuilt. My program was crashing with absurd errors.
+# After some debugging I traced it back to the fact that a1.o was *still* in
+# liba.a
+RM = [ common.rm-command ] ;
+if [ os.name ] = NT
+{
+ RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ;
+}
+
+
+# Declare action for creating static libraries. The 'r' letter means to add
+# files to the archive with replacement. Since we remove the archive, we do not
+# care about replacement, but there is no option to "add without replacement".
+# The 'c' letter suppresses warnings in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+#
+# Use qcc driver to create archive, see
+# http://www.qnx.com/developers/docs/6.3.2/neutrino/utilities/q/qcc.html
+actions piecemeal archive
+{
+ $(RM) "$(<)"
+ "$(CONFIG_COMMAND)" -A "$(<)" "$(>)"
+}
+
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+
+# Differ from 'link' above only by -shared.
+#
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
diff --git a/src/boost/tools/build/src/tools/qt.jam b/src/boost/tools/build/src/tools/qt.jam
new file mode 100644
index 000000000..8aa7ca266
--- /dev/null
+++ b/src/boost/tools/build/src/tools/qt.jam
@@ -0,0 +1,17 @@
+# Copyright (c) 2006 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Forwarning toolset file to Qt GUI library. Forwards to the toolset file
+# for the current version of Qt.
+
+import qt4 ;
+
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
+{
+ qt4.init $(prefix) : $(full_bin) : $(full_inc) : $(full_lib) : $(version) : $(condition) ;
+}
+
+
diff --git a/src/boost/tools/build/src/tools/qt3.jam b/src/boost/tools/build/src/tools/qt3.jam
new file mode 100644
index 000000000..9b2e8f6f4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/qt3.jam
@@ -0,0 +1,209 @@
+# Copyright 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for the Qt GUI library version 3
+# (http://www.trolltech.com/products/qt3/index.html).
+# For new developments, it is recommended to use Qt4 via the qt4 B2
+# module.
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+
+# Convert this module into a project, so that we can declare targets here.
+project.initialize $(__name__) ;
+project qt3 ;
+
+
+# Initialized the QT support module. The 'prefix' parameter tells where QT is
+# installed. When not given, environmental variable QTDIR should be set.
+#
+rule init ( prefix ? )
+{
+ if ! $(prefix)
+ {
+ prefix = [ modules.peek : QTDIR ] ;
+ if ! $(prefix)
+ {
+ errors.error
+ "QT installation prefix not given and QTDIR variable is empty" ;
+ }
+ }
+
+ if $(.initialized)
+ {
+ if $(prefix) != $(.prefix)
+ {
+ errors.error
+ "Attempt the reinitialize QT with different installation prefix" ;
+ }
+ }
+ else
+ {
+ .initialized = true ;
+ .prefix = $(prefix) ;
+
+ generators.register-standard qt3.moc : H : CPP(moc_%) : <allow>qt3 ;
+ # Note: the OBJ target type here is fake, take a look at
+ # qt4.jam/uic-h-generator for explanations that apply in this case as
+ # well.
+ generators.register [ new moc-h-generator-qt3
+ qt3.moc.cpp : MOCCABLE_CPP : OBJ : <allow>qt3 ] ;
+
+ # The UI type is defined in types/qt.jam, and UIC_H is only used in
+ # qt.jam, but not in qt4.jam, so define it here.
+ type.register UIC_H : : H ;
+
+ generators.register-standard qt3.uic-h : UI : UIC_H : <allow>qt3 ;
+
+ # The following generator is used to convert UI files to CPP. It creates
+ # UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also
+ # returns UIC_H target, so that it can be mocced.
+ class "qt::uic-cpp-generator" : generator
+ {
+ rule __init__ ( )
+ {
+ generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : <allow>qt3 ;
+ }
+
+ rule run ( project name ? : properties * : sources + )
+ {
+ # Consider this:
+ # obj test : test_a.cpp : <optimization>off ;
+ #
+ # This generator will somehow be called in this case, and,
+ # will fail -- which is okay. However, if there are <library>
+ # properties they will be converted to sources, so the size of
+ # 'sources' will be more than 1. In this case, the base generator
+ # will just crash -- and that's not good. Just use a quick test
+ # here.
+
+ local result ;
+ if ! $(sources[2])
+ {
+ # Construct CPP as usual
+ result = [ generator.run $(project) $(name)
+ : $(properties) : $(sources) ] ;
+
+ # If OK, process UIC_H with moc. It's pretty clear that
+ # the object generated with UIC will have Q_OBJECT macro.
+ if $(result)
+ {
+ local action = [ $(result[1]).action ] ;
+ local sources = [ $(action).sources ] ;
+ local mocced = [ generators.construct $(project) $(name)
+ : CPP : $(properties) : $(sources[2]) ] ;
+ result += $(mocced[2-]) ;
+ }
+ }
+
+ return $(result) ;
+ }
+ }
+
+ generators.register [ new "qt::uic-cpp-generator" ] ;
+
+ # Finally, declare prebuilt target for QT library.
+ local usage-requirements =
+ <include>$(.prefix)/include
+ <dll-path>$(.prefix)/lib
+ <library-path>$(.prefix)/lib
+ <allow>qt3
+ ;
+ lib qt : : <name>qt-mt <threading>multi : : $(usage-requirements) ;
+ lib qt : : <name>qt <threading>single : : $(usage-requirements) ;
+ }
+}
+
+class moc-h-generator-qt3 : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
+ {
+ name = [ $(sources[1]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt3.moc.cpp :
+ $(property-set) ] ;
+
+ local target = [
+ new file-target $(name) : MOC : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that the dependency from
+ # sources to this generated header is detected -- if Jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.prefix) ;
+}
+
+# -f forces moc to include the processed source file. Without it, it would think
+# that .qpp is not a header and would not include it from the generated file.
+#
+actions moc
+{
+ $(.prefix)/bin/moc -f $(>) -o $(<)
+}
+
+# When moccing .cpp files, we don't need -f, otherwise generated code will
+# include .cpp and we'll get duplicated symbols.
+#
+actions moc.cpp
+{
+ $(.prefix)/bin/moc $(>) -o $(<)
+}
+
+
+space = " " ;
+
+# Sometimes it's required to make 'plugins' available during uic invocation. To
+# help with this we add paths to all dependency libraries to uic commane line.
+# The intention is that it's possible to write
+#
+# exe a : ... a.ui ... : <uses>some_plugin ;
+#
+# and have everything work. We'd add quite a bunch of unrelated paths but it
+# won't hurt.
+#
+flags qt3.uic-h LIBRARY_PATH <xdll-path> ;
+actions uic-h
+{
+ $(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH)
+}
+
+
+flags qt3.uic-cpp LIBRARY_PATH <xdll-path> ;
+# The second target is uic-generated header name. It's placed in build dir, but
+# we want to include it using only basename.
+actions uic-cpp
+{
+ $(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH)
+}
diff --git a/src/boost/tools/build/src/tools/qt4.jam b/src/boost/tools/build/src/tools/qt4.jam
new file mode 100644
index 000000000..69b13bd55
--- /dev/null
+++ b/src/boost/tools/build/src/tools/qt4.jam
@@ -0,0 +1,755 @@
+# Copyright 2002-2006 Vladimir Prus
+# Copyright 2005 Alo Sarv
+# Copyright 2005-2009 Juergen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Qt4 library support module
+#
+# The module attempts to auto-detect QT installation location from QTDIR
+# environment variable; failing that, installation location can be passed as
+# argument:
+#
+# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ;
+#
+# The module supports code generation from .ui and .qrc files, as well as
+# running the moc preprocessor on headers. Note that you must list all your
+# moc-able headers in sources.
+#
+# Example:
+#
+# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
+# /qt4//QtGui /qt4//QtNetwork ;
+#
+# It's also possible to run moc on cpp sources:
+#
+# import cast ;
+#
+# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ;
+#
+# When moccing source file myapp.cpp you need to include "myapp.moc" from
+# myapp.cpp. When moccing .h files, the output of moc will be automatically
+# compiled and linked in, you don't need any includes.
+#
+# This is consistent with Qt guidelines:
+# http://qt-project.org/doc/qt-4.8/moc.html
+#
+# The .qrc processing utility supports various command line option (see
+# http://qt-project.org/doc/qt-4.8/rcc.html for a complete list). The
+# module provides default arguments for the "output file" and
+# "initialization function name" options. Other options can be set through
+# the <rccflags> build property. E.g. if you wish the compression settings
+# to be more aggressive than the defaults, you can apply them too all .qrc
+# files like this:
+#
+# project my-qt-project :
+# requirements
+# <rccflags>"-compress 9 -threshold 10"
+# ;
+#
+# Of course, this property can also be specified on individual targets.
+
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+import os ;
+import virtual-target ;
+import scanner ;
+
+# Qt3Support control feature
+#
+# Qt4 configure defaults to build Qt4 libraries with Qt3Support.
+# The autodetection is missing, so we default to disable Qt3Support.
+# This prevents the user from inadvertedly using a deprecated API.
+#
+# The Qt3Support library can be activated by adding
+# "<qt3support>on" to requirements
+#
+# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS"
+# to get warnings about deprecated Qt3 support functions and classes.
+# Files ported by the "qt3to4" conversion tool contain _tons_ of
+# warnings, so this define is not set as default.
+#
+# Todo: Detect Qt3Support from Qt's configure data.
+# Or add more auto-configuration (like python).
+feature.feature qt3support : off on : propagated link-incompatible ;
+
+# The Qt version used for requirements
+# Valid are <qt>4.4 or <qt>4.5.0
+# Auto-detection via qmake sets '<qt>major.minor.patch'
+feature.feature qt : : propagated ;
+
+# Extra flags for rcc
+feature.feature rccflags : : free ;
+
+project.initialize $(__name__) ;
+project qt ;
+
+# Save the project so that we tolerate 'import + using' combo.
+.project = [ project.current ] ;
+
+# Helper utils for easy debug output
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = TRUE ;
+}
+
+local rule debug-message ( message * )
+{
+ if $(.debug-configuration) = TRUE
+ {
+ ECHO notice\: "[qt4-cfg]" $(message) ;
+ }
+}
+
+# Capture qmake output line by line
+local rule read-output ( content )
+{
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ return $(lines) ;
+}
+
+# Capture Qt version from qmake
+local rule check-version ( bin_prefix )
+{
+ full-cmd = $(bin_prefix)"/qmake -v" ;
+ debug-message Running '$(full-cmd)' ;
+ local output = [ SHELL $(full-cmd) ] ;
+ for line in [ read-output $(output) ]
+ {
+ # Parse the output to get all the results.
+ if [ MATCH "QMake" : $(line) ]
+ {
+ # Skip first line of output
+ }
+ else
+ {
+ temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
+ }
+ }
+ return $(temp) ;
+}
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH "^([0-9]+)\.([0-9]+)(.*)$" : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+# Initialize the QT support module.
+# Parameters:
+# - 'prefix' parameter tells where Qt is installed.
+# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
+# - 'full_inc' optional full path to Qt top-level include directory
+# - 'full_lib' optional full path to Qt library directory
+# - 'version' optional version of Qt, else autodetected via 'qmake -v'
+# - 'condition' optional requirements
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
+{
+ project.push-current $(.project) ;
+
+ debug-message "==== Configuring Qt ... ====" ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified $(v):" '$($(v))' ;
+ }
+ }
+
+ # Needed as default value
+ .prefix = $(prefix) ;
+
+ # pre-build paths to detect reinitializations changes
+ local inc_prefix lib_prefix bin_prefix ;
+ if $(full_inc)
+ {
+ inc_prefix = $(full_inc) ;
+ }
+ else
+ {
+ inc_prefix = $(prefix)/include ;
+ }
+ if $(full_lib)
+ {
+ lib_prefix = $(full_lib) ;
+ }
+ else
+ {
+ lib_prefix = $(prefix)/lib ;
+ }
+ if $(full_bin)
+ {
+ bin_prefix = $(full_bin) ;
+ }
+ else
+ {
+ bin_prefix = $(prefix)/bin ;
+ }
+
+ # Globally needed variables
+ .incprefix = $(inc_prefix) ;
+ .libprefix = $(lib_prefix) ;
+ .binprefix = $(bin_prefix) ;
+
+ if ! $(.initialized)
+ {
+ # Make sure this is initialised only once
+ .initialized = true ;
+
+ # Generates cpp files from header files using "moc" tool
+ generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ;
+
+ # The OBJ result type is a fake, 'H' will be really produced. See
+ # comments on the generator class, defined below the 'init' function.
+ generators.register [ new uic-generator qt4.uic : UI : OBJ :
+ <allow>qt4 ] ;
+
+ # The OBJ result type is a fake here too.
+ generators.register [ new moc-h-generator
+ qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ;
+
+ generators.register [ new moc-inc-generator
+ qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ;
+
+ # Generates .cpp files from .qrc files.
+ generators.register-standard qt4.rcc : QRC : CPP(qrc_%) : <allow>qt4 ;
+
+ # dependency scanner for wrapped files.
+ type.set-scanner QRC : qrc-scanner ;
+
+ # Save value of first occurring prefix
+ .PREFIX = $(prefix) ;
+ }
+
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+ else
+ {
+ version = [ check-version $(bin_prefix) ] ;
+ if $(version)
+ {
+ version = $(version:J=.) ;
+ }
+ debug-message Detected version '$(version)' ;
+ }
+
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values qt ]
+ {
+ feature.extend qt : $(version) ;
+ }
+ target-requirements += <qt>$(version:E=default) ;
+ }
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ if ! $(target-os)
+ {
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+ target-requirements += <target-os>$(target-os) ;
+ }
+
+ # Build exact requirements for the tools
+ local tools-requirements = $(target-requirements:J=/) ;
+
+ debug-message "Details of this Qt configuration:" ;
+ debug-message " prefix: " '$(prefix:E=<empty>)' ;
+ debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
+ debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
+ debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
+ debug-message " target requirements:" '$(target-requirements)' ;
+ debug-message " tool requirements: " '$(tools-requirements)' ;
+
+ # setup the paths for the tools
+ toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+
+ # TODO: 2009-02-12: Better support for directories
+ # Most likely needed are separate getters for: include,libraries,binaries and sources.
+ toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ;
+
+ # Test for a buildable Qt.
+ if [ glob $(.prefix)/Jamroot ]
+ {
+ .bjam-qt = true
+
+ # this will declare QtCore (and qtmain on <target-os>windows)
+ add-shared-library QtCore ;
+ }
+ else
+ # Setup common pre-built Qt.
+ # Special setup for QtCore on which everything depends
+ {
+ local link = [ feature.get-values link : $(condition) ] ;
+
+ local usage-requirements =
+ <include>$(.incprefix)
+ <library-path>$(.libprefix)
+ <threading>multi
+ <allow>qt4 ;
+
+ if $(link) in shared
+ {
+ usage-requirements += <dll-path>$(.libprefix) ;
+ }
+
+ local suffix ;
+
+ # Since Qt-4.2, debug versions on unix have to be built
+ # separately and therefore have no suffix.
+ .suffix_version = "" ;
+ .suffix_debug = "" ;
+
+ # Control flag for auto-configuration of the debug libraries.
+ # This setup requires Qt 'configure -debug-and-release'.
+ # Only available on some platforms.
+ # ToDo: 2009-02-12: Maybe throw this away and
+ # require separate setup with <variant>debug as condition.
+ .have_separate_debug = FALSE ;
+
+ # Setup other platforms
+ if $(target-os) in windows cygwin
+ {
+ .have_separate_debug = TRUE ;
+
+ # On NT, the shared libs have "4" suffix, and "d" suffix in debug builds.
+ if $(link) in shared
+ {
+ .suffix_version = "4" ;
+ }
+ .suffix_debug = "d" ;
+
+ # On Windows we must link against the qtmain library
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ ;
+
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain
+ $(target-requirements)
+ ;
+ }
+ else if $(target-os) = darwin
+ {
+ # On MacOS X, both debug and release libraries are available.
+ .suffix_debug = "_debug" ;
+
+ .have_separate_debug = TRUE ;
+
+ alias qtmain ;
+ }
+ else
+ {
+ alias qtmain : : $(target-requirements) ;
+ }
+
+ lib QtCore : qtmain
+ : # requirements
+ <name>QtCore$(.suffix_version)
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <define>QT_NO_DEBUG
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
+
+ lib QtCore : $(main)
+ : # requirements
+ <name>QtCore$(.suffix_debug)$(.suffix_version)
+ <variant>debug
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+ }
+ }
+
+ # Initialising the remaining libraries is canonical
+ # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
+ # 'include' only for non-canonical include paths.
+ add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ;
+ add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
+ add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
+ add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
+
+ add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql
+ : QT_QT3SUPPORT_LIB QT3_SUPPORT
+ : <qt3support>on $(target-requirements) ;
+
+ # Dummy target to enable "<qt3support>off" and
+ # "<library>/qt//Qt3Support" at the same time. This enables quick
+ # switching from one to the other for test/porting purposes.
+ alias Qt3Support : : <qt3support>off $(target-requirements) ;
+
+ # OpenGl Support
+ add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
+
+ # SVG-Support (Qt 4.1)
+ add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
+
+ # Test-Support (Qt 4.1)
+ add-shared-library QtTest : QtCore : : $(target-requirements) ;
+
+ # Qt designer library
+ add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
+ add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ;
+
+ # Support for dynamic Widgets (Qt 4.1)
+ add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
+
+ # DBus-Support (Qt 4.2)
+ add-shared-library QtDBus : QtXml : : $(target-requirements) ;
+
+ # Script-Engine (Qt 4.3)
+ add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
+
+ # Tools for the Script-Engine (Qt 4.5)
+ add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
+
+ # WebKit (Qt 4.4)
+ add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
+
+ # Phonon Multimedia (Qt 4.4)
+ add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ;
+
+ # Multimedia engine (Qt 4.6)
+ add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
+
+ # XmlPatterns-Engine (Qt 4.4)
+ add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
+
+ # Help-Engine (Qt 4.4)
+ add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
+ add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ;
+
+ # QML-Engine (Qt 4.7)
+ add-shared-library QtDeclarative : QtGui QtXml : : $(target-requirements) ;
+
+ # AssistantClient Support
+ # Compat library removed in 4.7.0
+ # Pre-4.4 help system, use QtHelp for new programs
+ if $(version) < "4.7"
+ {
+ add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ;
+ }
+ debug-message "==== Configured Qt-$(version) ====" ;
+
+ project.pop-current ;
+}
+
+rule initialized ( )
+{
+ return $(.initialized) ;
+}
+
+
+
+# This custom generator is needed because in QT4, UI files are translated only
+# into H files, and no C++ files are created. Further, the H files need not be
+# passed via MOC. The header is used only via inclusion. If we define a standard
+# UI -> H generator, B2 will run MOC on H, and then compile the
+# resulting cpp. It will give a warning, since output from moc will be empty.
+#
+# This generator is declared with a UI -> OBJ signature, so it gets invoked when
+# linking generator tries to convert sources to OBJ, but it produces target of
+# type H. This is non-standard, but allowed. That header won't be mocced.
+#
+class uic-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(name)
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+ }
+
+ local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ;
+
+ # The 'ui_' prefix is to match qmake's default behavior.
+ local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However, we
+ # need the target to be seen by bjam, so that dependency from sources to
+ # this generated header is detected -- if jam does not know about this
+ # target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+}
+
+
+class moc-h-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target $(name) : MOC : $(project) : $(a)
+ ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+class moc-inc-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target moc_$(name) : CPP : $(project) :
+ $(a) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(target).actualize ] ;
+
+ return [ virtual-target.register $(target) ] ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.PREFIX) ;
+}
+
+# Add a shared Qt library.
+rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a static Qt library.
+rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a Qt library.
+# Static libs are unversioned, whereas shared libs have the major number as suffix.
+# Creates both release and debug versions on platforms where both are enabled by Qt configure.
+# Flags:
+# - lib-name Qt library Name
+# - version Qt major number used as shared library suffix (QtCore4.so)
+# - depends-on other Qt libraries
+# - usage-defines those are set by qmake, so set them when using this library
+# - requirements additional requirements
+# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
+rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
+{
+ if $(.bjam-qt)
+ {
+ # Import Qt module
+ # Eveything will be setup there
+ alias $(lib-name)
+ : $(.prefix)//$(lib-name)
+ :
+ :
+ : <allow>qt4 ;
+ }
+ else
+ {
+ local real_include ;
+ real_include ?= $(include) ;
+ real_include ?= $(lib-name) ;
+
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(version)
+ $(requirements)
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(.suffix_debug)$(version)
+ $(requirements)
+ <variant>debug
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+ }
+ }
+
+ # Make library explicit so that a simple <use>qt4 will not bring in everything.
+ # And some components like QtDBus/Phonon may not be available on all platforms.
+ explicit $(lib-name) ;
+}
+
+# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
+# The exact match is the last one.
+
+# Get <include> and <defines> from current toolset.
+flags qt4.moc INCLUDES <include> ;
+flags qt4.moc DEFINES <define> ;
+
+# need a newline for expansion of DEFINES and INCLUDES in the response file.
+.nl = "
+" ;
+
+# Processes headers to create Qt MetaObject information. Qt4-moc has its
+# c++-parser, so pass INCLUDES and DEFINES.
+# We use response file with one INCLUDE/DEFINE per line
+#
+actions moc
+{
+ $(.BINPREFIX[-1])/moc -f $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+# When moccing files for include only, we don't need -f, otherwise the generated
+# code will include the .cpp and we'll get duplicated symbols.
+#
+actions moc.inc
+{
+ $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+
+# Get extra options for RCC
+flags qt4.rcc RCC_OPTIONS <rccflags> ;
+
+# Generates source files from resource files.
+#
+actions rcc
+{
+ $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) $(RCC_OPTIONS) -o $(<)
+}
+
+
+# Generates user-interface source from .ui files.
+#
+actions uic
+{
+ $(.BINPREFIX[-1])/uic $(>) -o $(<)
+}
+
+
+# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
+# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
+# detailed documentation of the Qt Resource System.
+#
+class qrc-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "<file.*>(.*)</file>" ;
+ }
+}
+
+
+# Wrapped files are "included".
+scanner.register qrc-scanner : include ;
diff --git a/src/boost/tools/build/src/tools/qt5.jam b/src/boost/tools/build/src/tools/qt5.jam
new file mode 100644
index 000000000..eb2d6ddf3
--- /dev/null
+++ b/src/boost/tools/build/src/tools/qt5.jam
@@ -0,0 +1,800 @@
+# Copyright 2002-2006 Vladimir Prus
+# Copyright 2005 Alo Sarv
+# Copyright 2005-2012 Juergen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Qt5 library support module
+#
+# The module attempts to auto-detect QT installation location from QTDIR
+# environment variable; failing that, installation location can be passed as
+# argument:
+#
+# toolset.using qt5 : /usr/local/Trolltech/Qt-5.0.0 ;
+#
+# The module supports code generation from .ui and .qrc files, as well as
+# running the moc preprocessor on headers. Note that you must list all your
+# moc-able headers in sources.
+#
+# Example:
+#
+# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
+# /qt5//QtGui /qt5//QtNetwork ;
+#
+# It's also possible to run moc on cpp sources:
+#
+# import cast ;
+#
+# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt5//QtGui ;
+#
+# When moccing source file myapp.cpp you need to include "myapp.moc" from
+# myapp.cpp. When moccing .h files, the output of moc will be automatically
+# compiled and linked in, you don't need any includes.
+#
+# This is consistent with Qt guidelines:
+# http://qt-project.org/doc/qt-5.0/moc.html
+
+# The .qrc processing utility supports various command line option (see
+# http://qt-project.org/doc/qt-5.0/rcc.html for a complete list). The
+# module provides default arguments for the "output file" and
+# "initialization function name" options. Other options can be set through
+# the <rccflags> build property. E.g. if you wish the compression settings
+# to be more aggressive than the defaults, you can apply them too all .qrc
+# files like this:
+#
+# project my-qt-project :
+# requirements
+# <rccflags>"-compress 9 -threshold 10"
+# ;
+#
+# Of course, this property can also be specified on individual targets.
+
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+import os ;
+import virtual-target ;
+import scanner ;
+
+# The Qt version used for requirements
+# Valid are <qt>5.0 or <qt>5.1.0
+# Auto-detection via qmake sets '<qt>major.minor.patch'
+feature.feature qt5 : : propagated ;
+
+# Extra flags for rcc
+# $TODO: figure out how to declare this only once
+# feature.feature rccflags : : free ;
+
+project.initialize $(__name__) ;
+project qt5 ;
+
+# Save the project so that we tolerate 'import + using' combo.
+.project = [ project.current ] ;
+
+# Helper utils for easy debug output
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = TRUE ;
+}
+
+local rule debug-message ( message * )
+{
+ if $(.debug-configuration) = TRUE
+ {
+ ECHO notice\: "[qt5-cfg]" $(message) ;
+ }
+}
+
+# Capture qmake output line by line
+local rule read-output ( content )
+{
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ return $(lines) ;
+}
+
+# Capture Qt version from qmake
+local rule check-version ( bin_prefix )
+{
+ full-cmd = $(bin_prefix)"/qmake -v" ;
+ debug-message Running '$(full-cmd)' ;
+ local output = [ SHELL $(full-cmd) ] ;
+ for line in [ read-output $(output) ]
+ {
+ # Parse the output to get all the results.
+ if [ MATCH "QMake" : $(line) ]
+ {
+ # Skip first line of output
+ }
+ else
+ {
+ temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
+ }
+ }
+ return $(temp) ;
+}
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH "^([0-9]+)\.([0-9]+)(.*)$" : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+# Initialize the QT support module.
+# Parameters:
+# - 'prefix' parameter tells where Qt is installed.
+# - 'version' optional version of Qt, else autodetected via 'qmake -v'
+# - 'condition' optional requirements
+# - 'namespace' optional support for configure -qtnamespace
+# - 'infix' optional support for configure -qtlibinfix
+# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
+# - 'full_inc' optional full path to Qt top-level include directory
+# - 'full_lib' optional full path to Qt library directory
+rule init ( prefix : version ? : condition * : namespace ? : infix ? : full_bin ? : full_inc ? : full_lib ? )
+{
+ project.push-current $(.project) ;
+
+ debug-message "==== Configuring Qt ... ====" ;
+ for local v in version prefix condition namespace infix full_bin full_inc full_lib
+ {
+ if $($(v))
+ {
+ debug-message " user-specified $(v):" '$($(v))' ;
+ }
+ }
+
+ # Needed as default value
+ .prefix = $(prefix) ;
+
+ # pre-build paths to detect reinitializations changes
+ local inc_prefix lib_prefix bin_prefix ;
+ if $(full_inc)
+ {
+ inc_prefix = $(full_inc) ;
+ }
+ else
+ {
+ inc_prefix = $(prefix)/include ;
+ }
+ if $(full_lib)
+ {
+ lib_prefix = $(full_lib) ;
+ }
+ else
+ {
+ lib_prefix = $(prefix)/lib ;
+ }
+ if $(full_bin)
+ {
+ bin_prefix = $(full_bin) ;
+ }
+ else
+ {
+ bin_prefix = $(prefix)/bin ;
+ }
+
+ # Globally needed variables
+ .incprefix = $(inc_prefix) ;
+ .libprefix = $(lib_prefix) ;
+ .binprefix = $(bin_prefix) ;
+
+ if ! $(.initialized)
+ {
+ # Make sure this is initialised only once
+ .initialized = true ;
+
+ # Generates cpp files from header files using "moc" tool
+ generators.register-standard qt5.moc : H : CPP(moc_%) : <allow>qt5 ;
+
+ # The OBJ result type is a fake, 'H' will be really produced. See
+ # comments on the generator class, defined below the 'init' function.
+ generators.register [ new uic-5-generator qt5.uic : UI : OBJ :
+ <allow>qt5 ] ;
+
+ # The OBJ result type is a fake here too.
+ generators.register [ new moc-h-5-generator
+ qt5.moc.inc : MOCCABLE5_CPP : OBJ : <allow>qt5 ] ;
+
+ generators.register [ new moc-inc-5-generator
+ qt5.moc.inc : MOCCABLE5_H : OBJ : <allow>qt5 ] ;
+
+ # Generates .cpp files from .qrc files.
+ generators.register-standard qt5.rcc : QRC : CPP(qrc_%) : <allow>qt5 ;
+
+ # dependency scanner for wrapped files.
+ type.set-scanner QRC : qrc-5-scanner ;
+
+ # Save value of first occurring prefix
+ .PREFIX = $(prefix) ;
+ }
+
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+ else
+ {
+ version = [ check-version $(bin_prefix) ] ;
+ if $(version)
+ {
+ version = $(version:J=.) ;
+ }
+ debug-message Detected version '$(version)' ;
+ }
+
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values qt5 ]
+ {
+ feature.extend qt5 : $(version) ;
+ }
+ target-requirements += <qt5>$(version:E=default) ;
+ }
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ if ! $(target-os)
+ {
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+ target-requirements += <target-os>$(target-os) ;
+ }
+
+ # Build exact requirements for the tools
+ local tools-requirements = $(target-requirements:J=/) ;
+
+ debug-message "Details of this Qt configuration:" ;
+ debug-message " prefix: " '$(prefix:E=<empty>)' ;
+ debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
+ debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
+ debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
+ debug-message " target requirements:" '$(target-requirements)' ;
+ debug-message " tool requirements: " '$(tools-requirements)' ;
+
+ # setup the paths for the tools
+ toolset.flags qt5.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt5.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt5.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+
+ # TODO: 2009-02-12: Better support for directories
+ # Most likely needed are separate getters for: include,libraries,binaries and sources.
+ toolset.flags qt5.directory .PREFIX $(tools-requirements) : $(prefix) ;
+
+ # Test for a buildable Qt.
+ if [ glob $(.prefix)/Jamroot ]
+ {
+ .bjam-qt = true
+
+ # this will declare QtCore (and qtmain on <target-os>windows)
+ add-shared-library QtCore ;
+ }
+ else
+ # Setup common pre-built Qt.
+ # Special setup for QtCore on which everything depends
+ {
+ local link = [ feature.get-values link : $(condition) ] ;
+
+ local usage-requirements =
+ <include>$(.incprefix)
+ <library-path>$(.libprefix)
+ <threading>multi
+ <allow>qt5 ;
+
+ if $(link) in shared
+ {
+ usage-requirements += <dll-path>$(.libprefix) ;
+ usage-requirements += <target-os>windows:<dll-path>$(.binprefix) ;
+ }
+
+ local suffix ;
+
+ # debug versions on unix have to be built
+ # separately and therefore have no suffix.
+ .infix_version = "" ;
+ .suffix_debug = "" ;
+
+ # Control flag for auto-configuration of the debug libraries.
+ # This setup requires Qt 'configure -debug-and-release'.
+ # Only available on some platforms.
+ # ToDo: 2009-02-12: Maybe throw this away and
+ # require separate setup with <variant>debug as condition.
+ .have_separate_debug = FALSE ;
+
+ # Setup other platforms
+ if $(target-os) in windows cygwin
+ {
+ .have_separate_debug = TRUE ;
+
+ # On NT, the libs have "d" suffix in debug builds.
+ .suffix_debug = "d" ;
+
+ .infix_version = "5" ;
+
+ # On Windows we must link against the qtmain library
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ ;
+
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain
+ $(target-requirements)
+ ;
+ }
+ else if $(target-os) = darwin
+ {
+ # On MacOS X, both debug and release libraries are available.
+ .suffix_debug = "_debug" ;
+
+ .have_separate_debug = TRUE ;
+
+ alias qtmain ;
+ }
+ else
+ {
+ alias qtmain : : $(target-requirements) ;
+ .infix_version = "5" ;
+ }
+
+ lib QtCore : qtmain
+ : # requirements
+ <name>Qt$(.infix_version)Core
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <define>QT_NO_DEBUG
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
+
+ lib QtCore : $(main)
+ : # requirements
+ <name>Qt$(.infix_version)Core$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+ }
+ }
+
+ if [ glob $(.incprefix)/QtAngle ]
+ {
+ # Setup support of ANGLE builds.
+ alias QtAngle
+ : # sources
+ : # requirements
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_OPENGL_ES_2
+ <define>QT_OPENGL_ES_2_ANGLE
+ <include>$(.incprefix)/QtAngle
+ $(usage-requirements)
+ ;
+ }
+ else
+ {
+ alias QtAngle
+ : # sources
+ : # requirements
+ $(target-requirements)
+ ;
+ }
+
+ # Initialising the remaining libraries is canonical
+ # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
+ # 'include' only for non-canonical include paths.
+ add-shared-library QtGui : QtCore QtAngle : QT_GUI_LIB : $(target-requirements) ;
+ add-shared-library QtWidgets : QtGui : QT_WIDGETS_LIB : $(target-requirements) ;
+ add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
+ add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
+ add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
+ add-shared-library QtPrintSupport : QtGui : QT_PRINTSUPPORT_LIB : $(target-requirements) ;
+ add-shared-library QtConcurrent : QtCore : QT_CONCURRENT_LIB : $(target-requirements) ;
+
+ add-shared-library QtPositioning : QtCore : QT_POSITIONING_LIB : $(target-requirements) ;
+
+ add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
+ add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
+
+ add-shared-library QtTest : QtCore : : $(target-requirements) ;
+
+ # Qt designer library et. al.
+ add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
+ add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ;
+ add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
+
+ # DBus-Support
+ add-shared-library QtDBus : QtXml : : $(target-requirements) ;
+
+ # Script-Engine and Tools
+ add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
+ add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
+
+ # WebKit
+ add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
+ add-shared-library QtWebKitWidgets : QtGui : QT_WEBKITWIDGETS_LIB : $(target-requirements) ;
+
+ # Multimedia engine
+ add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
+ add-shared-library QtMultimediaWidgets : QtMultimedia : QT_MULTIMEDIAWIDGETS_LIB : $(target-requirements) ;
+
+ #
+ add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
+
+ # Help-Engine
+ add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
+ add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ;
+
+ # QtQuick
+ add-shared-library QtQml : QtCore QtNetwork QtGui : QT_QML_LIB : $(target-requirements) ;
+ add-shared-library QtQuick : QtQml : QT_QUICK_LIB : $(target-requirements) ;
+ add-shared-library QtQuickParticles : QtQml : : $(target-requirements) ;
+ add-shared-library QtQuickTest : QtQml : : $(target-requirements) ;
+
+ add-shared-library QtSerialPort : QtCore : QT_SERIALPORT_LIB : $(target-requirements) ;
+
+ # QtLocation (since 5.4)
+ add-shared-library QtLocation : QtQuick QtPositioning : QT_LOCATION_LIB : $(target-requirements) ;
+
+ # Webengine support (since 5.4)
+ add-shared-library QtWebEngine : QtGui : QT_WEBENGINE_LIB : $(target-requirements) ;
+ add-shared-library QtWebEngineCore : QtWebEngine : QT_WEBENGINECORE_LIB : $(target-requirements) ;
+ add-shared-library QtWebEngineWidgets : QtWebEngineCore QtWidgets : QT_WEBENGINEWIDGETS_LIB : $(target-requirements) ;
+
+ add-shared-library QtWebChannel : QtQml : QT_WEBCHANNEL_LIB : $(target-requirements) ;
+ add-shared-library QtWebSockets : QtNetwork : QT_WEBSOCKETS_LIB : $(target-requirements) ;
+
+ add-shared-library QtWebView : QtWebEngineCore QtWebChannel : QT_WEBVIEW_LIB : $(target-requirements) ;
+
+ # Qt3d libraries (since 5.6)
+ add-shared-library Qt3DCore : QtGui : QT_3DCORE_LIB : $(target-requirements) ;
+ add-shared-library Qt3DRender : Qt3DCore QtConcurrent : QT_3DRENDER_LIB : $(target-requirements) ;
+ add-shared-library Qt3DLogic : Qt3DCore : QT_3DLOGIC_LIB : $(target-requirements) ;
+ add-shared-library Qt3DInput : Qt3DRender : QT_3DINPUT_LIB : $(target-requirements) ;
+
+ # QtCharts (since 5.7)
+ add-shared-library QtCharts : QtWidgets : QT_CHARTS_LIB : $(target-requirements) ;
+
+ # 3D data visualization (since 5.7)
+ add-shared-library QtDataVisualization : QtGui : QT_DATAVISUALIZATION_LIB : $(target-requirements) ;
+
+ # In-App purchase API (since 5.7)
+ add-shared-library QtPurchasing : QtCore : QT_PURCHASING_LIB : $(target-requirements) ;
+
+ # Qt Connectivity (since 5.3)
+ add-shared-library QtBluetooth : QtCore : QT_BLUETOOTH_LIB : $(target-requirements) ;
+ add-shared-library QtNfc : QtCore : QT_NFC_LIB : $(target-requirements) ;
+
+ # Gamepad (since 5.7)
+ add-shared-library QtGamepad : QtCore : QT_GAMEPAD_LIB : $(target-requirements) ;
+
+ # SCXML state machine (since 5.7)
+ add-shared-library QtScxml : QtCore : QT_SCXML_LIB : $(target-requirements) ;
+
+ # Tech Preview QtQuick
+ # SerialBus (since 5.7)
+ add-shared-library QtSerialBus : QtCore : QT_SERIALBUS_LIB : $(target-requirements) ;
+
+ # Platform dependent libraries
+ # Regular expression support
+ add-shared-library QtV8 : QtCore : : $(target-requirements) ;
+
+ # QML-Engine version1
+ add-shared-library QtDeclarative : QtXml : : $(target-requirements) ;
+
+ debug-message "==== Configured Qt-$(version) ====" ;
+
+ project.pop-current ;
+}
+
+rule initialized ( )
+{
+ return $(.initialized) ;
+}
+
+
+
+# This custom generator is needed because in QT5, UI files are translated only
+# into H files, and no C++ files are created. Further, the H files need not be
+# passed via MOC. The header is used only via inclusion. If we define a standard
+# UI -> H generator, B2 will run MOC on H, and then compile the
+# resulting cpp. It will give a warning, since output from moc will be empty.
+#
+# This generator is declared with a UI -> OBJ signature, so it gets invoked when
+# linking generator tries to convert sources to OBJ, but it produces target of
+# type H. This is non-standard, but allowed. That header won't be mocced.
+#
+class uic-5-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(name)
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+ }
+
+ local a = [ new action $(sources[1]) : qt5.uic : $(property-set) ] ;
+
+ # The 'ui_' prefix is to match qmake's default behavior.
+ local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However, we
+ # need the target to be seen by bjam, so that dependency from sources to
+ # this generated header is detected -- if jam does not know about this
+ # target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+}
+
+
+class moc-h-5-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE5_CPP
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt5.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target $(name) : MOC : $(project) : $(a)
+ ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+class moc-inc-5-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE5_H
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt5.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target moc_$(name) : CPP : $(project) :
+ $(a) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(target).actualize ] ;
+
+ return [ virtual-target.register $(target) ] ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.PREFIX) ;
+}
+
+# Add a shared Qt library.
+rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.infix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a static Qt library.
+rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.infix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a Qt library.
+# Static libs are unversioned, whereas shared libs have the major number as suffix.
+# Creates both release and debug versions on platforms where both are enabled by Qt configure.
+# Flags:
+# - lib-name Qt library Name
+# - version Qt major number used as shared library suffix (QtCore5.so)
+# - depends-on other Qt libraries
+# - usage-defines those are set by qmake, so set them when using this library
+# - requirements additional requirements
+# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
+rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
+{
+ if $(.bjam-qt)
+ {
+ # Import Qt module
+ # Eveything will be setup there
+ alias $(lib-name)
+ : $(.prefix)//$(lib-name)
+ :
+ :
+ : <allow>qt5 ;
+ }
+ else
+ {
+ local real_include ;
+ real_include ?= $(include) ;
+ real_include ?= $(lib-name) ;
+
+ local real_name = [ MATCH ^Qt(.*) : $(lib-name) ] ;
+
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>Qt$(version)$(real_name)
+ $(requirements)
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>Qt$(version)$(real_name)$(.suffix_debug)
+ $(requirements)
+ <variant>debug
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+ }
+ }
+
+ # Make library explicit so that a simple <use>qt5 will not bring in everything.
+ # And some components like QtDBus/Phonon may not be available on all platforms.
+ explicit $(lib-name) ;
+}
+
+# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
+# The exact match is the last one.
+
+# Get <include> and <defines> from current toolset.
+flags qt5.moc INCLUDES <include> ;
+flags qt5.moc DEFINES <define> ;
+
+# need a newline for expansion of DEFINES and INCLUDES in the response file.
+.nl = "
+" ;
+
+# Processes headers to create Qt MetaObject information. Qt5-moc has its
+# c++-parser, so pass INCLUDES and DEFINES.
+# We use response file with one INCLUDE/DEFINE per line
+#
+actions moc
+{
+ $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+# When moccing files for include only, we don't need -f, otherwise the generated
+# code will include the .cpp and we'll get duplicated symbols.
+#
+actions moc.inc
+{
+ $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+
+# Get extra options for RCC
+flags qt5.rcc RCC_OPTIONS <rccflags> ;
+
+# Generates source files from resource files.
+#
+actions rcc
+{
+ $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) $(RCC_OPTIONS) -o $(<)
+}
+
+
+# Generates user-interface source from .ui files.
+#
+actions uic
+{
+ $(.BINPREFIX[-1])/uic $(>) -o $(<)
+}
+
+
+# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
+# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
+# detailed documentation of the Qt Resource System.
+#
+class qrc-5-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "<file.*>(.*)</file>" ;
+ }
+}
+
+
+# Wrapped files are "included".
+scanner.register qrc-5-scanner : include ;
diff --git a/src/boost/tools/build/src/tools/quickbook-config.jam b/src/boost/tools/build/src/tools/quickbook-config.jam
new file mode 100644
index 000000000..e983a78a8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/quickbook-config.jam
@@ -0,0 +1,44 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for BoostBook tools. To use, just import this module.
+
+import os ;
+import toolset : using ;
+
+if [ os.name ] = NT
+{
+ local boost-dir = ;
+ for local R in snapshot cvs 1.33.0
+ {
+ boost-dir += [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)"
+ : "InstallRoot" ] ;
+ }
+ local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ;
+ quickbook-path = $(quickbook-path[1]) ;
+
+ if $(quickbook-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using quickbook ":" $(quickbook-path) ;
+ }
+ using quickbook : $(quickbook-path) ;
+ }
+}
+else
+{
+ local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ;
+ quickbook-path = $(quickbook-path[1]) ;
+
+ if $(quickbook-path)
+ {
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using quickbook ":" $(quickbook-path) ;
+ }
+ using quickbook : $(quickbook-path) ;
+ }
+}
diff --git a/src/boost/tools/build/src/tools/quickbook.jam b/src/boost/tools/build/src/tools/quickbook.jam
new file mode 100644
index 000000000..9870e9e41
--- /dev/null
+++ b/src/boost/tools/build/src/tools/quickbook.jam
@@ -0,0 +1,363 @@
+#
+# Copyright (c) 2005 João Abecasis
+# Copyright (c) 2005 Vladimir Prus
+# Copyright (c) 2006 Rene Rivera
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+
+# This toolset defines a generator to translate QuickBook to BoostBook. It can
+# be used to generate nice (!) user documentation in different formats
+# (pdf/html/...), from a single text file with simple markup.
+#
+# The toolset defines the QUICKBOOK type (file extension 'qbk') and
+# a QUICKBOOK to XML (BOOSTBOOK) generator.
+#
+#
+# ===========================================================================
+# Q & A
+# ===========================================================================
+#
+# If you don't know what this is all about, some Q & A will hopefully get you
+# up to speed with QuickBook and this toolset.
+#
+#
+# What is QuickBook ?
+#
+# QuickBook is a WikiWiki style documentation tool geared towards C++
+# documentation using simple rules and markup for simple formatting tasks.
+# QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook
+# documents are simple text files. A single QuickBook document can
+# generate a fully linked set of nice HTML and PostScript/PDF documents
+# complete with images and syntax-colorized source code.
+#
+#
+# Where can I get QuickBook ?
+#
+# Quickbook can be found in Boost's repository, under the tools/quickbook
+# directory it was added there on Jan 2005, some time after the release of
+# Boost v1.32.0 and has been an integral part of the Boost distribution
+# since v1.33.
+#
+# Here's a link to the SVN repository:
+# https://svn.boost.org/svn/boost/trunk/tools/quickbook
+#
+# And to QuickBook's QuickBook-generated docs:
+# http://www.boost.org/doc/libs/release/tools/quickbook/index.html
+#
+#
+# How do I use QuickBook and this toolset in my projects ?
+#
+# The minimal example is:
+#
+# using boostbook ;
+# import quickbook ;
+#
+# boostbook my_docs : my_docs_source.qbk ;
+#
+# where my_docs is a target name and my_docs_source.qbk is a QuickBook
+# file. The documentation format to be generated is determined by the
+# boostbook toolset. By default html documentation should be generated,
+# but you should check BoostBook's docs to be sure.
+#
+#
+# What do I need ?
+#
+# You should start by setting up the BoostBook toolset. Please refer to
+# boostbook.jam and the BoostBook documentation for information on how to
+# do this.
+#
+# A QuickBook executable is also needed. The toolset will generate this
+# executable if it can find the QuickBook sources. The following
+# directories will be searched:
+#
+# BOOST_ROOT/tools/quickbook/
+# BOOST_BUILD_PATH/../../quickbook/
+#
+# (BOOST_ROOT and BOOST_BUILD_PATH are environment variables)
+#
+# If QuickBook sources are not found the toolset will then try to use
+# the shell command 'quickbook'.
+#
+#
+# How do I provide a custom QuickBook executable ?
+#
+# You may put the following in your user-config.jam or site-config.jam:
+#
+# using quickbook : /path/to/quickbook ;
+#
+# or, if 'quickbook' can be found in your PATH,
+#
+# using quickbook : quickbook ;
+#
+#
+# For convenience three alternatives are tried to get a QuickBook executable:
+#
+# 1. If the user points us to the a QuickBook executable, that is used.
+#
+# 2. Otherwise, we search for the QuickBook sources and compile QuickBook
+# using the default toolset.
+#
+# 3. As a last resort, we rely on the shell for finding 'quickbook'.
+#
+
+import boostbook ;
+import "class" : new ;
+import feature ;
+import generators ;
+import toolset ;
+import type ;
+import scanner ;
+import project ;
+import targets ;
+import build-system ;
+import path ;
+import common ;
+import errors ;
+
+# The one and only QUICKBOOK type!
+type.register QUICKBOOK : qbk ;
+
+# <quickbook-binary> shell command to run QuickBook
+# <quickbook-binary-dependencies> targets to build QuickBook from sources.
+feature.feature <quickbook-binary> : : free ;
+feature.feature <quickbook-binary-dependencies> : : free dependency ;
+feature.feature <quickbook-define> : : free ;
+feature.feature <quickbook-indent> : : free ;
+feature.feature <quickbook-line-width> : : free ;
+feature.feature <quickbook-strict-mode> : : free ;
+
+
+# quickbook-binary-generator handles generation of the QuickBook executable, by
+# marking it as a dependency for QuickBook docs.
+#
+# If the user supplied the QuickBook command that will be used.
+#
+# Otherwise we search some sensible places for the QuickBook sources and compile
+# from scratch using the default toolset.
+#
+# As a last resort we rely on the shell to find 'quickbook'.
+#
+class quickbook-binary-generator : generator
+{
+ import modules path targets quickbook ;
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ quickbook.freeze-config ;
+ # QuickBook invocation command and dependencies.
+ local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ;
+ local quickbook-binary-dependencies ;
+
+ if ! $(quickbook-binary)
+ {
+ # If the QuickBook source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'quickbook' in user's PATH
+ local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ;
+ if $(quickbook-dir)
+ {
+ # Get the main-target in QuickBook directory.
+ local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ;
+
+ # The first element are actual targets, the second are
+ # properties found in target-id. We do not care about these
+ # since we have passed the id ourselves.
+ quickbook-main-target =
+ [ $(quickbook-main-target[1]).main-target quickbook ] ;
+
+ quickbook-binary-dependencies =
+ [ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ;
+
+ # Ignore usage-requirements returned as first element.
+ quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ;
+
+ # Some toolsets generate extra targets (e.g. RSP). We must mark
+ # all targets as dependencies for the project, but we will only
+ # use the EXE target for quickbook-to-boostbook translation.
+ for local target in $(quickbook-binary-dependencies)
+ {
+ if [ $(target).type ] = EXE
+ {
+ quickbook-binary =
+ [ path.native
+ [ path.join
+ [ $(target).path ]
+ [ $(target).name ]
+ ]
+ ] ;
+ }
+ }
+ }
+ }
+
+ # Add $(quickbook-binary-dependencies) as a dependency of the current
+ # project and set it as the <quickbook-binary> feature for the
+ # quickbook-to-boostbook rule, below.
+ property-set = [ $(property-set).add-raw
+ <dependency>$(quickbook-binary-dependencies)
+ <quickbook-binary>$(quickbook-binary)
+ <quickbook-binary-dependencies>$(quickbook-binary-dependencies)
+ ] ;
+
+ return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ;
+ }
+}
+
+
+# Define a scanner for tracking QBK include dependencies.
+#
+class qbk-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "\\[[ ]*include[ ]+([^]]+)\\]"
+ "\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]"
+ "\\[[ ]*import[ ]+([^]]+)\\]" ;
+ }
+}
+
+
+scanner.register qbk-scanner : include ;
+
+type.set-scanner QUICKBOOK : qbk-scanner ;
+
+
+# Initialization of toolset.
+#
+# Parameters:
+# command ? -> path to QuickBook executable.
+#
+# When command is not supplied toolset will search for QuickBook directory and
+# compile the executable from source. If that fails we still search the path for
+# 'quickbook'.
+#
+rule init (
+ command ? # path to the QuickBook executable.
+ )
+{
+ if $(command)
+ {
+ if $(.config-frozen)
+ {
+ errors.user-error "quickbook: configuration cannot be changed after it has been used." ;
+ }
+ .command = $(command) ;
+ }
+}
+
+rule freeze-config ( )
+{
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+
+ # QuickBook invocation command and dependencies.
+
+ .quickbook-binary = $(.command) ;
+
+ if $(.quickbook-binary)
+ {
+ # Use user-supplied command.
+ .quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ;
+ }
+ else
+ {
+ # Search for QuickBook sources in sensible places, like
+ # $(BOOST_ROOT)/tools/quickbook
+ # $(BOOST_BUILD_PATH)/../../quickbook
+
+ # And build quickbook executable from sources.
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ local boost-build-path = [ build-system.location ] ;
+
+ if $(boost-root)
+ {
+ .quickbook-dir += [ path.join $(boost-root) tools ] ;
+ }
+
+ if $(boost-build-path)
+ {
+ .quickbook-dir += $(boost-build-path)/../.. ;
+ }
+
+ .quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ;
+
+ # If the QuickBook source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'quickbook' in user's PATH
+ if $(.quickbook-dir)
+ {
+ .quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ;
+ }
+ else
+ {
+ ECHO "QuickBook warning: The path to the quickbook executable was" ;
+ ECHO " not provided. Additionally, couldn't find QuickBook" ;
+ ECHO " sources searching in" ;
+ ECHO " * BOOST_ROOT/tools/quickbook" ;
+ ECHO " * BOOST_BUILD_PATH/../../quickbook" ;
+ ECHO " Will now try to find a precompiled executable by searching" ;
+ ECHO " the PATH for 'quickbook'." ;
+ ECHO " To disable this warning in the future, or to completely" ;
+ ECHO " avoid compilation of quickbook, you can explicitly set the" ;
+ ECHO " path to a quickbook executable command in user-config.jam" ;
+ ECHO " or site-config.jam with the call" ;
+ ECHO " using quickbook : /path/to/quickbook ;" ;
+
+ # As a last resort, search for 'quickbook' command in path. Note
+ # that even if the 'quickbook' command is not found,
+ # get-invocation-command will still return 'quickbook' and might
+ # generate an error while generating the virtual-target.
+
+ .quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ;
+ }
+ }
+ }
+}
+
+
+generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ;
+
+
+# <quickbook-binary> shell command to run QuickBook
+# <quickbook-binary-dependencies> targets to build QuickBook from sources.
+toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND <quickbook-binary> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES <quickbook-binary-dependencies> ;
+toolset.flags quickbook.quickbook-to-boostbook INCLUDES <include> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES <quickbook-define> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-INDENT <quickbook-indent> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH <quickbook-line-width> ;
+toolset.flags quickbook.quickbook-to-boostbook QB-OPTIONS <quickbook-strict-mode>on : --strict ;
+
+
+rule quickbook-to-boostbook ( target : source : properties * )
+{
+ # Signal dependency of quickbook sources on <quickbook-binary-dependencies>
+ # upon invocation of quickbook-to-boostbook.
+ DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ;
+}
+
+
+actions quickbook-to-boostbook
+{
+ "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" $(QB-OPTIONS) --output-file="$(1)" "$(2)"
+}
+
+
+# Declare a main target to convert a quickbook source into a boostbook XML file.
+#
+rule to-boostbook ( target-name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(target-name) : $(project) : XML
+ : [ targets.main-target-sources $(sources) : $(target-name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
diff --git a/src/boost/tools/build/src/tools/rc.jam b/src/boost/tools/build/src/tools/rc.jam
new file mode 100644
index 000000000..d7534bc6f
--- /dev/null
+++ b/src/boost/tools/build/src/tools/rc.jam
@@ -0,0 +1,155 @@
+# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+#
+# Copyright (c) 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import generators ;
+import feature ;
+import scanner ;
+import toolset : flags ;
+import type ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+type.register RC : rc ;
+
+rule init ( )
+{
+}
+
+# Configures a new resource compilation command specific to a condition,
+# usually a toolset selection condition. The possible options are:
+#
+# * <rc-type>(rc|windres) - Indicates the type of options the command
+# accepts.
+#
+# Even though the arguments are all optional, only when a command, condition,
+# and at minimum the rc-type option are given will the command be configured.
+# This is so that callers don't have to check auto-configuration values before
+# calling this. And still get the functionality of build failures when the
+# resource compiler can not be found.
+#
+rule configure ( command ? : condition ? : options * )
+{
+ local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
+
+ if $(command) && $(condition) && $(rc-type)
+ {
+ flags rc.compile.resource .RC $(condition) : $(command) ;
+ flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ;
+ flags rc.compile.resource DEFINES <define> ;
+ flags rc.compile.resource INCLUDES <include> ;
+ if $(.debug-configuration)
+ {
+ ECHO "notice:" using rc compiler "::" $(condition) "::" $(command) ;
+ }
+ }
+}
+
+rule compile.resource ( target : sources * : properties * )
+{
+ local rc-type = [ on $(target) return $(.RC_TYPE) ] ;
+ rc-type ?= null ;
+ compile.resource.$(rc-type) $(target) : $(sources[1]) ;
+}
+
+actions compile.resource.rc
+{
+ "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"
+}
+
+actions compile.resource.windres
+{
+ "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"
+}
+
+actions quietly compile.resource.null
+{
+ as /dev/null -o "$(<)"
+}
+
+# Since it is common practice to write
+# exe hello : hello.cpp hello.rc
+# we change the name of object created from RC file, to avoid conflict with
+# hello.cpp. The reason we generate OBJ and not RES, is that gcc does not seem
+# to like RES files, but works OK with OBJ (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/5643).
+#
+# Using 'register-c-compiler' adds the build directory to INCLUDES
+generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ;
+
+# Register scanner for resources
+class res-scanner : scanner
+{
+ import path ;
+ import regex ;
+ import scanner ;
+ import virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ;
+ local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ;
+ local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ;
+
+ # Icons and other includes may be referenced as
+ #
+ # IDR_MAINFRAME ICON "res\\icon.ico"
+ #
+ # so we have to replace double backslashes with single ones.
+ res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ;
+
+ # CONSIDER: the new scoping rules seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets. When a target is
+ # directly created from a virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We do not
+ # need this extra information for angle includes, since they should not
+ # depend on the including file (we can not get literal "." in the
+ # include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+ res = $(res:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) $(res) ;
+
+ INCLUDES $(target) : $(all) ;
+ NOCARE $(all) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) $(res) = $(b) $(self.includes:G=) ;
+
+ # Just propagate the current scanner to includes, in hope that includes
+ # do not change scanners.
+ scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
+
+ ISFILE $(all) ;
+ }
+}
+
+scanner.register res-scanner : include ;
+type.set-scanner RC : res-scanner ;
diff --git a/src/boost/tools/build/src/tools/rc.py b/src/boost/tools/build/src/tools/rc.py
new file mode 100644
index 000000000..c7a02dbb6
--- /dev/null
+++ b/src/boost/tools/build/src/tools/rc.py
@@ -0,0 +1,197 @@
+# Status: being ported by Steven Watanabe
+# Base revision: 47077
+#
+# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+#
+# Copyright (c) 2006 Rene Rivera.
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+##import type ;
+##import generators ;
+##import feature ;
+##import errors ;
+##import scanner ;
+##import toolset : flags ;
+
+import os.path
+import re
+
+import bjam
+
+from b2.build import type, toolset, generators, scanner, feature
+from b2.exceptions import AlreadyDefined
+from b2.tools import builtin
+from b2.util import regex
+from b2.build.toolset import flags
+from b2.manager import get_manager
+from b2.util import utility
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+type.register('RC', ['rc'])
+
+def init():
+ pass
+
+def configure (command = None, condition = None, options = None):
+ """
+ Configures a new resource compilation command specific to a condition,
+ usually a toolset selection condition. The possible options are:
+
+ * <rc-type>(rc|windres) - Indicates the type of options the command
+ accepts.
+
+ Even though the arguments are all optional, only when a command, condition,
+ and at minimum the rc-type option are given will the command be configured.
+ This is so that callers don't have to check auto-configuration values
+ before calling this. And still get the functionality of build failures when
+ the resource compiler can't be found.
+ """
+ rc_type = feature.get_values('<rc-type>', options)
+ if rc_type:
+ assert(len(rc_type) == 1)
+ rc_type = rc_type[0]
+
+ if command and condition and rc_type:
+ flags('rc.compile.resource', '.RC', condition, command)
+ flags('rc.compile.resource', '.RC_TYPE', condition, [rc_type.lower()])
+ flags('rc.compile.resource', 'DEFINES', [], ['<define>'])
+ flags('rc.compile.resource', 'INCLUDES', [], ['<include>'])
+ if debug():
+ print 'notice: using rc compiler ::', condition, '::', command
+
+engine = get_manager().engine()
+
+class RCAction:
+ """Class representing bjam action defined from Python.
+ The function must register the action to execute."""
+
+ def __init__(self, action_name, function):
+ self.action_name = action_name
+ self.function = function
+
+ def __call__(self, targets, sources, property_set):
+ if self.function:
+ self.function(targets, sources, property_set)
+
+# FIXME: What is the proper way to dispatch actions?
+def rc_register_action(action_name, function = None):
+ global engine
+ if action_name in engine.actions:
+ raise AlreadyDefined("Bjam action %s is already defined" % action_name)
+ engine.actions[action_name] = RCAction(action_name, function)
+
+def rc_compile_resource(targets, sources, properties):
+ rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE')
+ rc_type = rc_type[0] if rc_type else ''
+ global engine
+ engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties)
+
+rc_register_action('rc.compile.resource', rc_compile_resource)
+
+
+engine.register_action(
+ 'rc.compile.resource.rc',
+ '"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"')
+
+engine.register_action(
+ 'rc.compile.resource.windres',
+ '"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"')
+
+# FIXME: this was originally declared quietly
+engine.register_action(
+ 'compile.resource.null',
+ 'as /dev/null -o "$(<)"')
+
+# Since it's a common practice to write
+# exe hello : hello.cpp hello.rc
+# we change the name of object created from RC file, to
+# avoid conflict with hello.cpp.
+# The reason we generate OBJ and not RES, is that gcc does not
+# seem to like RES files, but works OK with OBJ.
+# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
+#
+# Using 'register-c-compiler' adds the build directory to INCLUDES
+# FIXME: switch to generators
+builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], [])
+
+__angle_include_re = "#include[ ]*<([^<]+)>"
+
+# Register scanner for resources
+class ResScanner(scanner.Scanner):
+
+ def __init__(self, includes):
+ scanner.__init__ ;
+ self.includes = includes
+
+ def pattern(self):
+ return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
+ "[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
+
+ def process(self, target, matches, binding):
+ binding = binding[0]
+ angle = regex.transform(matches, "#include[ ]*<([^<]+)>")
+ quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"")
+ res = regex.transform(matches,
+ "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
+ "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4])
+
+ # Icons and other includes may referenced as
+ #
+ # IDR_MAINFRAME ICON "res\\icon.ico"
+ #
+ # so we have to replace double backslashes to single ones.
+ res = [ re.sub(r'\\\\', '/', match) for match in res if match is not None ]
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ g = bjam.call('get-target-variable', target, 'HDRGRIST')[0]
+ b = os.path.normpath(os.path.dirname(binding))
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ # We don't need this extra information for angle includes,
+ # since they should not depend on including file (we can't
+ # get literal "." in include path).
+ g2 = g + "#" + b
+
+ g = "<" + g + ">"
+ g2 = "<" + g2 + ">"
+ angle = [g + x for x in angle]
+ quoted = [g2 + x for x in quoted]
+ res = [g2 + x for x in res]
+
+ all = angle + quoted
+
+ bjam.call('mark-included', target, all)
+
+ engine = get_manager().engine()
+
+ engine.add_dependency(target, res)
+ bjam.call('NOCARE', all + res)
+ engine.set_target_variable(angle, 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(quoted, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(res, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
+
+ # Just propagate current scanner to includes, in a hope
+ # that includes do not change scanners.
+ get_manager().scanners().propagate(self, angle + quoted)
+
+scanner.register(ResScanner, 'include')
+type.set_scanner('RC', ResScanner)
diff --git a/src/boost/tools/build/src/tools/sass.jam b/src/boost/tools/build/src/tools/sass.jam
new file mode 100644
index 000000000..be56487ff
--- /dev/null
+++ b/src/boost/tools/build/src/tools/sass.jam
@@ -0,0 +1,193 @@
+#|
+Copyright 2017 Dmitry Arkhipov
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+import common ;
+import feature ;
+import generators ;
+import modules ;
+import sequence ;
+import toolset ;
+import "class" : new ;
+
+#| tag::doc[]
+
+= Sass
+
+This tool converts SASS and SCSS files into CSS. This tool explicitly supports
+both the version written in C (sassc) and the original Ruby implementation
+(scss) but other variants might also work. In addition to tool-specific
+features, described in this section, the tool recognizes features `<flags>`
+and `<include>`.
+
+|# # end::doc[]
+
+feature.feature sass : : implicit propagated symmetric ;
+
+#| tag::doc[]
+
+== Feature: `sass-style`
+
+Sets the output style. Available values are
+
+* `nested`: each property is put on its own line, rules are indented based on
+ how deeply they are nested;
+* `expanded`: each property is put on its own line, rules are not indented;
+* `compact`: each rule is put on a single line, nested rules occupy adjacent
+ lines, while groups of unrelated rules are separated by newlines;
+* `compressed`: takes minimum amount of space: all unnecessary whitespace is
+ removed, property values are compressed to have minimal representation.
+
+The feature is `optional` and is not `propagated` to dependent targets. If no
+style is specified, then, if property set contains property `<optimization>on`,
+`compressed` style is selected. Otherwise, `nested` style is selected.
+
+|# # end::doc[]
+
+feature.subfeature sass
+ : style
+ : nested expanded compact compressed
+ : optional
+ ;
+
+#| tag::doc[]
+
+== Feature: `sass-line-numbers`
+
+Enables emitting comments showing original line numbers for rules. This can be
+useful for debugging a stylesheet. Available values are `on` and `off`. The
+feature is `optional` and is not `propagated` to dependent targets. If no value
+for this feature is specified, then one is copied from the feature
+`debug-symbols`.
+
+|# # end::doc[]
+
+feature.subfeature sass : line-numbers : on off : optional ;
+
+#| tag::doc[]
+
+== Initialization
+
+To use the `sass` tool you need to declare it in a configuration file with the
+`using` rule. The initialization takes the following arguments:
+
+* `command`: the command, with any extra arguments, to execute.
+
+For example you could insert the following in your `user-config.jam`:
+
+```
+using sass : /usr/local/bin/psass -p2 ; # Perl libsass-based version
+```
+
+If no `command` is given, `sassc` is tried, after which `scss` is tried.
+
+|# # end::doc[]
+
+rule init ( command * )
+{
+ if ! $(.initialized)
+ {
+ # Setup only if we were called via "using .. ;"
+ .initialized = true ;
+
+ # Register generators
+ generators.register [ new sass-generator sass.convert : SASS : CSS ] ;
+ }
+
+ # Setting up command
+ if ! $(command)
+ {
+ # If none was specified by the user, first try sassc, then scss
+ SASS = [ common.find-tool sassc ] ;
+ SASS ?= [ common.find-tool scss ] ;
+ }
+ else
+ {
+ # Otherwise we attempt to resolve each component of the command to
+ # account for script interpreter wrappers.
+ SASS = [ sequence.transform maybe-find-tool : $(command) ] ;
+ }
+}
+
+class sass-generator : generator
+{
+ import property-set ;
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ local style = [ $(property-set).get <sass-style> ] ;
+ local line-numbers = [ $(property-set).get <sass-line-numbers> ] ;
+
+ # Only one source file is sensible; we accept only sass and scss files
+ if ( ! $(sources[2]) ) && ( [ $(sources[1]).type ] in SASS )
+ {
+ # If no output name was given, guess it from sources
+ if ! $(name)
+ {
+ name = [ generator.determine-output-name $(sources) ] ;
+ }
+
+ # If output style was not given, then it is determined by
+ # <optimization> feature
+ if ! $(style)
+ {
+ switch [ $(property-set).get <optimization> ]
+ {
+ case "off" : style = nested ;
+ case * : style = compressed ;
+ }
+ }
+
+ # If line-numbers feature wasn't specified, copy it from
+ # <debug-symbols>
+ line-numbers ?= [ $(property-set).get <debug-symbols> ] ;
+ }
+
+ # We build a reduced property set so that we are not toolset dependent.
+ local raw-set
+ = <sass-style>$(style)
+ <sass-line-numbers>$(line-numbers)
+ ;
+ raw-set +=
+ [ sequence.filter recognized-feature : [ $(property-set).raw ] ] ;
+ raw-set = [ feature.expand-composites $(raw-set) ] ;
+ raw-set += [ $(property-set).incidental ] ;
+ property-set = [ property-set.create $(raw-set) ] ;
+ return
+ [ generator.run $(project) $(name)
+ : $(property-set)
+ : $(sources)
+ ] ;
+ }
+
+ local rule recognized-feature ( feature )
+ {
+ local result ;
+ if $(feature:G) in <include> <flags>
+ {
+ result = true ;
+ }
+ return $(result) ;
+ }
+}
+
+_ = " " ;
+toolset.flags sass STYLE : <sass-style> ;
+toolset.flags sass LINE_NUMBERS <sass-line-numbers>on : --line-numbers ;
+toolset.flags sass INCLUDES : <include> ;
+toolset.flags sass FLAGS : <flags> ;
+
+actions convert
+{
+ "$(SASS)" -t$(_)"$(STYLE)" $(LINE_NUMBERS) -I$(_)"$(INCLUDES)" $(FLAGS) "$(>)" $(_)"$(<)"
+}
+
+local rule maybe-find-tool ( command )
+{
+ local tool = [ common.find-tool $(command) ] ;
+ tool ?= $(command) ;
+ return $(tool) ;
+}
diff --git a/src/boost/tools/build/src/tools/saxonhe.jam b/src/boost/tools/build/src/tools/saxonhe.jam
new file mode 100644
index 000000000..01213193b
--- /dev/null
+++ b/src/boost/tools/build/src/tools/saxonhe.jam
@@ -0,0 +1,53 @@
+#
+# Copyright (c) 2018 Damian Jarek (damian dot jarek93 at gmail dot com)
+# Copyright (c) 2019 Richard Hodges (hodges dot r at gmail dot com)
+#
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+#
+
+import common ;
+import os ;
+
+rule init ( saxonhe_jar ? : java_exe ? )
+{
+ .java_exe = [ common.get-invocation-command saxonhe : java : $(java_exe) : ] ;
+ if $(saxonhe_jar)
+ {
+ .saxonhe_jar = $(saxonhe_jar) ;
+ }
+ else
+ {
+ local jar = [ GLOB "/usr/share/java/saxon/" "/usr/share/java/" : Saxon-HE.jar ] ;
+ .saxonhe_jar = $(jar[1]) ;
+ }
+}
+
+#
+# execute the saxonhe jar file passing files as inputs and outputs.
+#
+actions saxonhe
+{
+ "$(.java_exe)" -jar "$(.saxonhe_jar)" -o:"$(<)" -s:"$(>[1])" -xsl:"$(>[2])"
+}
+
+#
+# execute the saxonhe jar file passing directories as inputs and outputs.
+# saxonhe requires that the output directory already exists
+#
+if [ os.on-windows ]
+{
+ actions saxonhe_dir
+ {
+ if not exist "$(<)\\" mkdir "$(<)"
+ "$(.java_exe)" -jar "$(.saxonhe_jar)" -o:"$(<)" -s:"$(>[1])" -xsl:"$(>[2])"
+ }
+}
+else
+{
+ actions saxonhe_dir
+ {
+ mkdir -p "$(<)"
+ "$(.java_exe)" -jar "$(.saxonhe_jar)" -o:"$(<)" -s:"$(>[1])" -xsl:"$(>[2])"
+ }
+}
diff --git a/src/boost/tools/build/src/tools/stage.jam b/src/boost/tools/build/src/tools/stage.jam
new file mode 100644
index 000000000..a20482f6d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/stage.jam
@@ -0,0 +1,519 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'install' rule, used to copy a set of targets to a
+# single location.
+
+import "class" : new ;
+import feature ;
+import generators ;
+import path ;
+import project ;
+import targets ;
+import type ;
+import types/register ;
+import virtual-target ;
+
+
+feature.feature <install-dependencies> : off on : incidental ;
+feature.feature <install-type> : : free incidental ;
+feature.feature <install-source-root> : : free path ;
+feature.feature <so-version> : : free incidental ;
+
+# If 'on', version symlinks for shared libraries will not be created. Affects
+# Unix builds only.
+feature.feature <install-no-version-symlinks> : on : optional incidental ;
+
+
+class install-target-class : basic-target
+{
+ import "class" : new ;
+ import feature ;
+ import generators ;
+ import path ;
+ import project ;
+ import property ;
+ import property-set ;
+ import stage ;
+ import type ;
+
+ rule __init__ ( name-and-dir : project : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ # The usage-requirements specified here are ignored but are taken as a
+ # parameter to have this metatarget class have the same standard
+ # instantiation interface as all the other Boost Build metatarget
+ # classes.
+ basic-target.__init__ $(name-and-dir) : $(project) : $(sources) :
+ $(requirements) : $(default-build) ;
+ }
+
+ # If <location> is not set, sets it based on the project data.
+ #
+ rule update-location ( property-set )
+ {
+ local loc = [ $(property-set).get <location> ] ;
+ if ! $(loc)
+ {
+ loc = [ path.root $(self.name) [ $(self.project).get location ] ] ;
+ property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
+ }
+
+ return $(property-set) ;
+ }
+
+ # Takes a target that is installed and a property set which is used when
+ # installing.
+ #
+ rule adjust-properties ( target : build-property-set )
+ {
+ local ps-raw ;
+ local a = [ $(target).action ] ;
+ if $(a)
+ {
+ local ps = [ $(a).properties ] ;
+ ps-raw = [ $(ps).raw ] ;
+
+ # Unless <hardcode-dll-paths>true is in properties, which can happen
+ # only if the user has explicitly requested it, nuke all <dll-path>
+ # properties.
+ if [ $(build-property-set).get <hardcode-dll-paths> ] != true
+ {
+ ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
+ }
+
+ # If any <dll-path> properties were specified for installing, add
+ # them.
+ local l = [ $(build-property-set).get <dll-path> ] ;
+ ps-raw += $(l:G=<dll-path>) ;
+
+ # Also copy <linkflags> feature from current build set, to be used
+ # for relinking.
+ local l = [ $(build-property-set).get <linkflags> ] ;
+ ps-raw += $(l:G=<linkflags>) ;
+
+ # Remove the <tag> feature on original targets.
+ ps-raw = [ property.change $(ps-raw) : <tag> ] ;
+
+ # And <location>. If stage target has another stage target in
+ # sources, then we shall get virtual targets with the <location>
+ # property set.
+ ps-raw = [ property.change $(ps-raw) : <location> ] ;
+ }
+
+ local d = [ $(build-property-set).get <dependency> ] ;
+ ps-raw += $(d:G=<dependency>) ;
+
+ local d = [ $(build-property-set).get <location> ] ;
+ ps-raw += $(d:G=<location>) ;
+
+ local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
+ ps-raw += $(ns:G=<install-no-version-symlinks>) ;
+
+ local d = [ $(build-property-set).get <install-source-root> ] ;
+ # Make the path absolute: we shall use it to compute relative paths and
+ # making the path absolute will help.
+ if $(d)
+ {
+ d = [ path.root $(d) [ path.pwd ] ] ;
+ ps-raw += $(d:G=<install-source-root>) ;
+ }
+
+ if $(ps-raw)
+ {
+ return [ property-set.create $(ps-raw) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ source-targets = [ targets-to-stage $(source-targets) :
+ $(property-set) ] ;
+
+ property-set = [ update-location $(property-set) ] ;
+
+ local ename = [ $(property-set).get <name> ] ;
+
+ if $(ename) && $(source-targets[2])
+ {
+ import errors : error : $(__name__) : errors.error ;
+ errors.error When <name> property is used "in" 'install', only one
+ source is allowed. ;
+ }
+
+ local result ;
+ for local i in $(source-targets)
+ {
+ local staged-targets ;
+
+ local new-properties = [ adjust-properties $(i) :
+ $(property-set) ] ;
+
+ # See if something special should be done when staging this type. It
+ # is indicated by the presence of a special "INSTALLED_" type.
+ local t = [ $(i).type ] ;
+ if $(t) && [ type.registered INSTALLED_$(t) ]
+ {
+ if $(ename)
+ {
+ import errors : error : $(__name__) : errors.error ;
+ errors.error In "'install':" <name> property specified with
+ target that requires relinking. ;
+ }
+ else
+ {
+ local targets = [ generators.construct $(self.project)
+ $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ;
+ staged-targets += $(targets[2-]) ;
+ }
+ }
+ else
+ {
+ staged-targets = [ stage.copy-file $(self.project) $(ename) :
+ $(i) : $(new-properties) ] ;
+ }
+
+ if ! $(staged-targets)
+ {
+ import errors : error : $(__name__) : errors.error ;
+ errors.error Unable to generate staged version of
+ [ $(source).str ] ;
+ }
+
+ for t in $(staged-targets)
+ {
+ result += [ virtual-target.register $(t) ] ;
+ }
+ }
+
+ return [ property-set.empty ] $(result) ;
+ }
+
+ # Given the list of source targets explicitly passed to 'stage', returns the
+ # list of targets which must be staged.
+ #
+ rule targets-to-stage ( source-targets * : property-set )
+ {
+ local result ;
+
+ # Traverse the dependencies, if needed.
+ if [ $(property-set).get <install-dependencies> ] = "on"
+ {
+ source-targets = [ collect-targets $(source-targets) ] ;
+ }
+
+ # Filter the target types, if needed.
+ local included-types = [ $(property-set).get <install-type> ] ;
+ for local r in $(source-targets)
+ {
+ local ty = [ $(r).type ] ;
+ if $(ty)
+ {
+ # Do not stage searched libs.
+ if $(ty) != SEARCHED_LIB
+ {
+ if $(included-types)
+ {
+ if [ include-type $(ty) : $(included-types) ]
+ {
+ result += $(r) ;
+ }
+ }
+ else
+ {
+ result += $(r) ;
+ }
+ }
+ }
+ else if ! $(included-types)
+ {
+ # Do not install typeless targets if there is an explicit list
+ # of allowed types.
+ result += $(r) ;
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # CONSIDER: figure out why we can not use virtual-target.traverse here.
+ #
+ rule collect-targets ( targets * )
+ {
+ # Find subvariants
+ local s ;
+ for local t in $(targets)
+ {
+ s += [ $(t).creating-subvariant ] ;
+ }
+ s = [ sequence.unique $(s) ] ;
+
+ local result = [ new set ] ;
+ $(result).add $(targets) ;
+
+ for local i in $(s)
+ {
+ $(i).all-referenced-targets $(result) ;
+ }
+ local result2 ;
+ for local r in [ $(result).list ]
+ {
+ if $(r:G) != <use>
+ {
+ result2 += $(r:G=) ;
+ }
+ }
+ DELETE_MODULE $(result) ;
+ return [ sequence.unique $(result2) ] ;
+ }
+
+ # Returns true iff 'type' is subtype of some element of 'types-to-include'.
+ #
+ local rule include-type ( type : types-to-include * )
+ {
+ local found ;
+ while $(types-to-include) && ! $(found)
+ {
+ if [ type.is-subtype $(type) $(types-to-include[1]) ]
+ {
+ found = true ;
+ }
+ types-to-include = $(types-to-include[2-]) ;
+ }
+
+ return $(found) ;
+ }
+}
+
+
+# Creates a copy of target 'source'. The 'properties' object should have a
+# <location> property which specifies where the target must be placed.
+#
+rule copy-file ( project name ? : source : properties )
+{
+ name ?= [ $(source).name ] ;
+ local relative ;
+
+ local new-a = [ new non-scanning-action $(source) : common.copy :
+ $(properties) ] ;
+ local source-root = [ $(properties).get <install-source-root> ] ;
+ if $(source-root)
+ {
+ # Get the real path of the target. We probably need to strip relative
+ # path from the target name at construction.
+ local path = [ $(source).path ] ;
+ path = [ path.root $(name:D) $(path) ] ;
+ # Make the path absolute. Otherwise, it would be hard to compute the
+ # relative path. The 'source-root' is already absolute, see the
+ # 'adjust-properties' method above.
+ path = [ path.root $(path) [ path.pwd ] ] ;
+
+ relative = [ path.relative-to $(source-root) $(path) ] ;
+ }
+
+ # Note: Using $(name:D=$(relative)) might be faster here, but then we would
+ # need to explicitly check that relative is not ".", otherwise we might get
+ # paths like '<prefix>/boost/.', try to create it and mkdir would obviously
+ # fail.
+ name = [ path.join $(relative) $(name:D=) ] ;
+
+ return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
+ $(new-a) ] ;
+}
+
+
+rule symlink ( name : project : source : properties )
+{
+ local a = [ new action $(source) : symlink.ln : $(properties) ] ;
+ local t = [ new file-target $(name) exact : [ $(source).type ] : $(project)
+ : $(a) ] ;
+ return [ virtual-target.register $(t) ] ;
+}
+
+
+rule relink-file ( project : source : property-set )
+{
+ local action = [ $(source).action ] ;
+ local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
+ "" : $(property-set) ] ;
+ return [ $(cloned-action).targets ] ;
+}
+
+
+# Declare installed version of the EXE type. Generator for this type will cause
+# relinking to the new location.
+type.register INSTALLED_EXE : : EXE ;
+
+
+class installed-exe-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-exe : EXE : INSTALLED_EXE ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ local stage-rule = stage.copy-file ;
+
+ if ! [ $(property-set).get <os> ] in NT CYGWIN &&
+ ! [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ # If dll-path properties have been changed for the stage target,
+ # relink instead of copying.
+ local a = [ $(source).action ] ;
+ local p = [ $(a).properties ] ;
+ local original = [ $(p).get <dll-path> ] ;
+ local current = [ $(property-set).get <dll-path> ] ;
+
+ if $(current) != $(original)
+ {
+ stage-rule = stage.relink-file ;
+ }
+ }
+
+ return [ $(stage-rule) $(project) : $(source) : $(property-set) ] ;
+ }
+}
+
+
+generators.register [ new installed-exe-generator ] ;
+
+
+# Installing a shared link on Unix might cause a creation of versioned symbolic
+# links.
+type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
+
+
+class installed-shared-lib-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-shared-lib : SHARED_LIB :
+ INSTALLED_SHARED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ local copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ return [ virtual-target.register $(copied) ] ;
+ }
+ else
+ {
+ local a = [ $(source).action ] ;
+ local copied ;
+ if ! $(a)
+ {
+ # Non-derived file, just copy.
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ local cp = [ $(a).properties ] ;
+ local current-dll-path = [ $(cp).get <dll-path> ] ;
+ local new-dll-path = [ $(property-set).get <dll-path> ] ;
+
+ if $(current-dll-path) != $(new-dll-path)
+ {
+ # Rpath changed, need to relink.
+ copied = [ stage.relink-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ }
+
+ copied = [ virtual-target.register $(copied) ] ;
+
+ local result = $(copied) ;
+ # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
+ # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
+ # symbolic links.
+ local m = [ MATCH
+ "(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$" :
+ [ $(copied).name ] ] ;
+ if $(m)
+ {
+ # Symlink without version at all is used to make
+ # -lsome_library work.
+ result += [ stage.symlink $(m[1]) : $(project) : $(copied) :
+ $(property-set) ] ;
+
+ # Symlinks of some libfoo.N and libfoo.N.M are used so that
+ # library can found at runtime, if libfoo.N.M.X has soname of
+ # libfoo.N. That happens when the library makes some binary
+ # compatibility guarantees. If not, it is possible to skip those
+ # symlinks.
+ local suppress = [ $(property-set).get
+ <install-no-version-symlinks> ] ;
+
+ if $(suppress) != "on"
+ {
+ result += [ stage.symlink $(m[1]).$(m[2]) : $(project) :
+ $(copied) : $(property-set) ] ;
+ result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) :
+ $(project) : $(copied) : $(property-set) ] ;
+ }
+ }
+
+ return $(result) ;
+ }
+ }
+}
+
+generators.register [ new installed-shared-lib-generator ] ;
+
+
+# Main target rule for 'install'.
+#
+rule install ( name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ # Unless the user has explicitly asked us to hardcode dll paths, add
+ # <hardcode-dll-paths>false in requirements, to override default value.
+ if ! <hardcode-dll-paths>true in $(requirements)
+ {
+ requirements += <hardcode-dll-paths>false ;
+ }
+
+ if <tag> in $(requirements:G)
+ {
+ import errors ;
+ errors.user-error The <tag> property is not allowed for the 'install'
+ rule. ;
+ }
+
+ targets.create-metatarget install-target-class : $(project) : $(name) :
+ $(sources) : $(requirements) : $(default-build) ;
+}
+
+
+IMPORT $(__name__) : install : : install ;
+IMPORT $(__name__) : install : : stage ;
diff --git a/src/boost/tools/build/src/tools/stage.py b/src/boost/tools/build/src/tools/stage.py
new file mode 100644
index 000000000..2a15dd18a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/stage.py
@@ -0,0 +1,350 @@
+# Status: ported.
+# Base revision 64444.
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006, 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'install' rule, used to copy a set of targets to a
+# single location.
+
+import b2.build.feature as feature
+import b2.build.targets as targets
+import b2.build.property as property
+import b2.build.property_set as property_set
+import b2.build.generators as generators
+import b2.build.virtual_target as virtual_target
+
+from b2.manager import get_manager
+from b2.util.sequence import unique
+from b2.util import bjam_signature
+
+import b2.build.type
+
+import os.path
+import re
+import types
+
+feature.feature('install-dependencies', ['off', 'on'], ['incidental'])
+feature.feature('install-type', [], ['free', 'incidental'])
+feature.feature('install-source-root', [], ['free', 'path'])
+feature.feature('so-version', [], ['free', 'incidental'])
+
+# If 'on', version symlinks for shared libraries will not be created. Affects
+# Unix builds only.
+feature.feature('install-no-version-symlinks', ['on'], ['optional', 'incidental'])
+
+class InstallTargetClass(targets.BasicTarget):
+
+ def update_location(self, ps):
+ """If <location> is not set, sets it based on the project data."""
+
+ loc = ps.get('location')
+ if not loc:
+ loc = os.path.join(self.project().get('location'), self.name())
+ ps = ps.add_raw(["<location>" + loc])
+
+ return ps
+
+ def adjust_properties(self, target, build_ps):
+ a = target.action()
+ properties = []
+ if a:
+ ps = a.properties()
+ properties = ps.all()
+
+ # Unless <hardcode-dll-paths>true is in properties, which can happen
+ # only if the user has explicitly requested it, nuke all <dll-path>
+ # properties.
+
+ if build_ps.get('hardcode-dll-paths') != ['true']:
+ properties = [p for p in properties if p.feature.name != 'dll-path']
+
+ # If any <dll-path> properties were specified for installing, add
+ # them.
+ properties.extend(build_ps.get_properties('dll-path'))
+
+ # Also copy <linkflags> feature from current build set, to be used
+ # for relinking.
+ properties.extend(build_ps.get_properties('linkflags'))
+
+ # Remove the <tag> feature on original targets.
+ # And <location>. If stage target has another stage target in
+ # sources, then we shall get virtual targets with the <location>
+ # property set.
+ properties = [p for p in properties
+ if not p.feature.name in ['tag', 'location']]
+
+ properties.extend(build_ps.get_properties('dependency'))
+
+ properties.extend(build_ps.get_properties('location'))
+
+
+ properties.extend(build_ps.get_properties('install-no-version-symlinks'))
+
+ d = build_ps.get_properties('install-source-root')
+
+ # Make the path absolute: we shall use it to compute relative paths and
+ # making the path absolute will help.
+ if d:
+ p = d[0]
+ properties.append(property.Property(p.feature, os.path.abspath(p.value)))
+
+ return property_set.create(properties)
+
+
+ def construct(self, name, source_targets, ps):
+
+ source_targets = self.targets_to_stage(source_targets, ps)
+ ps = self.update_location(ps)
+
+ ename = ps.get('name')
+ if ename:
+ ename = ename[0]
+ if ename and len(source_targets) > 1:
+ get_manager().errors()("When <name> property is used in 'install', only one source is allowed")
+
+ result = []
+
+ for i in source_targets:
+
+ staged_targets = []
+ new_ps = self.adjust_properties(i, ps)
+
+ # See if something special should be done when staging this type. It
+ # is indicated by the presence of a special "INSTALLED_" type.
+ t = i.type()
+ if t and b2.build.type.registered("INSTALLED_" + t):
+
+ if ename:
+ get_manager().errors()("In 'install': <name> property specified with target that requires relinking.")
+ else:
+ (r, targets) = generators.construct(self.project(), name, "INSTALLED_" + t,
+ new_ps, [i])
+ assert isinstance(r, property_set.PropertySet)
+ staged_targets.extend(targets)
+
+ else:
+ staged_targets.append(copy_file(self.project(), ename, i, new_ps))
+
+ if not staged_targets:
+ get_manager().errors()("Unable to generate staged version of " + i)
+
+ result.extend(get_manager().virtual_targets().register(t) for t in staged_targets)
+
+ return (property_set.empty(), result)
+
+ def targets_to_stage(self, source_targets, ps):
+ """Given the list of source targets explicitly passed to 'stage', returns the
+ list of targets which must be staged."""
+
+ result = []
+
+ # Traverse the dependencies, if needed.
+ if ps.get('install-dependencies') == ['on']:
+ source_targets = self.collect_targets(source_targets)
+
+ # Filter the target types, if needed.
+ included_types = ps.get('install-type')
+ for r in source_targets:
+ ty = r.type()
+ if ty:
+ # Do not stage searched libs.
+ if ty != "SEARCHED_LIB":
+ if included_types:
+ if self.include_type(ty, included_types):
+ result.append(r)
+ else:
+ result.append(r)
+ elif not included_types:
+ # Don't install typeless target if there is an explicit list of
+ # allowed types.
+ result.append(r)
+
+ return result
+
+ # CONSIDER: figure out why we can not use virtual-target.traverse here.
+ #
+ def collect_targets(self, targets):
+
+ s = [t.creating_subvariant() for t in targets]
+ s = unique(filter(lambda l: l != None,s))
+
+ result = set(targets)
+ for i in s:
+ i.all_referenced_targets(result)
+
+ result2 = []
+ for r in result:
+ if isinstance(r, property.Property):
+
+ if r.feature.name != 'use':
+ result2.append(r.value)
+ else:
+ result2.append(r)
+ result2 = unique(result2)
+ return result2
+
+ # Returns true iff 'type' is subtype of some element of 'types-to-include'.
+ #
+ def include_type(self, type, types_to_include):
+ return any(b2.build.type.is_subtype(type, ti) for ti in types_to_include)
+
+# Creates a copy of target 'source'. The 'properties' object should have a
+# <location> property which specifies where the target must be placed.
+#
+def copy_file(project, name, source, ps):
+
+ if not name:
+ name = source.name()
+
+ relative = ""
+
+ new_a = virtual_target.NonScanningAction([source], "common.copy", ps)
+ source_root = ps.get('install-source-root')
+ if source_root:
+ source_root = source_root[0]
+ # Get the real path of the target. We probably need to strip relative
+ # path from the target name at construction.
+ path = os.path.join(source.path(), os.path.dirname(name))
+ # Make the path absolute. Otherwise, it would be hard to compute the
+ # relative path. The 'source-root' is already absolute, see the
+ # 'adjust-properties' method above.
+ path = os.path.abspath(path)
+
+ relative = os.path.relpath(path, source_root)
+
+ name = os.path.join(relative, os.path.basename(name))
+ return virtual_target.FileTarget(name, source.type(), project, new_a, exact=True)
+
+def symlink(name, project, source, ps):
+ a = virtual_target.Action([source], "symlink.ln", ps)
+ return virtual_target.FileTarget(name, source.type(), project, a, exact=True)
+
+def relink_file(project, source, ps):
+ action = source[0].action()
+ cloned_action = virtual_target.clone_action(action, project, "", ps)
+ targets = cloned_action.targets()
+ # We relink only on Unix, where exe or shared lib is always a single file.
+ assert len(targets) == 1
+ return targets[0]
+
+
+# Declare installed version of the EXE type. Generator for this type will cause
+# relinking to the new location.
+b2.build.type.register('INSTALLED_EXE', [], 'EXE')
+
+class InstalledExeGenerator(generators.Generator):
+
+ def __init__(self):
+ generators.Generator.__init__(self, "install-exe", False, ['EXE'], ['INSTALLED_EXE'])
+
+ def run(self, project, name, ps, source):
+
+ need_relink = False;
+
+ if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
+ # Never relink
+ pass
+ else:
+ # See if the dll-path properties are not changed during
+ # install. If so, copy, don't relink.
+ need_relink = source[0].action() and ps.get('dll-path') != source[0].action().properties().get('dll-path')
+
+ if need_relink:
+ return [relink_file(project, source, ps)]
+ else:
+ return [copy_file(project, None, source[0], ps)]
+
+generators.register(InstalledExeGenerator())
+
+
+# Installing a shared link on Unix might cause a creation of versioned symbolic
+# links.
+b2.build.type.register('INSTALLED_SHARED_LIB', [], 'SHARED_LIB')
+
+class InstalledSharedLibGenerator(generators.Generator):
+
+ def __init__(self):
+ generators.Generator.__init__(self, 'install-shared-lib', False, ['SHARED_LIB'], ['INSTALLED_SHARED_LIB'])
+
+ def run(self, project, name, ps, source):
+
+ source = source[0]
+ if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
+ copied = copy_file(project, None, source, ps)
+ return [get_manager().virtual_targets().register(copied)]
+ else:
+ a = source.action()
+ if not a:
+ # Non-derived file, just copy.
+ copied = copy_file(project, None, source, ps)
+ else:
+
+ need_relink = ps.get('dll-path') != source.action().properties().get('dll-path')
+
+ if need_relink:
+ # Rpath changed, need to relink.
+ copied = relink_file(project, source, ps)
+ else:
+ copied = copy_file(project, None, source, ps)
+
+ result = [get_manager().virtual_targets().register(copied)]
+ # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
+ # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
+ # symbolic links.
+ m = re.match("(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$",
+ copied.name());
+ if m:
+ # Symlink without version at all is used to make
+ # -lsome_library work.
+ result.append(symlink(m.group(1), project, copied, ps))
+
+ # Symlinks of some libfoo.N and libfoo.N.M are used so that
+ # library can found at runtime, if libfoo.N.M.X has soname of
+ # libfoo.N. That happens when the library makes some binary
+ # compatibility guarantees. If not, it is possible to skip those
+ # symlinks.
+ if ps.get('install-no-version-symlinks') != ['on']:
+
+ result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps))
+ result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3),
+ project, copied, ps))
+
+ return result
+
+generators.register(InstalledSharedLibGenerator())
+
+
+# Main target rule for 'install'.
+#
+@bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"]))
+def install(name, sources, requirements=[], default_build=[], usage_requirements=[]):
+
+ requirements = requirements[:]
+ # Unless the user has explicitly asked us to hardcode dll paths, add
+ # <hardcode-dll-paths>false in requirements, to override default value.
+ if not '<hardcode-dll-paths>true' in requirements:
+ requirements.append('<hardcode-dll-paths>false')
+
+ if any(r.startswith('<tag>') for r in requirements):
+ get_manager().errors()("The <tag> property is not allowed for the 'install' rule")
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ InstallTargetClass(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+get_manager().projects().add_rule("install", install)
+get_manager().projects().add_rule("stage", install)
+
diff --git a/src/boost/tools/build/src/tools/stlport.jam b/src/boost/tools/build/src/tools/stlport.jam
new file mode 100644
index 000000000..4a08c8c9b
--- /dev/null
+++ b/src/boost/tools/build/src/tools/stlport.jam
@@ -0,0 +1,312 @@
+# Copyright Gennadiy Rozental
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The STLPort is usable by means of 'stdlib' feature. When
+# stdlib=stlport is specified, default version of STLPort will be used,
+# while stdlib=stlport-4.5 will use specific version.
+# The subfeature value 'hostios' means to use host compiler's iostreams.
+#
+# The specific version of stlport is selected by features:
+# The <runtime-link> feature selects between static and shared library
+# The <runtime-debugging>on selects STLPort with debug symbols
+# and stl debugging.
+# There's no way to use STLPort with debug symbols but without
+# stl debugging.
+
+# TODO: must implement selection of different STLPort installations based
+# on used toolset.
+# Also, finish various flags:
+#
+# This is copied from V1 toolset, "+" means "implemented"
+#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ;
+# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ;
+
+
+import feature : feature subfeature ;
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+import common ;
+import type ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project stlport ;
+
+# The problem: how to request to use host compiler's iostreams?
+#
+# Solution 1: Global 'stlport-iostream' feature.
+# That's ugly. Subfeature make more sense for stlport-specific thing.
+# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream")
+# is default.
+# The problem is that such subfeature will appear in target paths, and that's ugly
+# Solution 3: Use optional subfeature with only one value.
+
+feature.extend stdlib : stlport ;
+feature.compose <stdlib>stlport : <library>/stlport//stlport ;
+
+# STLport iostreams or native iostreams
+subfeature stdlib stlport : iostream : hostios : optional propagated ;
+
+# STLport extensions
+subfeature stdlib stlport : extensions : noext : optional propagated ;
+
+# STLport anachronisms -- NOT YET SUPPORTED
+# subfeature stdlib stlport : anachronisms : on off ;
+
+# STLport debug allocation -- NOT YET SUPPORTED
+#subfeature stdlib stlport : debug-alloc : off on ;
+
+# Declare a special target class to handle the creation of search-lib-target
+# instances for STLport. We need a special class, because otherwise we'll have
+# - declare prebuilt targets for all possible toolsets. And by the time 'init'
+# is called we don't even know the list of toolsets that are registered
+# - when host iostreams are used, we really should produce nothing. It would
+# be hard/impossible to achieve this using prebuilt targets.
+
+class stlport-target-class : basic-target
+{
+ import feature project type errors generators ;
+ import set : difference ;
+
+ rule __init__ ( project : headers ? : libraries * : version ? )
+ {
+ basic-target.__init__ stlport : $(project) ;
+ self.headers = $(headers) ;
+ self.libraries = $(libraries) ;
+ self.version = $(version) ;
+ self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ;
+
+ local requirements ;
+ requirements += <stdlib-stlport:version>$(self.version) ;
+ requirements += <relevant>runtime-debugging ;
+ requirements += <relevant>toolset ;
+ requirements += <relevant>runtime-link ;
+ self.requirements = [ property-set.create $(requirements) ] ;
+ }
+
+ rule generate ( property-set )
+ {
+ # Since this target is built with <stdlib>stlport, it will also
+ # have <library>/stlport//stlport in requirements, which will
+ # cause a loop in main target references. Remove that property
+ # manually.
+
+ property-set = [ property-set.create
+ [ difference
+ [ $(property-set).raw ] :
+ <library>/stlport//stlport
+ <stdlib>stlport
+ ]
+ ] ;
+ return [ basic-target.generate $(property-set) ] ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ # Deduce the name of stlport library, based on toolset and
+ # debug setting.
+ local raw = [ $(property-set).raw ] ;
+ local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ;
+ local toolset = [ feature.get-values <toolset> : $(raw) ] ;
+
+ if $(self.version.5)
+ {
+ # Version 5.x
+
+ # STLport host IO streams no longer supported. So we always
+ # need libraries.
+
+ # name: stlport(stl)?[dg]?(_static)?.M.R
+ local name = stlport ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name += stl ;
+ switch $(toolset)
+ {
+ case gcc* : name += g ;
+ case darwin* : name += g ;
+ case * : name += d ;
+ }
+ }
+
+ if [ feature.get-values <runtime-link> : $(raw) ] = "static"
+ {
+ name += _static ;
+ }
+
+ # Starting with version 5.2.0, the STLport static libraries no
+ # longer include a version number in their name
+ local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ;
+ if $(version.pre.5.2) || [ feature.get-values <runtime-link> :
+ $(raw) ] != "static"
+ {
+ name += .$(self.version.5) ;
+ }
+
+ name = $(name:J=) ;
+
+ if [ feature.get-values <install-dependencies> : $(raw) ] = "on"
+ {
+ #~ Allow explicitly asking to install the STLport lib by
+ #~ referring to it directly:
+ #~ /stlport//stlport/<install-dependencies>on
+ #~ This allows for install packaging of all libs one might need
+ #~ for a standalone distribution.
+ import path : make : path-make ;
+ local runtime-link
+ = [ feature.get-values <runtime-link> : $(raw) ] ;
+ local lib-file.props
+ = [ property-set.create $(raw) <link>$(runtime-link) ] ;
+ local lib-file.prefix
+ = [ type.generated-target-prefix $(runtime-link:U)_LIB :
+ $(lib-file.props) ] ;
+ local lib-file.suffix
+ = [ type.generated-target-suffix $(runtime-link:U)_LIB :
+ $(lib-file.props) ] ;
+ lib-file.prefix
+ ?= "" "lib" ;
+ lib-file.suffix
+ ?= "" ;
+ local lib-file
+ = [ GLOB $(self.libraries) [ modules.peek : PATH ] :
+ $(lib-file.prefix)$(name).$(lib-file.suffix) ] ;
+ lib-file
+ = [ new file-reference [ path-make $(lib-file[1]) ] :
+ $(self.project) ] ;
+ lib-file
+ = [ $(lib-file).generate "" ] ;
+ local lib-file.requirements
+ = [ targets.main-target-requirements
+ [ $(lib-file.props).raw ] <file>$(lib-file[-1])
+ : $(self.project) ] ;
+ return [ generators.construct $(self.project) $(name) : LIB :
+ $(lib-file.requirements) ] ;
+ }
+ else
+ {
+ #~ Otherwise, it is just regular library usage.
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ }
+ else if ! $(hostios) && $(toolset) != msvc
+ {
+ # We don't need libraries if host istreams are used. For
+ # msvc, automatic library selection will be used.
+
+ # name: stlport_<toolset>(_stldebug)?
+ local name = stlport ;
+ name = $(name)_$(toolset) ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name = $(name)_stldebug ;
+ }
+
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local usage-requirements =
+ <include>$(self.headers)
+ <dll-path>$(self.libraries)
+ <library-path>$(self.libraries)
+ ;
+
+ local rproperties = [ $(subvariant).build-properties ] ;
+ # CONSIDER: should this "if" sequence be replaced with
+ # some use of 'property-map' class?
+ if [ $(rproperties).get <runtime-debugging> ] = "on"
+ {
+ usage-requirements +=
+ <define>_STLP_DEBUG=1
+ <define>_STLP_DEBUG_UNINITIALIZED=1 ;
+ }
+ if [ $(rproperties).get <runtime-link> ] = "shared"
+ {
+ usage-requirements +=
+ <define>_STLP_USE_DYNAMIC_LIB=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext
+ {
+ usage-requirements +=
+ <define>_STLP_NO_EXTENSIONS=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios
+ {
+ usage-requirements +=
+ <define>_STLP_NO_OWN_IOSTREAMS=1
+ <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+ }
+ if $(self.version.5)
+ {
+ # Version 5.x
+ if [ $(rproperties).get <threading> ] = "single"
+ {
+ # Since STLport5 doesn't normally support single-thread
+ # we force STLport5 into the multi-thread mode. Hence
+ # getting what other libs provide of single-thread code
+ # linking against a multi-thread lib.
+ usage-requirements +=
+ <define>_STLP_THREADS=1 ;
+ }
+ }
+
+ return [ property-set.create $(usage-requirements) ] ;
+ }
+}
+
+rule stlport-target ( headers ? : libraries * : version ? )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new stlport-target-class $(project) : $(headers) : $(libraries)
+ : $(version)
+ ] ;
+}
+
+local .version-subfeature-defined ;
+
+# Initialize stlport support.
+rule init (
+ version ? :
+ headers : # Location of header files
+ libraries * # Location of libraries, lib and bin subdirs of STLport.
+ )
+{
+ # FIXME: need to use common.check-init-parameters here.
+ # At the moment, that rule always tries to define subfeature
+ # of the 'toolset' feature, while we need to define subfeature
+ # of <stdlib>stlport, so tweaks to check-init-parameters are needed.
+ if $(version)
+ {
+ if ! $(.version-subfeature-defined)
+ {
+ feature.subfeature stdlib stlport : version : : propagated ;
+ .version-subfeature-defined = true ;
+ }
+ feature.extend-subfeature stdlib stlport : version : $(version) ;
+ }
+
+ # Declare the main target for this STLPort version.
+ stlport-target $(headers) : $(libraries) : $(version) ;
+}
+
diff --git a/src/boost/tools/build/src/tools/sun.jam b/src/boost/tools/build/src/tools/sun.jam
new file mode 100644
index 000000000..da7d7fe82
--- /dev/null
+++ b/src/boost/tools/build/src/tools/sun.jam
@@ -0,0 +1,224 @@
+# Copyright (C) Christopher Currie 2003. Permission to copy, use,
+# modify, sell and distribute this software is granted provided this
+# copyright notice appears in all copies. This software is provided
+# "as is" without express or implied warranty, and with no claim as
+# to its suitability for any purpose.
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.sun]]
+= Sun Studio
+
+The `sun` module supports the
+http://developers.sun.com/sunstudio/index.jsp[Sun Studio] C++ compilers
+for the Solaris OS.
+
+The module is initialized using the following syntax:
+
+----
+using sun : [version] : [c++-compile-command] : [compiler options] ;
+----
+
+This statement may be repeated several times, if you want to configure
+several versions of the compiler.
+
+If the command is not specified, B2 will search for a binary
+named `CC` in `/opt/SUNWspro/bin` and in PATH.
+
+When using this compiler on complex C++ code, such as the
+http://boost.org[Boost C++ library], it is recommended to specify the
+following options when initializing the `sun` module:
+
+----
+-library=stlport4 -features=tmplife -features=tmplrefstatic
+----
+
+See the http://blogs.sun.com/sga/entry/command_line_options[Sun C++
+Frontend Tales] for details.
+
+The following options can be provided, using
+_`<option-name>option-value syntax`_:
+
+`cflags`::
+Specifies additional compiler flags that will be used when compiling C
+sources.
+
+`cxxflags`::
+Specifies additional compiler flags that will be used when compiling C++
+sources.
+
+`compileflags`::
+Specifies additional compiler flags that will be used when compiling both C
+and C++ sources.
+
+`linkflags`::
+Specifies additional command line options that will be passed to the linker.
+
+Starting with Sun Studio 12, you can create 64-bit applications by using
+the `address-model=64` property.
+
+|# # end::doc[]
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import type ;
+import common ;
+
+feature.extend toolset : sun ;
+toolset.inherit sun : unix ;
+generators.override sun.prebuilt : builtin.lib-generator ;
+generators.override sun.prebuilt : builtin.prebuilt ;
+generators.override sun.searched-lib-generator : searched-lib-generator ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters sun : version $(version) ] ;
+
+ command = [ common.get-invocation-command sun : CC
+ : $(command) : "/opt/SUNWspro/bin" ] ;
+
+ # Even if the real compiler is not found, put CC to
+ # command line so that user see command line that would have being executed.
+ command ?= CC ;
+
+ common.handle-options sun : $(condition) : $(command) : $(options) ;
+
+ command_c = $(command[1--2]) $(command[-1]:B=cc) ;
+
+ toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ;
+}
+
+# Declare generators
+generators.register-c-compiler sun.compile.c : C : OBJ : <toolset>sun ;
+generators.register-c-compiler sun.compile.c++ : CPP : OBJ : <toolset>sun ;
+
+# Declare flags and actions for compilation
+flags sun.compile OPTIONS <debug-symbols>on : -g ;
+flags sun.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags sun.compile OPTIONS <optimization>speed : -xO4 ;
+flags sun.compile OPTIONS <optimization>space : -xO2 -xspace ;
+flags sun.compile OPTIONS <threading>multi : -mt ;
+flags sun.compile OPTIONS <warnings>off : -erroff ;
+flags sun.compile OPTIONS <warnings>on : -erroff=%none ;
+flags sun.compile OPTIONS <warnings>all : -erroff=%none ;
+flags sun.compile OPTIONS <warnings-as-errors>on : -errwarn ;
+
+flags sun.compile OPTIONS <local-visibility>hidden : -xldscope=hidden ;
+flags sun.compile OPTIONS <local-visibility>protected : -xldscope=symbolic ;
+flags sun.compile OPTIONS <local-visibility>global : -xldscope=global ;
+
+flags sun.compile.c++ OPTIONS <inlining>off : +d ;
+
+# There are no less than 5 standard library options:
+# 1) The default, which uses an old version of the Rogue Wave std lib,
+# also available via -std=sun03.
+# 2) C++03 mode + STLport, selected via the -library option.
+# 3) C++03 mode plus the Apache std lib, selected via the -library option.
+# 4) C++03 or C++11 in g++ compatibility mode, and GNU libstdc++3, selected via -std=c++03/11.
+#
+# Note that the -std, -library and -compat compiler switches appear to be largely mutually
+# incompatible, and that going forward the -std switch seems to be the preferred one.
+#
+# See http://docs.oracle.com/cd/E37069_01/html/E37075/bkamw.html#OSSCPgnaof
+#
+
+flags sun.compile.c++ OPTIONS <stdlib>sun-stlport : -library=stlport4 -compat=5 -features=zla ;
+flags sun.link OPTIONS <stdlib>sun-stlport : -library=stlport4 -compat=5 ;
+
+flags sun.compile.c++ OPTIONS <stdlib>apache : -library=stdcxx4 -compat=5 -features=zla ;
+flags sun.link OPTIONS <stdlib>apache : -library=stdcxx4 -compat=5 ;
+
+flags sun.compile.c++ OPTIONS <stdlib>gnu : -std=c++03 ;
+flags sun.compile.c++ DEFINES <stdlib>gnu : _GLIBCXX_USE_CXX11_ABI=0 ;
+flags sun.link OPTIONS <stdlib>gnu : -std=c++03 ;
+
+flags sun.compile.c++ OPTIONS <stdlib>gnu11 : -std=c++11 ;
+flags sun.compile.c++ DEFINES <stdlib>gnu11 : _GLIBCXX_USE_CXX11_ABI=1 ;
+flags sun.link OPTIONS <stdlib>gnu11 : -std=c++11 ;
+
+# The -m32 and -m64 options are supported starting
+# with Sun Studio 12. On earlier compilers, the
+# 'address-model' feature is not supported and should not
+# be used. Instead, use -xarch=generic64 command line
+# option.
+# See http://svn.boost.org/trac/boost/ticket/1186
+# for details.
+flags sun OPTIONS <address-model>32 : -m32 ;
+flags sun OPTIONS <address-model>64 : -m64 ;
+# On sparc, there's a difference between -Kpic
+# and -KPIC. The first is slightly more efficient,
+# but has the limits on the size of GOT table.
+# For minimal fuss on user side, we use -KPIC here.
+# See http://svn.boost.org/trac/boost/ticket/1186#comment:6
+# for detailed explanation.
+flags sun OPTIONS <link>shared : -KPIC ;
+
+flags sun.compile OPTIONS <cflags> ;
+flags sun.compile.c++ OPTIONS <cxxflags> ;
+flags sun.compile DEFINES <define> ;
+flags sun.compile INCLUDES <include> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags sun.link OPTIONS <debug-symbols>on : -g ;
+# Strip the binary when no debugging is needed
+flags sun.link OPTIONS <debug-symbols>off : -s ;
+flags sun.link OPTIONS <profiling>on : -xprofile=tcov ;
+flags sun.link OPTIONS <threading>multi : -mt ;
+flags sun.link OPTIONS <linkflags> ;
+flags sun.link LINKPATH <library-path> ;
+flags sun.link FINDLIBS-ST <find-static-library> ;
+flags sun.link FINDLIBS-SA <find-shared-library> ;
+flags sun.link LIBRARIES <library-file> ;
+flags sun.link LINK-RUNTIME <runtime-link>static : static ;
+flags sun.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags sun.link RPATH <dll-path> ;
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags sun.link RPATH <xdll-path> ;
+
+# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.)
+flags sun.link FINDLIBS-SA : rt ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+# Declare action for creating static libraries
+actions piecemeal archive
+{
+ "$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)"
+}
+
diff --git a/src/boost/tools/build/src/tools/symlink.jam b/src/boost/tools/build/src/tools/symlink.jam
new file mode 100644
index 000000000..b33e8260c
--- /dev/null
+++ b/src/boost/tools/build/src/tools/symlink.jam
@@ -0,0 +1,140 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines the "symlink" special target. 'symlink' targets make symbolic links
+# to the sources.
+
+import targets modules path class os feature project property-set ;
+
+.count = 0 ;
+
+feature.feature symlink-location : project-relative build-relative : incidental ;
+
+# The class representing "symlink" targets.
+#
+class symlink-targets : basic-target
+{
+ import numbers modules class property project path ;
+
+ rule __init__ (
+ project
+ : targets *
+ : sources *
+ )
+ {
+ # Generate a fake name for now. Need unnamed targets eventually.
+ local c = [ modules.peek symlink : .count ] ;
+ modules.poke symlink : .count : [ numbers.increment $(c) ] ;
+ local fake-name = symlink#$(c) ;
+
+ basic-target.__init__ $(fake-name) : $(project) : $(sources) ;
+
+ # Remember the targets to map the sources onto. Pad or truncate
+ # to fit the sources given.
+ self.targets = ;
+ for local source in $(sources)
+ {
+ if $(targets)
+ {
+ self.targets += $(targets[1]) ;
+ targets = $(targets[2-]) ;
+ }
+ else
+ {
+ self.targets += $(source) ;
+ }
+ }
+
+ # The virtual targets corresponding to the given targets.
+ self.virtual-targets = ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local i = 1 ;
+ for local t in $(source-targets)
+ {
+ local s = $(self.targets[$(i)]) ;
+ local a = [ class.new action $(t) : symlink.ln : $(property-set) ] ;
+ local vt = [ class.new file-target $(s:D=)
+ : [ $(t).type ] : $(self.project) : $(a) ] ;
+
+ # Place the symlink in the directory relative to the project
+ # location, instead of placing it in the build directory.
+ if [ property.select <symlink-location> : [ $(property-set).raw ] ] = <symlink-location>project-relative
+ {
+ $(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ;
+ }
+
+ self.virtual-targets += $(vt) ;
+ i = [ numbers.increment $(i) ] ;
+ }
+ return [ property-set.empty ] $(self.virtual-targets) ;
+ }
+}
+
+# Creates a symbolic link from a set of targets to a set of sources.
+# The targets and sources map one to one. The symlinks generated are
+# limited to be the ones given as the sources. That is, the targets
+# are either padded or trimmed to equate to the sources. The padding
+# is done with the name of the corresponding source. For example::
+#
+# symlink : one two ;
+#
+# Is equal to::
+#
+# symlink one two : one two ;
+#
+# Names for symlink are relative to the project location. They cannot
+# include ".." path components.
+rule symlink (
+ targets *
+ : sources *
+ )
+{
+ local project = [ project.current ] ;
+
+ return [ targets.main-target-alternative
+ [ class.new symlink-targets $(project) : $(targets) :
+ # Note: inline targets are not supported for symlink, intentionally,
+ # since it's used to linking existing non-local targets.
+ $(sources) ] ] ;
+}
+
+rule ln
+{
+ local os ;
+ if [ modules.peek : UNIX ] { os = UNIX ; }
+ else { os ?= [ os.name ] ; }
+ # Remember the path to make the link relative to where the symlink is located.
+ local path-to-source = [ path.relative-to
+ [ path.make [ on $(<) return $(LOCATE) ] ]
+ [ path.make [ on $(>) return $(LOCATE) ] ] ] ;
+ if $(path-to-source) = .
+ {
+ PATH_TO_SOURCE on $(<) = "" ;
+ }
+ else
+ {
+ PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ;
+ }
+ ln-$(os) $(<) : $(>) ;
+}
+
+actions ln-UNIX
+{
+ ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'
+}
+
+# there is a way to do this; we fall back to a copy for now
+actions ln-NT
+{
+ echo "NT symlinks not supported yet, making copy"
+ del /f /q "$(<)" 2>nul >nul
+ copy "$(>)" "$(<)" $(NULL_OUT)
+}
+
+IMPORT $(__name__) : symlink : : symlink ;
diff --git a/src/boost/tools/build/src/tools/symlink.py b/src/boost/tools/build/src/tools/symlink.py
new file mode 100644
index 000000000..ed5388977
--- /dev/null
+++ b/src/boost/tools/build/src/tools/symlink.py
@@ -0,0 +1,112 @@
+# Status: ported.
+# Base revision: 64488.
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines the "symlink" special target. 'symlink' targets make symbolic links
+# to the sources.
+
+import b2.build.feature as feature
+import b2.build.targets as targets
+import b2.build.property_set as property_set
+import b2.build.virtual_target as virtual_target
+import b2.build.targets
+
+from b2.manager import get_manager
+
+import bjam
+
+import os
+
+
+feature.feature("symlink-location", ["project-relative", "build-relative"], ["incidental"])
+
+class SymlinkTarget(targets.BasicTarget):
+
+ _count = 0
+
+ def __init__(self, project, targets, sources):
+
+ # Generate a fake name for now. Need unnamed targets eventually.
+ fake_name = "symlink#%s" % SymlinkTarget._count
+ SymlinkTarget._count = SymlinkTarget._count + 1
+
+ b2.build.targets.BasicTarget.__init__(self, fake_name, project, sources)
+
+ # Remember the targets to map the sources onto. Pad or truncate
+ # to fit the sources given.
+ assert len(targets) <= len(sources)
+ self.targets = targets[:] + sources[len(targets):]
+
+ # The virtual targets corresponding to the given targets.
+ self.virtual_targets = []
+
+ def construct(self, name, source_targets, ps):
+ i = 0
+ for t in source_targets:
+ s = self.targets[i]
+ a = virtual_target.Action(self.manager(), [t], "symlink.ln", ps)
+ vt = virtual_target.FileTarget(os.path.basename(s), t.type(), self.project(), a)
+
+ # Place the symlink in the directory relative to the project
+ # location, instead of placing it in the build directory.
+ if not ps.get('symlink-location') == "project-relative":
+ vt.set_path(os.path.join(self.project().get('location'), os.path.dirname(s)))
+
+ vt = get_manager().virtual_targets().register(vt)
+ self.virtual_targets.append(vt)
+ i = i + 1
+
+ return (property_set.empty(), self.virtual_targets)
+
+# Creates a symbolic link from a set of targets to a set of sources.
+# The targets and sources map one to one. The symlinks generated are
+# limited to be the ones given as the sources. That is, the targets
+# are either padded or trimmed to equate to the sources. The padding
+# is done with the name of the corresponding source. For example::
+#
+# symlink : one two ;
+#
+# Is equal to::
+#
+# symlink one two : one two ;
+#
+# Names for symlink are relative to the project location. They cannot
+# include ".." path components.
+def symlink(targets, sources):
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+ p = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ SymlinkTarget(p, targets,
+ # Note: inline targets are not supported for symlink, intentionally,
+ # since it's used to linking existing non-local targets.
+ sources))
+
+
+def setup_ln(targets, sources, ps):
+
+ source_path = bjam.call("get-target-variable", sources[0], "LOCATE")[0]
+ target_path = bjam.call("get-target-variable", targets[0], "LOCATE")[0]
+ rel = os.path.relpath(source_path, target_path)
+ if rel == ".":
+ bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", "")
+ else:
+ bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", rel)
+
+if os.name == 'nt':
+ ln_action = """echo "NT symlinks not supported yet, making copy"
+del /f /q "$(<)" 2>nul >nul
+copy "$(>)" "$(<)" $(NULL_OUT)"""
+else:
+ ln_action = "ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'"
+
+get_manager().engine().register_action("symlink.ln", ln_action, function=setup_ln)
+
+get_manager().projects().add_rule("symlink", symlink)
diff --git a/src/boost/tools/build/src/tools/testing-aux.jam b/src/boost/tools/build/src/tools/testing-aux.jam
new file mode 100644
index 000000000..30309fbb8
--- /dev/null
+++ b/src/boost/tools/build/src/tools/testing-aux.jam
@@ -0,0 +1,344 @@
+import feature ;
+
+# This module is imported by testing.py. The definitions here are
+# too tricky to do in Python
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(sources) ;
+}
+IMPORT testing : expect-success : : testing.expect-success ;
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ FAIL_EXPECTED $(dependency) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) ;
+}
+IMPORT testing : expect-failure : : testing.expect-failure ;
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed**
+{
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+
+if [ os.name ] = VMS
+{
+ actions **passed**
+ {
+ PIPE WRITE SYS$OUTPUT "passed" > $(<:W)
+ }
+
+ actions (failed-as-expected)
+ {
+ PIPE WRITE SYS$OUTPUT "failed as expected" > $(<:W)
+ }
+}
+
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
+# - if 'none', does not remove anything, ever
+# - if empty, removes 'source'
+# - if non-empty and not 'none', contains a list of sources to remove.
+#
+rule capture-output ( target : source : properties * : targets-to-remove * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ if $(targets-to-remove) = none
+ {
+ targets-to-remove = ;
+ }
+ else if ! $(targets-to-remove)
+ {
+ targets-to-remove = $(source) ;
+ }
+
+ if [ on $(target) return $(REMOVE_TEST_TARGETS) ]
+ {
+ TEMPORARY $(targets-to-remove) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(targets-to-remove) ;
+ }
+
+ if ! [ feature.get-values testing.launcher : $(properties) ]
+ {
+ ## On VMS set default launcher to MCR
+ if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; }
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .THEN = "(" ;
+ .EXIT_SUCCESS = "0" ;
+ .STATUS_0 = "%status% EQU 0 $(.THEN)" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 $(.THEN)" ;
+ .VERBOSE = "%verbose% EQU 1 $(.THEN)" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+ .NULLIN = ;
+}
+else if [ os.name ] = VMS
+{
+ local nl = "
+" ;
+
+ .STATUS = "''status'" ;
+ .SET_STATUS = "status=$STATUS" ;
+ .SAY = "pipe write sys$output" ; ## not really echo
+ .RUN_OUTPUT_NL = "$(.SAY) \"\"" ;
+ .THEN = "$(nl)then" ;
+ .EXIT_SUCCESS = "1" ;
+ .SUCCESS = "status .eq. $(.EXIT_SUCCESS) $(.THEN)" ;
+ .STATUS_0 = "status .eq. 0 $(.THEN)" ;
+ .STATUS_NOT_0 = "status .ne. 0 $(.THEN)" ;
+ .VERBOSE = "verbose .eq. 1 $(.THEN)" ;
+ .ENDIF = "endif" ;
+ .SHELL_SET = "" ;
+ .CATENATE = type ;
+ .CP = copy ;
+ .NULLIN = ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .THEN = "; then" ;
+ .EXIT_SUCCESS = "0" ;
+ .STATUS_0 = "test $status -eq 0 $(.THEN)" ;
+ .STATUS_NOT_0 = "test $status -ne 0 $(.THEN)" ;
+ .VERBOSE = "test $verbose -eq 1 $(.THEN)" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+ .NULLIN = "<" "/dev/null" ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+IMPORT testing : capture-output : : testing.capture-output ;
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+
+if [ os.name ] = VMS
+{
+ actions capture-output bind INPUT_FILES output-file
+ {
+ $(PATH_SETUP)
+ !! Execute twice - first for status, second for output
+ set noon
+ pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) 2>NL: >NL:
+ $(.SET_STATUS)
+ pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) | type sys$input /out=$(output-file:W)
+ set on
+ !! Harmonize VMS success status with POSIX
+ if $(.SUCCESS)
+ $(.SHELL_SET)status="0"
+ $(.ENDIF)
+ $(.RUN_OUTPUT_NL) | append /new sys$input $(output-file:W)
+ $(.SAY) "EXIT STATUS: $(.STATUS)" | append /new sys$input $(output-file:W)
+ if $(.STATUS_0)
+ $(.CP) $(output-file:W) $(<:W)
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ $(.SAY) "====== BEGIN OUTPUT ======"
+ $(.CATENATE) $(output-file:W)
+ $(.SAY) "====== END OUTPUT ======"
+ $(.ENDIF)
+ !! Harmonize VMS success status with POSIX on exit
+ if $(.STATUS_0)
+ $(.SHELL_SET)status="$(.EXIT_SUCCESS)"
+ $(.ENDIF)
+ exit "$(.STATUS)"
+ }
+
+ actions quietly updated ignore piecemeal together RmTemps
+ {
+ $(.RM) $(>:WJ=;*,);*
+ }
+}
+
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+
+rule unit-test ( target : source : properties * )
+{
+ if ! [ feature.get-values testing.launcher : $(properties) ]
+ {
+ ## On VMS set default launcher to MCR
+ if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; }
+ }
+}
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)"
+}
+
+if [ os.name ] = VMS
+{
+ actions unit-test
+ {
+ $(PATH_SETUP)
+ pipe $(LAUNCHER) $(>:W) $(ARGS) && $(.MAKE_FILE) $(<:W)
+ }
+}
+
+# Note that this rule may be called multiple times for a single target in case
+# there are multiple actions operating on the same target in sequence. One such
+# example are msvc exe targets first created by a linker action and then updated
+# with an embedded manifest file by a separate action.
+rule record-time ( target : source : start end user system )
+{
+ local src-string = [$(source:G=:J=",")"] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+
+ # We need the following variables because attempting to perform such
+ # variable expansion in actions would not work due to quotes getting treated
+ # as regular characters.
+ USER_TIME_SECONDS on $(target) += $(src-string)$(user)" seconds" ;
+ SYSTEM_TIME_SECONDS on $(target) += $(src-string)$(system)" seconds" ;
+}
+
+# Calling this rule requests that Boost Build time how long it takes to build
+# the 'source' target and display the results both on the standard output and in
+# the 'target' file.
+#
+rule time ( target : sources + : properties * )
+{
+ # Set up rule for recording timing information.
+ __TIMING_RULE__ on $(sources) = testing.record-time $(target) ;
+
+ # Make sure the sources get rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(sources) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+
+ echo user: $(USER_TIME_SECONDS) > "$(<)"
+ echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)"
+}
+
+if [ os.name ] = VMS
+{
+ actions time
+ {
+ WRITE SYS$OUTPUT "user: ", "$(USER_TIME)"
+ WRITE SYS$OUTPUT "system: ", "(SYSTEM_TIME)"
+
+ PIPE WRITE SYS$OUTPUT "user: ", "$(USER_TIME_SECONDS)" | TYPE SYS$INPUT /OUT=$(<:W)
+ PIPE WRITE SYS$OUTPUT "system: ", "$(SYSTEM_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W)
+ }
+}
diff --git a/src/boost/tools/build/src/tools/testing.jam b/src/boost/tools/build/src/tools/testing.jam
new file mode 100644
index 000000000..a6c5cc3d3
--- /dev/null
+++ b/src/boost/tools/build/src/tools/testing.jam
@@ -0,0 +1,847 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2014-2015 Rene Rivera
+# Copyright 2014 Microsoft Corporation
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements regression testing framework. It declares a number of
+# main target rules which perform some action and, if the results are OK,
+# creates an output file.
+#
+# The exact list of rules is:
+# 'compile' -- creates .test file if compilation of sources was
+# successful.
+# 'compile-fail' -- creates .test file if compilation of sources failed.
+# 'run' -- creates .test file is running of executable produced from
+# sources was successful. Also leaves behind .output file
+# with the output from program run.
+# 'run-fail' -- same as above, but .test file is created if running fails.
+#
+# In all cases, presence of .test file is an indication that the test passed.
+# For more convenient reporting, you might want to use C++ Boost regression
+# testing utilities (see http://www.boost.org/more/regression.html).
+#
+# For historical reason, a 'unit-test' rule is available which has the same
+# syntax as 'exe' and behaves just like 'run'.
+
+# Things to do:
+# - Teach compiler_status handle Jamfile.v2.
+# Notes:
+# - <no-warn> is not implemented, since it is Como-specific, and it is not
+# clear how to implement it
+# - std::locale-support is not implemented (it is used in one test).
+
+
+import alias ;
+import build-system ;
+import "class" ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import param ;
+import path ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import type ;
+import virtual-target ;
+
+
+rule init ( )
+{
+}
+
+
+# Feature controlling the command used to launch test programs.
+feature.feature testing.launcher : : free optional ;
+
+feature.feature test-info : : free incidental ;
+feature.feature testing.arg : : free incidental ;
+feature.feature testing.input-file : : free dependency ;
+
+feature.feature preserve-test-targets : on off : incidental propagated ;
+
+# Feature to control whether executable binaries are run as part of test.
+# This can be used to just compile test cases in cross compilation situations.
+feature.feature testing.execute : on off : incidental propagated ;
+feature.set-default testing.execute : on ;
+
+# Register target types.
+type.register TEST : test ;
+type.register COMPILE : : TEST ;
+type.register COMPILE_FAIL : : TEST ;
+type.register RUN_OUTPUT : run ;
+type.register RUN : : TEST ;
+type.register RUN_FAIL : : TEST ;
+type.register LINK_FAIL : : TEST ;
+type.register LINK : : TEST ;
+type.register UNIT_TEST : passed : TEST ;
+
+
+# Suffix to denote test target directory
+#
+.TEST-DIR-SUFFIX = ".test" ;
+if [ os.name ] = VMS
+{
+ .TEST-DIR-SUFFIX = "$test" ;
+}
+
+# Declare the rules which create main targets. While the 'type' module already
+# creates rules with the same names for us, we need extra convenience: default
+# name of main target, so write our own versions.
+
+# Helper rule. Create a test target, using basename of first source if no target
+# name is explicitly passed. Remembers the created target in a global variable.
+#
+rule make-test ( target-type : sources + : requirements * : target-name ? )
+{
+ target-name ?= $(sources[1]:D=:S=) ;
+
+ # Having periods (".") in the target name is problematic because the typed
+ # generator will strip the suffix and use the bare name for the file
+ # targets. Even though the location-prefix averts problems most times it
+ # does not prevent ambiguity issues when referring to the test targets. For
+ # example when using the XML log output. So we rename the target to remove
+ # the periods, and provide an alias for users.
+ local real-name = [ regex.replace $(target-name) "[.]" "~" ] ;
+
+ local project = [ project.current ] ;
+ # The <location-prefix> forces the build system for generate paths in the
+ # form '$build_dir/array1$(.TEST-DIR-SUFFIX)/gcc/debug'. This is necessary
+ # to allow post-processing tools to work.
+ local t = [ targets.create-typed-target [ type.type-from-rule-name
+ $(target-type) ] : $(project) : $(real-name) : $(sources) :
+ $(requirements) <location-prefix>$(real-name)$(.TEST-DIR-SUFFIX)
+ <relevant>toolset ] ;
+
+ # The alias to the real target, per period replacement above.
+ if $(real-name) != $(target-name)
+ {
+ alias $(target-name) : $(t) ;
+ }
+
+ # Remember the test (for --dump-tests). A good way would be to collect all
+ # given a project. This has some technical problems: e.g. we can not call
+ # this dump from a Jamfile since projects referred by 'build-project' are
+ # not available until the whole Jamfile has been loaded.
+ .all-tests += $(t) ;
+ return $(t) ;
+}
+
+
+# Note: passing more that one cpp file here is known to fail. Passing a cpp file
+# and a library target works.
+#
+rule compile ( sources + : requirements * : target-name ? )
+{
+ param.handle-named-params sources requirements target-name ;
+ return [ make-test compile : $(sources) : $(requirements) : $(target-name) ]
+ ;
+}
+
+
+rule compile-fail ( sources + : requirements * : target-name ? )
+{
+ param.handle-named-params sources requirements target-name ;
+ return [ make-test compile-fail : $(sources) : $(requirements) :
+ $(target-name) ] ;
+}
+
+
+rule link ( sources + : requirements * : target-name ? )
+{
+ param.handle-named-params sources requirements target-name ;
+ return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule link-fail ( sources + : requirements * : target-name ? )
+{
+ param.handle-named-params sources requirements target-name ;
+ return [ make-test link-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+rule handle-input-files ( input-files * )
+{
+ if $(input-files[2])
+ {
+ # Check that sorting made when creating property-set instance will not
+ # change the ordering.
+ if [ sequence.insertion-sort $(input-files) ] != $(input-files)
+ {
+ errors.user-error "Names of input files must be sorted alphabetically"
+ : "due to internal limitations" ;
+ }
+ }
+ return <testing.input-file>$(input-files) ;
+}
+
+
+rule run ( sources + : args * : input-files * : requirements * : target-name ? :
+ default-build * )
+{
+ param.handle-named-params sources args input-files requirements
+ target-name default-build ;
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule run-fail ( sources + : args * : input-files * : requirements * :
+ target-name ? : default-build * )
+{
+ param.handle-named-params sources args input-files requirements
+ target-name default-build ;
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
+IMPORT : alias : : test-suite ;
+
+
+# For all main targets in 'project-module', which are typed targets with type
+# derived from 'TEST', produce some interesting information.
+#
+rule dump-tests
+{
+ for local t in $(.all-tests)
+ {
+ dump-test $(t) ;
+ }
+}
+
+if ( --dump-tests in [ modules.peek : ARGV ] )
+{
+ IMPORT testing : dump-tests : : testing.dump-tests ;
+ build-system.add-pre-build-hook testing.dump-tests ;
+}
+
+# Given a project location in normalized form (slashes are forward), compute the
+# name of the Boost library.
+#
+local rule get-library-name ( path )
+{
+ # Path is in normalized form, so all slashes are forward.
+ local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ;
+ local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ;
+ local match3 = [ MATCH (/status$) : $(path) ] ;
+
+ if $(match1) { return $(match1[2]) ; }
+ else if $(match2) { return $(match2[2]) ; }
+ else if $(match3) { return "" ; }
+ else if --dump-tests in [ modules.peek : ARGV ]
+ {
+ # The 'run' rule and others might be used outside boost. In that case,
+ # just return the path, since the 'library name' makes no sense.
+ return $(path) ;
+ }
+}
+
+
+# Was an XML dump requested?
+.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
+
+
+# Takes a target (instance of 'basic-target') and prints
+# - its type
+# - its name
+# - comments specified via the <test-info> property
+# - relative location of all source from the project root.
+#
+rule dump-test ( target )
+{
+ local type = [ $(target).type ] ;
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+
+ local project-root = [ $(project).get project-root ] ;
+ local library = [ get-library-name [ path.root [ $(project).get location ]
+ [ path.pwd ] ] ] ;
+ if $(library)
+ {
+ name = $(library)/$(name) ;
+ }
+
+ local sources = [ $(target).sources ] ;
+ local source-files ;
+ for local s in $(sources)
+ {
+ if [ class.is-a $(s) : file-reference ]
+ {
+ local location = [ path.root [ path.root [ $(s).name ]
+ [ $(s).location ] ] [ path.pwd ] ] ;
+
+ source-files += [ path.relative-to [ path.root $(project-root)
+ [ path.pwd ] ] $(location) ] ;
+ }
+ }
+
+ local target-name =
+ [ $(project).get location ] // [ $(target).name ] $(.TEST-DIR-SUFFIX) ;
+ target-name = $(target-name:J=) ;
+
+ local r = [ $(target).requirements ] ;
+ # Extract values of the <test-info> feature.
+ local test-info = [ $(r).get <test-info> ] ;
+
+ # If the user requested XML output on the command-line, add the test info to
+ # that XML file rather than dumping them to stdout.
+ if $(.out-xml)
+ {
+ local nl = "
+" ;
+ .contents on $(.out-xml) +=
+ "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
+ "$(nl) <target><![CDATA[$(target-name)]]></target>"
+ "$(nl) <info><![CDATA[$(test-info)]]></info>"
+ "$(nl) <source><![CDATA[$(source-files)]]></source>"
+ "$(nl) </test>"
+ ;
+ }
+ else
+ {
+ # Format them into a single string of quoted strings.
+ test-info = \"$(test-info:J=\"\ \")\" ;
+
+ ECHO boost-test($(type)) \"$(name)\" "[$(test-info)]" ":"
+ \"$(source-files)\" ;
+ }
+}
+
+class testing.expect-failure-generator : generator
+{
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ for local s in $(sources)
+ {
+ local a = [ $(s).action ] ;
+ if $(a)
+ {
+ for local t in [ $(a).targets ]
+ {
+ $(t).fail-expected ;
+ }
+ }
+ }
+ return [ generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+local rule register-fail-expected ( source-type : test-type )
+{
+ generators.register [ class.new testing.expect-failure-generator
+ testing.expect-failure : $(source-type) : $(test-type) ] ;
+}
+
+# Register generators. Depending on target type, either 'expect-success' or
+# 'expect-failure' rule will be used.
+generators.register-standard testing.expect-success : OBJ : COMPILE ;
+register-fail-expected OBJ : COMPILE_FAIL ;
+generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
+register-fail-expected RUN_OUTPUT : RUN_FAIL ;
+generators.register-standard testing.expect-success : EXE : LINK ;
+register-fail-expected EXE : LINK_FAIL ;
+
+# Generator which runs an EXE and captures output.
+generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
+
+# Generator which creates a target if sources run successfully. Differs from RUN
+# in that run output is not captured. The reason why it exists is that the 'run'
+# rule is much better for automated testing, but is not user-friendly (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
+generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
+
+toolset.uses-features testing.expect-success : <preserve-test-targets> ;
+toolset.uses-features testing.expect-failure : <preserve-test-targets> ;
+
+# The action rules called by generators.
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(dependency) : $(requirements) ;
+}
+
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) : $(properties) ;
+}
+
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed** ( target : sources * : properties * )
+{
+ if [ feature.get-values preserve-test-targets : $(properties) ] = off
+ {
+ remove-test-targets $(<) ;
+ }
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+
+if [ os.name ] = VMS
+{
+ actions **passed**
+ {
+ PIPE WRITE SYS$OUTPUT "passed" > $(<:W)
+ }
+
+ actions (failed-as-expected)
+ {
+ PIPE WRITE SYS$OUTPUT "failed as expected" > $(<:W)
+ }
+}
+
+rule run-path-setup ( target : source : properties * )
+{
+ # For testing, we need to make sure that all dynamic libraries needed by the
+ # test are found. So, we collect all paths from dependency libraries (via
+ # xdll-path property) and add whatever explicit dll-path user has specified.
+ # The resulting paths are added to the environment on each test invocation.
+ local target-os = [ feature.get-values <target-os> : $(properties) ] ;
+ local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
+ dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
+ if $(target-os) != vxworks
+ {
+ dll-paths += [ on $(source) return $(RUN_PATH) ] ;
+ }
+ dll-paths = [ sequence.unique $(dll-paths) ] ;
+ if $(dll-paths)
+ {
+ translate-to-os = path.native ;
+ if [ os.name ] = VMS
+ {
+ translate-to-os = path.to-VMS ;
+ }
+ if $(target-os) = vxworks
+ {
+ # map <build-os> paths to <target-os> paths
+ local save-os = [ modules.peek os : .name ] ;
+ modules.poke os : .name : VXWORKS ;
+ local parent = [ os.environ PKG_SRC_BUILD_DIR ] ;
+ local prefix = [ os.environ LAYER_SRC_PATH ] ;
+ local target-dll-paths ;
+ for local e in $(dll-paths)
+ {
+ target-dll-paths += [ path.join $(prefix) [ path.relative $(e) $(parent) : noerror ] ] ;
+ }
+ PATH_SETUP on $(target) = [ common.prepend-path-variable-command
+ [ os.shared-library-path-variable ] : $(target-dll-paths) ] ;
+ modules.poke os : .name : $(save-os) ;
+ }
+ else
+ {
+ dll-paths = [ sequence.transform $(translate-to-os) : $(dll-paths) ] ;
+ PATH_SETUP on $(target) = [ common.prepend-path-variable-command
+ [ os.shared-library-path-variable ] : $(dll-paths) ] ;
+ }
+ }
+}
+
+
+local argv = [ modules.peek : ARGV ] ;
+
+toolset.flags testing.capture-output ARGS <testing.arg> ;
+toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
+toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
+
+toolset.uses-features testing.capture-output :
+ <testing.launcher> <testing.execute> <dll-path> <xdll-path> <target-os> ;
+
+if --remove-test-targets in [ modules.peek : ARGV ]
+{
+ feature.set-default preserve-test-targets : off ;
+}
+
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable.
+#
+rule capture-output ( target : source : properties * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ run-path-setup $(target) : $(source) : $(properties) ;
+
+ DISABLE_TEST_EXECUTION on $(target) = 0 ;
+ if [ feature.get-values testing.execute : $(properties) ] = off
+ {
+ DISABLE_TEST_EXECUTION on $(target) = 1 ;
+ }
+
+ if ! [ feature.get-values testing.launcher : $(properties) ]
+ {
+ ## On VMS set default launcher to MCR
+ if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; }
+ }
+}
+
+.types-to-remove = EXE OBJ ;
+
+local rule remove-test-targets ( target )
+{
+ local action = [ on $(target) return $(.action) ] ;
+ local associated-targets = [ virtual-target.traverse [ $(action).targets ] ] ;
+ local targets-to-remove ;
+ for local t in [ sequence.unique $(associated-targets) ]
+ {
+ if [ $(t).type ] in $(.types-to-remove)
+ {
+ targets-to-remove += [ $(t).actual-name ] ;
+ }
+ }
+ rmtemp-sources $(target) : $(targets-to-remove) ;
+}
+
+local rule rmtemp-sources ( target : sources * )
+{
+ if $(sources)
+ {
+ TEMPORARY $(sources) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(sources) ;
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .THEN = "(" ;
+ .EXIT_SUCCESS = "0" ;
+ .STATUS_0 = "%status% EQU 0 $(.THEN)" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 $(.THEN)" ;
+ .VERBOSE = "%verbose% EQU 1 $(.THEN)" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+ .NULLIN = ;
+}
+else if [ os.name ] = VMS
+{
+ local nl = "
+" ;
+
+ .STATUS = "''status'" ;
+ .SET_STATUS = "status=$STATUS" ;
+ .SAY = "pipe write sys$output" ; ## not really echo
+ .RUN_OUTPUT_NL = "$(.SAY) \"\"" ;
+ .THEN = "$(nl)then" ;
+ .EXIT_SUCCESS = "1" ;
+ .SUCCESS = "status .eq. $(.EXIT_SUCCESS) $(.THEN)" ;
+ .STATUS_0 = "status .eq. 0 $(.THEN)" ;
+ .STATUS_NOT_0 = "status .ne. 0 $(.THEN)" ;
+ .VERBOSE = "verbose .eq. 1 $(.THEN)" ;
+ .ENDIF = "endif" ;
+ .SHELL_SET = "" ;
+ .CATENATE = type ;
+ .CP = copy ;
+ .NULLIN = ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .THEN = "; then" ;
+ .EXIT_SUCCESS = "0" ;
+ .STATUS_0 = "test $status -eq 0 $(.THEN)" ;
+ .STATUS_NOT_0 = "test $status -ne 0 $(.THEN)" ;
+ .VERBOSE = "test $verbose -eq 1 $(.THEN)" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+ .NULLIN = "<" "/dev/null" ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(.SHELL_SET)status=$(DISABLE_TEST_EXECUTION)
+ if $(.STATUS_NOT_0)
+ echo Skipping test execution due to testing.execute=off
+ exit $(.EXIT_SUCCESS)
+ $(.ENDIF)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 $(.NULLIN)
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+if [ os.name ] = VMS
+{
+ actions capture-output bind INPUT_FILES output-file
+ {
+ $(PATH_SETUP)
+ $(.SHELL_SET)status=$(DISABLE_TEST_EXECUTION)
+ if $(.STATUS_NOT_0)
+ $(.SAY) "Skipping test execution due to testing.execute=off"
+ exit "$(.EXIT_SUCCESS)"
+ $(.ENDIF)
+ !! Execute twice - first for status, second for output
+ set noon
+ pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) 2>NL: >NL:
+ $(.SET_STATUS)
+ pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) | type sys$input /out=$(output-file:W)
+ set on
+ !! Harmonize VMS success status with POSIX
+ if $(.SUCCESS)
+ $(.SHELL_SET)status="0"
+ $(.ENDIF)
+ $(.RUN_OUTPUT_NL) | append /new sys$input $(output-file:W)
+ $(.SAY) "EXIT STATUS: $(.STATUS)" | append /new sys$input $(output-file:W)
+ if $(.STATUS_0)
+ $(.CP) $(output-file:W) $(<:W)
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ $(.SAY) "====== BEGIN OUTPUT ======"
+ $(.CATENATE) $(output-file:W)
+ $(.SAY) "====== END OUTPUT ======"
+ $(.ENDIF)
+ !! Harmonize VMS success status with POSIX on exit
+ if $(.STATUS_0)
+ $(.SHELL_SET)status="$(.EXIT_SUCCESS)"
+ $(.ENDIF)
+ exit "$(.STATUS)"
+ }
+
+ actions quietly updated ignore piecemeal together RmTemps
+ {
+ $(.RM) $(>:WJ=;*,);*
+ }
+}
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
+toolset.flags testing.unit-test ARGS <testing.arg> ;
+
+
+rule unit-test ( target : source : properties * )
+{
+ run-path-setup $(target) : $(source) : $(properties) ;
+
+ if ! [ feature.get-values testing.launcher : $(properties) ]
+ {
+ ## On VMS set default launcher to MCR
+ if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; }
+ }
+}
+
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)"
+}
+
+if [ os.name ] = VMS
+{
+ actions unit-test
+ {
+ $(PATH_SETUP)
+ pipe $(LAUNCHER) $(>:W) $(ARGS) && $(.MAKE_FILE) $(<:W)
+ }
+}
+
+IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
+ : : compile compile-fail run run-fail link link-fail ;
+
+
+# This is a composing generator to support cases where a generator for the
+# specified target constructs other targets as well. One such example is msvc's
+# exe generator that constructs both EXE and PDB targets.
+type.register TIME : time ;
+generators.register-composing testing.time : : TIME ;
+
+
+# Note that this rule may be called multiple times for a single target in case
+# there are multiple actions operating on the same target in sequence. One such
+# example are msvc exe targets first created by a linker action and then updated
+# with an embedded manifest file by a separate action.
+rule record-time ( target : source : start end user system clock )
+{
+ local src-string = "[$(source:G=:J=,)] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+ CLOCK_TIME on $(target) += $(src-string)$(clock) ;
+
+ # We need the following variables because attempting to perform such
+ # variable expansion in actions would not work due to quotes getting treated
+ # as regular characters.
+ USER_TIME_SECONDS on $(target) += $(src-string)$(user)" seconds" ;
+ SYSTEM_TIME_SECONDS on $(target) += $(src-string)$(system)" seconds" ;
+ CLOCK_TIME_SECONDS on $(target) += $(src-string)$(clock)" seconds" ;
+}
+
+
+# Support for generating timing information for any main target. To use
+# declare a custom make target that uses the testing.time generator rule
+# specified here. For example:
+#
+# make main.cpp : main_cpp.pro : @do-something ;
+# time main.time : main.cpp ;
+# actions do-something
+# {
+# sleep 2 && echo "$(<)" > "$(<)"
+# }
+#
+# The above will generate a "main.time", and echo to output, timing
+# information for the action of source "main.cpp".
+
+
+IMPORT testing : record-time : : testing.record-time ;
+
+
+# Calling this rule requests that Boost Build time how long it takes to build
+# the 'source' target and display the results both on the standard output and in
+# the 'target' file.
+#
+rule time ( target : sources + : properties * )
+{
+ # Set up rule for recording timing information.
+ local action = [ on $(target) return $(.action) ] ;
+ for local action.source in [ $(action).sources ]
+ {
+ # Yes, this uses the private "actual-name" of the target action.
+ # But it's the only way to get at the real name of the sources
+ # given the context of header scanners.
+ __TIMING_RULE__ on [ $(action.source).actual-name ] = testing.record-time $(target) ;
+ }
+
+ # Make sure the sources get rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(sources) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+ echo clock: $(CLOCK_TIME)
+
+ echo user: $(USER_TIME_SECONDS) > "$(<)"
+ echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)"
+ echo clock: $(CLOCK_TIME_SECONDS) >> "$(<)"
+}
+
+if [ os.name ] = VMS
+{
+ actions time
+ {
+ WRITE SYS$OUTPUT "user: ", "$(USER_TIME)"
+ WRITE SYS$OUTPUT "system: ", "(SYSTEM_TIME)"
+ WRITE SYS$OUTPUT "clock: ", "(CLOCK_TIME)"
+
+ PIPE WRITE SYS$OUTPUT "user: ", "$(USER_TIME_SECONDS)" | TYPE SYS$INPUT /OUT=$(<:W)
+ PIPE WRITE SYS$OUTPUT "system: ", "$(SYSTEM_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W)
+ PIPE WRITE SYS$OUTPUT "clock: ", "$(CLOCK_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W)
+ }
+}
diff --git a/src/boost/tools/build/src/tools/testing.py b/src/boost/tools/build/src/tools/testing.py
new file mode 100644
index 000000000..8f1c0b330
--- /dev/null
+++ b/src/boost/tools/build/src/tools/testing.py
@@ -0,0 +1,359 @@
+# Status: ported, except for --out-xml
+# Base revision: 64488
+#
+# Copyright 2005 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements regression testing framework. It declares a number of
+# main target rules which perform some action and, if the results are OK,
+# creates an output file.
+#
+# The exact list of rules is:
+# 'compile' -- creates .test file if compilation of sources was
+# successful.
+# 'compile-fail' -- creates .test file if compilation of sources failed.
+# 'run' -- creates .test file is running of executable produced from
+# sources was successful. Also leaves behind .output file
+# with the output from program run.
+# 'run-fail' -- same as above, but .test file is created if running fails.
+#
+# In all cases, presence of .test file is an indication that the test passed.
+# For more convenient reporting, you might want to use C++ Boost regression
+# testing utilities (see http://www.boost.org/more/regression.html).
+#
+# For historical reason, a 'unit-test' rule is available which has the same
+# syntax as 'exe' and behaves just like 'run'.
+
+# Things to do:
+# - Teach compiler_status handle Jamfile.v2.
+# Notes:
+# - <no-warn> is not implemented, since it is Como-specific, and it is not
+# clear how to implement it
+# - std::locale-support is not implemented (it is used in one test).
+
+import b2.build.feature as feature
+import b2.build.type as type
+import b2.build.targets as targets
+import b2.build.generators as generators
+import b2.build.toolset as toolset
+import b2.tools.common as common
+import b2.util.option as option
+import b2.build_system as build_system
+
+
+
+from b2.manager import get_manager
+from b2.util import stem, bjam_signature, is_iterable_typed
+from b2.util.sequence import unique
+
+import bjam
+
+import re
+import os.path
+import sys
+
+def init():
+ pass
+
+# Feature controlling the command used to lanch test programs.
+feature.feature("testing.launcher", [], ["free", "optional"])
+
+feature.feature("test-info", [], ["free", "incidental"])
+feature.feature("testing.arg", [], ["free", "incidental"])
+feature.feature("testing.input-file", [], ["free", "dependency"])
+
+feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"])
+
+# Register target types.
+type.register("TEST", ["test"])
+type.register("COMPILE", [], "TEST")
+type.register("COMPILE_FAIL", [], "TEST")
+
+type.register("RUN_OUTPUT", ["run"])
+type.register("RUN", [], "TEST")
+type.register("RUN_FAIL", [], "TEST")
+
+type.register("LINK", [], "TEST")
+type.register("LINK_FAIL", [], "TEST")
+type.register("UNIT_TEST", ["passed"], "TEST")
+
+__all_tests = []
+
+# Declare the rules which create main targets. While the 'type' module already
+# creates rules with the same names for us, we need extra convenience: default
+# name of main target, so write our own versions.
+
+# Helper rule. Create a test target, using basename of first source if no target
+# name is explicitly passed. Remembers the created target in a global variable.
+def make_test(target_type, sources, requirements, target_name=None):
+ assert isinstance(target_type, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert isinstance(target_type, basestring) or target_type is None
+ if not target_name:
+ target_name = stem(os.path.basename(sources[0]))
+
+ # Having periods (".") in the target name is problematic because the typed
+ # generator will strip the suffix and use the bare name for the file
+ # targets. Even though the location-prefix averts problems most times it
+ # does not prevent ambiguity issues when referring to the test targets. For
+ # example when using the XML log output. So we rename the target to remove
+ # the periods, and provide an alias for users.
+ real_name = target_name.replace(".", "~")
+
+ project = get_manager().projects().current()
+ # The <location-prefix> forces the build system for generate paths in the
+ # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
+ # post-processing tools to work.
+ t = get_manager().targets().create_typed_target(
+ type.type_from_rule_name(target_type), project, real_name, sources,
+ requirements + ["<location-prefix>" + real_name + ".test"], [], [])
+
+ # The alias to the real target, per period replacement above.
+ if real_name != target_name:
+ get_manager().projects().project_rules().rules["alias"](
+ target_name, [t])
+
+ # Remember the test (for --dump-tests). A good way would be to collect all
+ # given a project. This has some technical problems: e.g. we can not call
+ # this dump from a Jamfile since projects referred by 'build-project' are
+ # not available until the whole Jamfile has been loaded.
+ __all_tests.append(t)
+ return t
+
+
+# Note: passing more that one cpp file here is known to fail. Passing a cpp file
+# and a library target works.
+#
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def compile(sources, requirements, target_name=None):
+ return make_test("compile", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def compile_fail(sources, requirements, target_name=None):
+ return make_test("compile-fail", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def link(sources, requirements, target_name=None):
+ return make_test("link", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def link_fail(sources, requirements, target_name=None):
+ return make_test("link-fail", sources, requirements, target_name)
+
+def handle_input_files(input_files):
+ if len(input_files) > 1:
+ # Check that sorting made when creating property-set instance will not
+ # change the ordering.
+ if sorted(input_files) != input_files:
+ get_manager().errors()("Names of input files must be sorted alphabetically\n" +
+ "due to internal limitations")
+ return ["<testing.input-file>" + f for f in input_files]
+
+@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
+ ["requirements", "*"], ["target_name", "?"],
+ ["default_build", "*"]))
+def run(sources, args, input_files, requirements, target_name=None, default_build=[]):
+ if args:
+ requirements.append("<testing.arg>" + " ".join(args))
+ requirements.extend(handle_input_files(input_files))
+ return make_test("run", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
+ ["requirements", "*"], ["target_name", "?"],
+ ["default_build", "*"]))
+def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]):
+ if args:
+ requirements.append("<testing.arg>" + " ".join(args))
+ requirements.extend(handle_input_files(input_files))
+ return make_test("run-fail", sources, requirements, target_name)
+
+# Register all the rules
+for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]:
+ get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_")))
+
+# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
+from b2.build.alias import alias
+get_manager().projects().add_rule("test-suite", alias)
+
+# For all main targets in 'project-module', which are typed targets with type
+# derived from 'TEST', produce some interesting information.
+#
+def dump_tests():
+ for t in __all_tests:
+ dump_test(t)
+
+# Given a project location in normalized form (slashes are forward), compute the
+# name of the Boost library.
+#
+__ln1 = re.compile("/(tools|libs)/(.*)/(test|example)")
+__ln2 = re.compile("/(tools|libs)/(.*)$")
+__ln3 = re.compile("(/status$)")
+def get_library_name(path):
+ assert isinstance(path, basestring)
+
+ path = path.replace("\\", "/")
+ match1 = __ln1.match(path)
+ match2 = __ln2.match(path)
+ match3 = __ln3.match(path)
+
+ if match1:
+ return match1.group(2)
+ elif match2:
+ return match2.group(2)
+ elif match3:
+ return ""
+ elif option.get("dump-tests", False, True):
+ # The 'run' rule and others might be used outside boost. In that case,
+ # just return the path, since the 'library name' makes no sense.
+ return path
+
+# Was an XML dump requested?
+__out_xml = option.get("out-xml", False, True)
+
+# Takes a target (instance of 'basic-target') and prints
+# - its type
+# - its name
+# - comments specified via the <test-info> property
+# - relative location of all source from the project root.
+#
+def dump_test(target):
+ assert isinstance(target, targets.AbstractTarget)
+ type = target.type()
+ name = target.name()
+ project = target.project()
+
+ project_root = project.get('project-root')
+ library = get_library_name(os.path.abspath(project.get('location')))
+ if library:
+ name = library + "/" + name
+
+ sources = target.sources()
+ source_files = []
+ for s in sources:
+ if isinstance(s, targets.FileReference):
+ location = os.path.abspath(os.path.join(s.location(), s.name()))
+ source_files.append(os.path.relpath(location, os.path.abspath(project_root)))
+
+ target_name = project.get('location') + "//" + target.name() + ".test"
+
+ test_info = target.requirements().get('test-info')
+ test_info = " ".join('"' + ti + '"' for ti in test_info)
+
+ # If the user requested XML output on the command-line, add the test info to
+ # that XML file rather than dumping them to stdout.
+ #if $(.out-xml)
+ #{
+# local nl = "
+#" ;
+# .contents on $(.out-xml) +=
+# "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
+# "$(nl) <target><![CDATA[$(target-name)]]></target>"
+# "$(nl) <info><![CDATA[$(test-info)]]></info>"
+# "$(nl) <source><![CDATA[$(source-files)]]></source>"
+# "$(nl) </test>"
+# ;
+# }
+# else
+
+ source_files = " ".join('"' + s + '"' for s in source_files)
+ if test_info:
+ print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files)
+ else:
+ print 'boost-test(%s) "%s" : %s' % (type, name, source_files)
+
+# Register generators. Depending on target type, either 'expect-success' or
+# 'expect-failure' rule will be used.
+generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"])
+generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"])
+generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"])
+generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"])
+generators.register_standard("testing.expect-success", ["EXE"], ["LINK"])
+generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"])
+
+# Generator which runs an EXE and captures output.
+generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"])
+
+# Generator which creates a target if sources run successfully. Differs from RUN
+# in that run output is not captured. The reason why it exists is that the 'run'
+# rule is much better for automated testing, but is not user-friendly (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
+generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"])
+
+# FIXME: if those calls are after bjam.call, then bjam will crash
+# when toolset.flags calls bjam.caller.
+toolset.flags("testing.capture-output", "ARGS", [], ["<testing.arg>"])
+toolset.flags("testing.capture-output", "INPUT_FILES", [], ["<testing.input-file>"])
+toolset.flags("testing.capture-output", "LAUNCHER", [], ["<testing.launcher>"])
+
+toolset.flags("testing.unit-test", "LAUNCHER", [], ["<testing.launcher>"])
+toolset.flags("testing.unit-test", "ARGS", [], ["<testing.arg>"])
+
+# This is a composing generator to support cases where a generator for the
+# specified target constructs other targets as well. One such example is msvc's
+# exe generator that constructs both EXE and PDB targets.
+type.register("TIME", ["time"])
+generators.register_composing("testing.time", [], ["TIME"])
+
+
+# The following code sets up actions for this module. It's pretty convoluted,
+# but the basic points is that we most of actions are defined by Jam code
+# contained in testing-aux.jam, which we load into Jam module named 'testing'
+
+def run_path_setup(target, sources, ps):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert is_iterable_typed(target, basestring) or isinstance(target, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(ps, PropertySet)
+ # For testing, we need to make sure that all dynamic libraries needed by the
+ # test are found. So, we collect all paths from dependency libraries (via
+ # xdll-path property) and add whatever explicit dll-path user has specified.
+ # The resulting paths are added to the environment on each test invocation.
+ dll_paths = ps.get('dll-path')
+ dll_paths.extend(ps.get('xdll-path'))
+ dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH"))
+ dll_paths = unique(dll_paths)
+ if dll_paths:
+ bjam.call("set-target-variable", target, "PATH_SETUP",
+ common.prepend_path_variable_command(
+ common.shared_library_path_variable(), dll_paths))
+
+def capture_output_setup(target, sources, ps):
+ if __debug__:
+ from ..build.property_set import PropertySet
+ assert is_iterable_typed(target, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(ps, PropertySet)
+ run_path_setup(target[0], sources, ps)
+
+ if ps.get('preserve-test-targets') == ['off']:
+ bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1")
+
+get_manager().engine().register_bjam_action("testing.capture-output",
+ capture_output_setup)
+
+
+path = os.path.dirname(__file__)
+import b2.util.os_j
+get_manager().projects().project_rules()._import_rule("testing", "os.name",
+ b2.util.os_j.name)
+import b2.tools.common
+get_manager().projects().project_rules()._import_rule("testing", "common.rm-command",
+ b2.tools.common.rm_command)
+get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command",
+ b2.tools.common.file_creation_command)
+
+bjam.call("load", "testing", os.path.join(path, "testing-aux.jam"))
+
+
+for name in ["expect-success", "expect-failure", "time"]:
+ get_manager().engine().register_bjam_action("testing." + name)
+
+get_manager().engine().register_bjam_action("testing.unit-test",
+ run_path_setup)
+
+if option.get("dump-tests", False, True):
+ build_system.add_pre_build_hook(dump_tests)
diff --git a/src/boost/tools/build/src/tools/types/__init__.py b/src/boost/tools/build/src/tools/types/__init__.py
new file mode 100644
index 000000000..9ee31d13a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/__init__.py
@@ -0,0 +1,19 @@
+__all__ = [
+ 'asm',
+ 'cpp',
+ 'exe',
+ 'html',
+ 'lib',
+ 'obj',
+ 'preprocessed',
+ 'rsp',
+]
+
+def register_all ():
+ for i in __all__:
+ m = __import__ (__name__ + '.' + i)
+ reg = i + '.register ()'
+ #exec (reg)
+
+# TODO: (PF) I thought these would be imported automatically. Anyone knows why they aren't?
+register_all ()
diff --git a/src/boost/tools/build/src/tools/types/adoc.jam b/src/boost/tools/build/src/tools/types/adoc.jam
new file mode 100644
index 000000000..9e0c5309e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/adoc.jam
@@ -0,0 +1,26 @@
+#|
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+import scanner ;
+import type ;
+
+type ASCIIDOC : adoc asciidoc ;
+
+class asciidoc-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return
+ "include::([^[]+)"
+ "image::([^[]+)"
+ "image:([^[]+)"
+ ;
+ }
+}
+
+scanner.register asciidoc-scanner : include ;
+type.set-scanner ASCIIDOC : asciidoc-scanner ;
diff --git a/src/boost/tools/build/src/tools/types/asm.jam b/src/boost/tools/build/src/tools/types/asm.jam
new file mode 100644
index 000000000..a340db36a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/asm.jam
@@ -0,0 +1,4 @@
+# Copyright Craig Rodrigues 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type ASM : s S asm ;
diff --git a/src/boost/tools/build/src/tools/types/asm.py b/src/boost/tools/build/src/tools/types/asm.py
new file mode 100644
index 000000000..d9a30152e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/asm.py
@@ -0,0 +1,33 @@
+# Copyright Craig Rodrigues 2005.
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+from b2.build import type as type_
+from b2.manager import get_manager
+from b2.tools.cast import cast
+from b2.util import bjam_signature
+
+
+MANAGER = get_manager()
+PROJECT_REGISTRY = MANAGER.projects()
+
+# maps project.name() + type to type
+_project_types = {}
+
+type_.register_type('ASM', ['s', 'S', 'asm'])
+
+
+@bjam_signature((['type_'], ['sources', '*'], ['name', '?']))
+def set_asm_type(type_, sources, name=''):
+ project = PROJECT_REGISTRY.current()
+ _project_types[project.name() + type_] = _project_types.get(
+ project.name() + type_, type_) + '_'
+
+ name = name if name else _project_types[project.name() + type_]
+ type_ += '.asm'
+ return cast(name, type_.upper(), sources, [], [], [])
+
+
+PROJECT_REGISTRY.add_rule("set-asm-type", set_asm_type)
diff --git a/src/boost/tools/build/src/tools/types/cpp.jam b/src/boost/tools/build/src/tools/types/cpp.jam
new file mode 100644
index 000000000..3fcf449a2
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/cpp.jam
@@ -0,0 +1,90 @@
+# Copyright 2004 David Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import scanner ;
+import type ;
+
+
+class c-scanner : scanner
+{
+ import path ;
+ import regex ;
+ import scanner ;
+ import sequence ;
+ import toolset ;
+ import virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ # toolset.handle-flag-value is a bit of overkill, but it
+ # does correctly handle the topological sort of && separated
+ # include paths
+ self.includes = [ toolset.handle-flag-value <include> : $(includes) ] ;
+ }
+
+ rule pattern ( )
+ {
+ return "#[ \t]*include[ \t]*(<(.*)>|\"(.*)\")" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
+ angle = [ sequence.transform path.native : $(angle) ] ;
+ local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
+ quoted = [ sequence.transform path.native : $(quoted) ] ;
+
+ # CONSIDER: the new scoping rules seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets. When a target is
+ # directly created from a virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We do not
+ # need this extra information for angle includes, since they should not
+ # depend on the including file (we can not get literal "." in the
+ # include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) ;
+
+ INCLUDES $(target) : $(all) ;
+ NOCARE $(all) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
+
+ # Just propagate the current scanner to includes, in hope that includes
+ # do not change scanners.
+ scanner.propagate $(__name__) : $(all) : $(target) ;
+
+ ISFILE $(all) ;
+ }
+}
+
+scanner.register c-scanner : include ;
+
+type.register CPP : cpp cxx cc ;
+type.register H : h ;
+type.register HPP : hpp : H ;
+type.register C : c ;
+
+# It most cases where a CPP file or a H file is a source of some action, we
+# should rebuild the result if any of files included by CPP/H are changed. One
+# case when this is not needed is installation, which is handled specifically.
+type.set-scanner CPP : c-scanner ;
+type.set-scanner C : c-scanner ;
+# One case where scanning of H/HPP files is necessary is PCH generation -- if
+# any header included by HPP being precompiled changes, we need to recompile the
+# header.
+type.set-scanner H : c-scanner ;
+type.set-scanner HPP : c-scanner ;
diff --git a/src/boost/tools/build/src/tools/types/cpp.py b/src/boost/tools/build/src/tools/types/cpp.py
new file mode 100644
index 000000000..50797bae4
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/cpp.py
@@ -0,0 +1,10 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+from b2.build import type as type_
+
+
+type_.register_type('CPP', ['cpp', 'cxx', 'cc'])
+type_.register_type('H', ['h'])
+type_.register_type('HPP', ['hpp'], 'H')
+type_.register_type('C', ['c'])
diff --git a/src/boost/tools/build/src/tools/types/css.jam b/src/boost/tools/build/src/tools/types/css.jam
new file mode 100644
index 000000000..81b55ff43
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/css.jam
@@ -0,0 +1,10 @@
+#|
+Copyright 2017 Dmitry Arkhipov
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+import type ;
+
+type CSS : css ;
diff --git a/src/boost/tools/build/src/tools/types/docbook.jam b/src/boost/tools/build/src/tools/types/docbook.jam
new file mode 100644
index 000000000..1ceb8d495
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/docbook.jam
@@ -0,0 +1,8 @@
+#|
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+type DOCBOOK : docbook : XML ;
diff --git a/src/boost/tools/build/src/tools/types/exe.jam b/src/boost/tools/build/src/tools/types/exe.jam
new file mode 100644
index 000000000..47109513a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/exe.jam
@@ -0,0 +1,9 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register EXE ;
+type.set-generated-target-suffix EXE : <target-os>windows : "exe" ;
+type.set-generated-target-suffix EXE : <target-os>cygwin : "exe" ;
diff --git a/src/boost/tools/build/src/tools/types/exe.py b/src/boost/tools/build/src/tools/types/exe.py
new file mode 100644
index 000000000..a4935e24e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/exe.py
@@ -0,0 +1,11 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.build import type
+
+def register ():
+ type.register_type ('EXE', ['exe'], None, ['NT', 'CYGWIN'])
+ type.register_type ('EXE', [], None, [])
+
+register ()
diff --git a/src/boost/tools/build/src/tools/types/html.jam b/src/boost/tools/build/src/tools/types/html.jam
new file mode 100644
index 000000000..5cd337d09
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/html.jam
@@ -0,0 +1,4 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type HTML : html ;
diff --git a/src/boost/tools/build/src/tools/types/html.py b/src/boost/tools/build/src/tools/types/html.py
new file mode 100644
index 000000000..63af4d907
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/html.py
@@ -0,0 +1,10 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.build import type
+
+def register ():
+ type.register_type ('HTML', ['html'])
+
+register ()
diff --git a/src/boost/tools/build/src/tools/types/lib.jam b/src/boost/tools/build/src/tools/types/lib.jam
new file mode 100644
index 000000000..854ab8fd5
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/lib.jam
@@ -0,0 +1,74 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ; # for set-generated-target-suffix
+import os ;
+
+# The following naming scheme is used for libraries.
+#
+# On *nix:
+# libxxx.a static library
+# libxxx.so shared library
+#
+# On windows (msvc)
+# libxxx.lib static library
+# xxx.dll DLL
+# xxx.lib import library
+#
+# On windows (mingw):
+# libxxx.a static library
+# libxxx.dll DLL
+# libxxx.dll.a import library
+#
+# On cygwin i.e. <target-os>cygwin
+# libxxx.a static library
+# cygxxx.dll DLL
+# libxxx.dll.a import library
+#
+
+type.register LIB ;
+
+# FIXME: should not register both extensions on both platforms.
+type.register STATIC_LIB : a lib : LIB ;
+
+# The 'lib' prefix is used everywhere
+type.set-generated-target-prefix STATIC_LIB : : lib ;
+
+# Use '.lib' suffix for windows
+type.set-generated-target-suffix STATIC_LIB : <target-os>windows : lib ;
+
+# Except with gcc.
+type.set-generated-target-suffix STATIC_LIB : <toolset>gcc <target-os>windows : a ;
+
+# Use xxx.lib for import libs
+type IMPORT_LIB : : STATIC_LIB ;
+type.set-generated-target-prefix IMPORT_LIB : : "" ;
+type.set-generated-target-suffix IMPORT_LIB : : lib ;
+
+# Except with gcc (mingw or cygwin), where use libxxx.dll.a
+type.set-generated-target-prefix IMPORT_LIB : <toolset>gcc : lib ;
+type.set-generated-target-suffix IMPORT_LIB : <toolset>gcc : dll.a ;
+
+type.register SHARED_LIB : so dll dylib : LIB ;
+
+# Both mingw and cygwin use libxxx.dll naming scheme.
+# On Linux, use "lib" prefix
+type.set-generated-target-prefix SHARED_LIB : : lib ;
+# But don't use it on windows
+type.set-generated-target-prefix SHARED_LIB : <target-os>windows : "" ;
+# But use it again on mingw
+type.set-generated-target-prefix SHARED_LIB : <toolset>gcc <target-os>windows : lib ;
+# And use 'cyg' on cygwin
+type.set-generated-target-prefix SHARED_LIB : <target-os>cygwin : cyg ;
+
+
+type.set-generated-target-suffix SHARED_LIB : <target-os>windows : dll ;
+type.set-generated-target-suffix SHARED_LIB : <target-os>cygwin : dll ;
+type.set-generated-target-suffix SHARED_LIB : <target-os>darwin : dylib ;
+
+type SEARCHED_LIB : : LIB ;
+# This is needed so that when we create a target of SEARCHED_LIB
+# type, there's no prefix or suffix automatically added.
+type.set-generated-target-prefix SEARCHED_LIB : : "" ;
+type.set-generated-target-suffix SEARCHED_LIB : : "" ;
diff --git a/src/boost/tools/build/src/tools/types/lib.py b/src/boost/tools/build/src/tools/types/lib.py
new file mode 100644
index 000000000..d0ec1fb52
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/lib.py
@@ -0,0 +1,77 @@
+# Status: ported
+# Base revision: 64456.
+# Copyright David Abrahams 2004.
+# Copyright Vladimir Prus 2010.
+# Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import b2.build.type as type
+
+# The following naming scheme is used for libraries.
+#
+# On *nix:
+# libxxx.a static library
+# libxxx.so shared library
+#
+# On windows (msvc)
+# libxxx.lib static library
+# xxx.dll DLL
+# xxx.lib import library
+#
+# On windows (mingw):
+# libxxx.a static library
+# libxxx.dll DLL
+# libxxx.dll.a import library
+#
+# On cygwin i.e. <target-os>cygwin
+# libxxx.a static library
+# cygxxx.dll DLL
+# libxxx.dll.a import library
+#
+
+type.register('LIB')
+
+# FIXME: should not register both extensions on both platforms.
+type.register('STATIC_LIB', ['a', 'lib'], 'LIB')
+
+# The 'lib' prefix is used everywhere
+type.set_generated_target_prefix('STATIC_LIB', [], 'lib')
+
+# Use '.lib' suffix for windows
+type.set_generated_target_suffix('STATIC_LIB', ['<target-os>windows'], 'lib')
+
+# Except with gcc.
+type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>windows'], 'a')
+
+# Use xxx.lib for import libs
+type.register('IMPORT_LIB', [], 'STATIC_LIB')
+type.set_generated_target_prefix('IMPORT_LIB', [], '')
+type.set_generated_target_suffix('IMPORT_LIB', [], 'lib')
+
+# Except with gcc (mingw or cygwin), where use libxxx.dll.a
+type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc'], 'lib')
+type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc'], 'dll.a')
+
+type.register('SHARED_LIB', ['so', 'dll', 'dylib'], 'LIB')
+
+# Both mingw and cygwin use libxxx.dll naming scheme.
+# On Linux, use "lib" prefix
+type.set_generated_target_prefix('SHARED_LIB', [], 'lib')
+# But don't use it on windows
+type.set_generated_target_prefix('SHARED_LIB', ['<target-os>windows'], '')
+# But use it again on mingw
+type.set_generated_target_prefix('SHARED_LIB', ['<toolset>gcc', '<target-os>windows'], 'lib')
+# And use 'cyg' on cygwin
+type.set_generated_target_prefix('SHARED_LIB', ['<target-os>cygwin'], 'cyg')
+
+
+type.set_generated_target_suffix('SHARED_LIB', ['<target-os>windows'], 'dll')
+type.set_generated_target_suffix('SHARED_LIB', ['<target-os>cygwin'], 'dll')
+type.set_generated_target_suffix('SHARED_LIB', ['<target-os>darwin'], 'dylib')
+
+type.register('SEARCHED_LIB', [], 'LIB')
+# This is needed so that when we create a target of SEARCHED_LIB
+# type, there's no prefix or suffix automatically added.
+type.set_generated_target_prefix('SEARCHED_LIB', [], '')
+type.set_generated_target_suffix('SEARCHED_LIB', [], '')
diff --git a/src/boost/tools/build/src/tools/types/man.jam b/src/boost/tools/build/src/tools/types/man.jam
new file mode 100644
index 000000000..4fb59a0fe
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/man.jam
@@ -0,0 +1,8 @@
+#|
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+type MANPAGE : man 1M n p x ;
diff --git a/src/boost/tools/build/src/tools/types/markdown.jam b/src/boost/tools/build/src/tools/types/markdown.jam
new file mode 100644
index 000000000..936dabfaa
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/markdown.jam
@@ -0,0 +1,4 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type MARKDOWN : md markdown ;
diff --git a/src/boost/tools/build/src/tools/types/markdown.py b/src/boost/tools/build/src/tools/types/markdown.py
new file mode 100644
index 000000000..c689c9a68
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/markdown.py
@@ -0,0 +1,10 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.build import type
+
+def register ():
+ type.register_type ('MARKDOWN', ['markdown', 'md'])
+
+register ()
diff --git a/src/boost/tools/build/src/tools/types/obj.jam b/src/boost/tools/build/src/tools/types/obj.jam
new file mode 100644
index 000000000..6afbcaa6f
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/obj.jam
@@ -0,0 +1,9 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register OBJ : o obj ;
+type.set-generated-target-suffix OBJ : <target-os>windows : obj ;
+type.set-generated-target-suffix OBJ : <target-os>cygwin : obj ;
diff --git a/src/boost/tools/build/src/tools/types/obj.py b/src/boost/tools/build/src/tools/types/obj.py
new file mode 100644
index 000000000..e61e99a81
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/obj.py
@@ -0,0 +1,11 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.build import type
+
+def register ():
+ type.register_type ('OBJ', ['obj'], None, ['NT', 'CYGWIN'])
+ type.register_type ('OBJ', ['o'])
+
+register ()
diff --git a/src/boost/tools/build/src/tools/types/objc.jam b/src/boost/tools/build/src/tools/types/objc.jam
new file mode 100644
index 000000000..709cbd0c7
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/objc.jam
@@ -0,0 +1,26 @@
+# Copyright Rene Rivera 2008, 2010.
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+import type ;
+import scanner ;
+import types/cpp ;
+
+class objc-scanner : c-scanner
+{
+ rule __init__ ( includes * )
+ {
+ c-scanner.__init__ $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "#[ \t]*include|import[ ]*(<(.*)>|\"(.*)\")" ;
+ }
+}
+
+scanner.register objc-scanner : include ;
+
+type.register OBJECTIVE_C : m ;
+type.register OBJECTIVE_CPP : mm ;
+type.set-scanner OBJECTIVE_C : objc-scanner ;
+type.set-scanner OBJECTIVE_CPP : objc-scanner ;
diff --git a/src/boost/tools/build/src/tools/types/pdf.jam b/src/boost/tools/build/src/tools/types/pdf.jam
new file mode 100644
index 000000000..f5d64387e
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/pdf.jam
@@ -0,0 +1,8 @@
+#|
+Copyright 2017 Rene Rivera
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+type PDF : pdf ;
diff --git a/src/boost/tools/build/src/tools/types/preprocessed.jam b/src/boost/tools/build/src/tools/types/preprocessed.jam
new file mode 100644
index 000000000..c9187ba67
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/preprocessed.jam
@@ -0,0 +1,9 @@
+# Copyright Steven Watanabe 2011
+# Distributed under the Boost Software License Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import type ;
+
+type.register PREPROCESSED_C : i : C ;
+type.register PREPROCESSED_CPP : ii : CPP ;
diff --git a/src/boost/tools/build/src/tools/types/preprocessed.py b/src/boost/tools/build/src/tools/types/preprocessed.py
new file mode 100644
index 000000000..f59104334
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/preprocessed.py
@@ -0,0 +1,11 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.build import type
+
+def register ():
+ type.register_type('PREPROCESSED_C', ['i'], 'C')
+ type.register_type('PREPROCESSED_CPP', ['ii'], 'CPP')
+
+register ()
diff --git a/src/boost/tools/build/src/tools/types/qt.jam b/src/boost/tools/build/src/tools/types/qt.jam
new file mode 100644
index 000000000..4951063e3
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/qt.jam
@@ -0,0 +1,12 @@
+# Copyright Vladimir Prus 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+type UI : ui ;
+type QRC : qrc ;
+type MOCCABLE_CPP ;
+type MOCCABLE_H ;
+type MOCCABLE5_CPP ;
+type MOCCABLE5_H ;
+# Result of running moc.
+type MOC : moc : H ;
diff --git a/src/boost/tools/build/src/tools/types/register.jam b/src/boost/tools/build/src/tools/types/register.jam
new file mode 100644
index 000000000..daedfb701
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/register.jam
@@ -0,0 +1,39 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# This module's job is to automatically import all the type
+# registration modules in its directory.
+import type os path modules ;
+
+# Register the given type on the specified OSes, or on remaining OSes
+# if os is not specified. This rule is injected into each of the type
+# modules for the sake of convenience.
+local rule type ( type : suffixes * : base-type ? : os * )
+{
+ if ! [ type.registered $(type) ]
+ {
+ if ( ! $(os) ) || [ os.name ] in $(os)
+ {
+ type.register $(type) : $(suffixes) : $(base-type) ;
+ }
+ }
+}
+
+.this-module's-file = [ modules.binding $(__name__) ] ;
+.this-module's-dir = [ path.parent [ path.make $(.this-module's-file) ] ] ;
+.sibling-jamfiles = [ path.glob $(.this-module's-dir) : *.jam ] ;
+.sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ;
+
+# A loop over all modules in this directory
+for m in $(.sibling-modules)
+{
+ m = [ path.basename $(m) ] ;
+ m = types/$(m) ;
+
+ # Inject the type rule into the new module
+ IMPORT $(__name__) : type : $(m:B) : type ;
+ import $(m) ;
+}
+
+
diff --git a/src/boost/tools/build/src/tools/types/rsp.jam b/src/boost/tools/build/src/tools/types/rsp.jam
new file mode 100644
index 000000000..bdf8a7c98
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/rsp.jam
@@ -0,0 +1,4 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+type RSP : rsp ;
diff --git a/src/boost/tools/build/src/tools/types/rsp.py b/src/boost/tools/build/src/tools/types/rsp.py
new file mode 100644
index 000000000..ccb379e95
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/rsp.py
@@ -0,0 +1,10 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.build import type
+
+def register ():
+ type.register_type ('RSP', ['rsp'])
+
+register ()
diff --git a/src/boost/tools/build/src/tools/types/sass-type.jam b/src/boost/tools/build/src/tools/types/sass-type.jam
new file mode 100644
index 000000000..c7f404025
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/sass-type.jam
@@ -0,0 +1,49 @@
+#|
+Copyright 2017 Dmitry Arkhipov
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+import scanner ;
+import type ;
+
+class sass-scanner : common-scanner
+{
+ import sequence ;
+
+ local rule import-to-file ( import )
+ {
+ if ! ( $(import:S) in .sass .scss )
+ {
+ return $(import).sass $(import).scss ;
+ }
+ else
+ {
+ return $(import) ;
+ }
+ }
+
+ rule pattern ( )
+ {
+ return
+ "@import[ \t]+\"([^\"]+)\""
+ "@import[ \t]+\'([^\']+)\'"
+ ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ common-scanner.process
+ $(target)
+ : [ sequence.transform import-to-file : $(matches) ]
+ : $(binding)
+ ;
+ }
+}
+
+scanner.register sass-scanner : include ;
+
+type SASS : sass scss ;
+
+type.set-scanner SASS : sass-scanner ;
diff --git a/src/boost/tools/build/src/tools/types/xml.jam b/src/boost/tools/build/src/tools/types/xml.jam
new file mode 100644
index 000000000..10149ea93
--- /dev/null
+++ b/src/boost/tools/build/src/tools/types/xml.jam
@@ -0,0 +1,49 @@
+#|
+Copyright 2017 Rene Rivera
+Copyright 2003, 2004, 2005 Dave Abrahams
+Copyright 2003, 2004, 2005 Douglas Gregor
+Copyright 2005, 2006, 2007 Rene Rivera
+Copyright 2003, 2004, 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0. (See
+accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+|#
+
+import scanner ;
+import type ;
+
+type XML : xml ;
+
+# XInclude scanner. Mostly stolen from c-scanner. :)
+# Note that this assumes an "xi" prefix for XIncludes. This is not always the
+# case for XML documents, but we assume it is true for anything we encounter.
+#
+class xinclude-scanner : scanner
+{
+ import scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "xi:include[ ]*href=\"([^\"]*)\"" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+scanner.register xinclude-scanner : "xsl:path" ;
+type.set-scanner XML : xinclude-scanner ;
diff --git a/src/boost/tools/build/src/tools/unix.jam b/src/boost/tools/build/src/tools/unix.jam
new file mode 100644
index 000000000..75949851a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/unix.jam
@@ -0,0 +1,224 @@
+# Copyright (c) 2004 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file implements linking semantic common to all unixes. On unix, static
+# libraries must be specified in a fixed order on the linker command line. Generators
+# declared there store information about the order and use it property.
+
+import feature ;
+import "class" : new ;
+import generators ;
+import type ;
+import set ;
+import order ;
+import builtin ;
+
+class unix-linking-generator : linking-generator
+{
+ import property-set ;
+ import type ;
+ import unix ;
+
+ rule __init__ ( id
+ composing ? : # Specify if generator is composing. The generator will be
+ # composing if non-empty string is passed, or parameter is
+ # not given. To make generator non-composing, pass empty
+ # string ("")
+ source-types + : target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
+ $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result = [ linking-generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ;
+ local libraries ;
+ for local l in $(sources)
+ {
+ if [ type.is-derived [ $(l).type ] LIB ]
+ {
+ libraries += $(l) ;
+ }
+ else
+ {
+ sources2 += $(l) ;
+ }
+ }
+
+ sources = $(sources2) [ unix.order-libraries $(libraries) ] ;
+
+ return [ linking-generator.generated-targets $(sources) : $(property-set)
+ : $(project) $(name) ] ;
+ }
+
+}
+
+class unix-archive-generator : archive-generator
+{
+ import unix ;
+
+ rule __init__ ( id composing ? : source-types + : target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ archive-generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
+ $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result = [ archive-generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+
+ }
+}
+
+class unix-searched-lib-generator : searched-lib-generator
+{
+ import unix ;
+ rule __init__ ( * : * )
+ {
+ generator.__init__
+ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule optional-properties ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local result = [ searched-lib-generator.run $(project) $(name)
+ : $(property-set) : $(sources) ] ;
+
+ unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ;
+
+ return $(result) ;
+ }
+}
+
+class unix-prebuilt-lib-generator : generator
+{
+ import unix ;
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local f = [ $(property-set).get <file> ] ;
+ unix.set-library-order-aux $(f) : $(sources) ;
+ return $(f) $(sources) ;
+ }
+}
+
+generators.register
+ [ new unix-prebuilt-lib-generator unix.prebuilt : : LIB
+ : <file> <toolset>unix ] ;
+
+generators.override unix.prebuilt : builtin.lib-generator ;
+
+
+# Declare generators
+generators.register [ new unix-linking-generator unix.link : LIB OBJ : EXE
+ : <toolset>unix ] ;
+
+generators.register [ new unix-archive-generator unix.archive : OBJ : STATIC_LIB
+ : <toolset>unix ] ;
+
+generators.register [ new unix-linking-generator unix.link.dll : LIB OBJ : SHARED_LIB
+ : <toolset>unix ] ;
+
+generators.register [ new unix-searched-lib-generator
+ unix.searched-lib-generator : : SEARCHED_LIB : <toolset>unix ] ;
+
+
+# The derived toolset must specify their own actions.
+actions link {
+}
+
+actions link.dll {
+}
+
+actions archive {
+}
+
+actions searched-lib-generator {
+}
+
+actions prebuilt {
+}
+
+
+
+
+
+.order = [ new order ] ;
+
+rule set-library-order-aux ( from * : to * )
+{
+ for local f in $(from)
+ {
+ for local t in $(to)
+ {
+ if $(f) != $(t)
+ {
+ $(.order).add-pair $(f) $(t) ;
+ }
+ }
+ }
+}
+
+rule set-library-order ( sources * : property-set : result * )
+{
+ local used-libraries ;
+ local deps = [ $(property-set).dependency ] ;
+ for local l in $(sources) $(deps:G=)
+ {
+ if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
+ {
+ used-libraries += $(l) ;
+ }
+ }
+
+ local created-libraries ;
+ for local l in $(result)
+ {
+ if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ]
+ {
+ created-libraries += $(l) ;
+ }
+ }
+
+ created-libraries = [ set.difference $(created-libraries) : $(used-libraries) ] ;
+ set-library-order-aux $(created-libraries) : $(used-libraries) ;
+}
+
+rule order-libraries ( libraries * )
+{
+ local r = [ $(.order).order $(libraries) ] ;
+ return $(r) ;
+}
+ \ No newline at end of file
diff --git a/src/boost/tools/build/src/tools/unix.py b/src/boost/tools/build/src/tools/unix.py
new file mode 100644
index 000000000..298fc1dc5
--- /dev/null
+++ b/src/boost/tools/build/src/tools/unix.py
@@ -0,0 +1,155 @@
+# Copyright (c) 2004 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+""" This file implements linking semantics common to all unixes. On unix, static
+ libraries must be specified in a fixed order on the linker command line. Generators
+ declared there store information about the order and use it properly.
+"""
+
+import builtin
+from b2.build import generators, type
+from b2.util.utility import *
+from b2.util import set, sequence
+
+class UnixLinkingGenerator (builtin.LinkingGenerator):
+
+ def __init__ (self, id, composing, source_types, target_types, requirements):
+ builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements)
+
+ def run (self, project, name, prop_set, sources):
+ result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources)
+ if result:
+ set_library_order (project.manager (), sources, prop_set, result [1])
+
+ return result
+
+ def generated_targets (self, sources, prop_set, project, name):
+ sources2 = []
+ libraries = []
+ for l in sources:
+ if type.is_derived (l.type (), 'LIB'):
+ libraries.append (l)
+
+ else:
+ sources2.append (l)
+
+ sources = sources2 + order_libraries (libraries)
+
+ return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name)
+
+
+class UnixArchiveGenerator (builtin.ArchiveGenerator):
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run (self, project, name, prop_set, sources):
+ from b2.build.property_set import PropertySet
+ result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources)
+ if result and isinstance(result[0], PropertySet):
+ _, targets = result
+ else:
+ targets = result
+ set_library_order(project.manager(), sources, prop_set, targets)
+ return result
+
+class UnixSearchedLibGenerator (builtin.SearchedLibGenerator):
+
+ def __init__ (self):
+ builtin.SearchedLibGenerator.__init__ (self)
+
+ def optional_properties (self):
+ return self.requirements ()
+
+ def run (self, project, name, prop_set, sources):
+ result = SearchedLibGenerator.run (project, name, prop_set, sources)
+
+ set_library_order (sources, prop_set, result)
+
+ return result
+
+class UnixPrebuiltLibGenerator (generators.Generator):
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run (self, project, name, prop_set, sources):
+ f = prop_set.get ('<file>')
+ set_library_order_aux (f, sources)
+ return f + sources
+
+### # The derived toolset must specify their own rules and actions.
+# FIXME: restore?
+# action.register ('unix.prebuilt', None, None)
+
+
+generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB'], ['<file>', '<toolset>unix']))
+
+
+
+
+
+### # Declare generators
+### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE
+### : <toolset>unix ] ;
+generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['<toolset>unix']))
+
+### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB
+### : <toolset>unix ] ;
+###
+### generators.register [ new UnixSearchedLibGenerator
+### unix.SearchedLibGenerator : : SEARCHED_LIB : <toolset>unix ] ;
+###
+###
+### # The derived toolset must specify their own actions.
+### actions link {
+### }
+###
+### actions link.dll {
+### }
+
+def unix_archive (manager, targets, sources, properties):
+ pass
+
+# FIXME: restore?
+#action.register ('unix.archive', unix_archive, [''])
+
+### actions searched-lib-generator {
+### }
+###
+### actions prebuilt {
+### }
+
+
+from b2.util.order import Order
+__order = Order ()
+
+def set_library_order_aux (from_libs, to_libs):
+ for f in from_libs:
+ for t in to_libs:
+ if f != t:
+ __order.add_pair (f, t)
+
+def set_library_order (manager, sources, prop_set, result):
+ used_libraries = []
+ deps = prop_set.dependency ()
+
+ sources.extend(d.value for d in deps)
+ sources = sequence.unique(sources)
+
+ for l in sources:
+ if l.type () and type.is_derived (l.type (), 'LIB'):
+ used_libraries.append (l)
+
+ created_libraries = []
+ for l in result:
+ if l.type () and type.is_derived (l.type (), 'LIB'):
+ created_libraries.append (l)
+
+ created_libraries = set.difference (created_libraries, used_libraries)
+ set_library_order_aux (created_libraries, used_libraries)
+
+def order_libraries (libraries):
+ return __order.order (libraries)
+
diff --git a/src/boost/tools/build/src/tools/vacpp.jam b/src/boost/tools/build/src/tools/vacpp.jam
new file mode 100644
index 000000000..168f46564
--- /dev/null
+++ b/src/boost/tools/build/src/tools/vacpp.jam
@@ -0,0 +1,173 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Toon Knapen 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#| tag::doc[]
+
+[[bbv2.reference.tools.compiler.vacpp]]
+= IBM Visual Age
+
+The `vacpp` module supports the http://www.ibm.com/software/ad/vacpp[IBM
+Visual Age] C++ Compiler, for the AIX operating system. Versions 7.1 and
+8.0 are known to work.
+
+The module is initialized using the following syntax:
+
+----
+using vacpp ;
+----
+
+The module does not accept any initialization options. The compiler
+should be installed in the `/usr/vacpp/bin` directory.
+
+Later versions of Visual Age are known as XL C/C++. They were not tested
+with the the `vacpp` module.
+
+|# # end::doc[]
+
+#
+# B2 V2 toolset for the IBM XL C++ compiler
+#
+
+import toolset : flags ;
+import feature ;
+import common ;
+import generators ;
+import os ;
+
+feature.extend toolset : vacpp ;
+toolset.inherit vacpp : unix ;
+generators.override vacpp.prebuilt : builtin.prebuilt ;
+generators.override vacpp.searched-lib-generator : searched-lib-generator ;
+
+# Configure the vacpp toolset
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters vacpp : version $(version) ] ;
+
+ command = [ common.get-invocation-command vacpp : xlC
+ : $(command) : "/usr/vacpp/bin/xlC" ] ;
+
+ common.handle-options vacpp : $(condition) : $(command) : $(options) ;
+}
+
+# Declare generators
+generators.register-c-compiler vacpp.compile.c : C : OBJ : <toolset>vacpp ;
+generators.register-c-compiler vacpp.compile.c++ : CPP : OBJ : <toolset>vacpp ;
+
+# Allow C++ style comments in C files
+flags vacpp CFLAGS : -qcpluscmt ;
+
+# Declare flags
+flags vacpp CFLAGS <optimization>off : -qNOOPTimize ;
+flags vacpp CFLAGS <optimization>speed : -O3 -qstrict ;
+flags vacpp CFLAGS <optimization>space : -O2 -qcompact ;
+
+# Discretionary inlining (not recommended)
+flags vacpp CFLAGS <inlining>off : -qnoinline ;
+flags vacpp CFLAGS <inlining>on : -qinline ;
+#flags vacpp CFLAGS <inlining>full : -qinline ;
+flags vacpp CFLAGS <inlining>full : ;
+
+# Exception handling
+flags vacpp C++FLAGS <exception-handling>off : -qnoeh ;
+flags vacpp C++FLAGS <exception-handling>on : -qeh ;
+
+# Run-time Type Identification
+flags vacpp C++FLAGS <rtti>off : -qnortti ;
+flags vacpp C++FLAGS <rtti>on : -qrtti ;
+
+# Enable 64-bit memory addressing model
+flags vacpp CFLAGS <address-model>64 : -q64 ;
+flags vacpp LINKFLAGS <address-model>64 : -q64 ;
+flags vacpp ARFLAGS <target-os>aix/<address-model>64 : -X 64 ;
+
+# Use absolute path when generating debug information
+flags vacpp CFLAGS <debug-symbols>on : -g -qfullpath ;
+flags vacpp LINKFLAGS <debug-symbols>on : -g -qfullpath ;
+flags vacpp LINKFLAGS <debug-symbols>off : -s ;
+
+if [ os.name ] = AIX
+{
+ flags vacpp.compile C++FLAGS : -qfuncsect ;
+
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The above options are definitely for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+ flags vacpp.link LINKFLAGS <link>shared : -bnoipath ;
+
+ # Run-time linking
+ flags vacpp.link EXE-LINKFLAGS <link>shared : -brtl ;
+}
+else
+{
+ # Linux PPC
+ flags vacpp.compile CFLAGS <link>shared : -qpic=large ;
+ flags vacpp FINDLIBS : rt ;
+}
+
+# Profiling
+flags vacpp CFLAGS <profiling>on : -pg ;
+flags vacpp LINKFLAGS <profiling>on : -pg ;
+
+flags vacpp.compile OPTIONS <cflags> ;
+flags vacpp.compile.c++ OPTIONS <cxxflags> ;
+flags vacpp DEFINES <define> ;
+flags vacpp UNDEFS <undef> ;
+flags vacpp HDRS <include> ;
+flags vacpp STDHDRS <sysinclude> ;
+flags vacpp.link OPTIONS <linkflags> ;
+flags vacpp ARFLAGS <arflags> ;
+
+flags vacpp LIBPATH <library-path> ;
+flags vacpp NEEDLIBS <library-file> ;
+flags vacpp FINDLIBS <find-shared-library> ;
+flags vacpp FINDLIBS <find-static-library> ;
+
+# Select the compiler name according to the threading model.
+flags vacpp VA_C_COMPILER <threading>single : xlc ;
+flags vacpp VA_C_COMPILER <threading>multi : xlc_r ;
+flags vacpp VA_CXX_COMPILER <threading>single : xlC ;
+flags vacpp VA_CXX_COMPILER <threading>multi : xlC_r ;
+
+SPACE = " " ;
+
+flags vacpp.link.dll HAVE_SONAME <target-os>linux : "" ;
+
+actions vacpp.link bind NEEDLIBS
+{
+ $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions vacpp.link.dll bind NEEDLIBS
+{
+ xlC_r -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions vacpp.compile.c
+{
+ $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions vacpp.compile.c++
+{
+ $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions updated together piecemeal vacpp.archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/src/boost/tools/build/src/tools/vmsdecc.jam b/src/boost/tools/build/src/tools/vmsdecc.jam
new file mode 100644
index 000000000..8f1e8ed17
--- /dev/null
+++ b/src/boost/tools/build/src/tools/vmsdecc.jam
@@ -0,0 +1,578 @@
+# Copyright (c) 2015 Artur Shepilko
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements OpenVMS-based HP DECC/C++ toolset.
+# Relies on POSIX-style path handling bjam/B2 implementation for VMS.
+
+import "class" : new ;
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import type ;
+import common ;
+import unix ;
+import path ;
+
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : vmsdecc ;
+
+toolset.inherit-generators vmsdecc : unix : unix.link unix.link.dll ;
+toolset.inherit-flags vmsdecc : unix ;
+toolset.inherit-rules vmsdecc : unix ;
+
+generators.override vmsdecc.archive-generator : builtin.archive-generator ;
+generators.override vmsdecc.prebuilt : builtin.prebuilt ;
+generators.override vmsdecc.searched-lib-generator : searched-lib-generator ;
+
+type.set-generated-target-suffix EXE : <toolset>vmsdecc <target-os>vms : exe ;
+type.set-generated-target-suffix OBJ : <toolset>vmsdecc <target-os>vms : obj ;
+type.set-generated-target-suffix PREPROCESSED_C : <toolset>vmsdecc <target-os>vms : i ;
+type.set-generated-target-suffix PREPROCESSED_CPP : <toolset>vmsdecc <target-os>vms : ixx ;
+type.set-generated-target-suffix STATIC_LIB : <toolset>vmsdecc <target-os>vms : olb ; ## xxx.olb
+
+type.register-suffixes exe : SHARED_LIB ;
+type.set-generated-target-prefix SHARED_LIB : <toolset>vmsdecc <target-os>vms : shr ; ## shrxxx.exe
+type.set-generated-target-suffix SHARED_LIB : <toolset>vmsdecc <target-os>vms : exe ; ## shrxxx.exe
+
+.OBJ = .obj ; ## suffix
+.nl = "
+" ;
+
+rule init ( version ? : command * : options * )
+{
+ local argv = [ modules.peek : ARGV ] ;
+
+ local condition = [
+ common.check-init-parameters vmsdecc : version $(version) ] ;
+
+ # CC and CXX are CLI commands, so no need to search for the executables
+ command = CXX ;
+ toolset.flags vmsdecc .CXX $(condition) : CXX ;
+ common.handle-options vmsdecc : $(condition) : $(command) : $(options) ;
+
+ local command_c = $(command[1--2]) $(command[-1]:B=CC) ;
+ toolset.flags vmsdecc .CC $(condition) : $(command_c) ;
+
+ local linker = [ feature.get-values <linker> : $(options) ] ;
+ linker ?= CXXLINK ;
+ toolset.flags vmsdecc.link .LD $(condition) : $(linker) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice\: using linker "::" $(condition) "::" $(linker[1]) ;
+ }
+
+ local archiver = LIB ;
+ toolset.flags vmsdecc.archive .AR $(condition) : $(archiver) ;
+
+ local b2 = $(argv[1]) ;
+ toolset.flags vmsdecc .B2 $(condition) : $(b2) ;
+}
+
+# Declare generators
+generators.register-c-compiler vmsdecc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>vmsdecc ;
+generators.register-c-compiler vmsdecc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>vmsdecc ;
+generators.register-c-compiler vmsdecc.compile.c : C : OBJ : <toolset>vmsdecc ;
+generators.register-c-compiler vmsdecc.compile.c++ : CPP : OBJ : <toolset>vmsdecc ;
+
+# Declare flags and actions for compilation
+flags vmsdecc.compile OPTIONS <debug-symbols>on : /DEBUG ;
+flags vmsdecc.compile OPTIONS <profiling>on : /DEBUG ; ## needs PCA link options
+flags vmsdecc.compile OPTIONS <optimization>off : /NOOPT ;
+flags vmsdecc.compile OPTIONS <optimization>speed : /OPT=INLINE=SPEED/OPT=NOINLINE ;
+flags vmsdecc.compile OPTIONS <optimization>space : /OPT=INLINE=SIZE/OPT=NOINLINE ;
+flags vmsdecc.compile OPTIONS <warnings>off : /NOWARN ;
+flags vmsdecc.compile OPTIONS <warnings>on : /WARN ;
+flags vmsdecc.compile OPTIONS <warnings>all : /WARN=ENABLE=ALL ;
+
+flags vmsdecc.compile.c++ OPTIONS <inlining>off : /OPT=NOINLINE ;
+
+flags vmsdecc OPTIONS <address-model>32 : /POINTER=32 ;
+flags vmsdecc OPTIONS <address-model>64 : /POINTER=64 ; ## /POINTER=64=ARGV argv-64
+
+flags vmsdecc.compile OPTIONS <cflags> ;
+flags vmsdecc.compile.c++ OPTIONS <cxxflags> ;
+flags vmsdecc.compile DEFINES <define> ;
+flags vmsdecc.compile UNDEFS <undef> ;
+flags vmsdecc.compile INCLUDES <include> ;
+flags vmsdecc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+feature.feature cxx-repository : : free path ; #order-sensitive ;
+flags vmsdecc CXX-REPOS <cxx-repository> ;
+
+
+local rule get-includes ( sources * : includes * )
+{
+ local result ;
+
+ ## Expect POSIX-style path, quote in double-quotes
+ for local d in $(sources:D) $(includes)
+ {
+ if $(d)
+ {
+ local QUOTE = \" ;
+ local SEP = / ;
+
+ local enquote = false ;
+ local addsep = false ;
+
+ s = [ SPLIT_BY_CHARACTERS $(d) : $(QUOTE) ] ;
+
+ if $(s) = $(d) { enquote = true ; }
+ if [ SPLIT_BY_CHARACTERS $(s) : $(SEP) ] = $(s) { addsep = true ; }
+
+ if $(addsep)
+ {
+ d = $(s)$(SEP) ;
+ enquote = true ;
+ }
+
+ if $(enquote)
+ {
+ d = $(QUOTE)$(d)$(QUOTE) ;
+ }
+
+ if ! $(d) in $(result)
+ {
+ result += $(d) ;
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+CXX-REPO-NAME = cxx_repository ;
+
+local rule get-target-cxx-repo ( target )
+{
+ return [ path.join $(target) $(CXX-REPO-NAME) ] ;
+}
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(SOURCE-INCLUDES) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ;
+
+ DEFINES on $(targets) = [ on $(targets) return "__USE_STD_IOSTREAM" $(DEFINES) ] ;
+
+ INCLUDES on $(targets) = [ on $(targets) get-includes $(sources) : $(INCLUDES) ] ;
+
+ TARGET-CXX-REPO on $(targets) = [ on $(targets[1]) get-target-cxx-repo $(LOCATE) ] ;
+ CXX-REPOS on $(targets) = [ on $(targets) return $(TARGET-CXX-REPO) $(CXX-REPOS) ] ;
+}
+
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(SOURCE-INCLUDES) ] ;
+
+ INCLUDES on $(targets) = [ on $(targets) get-includes $(sources) : $(INCLUDES) ] ;
+}
+
+actions compile.c
+{
+ $(.CC) $(OPTIONS) /DEF=("$(DEFINES:J=",")") /UNDEF=("$(UNDEFS:J=",")") /INC=($(INCLUDES:J=,)) /OBJ=$(<:W) $(>:W)
+}
+
+actions compile.c++
+{
+ $(.CXX) $(OPTIONS) /DEF=("$(DEFINES:J=",")") /UNDEF=("$(UNDEFS:J=",")") /INC=($(INCLUDES:J=,)) /REPO=($(CXX-REPOS:WJ=,)) /OBJ=$(<:W) $(>:W)
+}
+
+
+
+# Custom linking generator to separate dependency libraries and optfiles from
+# the list of sources. The objfiles, libraries, and optfiles are then referenced
+# via properties. This allows separate qualification of object-files and libraries
+# on linker command line.
+#
+class vmsdecc-linking-generator : linking-generator
+{
+ rule run ( project name ? : property-set : sources + )
+ {
+ local result = [ linking-generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ; # Sources to pass to inherited rule.
+ local properties2 ; # Properties to pass to inherited rule.
+ local objfiles ; # Object files.
+ local libraries ; # Library sources.
+
+ properties2 = [ $(property-set).raw ] ;
+
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] OBJ ]
+ {
+ objfiles += $(s) ;
+ properties2 += <link-objfile>$(s) ;
+ }
+ else if [ type.is-derived [ $(s).type ] STATIC_LIB ]
+ {
+ libraries += $(s) ;
+ properties2 += <link-staticlib>$(s) ;
+ }
+ else if [ type.is-derived [ $(s).type ] SHARED_LIB ]
+ {
+ libraries += $(s) ;
+ properties2 += <link-sharedlib>$(s) ;
+ }
+ }
+
+
+ return [ linking-generator.generated-targets $(sources)
+ : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
+ }
+}
+
+
+generators.register [ new vmsdecc-linking-generator vmsdecc.link :
+ OBJ SEARCHED_LIB STATIC_LIB SHARED_LIB : EXE : <toolset>vmsdecc ] ;
+
+generators.register [ new vmsdecc-linking-generator vmsdecc.link.dll :
+ OBJ SEARCHED_LIB STATIC_LIB SHARED_LIB : SHARED_LIB : <toolset>vmsdecc ] ;
+
+
+
+# Declare flags and actions for linking
+flags vmsdecc.link OPTIONS <debug-symbols>on : /DEBUG ;
+# Strip the binary when no debugging is needed
+flags vmsdecc.link OPTIONS <debug-symbols>off : /NODEBUG ;
+flags vmsdecc.link OPTIONS <profiling>on : /DEBUG ; ## need "DEFINE LIB$DEBUG PCA$COLLECTOR"
+flags vmsdecc.link OPTIONS <linkflags> ;
+flags vmsdecc.link LINKPATH <library-path> ;
+flags vmsdecc.link FINDLIBS-ST <find-static-library> ;
+flags vmsdecc.link FINDLIBS-SA <find-shared-library> ;
+flags vmsdecc.link LIBRARIES <library-file> ;
+flags vmsdecc.link LINK-RUNTIME <runtime-link>static : static ;
+flags vmsdecc.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags vmsdecc.link RPATH <dll-path> ;
+flags vmsdecc.link FINDLIBS-SA ;
+
+feature.feature "link-objfile" : : free dependency path incidental ;
+flags vmsdecc.link LINK-OBJS <link-objfile> ;
+
+feature.feature "link-libmodule" : : free dependency incidental ;
+flags vmsdecc.link LINK-LIBMODULES <link-libmodule> ;
+
+feature.feature "link-staticlib" : : free dependency path incidental ;
+flags vmsdecc.link LINK-LIBS <link-staticlib> ;
+
+feature.feature "link-sharedlib" : : free dependency path incidental ;
+flags vmsdecc.link LINK-SHAREDLIBS <link-sharedlib> ;
+
+feature.feature "link-optfile" : : free dependency path incidental ;
+flags vmsdecc.link LINK-OPTS <link-optfile> ;
+
+
+local rule export-target-var-contents ( var-name : values * )
+{
+ local result ;
+ local nl = "
+" ;
+ local locate ;
+
+ if $(var-name)
+ {
+ result +=
+ "$(nl)$(var-name) =" ;
+ for local f in $(values)
+ {
+ locate = [ on $(f) return $(LOCATE) ] ;
+ result +=
+ "$(nl)\"$(f:TG=:R=$(locate))\"" ;
+ }
+ result += "$(nl) ;" ;
+ }
+
+ return $(result) ;
+}
+
+# VMS linker usually expects an explicit object module that contains main().
+# Yet on *NIX, the main module can be automatically resolved from a library --
+# this may arguably be convenient with dynamic linking, and is also used with
+# Boost.Test.
+# To handle such cases on VMS, one needs first to locate the library module
+# containing main(), then include it in sources for the link command.
+# GLOB_ARCHIVE built-in can locate the module name (globbing by symbol MAIN).
+# To be able to use its result during jam-parsing stage, we need to execute it
+# from a separate jam-file that produces a pre-defined option file for link.
+#
+
+actions write-jam-file-contents
+{
+ SET FILE /VER=1 @($(<:W):E= $(>) )
+}
+
+
+local rule mainmod-link-opt.generate ( jam-file : opt-file : objs * : libs * : sharedlibs * )
+{
+ local nl = "
+" ;
+ local $ = $ ;
+ local @ = @ ;
+
+ if $(jam-file) && $(opt-file)
+ {
+ local .contents on $(jam-file) =
+ "# This file was auto-generated by <toolset>$(__name__)." ;
+
+ .contents on $(jam-file) +=
+ "$(nl)OPT-FILE = $(opt-file) ;" ;
+
+ .contents on $(jam-file) += [ on $(jam-file)
+ export-target-var-contents "OBJS" : $(objs) ] ;
+
+ .contents on $(jam-file) += [ on $(jam-file)
+ export-target-var-contents "LIBS" : $(libs) ] ;
+
+ .contents on $(jam-file) += [ on $(jam-file)
+ export-target-var-contents "SHAREDLIBS" : $(sharedlibs) ] ;
+
+ .contents on $(jam-file) +=
+ "$(nl).nl = \"$(nl)\" ;"
+ ;
+ .contents on $(jam-file) +=
+ "$(nl)local rule get-main-members ( libs * : symbol-main ? )"
+ "$(nl){"
+ "$(nl) local result ;"
+ "$(nl) symbol-main ?= \"MAIN\" ;"
+ "$(nl) for local libfile in $($)(libs)"
+ "$(nl) {"
+ "$(nl) local main = [ GLOB_ARCHIVE $($)(libfile) : : : $($)(symbol-main) ] ;"
+ "$(nl) if $($)(main)"
+ "$(nl) {"
+ "$(nl) result += $($)(main) ;"
+ "$(nl) }"
+ "$(nl) }"
+ "$(nl) return $($)(result) ;"
+ "$(nl)}"
+ ;
+ .contents on $(jam-file) +=
+ "$(nl)local rule get-libmods ( members * )"
+ "$(nl){"
+ "$(nl) local result ;"
+ "$(nl) for local m in $($)(members)"
+ "$(nl) {"
+ "$(nl) local lib = $($)(m:WDBS) ;"
+ "$(nl) local mem = $($)(m:M) ;"
+ "$(nl) if $($)(mem)"
+ "$(nl) {"
+ "$(nl) local mod = [ SPLIT_BY_CHARACTERS $($)(mem) : \"()\" ] ;"
+ "$(nl) result += $($)(lib)/INC=($($)(mod:B))/LIB ;"
+ "$(nl) }"
+ "$(nl) }"
+ "$(nl) return $($)(result) ;"
+ "$(nl)}"
+ ;
+ .contents on $(jam-file) +=
+ "$(nl)rule mainmod-link-opt ( opt-file : libs * : objs * )"
+ "$(nl){"
+ "$(nl) local main-members = [ on $($)(opt-file[1]) get-main-members $($)(libs) ] ;"
+ "$(nl) LIBMODULES on $($)(opt-file[1]) = [ on $($)(opt-file[1]) get-libmods $($)(main-members[1]) ] ;"
+ "$(nl)}"
+ ;
+ .contents on $(jam-file) +=
+ "$(nl)actions mainmod-link-opt bind OBJS LIBMODULES"
+ "$(nl){"
+ "$(nl) SET FILE /VER=1 $(@)($($)(<:W):E= $($)(LIBMODULES:J=,-$($)(.nl))-$($)(.nl) )"
+ "$(nl)}"
+ ;
+ .contents on $(jam-file) +=
+ "$(nl)local rule make"
+ "$(nl){"
+ "$(nl) if $($)(OPT-FILE)"
+ "$(nl) {"
+ "$(nl) DEPENDS all : $($)(OPT-FILE) ;"
+ "$(nl) DEPENDS $($)(OPT-FILE) : $($)(LIBS) $($)(OBJS) ;"
+ "$(nl) mainmod-link-opt $($)(OPT-FILE) : $($)(LIBS) : $($)(OBJS) ;"
+ "$(nl) }"
+ "$(nl)}"
+ "$(nl)make all ;"
+ ;
+
+ write-jam-file-contents $(jam-file) : [ on $(jam-file) return $(.contents) ] ;
+
+ }
+}
+
+
+rule link ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-OBJS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-LIBS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-SHAREDLIBS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-OPTS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LIBRARIES) ] ;
+
+
+ for local s in $(sources)
+ {
+ local r = [ on $(s) return $(TARGET-CXX-REPO) ] ;
+
+ if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ]
+ {
+ CXX-REPOS on $(targets[1]) += $(r) ;
+ }
+ }
+
+ local locate = [ on $(targets[1]) return $(LOCATE) ] ;
+ LINK-MAINMOD-OPT on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.opt) ;
+ LINK-MAINMOD-JAM on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.jam) ;
+ #on $(targets[1]) TEMPORARY $(LINK-MAINMOD-JAM) ;
+
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-OPT) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-JAM) ] ;
+ on $(targets[1]) DEPENDS $(LINK-MAINMOD-OPT) : $(LINK-MAINMOD-JAM) ;
+
+ on $(targets[1]) mainmod-link-opt.generate $(LINK-MAINMOD-JAM)
+ : $(LINK-MAINMOD-OPT) : $(LINK-OBJS) : $(LINK-LIBS) $(LIBRARIES) : $(LINK-SHAREDLIBS) ;
+
+
+}
+
+actions link bind LINK-OBJS LINK-MAINMOD-JAM LINK-MAINMOD-OPT LINK-LIBS LIBRARIES LINK-SHAREDLIBS LINK-OPTS CXX-REPOS
+{
+ CXX_REPOS = "" +"$(CXX-REPOS:WJ=,)"
+ IF (CXX_REPOS .EQS. "") THEN CXX_REPOS = "NL:"
+ DEF /NOLOG REPOS 'CXX_REPOS'
+ SET FILE /VER=1 @($(<:WS=$INPUT.opt):E= $(LINK-OBJS:WJ=,-$(.nl))-$(.nl) ,$(LINK-LIBS:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LIBRARIES:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LINK-SHAREDLIBS:WJ=/SHARE,-$(.nl))/SHARE-$(.nl) )
+ MC $(.B2) -f $(LINK-MAINMOD-JAM:W)
+ $(.LD) $(OPTIONS) /REPO=(REPOS:) /EXE=$(<:W) $(LINK-MAINMOD-OPT:W)/OPT, $(<:WS=$INPUT.opt)/OPT ,$(LINK-OPTS:WJ=/OPT,)/OPT
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-OBJS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-LIBS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-SHAREDLIBS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-OPTS) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LIBRARIES) ] ;
+
+ for local s in $(sources)
+ {
+ local r = [ on $(s) return $(TARGET-CXX-REPO) ] ;
+
+ if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ]
+ {
+ CXX-REPOS on $(targets[1]) += $(r) ;
+ }
+ }
+
+
+ local locate = [ on $(targets[1]) return $(LOCATE) ] ;
+ LINK-MAINMOD-OPT on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.opt) ;
+ LINK-MAINMOD-JAM on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.jam) ;
+ #on $(targets[1]) TEMPORARY $(LINK-MAINMOD-JAM) ;
+
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-OPT) ] ;
+ DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-JAM) ] ;
+ on $(targets[1]) DEPENDS $(LINK-MAINMOD-OPT) : $(LINK-MAINMOD-JAM) ;
+
+ on $(targets[1]) mainmod-link-opt.generate $(LINK-MAINMOD-JAM)
+ : $(LINK-MAINMOD-OPT) : $(LINK-OBJS) : $(LINK-LIBS) $(LIBRARIES) : $(LINK-SHAREDLIBS) ;
+
+}
+
+actions link.dll bind LINK-OBJS LINK-MAINMOD-JAM LINK-MAINMOD-OPT LINK-LIB LINK-LIBS LIBRARIES LINK-SHAREDLIBS LINK-OPTS CXX-REPOS
+{
+ CXX_REPOS = "" +"$(CXX-REPOS:WJ=,)"
+ IF (CXX_REPOS .EQS. "") THEN CXX_REPOS = "NL:"
+ DEF /NOLOG REPOS 'CXX_REPOS'
+ SET FILE /VER=1 @($(<:WS=$INPUT.opt):E= $(LINK-OBJS:WJ=,-$(.nl))-$(.nl) ,$(LINK-LIBS:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LIBRARIES:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LINK-SHAREDLIBS:WJ=/SHARE,-$(.nl))/SHARE-$(.nl) )
+ MC $(.B2) -f $(LINK-MAINMOD-JAM:W)
+ $(.LD) $(OPTIONS) /REPO=(REPOS:) /SHARE=$(<:W) $(LINK-MAINMOD-OPT:W)/OPT, $(<:WS=$INPUT.opt)/OPT ,$(LINK-OPTS:WJ=/OPT,)/OPT
+}
+
+
+
+flags vmsdecc.archive AROPTIONS <archiveflags> ;
+
+
+local rule vms-join-wildcard-name ( path * : name )
+{
+ local files ;
+
+ if $(name)
+ {
+ for local d in $(path)
+ {
+ files += $(d)$(name) ;
+ }
+
+ files ?= $(name) ;
+
+ }
+
+ return $(files) ;
+}
+
+
+rule archive ( targets + : sources * : properties * )
+{
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+
+
+ #CXX-REPOS on $(targets[1]) = null ; ## reset
+
+ for local s in $(sources)
+ {
+ local r = [ on $(s) return $(TARGET-CXX-REPO) ] ;
+
+ if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ]
+ {
+ CXX-REPOS on $(targets[1]) += $(r) ;
+ }
+ }
+
+ if [ on $(targets[1]) return $(CXX-REPOS) ]
+ {
+ CXX-REPO-OBJS on $(targets[1]) = [ on $(targets[1]) return [ vms-join-wildcard-name $(CXX-REPOS:W) : *$(.OBJ) ] ] ;
+
+ #DEPENDS $(targets) : [ on $(targets[1]) return $(CXX-REPO-OBJS) ] ;
+ }
+}
+
+# Declare action for creating static libraries
+actions piecemeal archive
+{
+ HAVE_REPO_OBJS = "F"
+ IF ("" +"$(CXX-REPO-OBJS[1])" .NES. "")
+ THEN
+ IF ( "" +F$SEARCH("$(CXX-REPO-OBJS[1])") .NES. "")
+ THEN
+ HAVE_REPO_OBJS = "T"
+ ENDIF
+ ENDIF
+ $(.AR) /CREATE /REPL $(AROPTIONS) $(<:W) $(>:WJ=,)
+ IF (HAVE_REPO_OBJS)
+ THEN
+ $(.AR) /REPL $(AROPTIONS) $(<:W) $(CXX-REPO-OBJS:J=,)
+ PIPE DEL /NOLOG /NOCONF $(CXX-REPO-OBJS:J=;*,);* 2>NL: >NL:
+ ENDIF
+}
+
diff --git a/src/boost/tools/build/src/tools/whale.jam b/src/boost/tools/build/src/tools/whale.jam
new file mode 100644
index 000000000..9335ff0c0
--- /dev/null
+++ b/src/boost/tools/build/src/tools/whale.jam
@@ -0,0 +1,116 @@
+# Copyright (C) Vladimir Prus 2002-2005.
+
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements support for Whale/Dolphin/WD parser/lexer tools.
+# See http://www.cs.queensu.ca/home/okhotin/whale/ for details.
+#
+# There are three interesting target types:
+# - WHL (the parser sources), that are converted to CPP and H
+# - DLP (the lexer sources), that are converted to CPP and H
+# - WD (combined parser/lexer sources), that are converted to WHL + DLP
+
+import type ;
+import generators ;
+import path ;
+import "class" : new ;
+import errors ;
+
+rule init ( path # path the Whale/Dolphin/WD binaries
+ )
+{
+ if $(.configured) && $(.path) != $(path)
+ {
+ errors.user-error "Attempt to reconfigure Whale support" :
+ "Previously configured with path \"$(.path:E=<empty>)\"" :
+ "Now configuring with path \"$(path:E=<empty>)\"" ;
+
+ }
+ .configured = true ;
+ .path = $(path) ;
+
+ .whale = [ path.join $(path) whale ] ;
+ .dolphin = [ path.join $(path) dolphin ] ;
+ .wd = [ path.join $(path) wd ] ;
+}
+
+
+# Declare the types.
+type.register WHL : whl ;
+type.register DLP : dlp ;
+type.register WHL_LR0 : lr0 ;
+type.register WD : wd ;
+
+# Declare standard generators.
+generators.register-standard whale.whale : WHL : CPP H H(%_symbols) ;
+generators.register-standard whale.dolphin : DLP : CPP H ;
+generators.register-standard whale.wd : WD : WHL(%_parser) DLP(%_lexer) ;
+
+# The conversions defines above a ambiguious when we generated CPP from WD.
+# We can either go via WHL type, or via DLP type.
+# The following custom generator handles this by running both conversions.
+
+class wd-to-cpp : generator
+{
+ rule __init__ ( * : * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) ;
+ }
+
+ rule run ( project name ? : property-set : source * )
+ {
+ if ! $(source[2])
+ {
+ local new-sources ;
+ if ! [ $(source).type ] in WHL DLP
+ {
+ local r1 = [ generators.construct $(project) $(name)
+ : WHL : $(property-set) : $(source) ] ;
+ local r2 = [ generators.construct $(project) $(name)
+ : DLP : $(property-set) : $(source) ] ;
+
+ new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ;
+ }
+ else
+ {
+ new-sources = $(source) ;
+ }
+
+ local result ;
+ for local i in $(new-sources)
+ {
+ local t = [ generators.construct $(project) $(name) : CPP
+ : $(property-set) : $(i) ] ;
+ result += $(t[2-]) ;
+ }
+ return $(result) ;
+ }
+ }
+
+}
+
+
+generators.override whale.wd-to-cpp : whale.whale ;
+generators.override whale.wd-to-cpp : whale.dolphin ;
+
+
+generators.register [ new wd-to-cpp whale.wd-to-cpp : : CPP ] ;
+
+
+actions whale
+{
+ $(.whale) -d $(<[1]:D) $(>)
+}
+
+actions dolphin
+{
+ $(.dolphin) -d $(<[1]:D) $(>)
+}
+
+actions wd
+{
+ $(.wd) -d $(<[1]:D) -g $(>)
+}
+
diff --git a/src/boost/tools/build/src/tools/xlcpp.jam b/src/boost/tools/build/src/tools/xlcpp.jam
new file mode 100644
index 000000000..1b66301cf
--- /dev/null
+++ b/src/boost/tools/build/src/tools/xlcpp.jam
@@ -0,0 +1,164 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Toon Knapen 2004.
+# Copyright Catherine Morton 2015.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# B2 V2 toolset for the IBM XL C++ compiler
+#
+
+import toolset : flags ;
+import feature ;
+import common ;
+import generators ;
+import os ;
+
+feature.extend toolset : xlcpp ;
+toolset.inherit xlcpp : unix ;
+generators.override xlcpp.prebuilt : builtin.prebuilt ;
+generators.override xlcpp.searched-lib-generator : searched-lib-generator ;
+
+# Configure the xlcpp toolset
+rule init ( version ? : command * : options * )
+{
+ local condition = [
+ common.check-init-parameters xlcpp : version $(version) ] ;
+
+ command = [ common.get-invocation-command xlcpp : xlC
+ : $(command) : "/usr/xlcpp/bin/xlC" ] ;
+
+ common.handle-options xlcpp : $(condition) : $(command) : $(options) ;
+}
+
+# Declare generators
+generators.register-c-compiler xlcpp.compile.c : C : OBJ : <toolset>xlcpp ;
+generators.register-c-compiler xlcpp.compile.c++ : CPP : OBJ : <toolset>xlcpp ;
+
+# Allow C++ style comments in C files
+flags xlcpp CFLAGS : -qnoxlcompatmacros ;
+
+# Declare flags
+flags xlcpp CFLAGS <optimization>off : -qNOOPTimize ;
+flags xlcpp CFLAGS <optimization>speed : ;
+flags xlcpp CFLAGS <optimization>space : -O2 -qcompact ;
+
+# Discretionary inlining (not recommended)
+flags xlcpp CFLAGS <inlining>off : -qnoinline ;
+flags xlcpp CFLAGS <inlining>on : -qinline ;
+#flags xlcpp CFLAGS <inlining>full : -qinline ;
+flags xlcpp CFLAGS <inlining>full : ;
+
+# Exception handling
+flags xlcpp C++FLAGS <exception-handling>off : -qnoeh ;
+flags xlcpp C++FLAGS <exception-handling>on : -qeh ;
+
+# Run-time Type Identification
+flags xlcpp C++FLAGS <rtti>off : -qnortti ;
+flags xlcpp C++FLAGS <rtti>on : -qrtti ;
+
+# Enable 64-bit memory addressing model
+flags xlcpp CFLAGS <address-model>64 : -q64 ;
+flags xlcpp LINKFLAGS <address-model>64 : -q64 ;
+flags xlcpp ARFLAGS <target-os>aix/<address-model>64 : -X 64 ;
+
+# Use absolute path when generating debug information
+flags xlcpp CFLAGS <debug-symbols>on : -g -qfullpath ;
+flags xlcpp LINKFLAGS <debug-symbols>on : -g -qfullpath ;
+flags xlcpp LINKFLAGS <debug-symbols>off : -s ;
+
+if [ os.name ] = AIX
+{
+ flags xlcpp.compile C++FLAGS : -qfuncsect ;
+
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The above options are definitely for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+ flags xlcpp.link LINKFLAGS <link>shared : -bnoipath ;
+
+ # Run-time linking
+ flags xlcpp.link EXE-LINKFLAGS <link>shared : -brtl ;
+}
+else
+{
+ # Linux PPC
+ flags xlcpp.compile CFLAGS <link>shared : -qpic=large ;
+ flags xlcpp FINDLIBS : rt ;
+
+ flags xlcpp.compile OPTIONS <local-visibility>hidden : -qvisibility=hidden ;
+ flags xlcpp.compile OPTIONS <local-visibility>protected : -qvisibility=protected ;
+ flags xlcpp.compile OPTIONS <local-visibility>global : -qvisibility=default ;
+}
+
+# Profiling
+flags xlcpp CFLAGS <profiling>on : -pg ;
+flags xlcpp LINKFLAGS <profiling>on : -pg ;
+
+# Declare flags and actions for compilation
+flags xlcpp.compile.c++ OPTIONS <cxxstd>98 : -std=c++03 ;
+flags xlcpp.compile.c++ OPTIONS <cxxstd>03 : -std=c++03 ;
+flags xlcpp.compile.c++ OPTIONS <cxxstd>0x : -std=c++11 ;
+flags xlcpp.compile.c++ OPTIONS <cxxstd>11 : -std=c++11 ;
+flags xlcpp.compile.c++ OPTIONS <cxxstd>1y : -std=c++1y ;
+flags xlcpp.compile.c++ OPTIONS <cxxstd>14 : -std=c++1y ;
+flags xlcpp.compile.c++ OPTIONS <cxxstd>latest : -std=c++1y ;
+
+flags xlcpp.compile OPTIONS <cflags> ;
+flags xlcpp.compile.c++ OPTIONS <cxxflags> ;
+flags xlcpp DEFINES <define> ;
+flags xlcpp UNDEFS <undef> ;
+flags xlcpp HDRS <include> ;
+flags xlcpp STDHDRS <sysinclude> ;
+flags xlcpp.link OPTIONS <linkflags> ;
+flags xlcpp ARFLAGS <arflags> ;
+
+flags xlcpp LIBPATH <library-path> ;
+flags xlcpp NEEDLIBS <library-file> ;
+flags xlcpp FINDLIBS <find-shared-library> ;
+flags xlcpp FINDLIBS <find-static-library> ;
+
+# Select the compiler name according to the threading model.
+flags xlcpp VA_C_COMPILER <threading>single : xlc ;
+flags xlcpp VA_C_COMPILER <threading>multi : xlc ;
+flags xlcpp VA_CXX_COMPILER <threading>single : xlC ;
+flags xlcpp VA_CXX_COMPILER <threading>multi : xlC ;
+
+SPACE = " " ;
+
+flags xlcpp.link.dll HAVE_SONAME <target-os>linux : "" ;
+
+actions xlcpp.link bind NEEDLIBS
+{
+ $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions xlcpp.link.dll bind NEEDLIBS
+{
+ xlC -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS)
+}
+
+actions xlcpp.compile.c
+{
+ $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions xlcpp.compile.c++
+{
+ $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)"
+}
+
+actions updated together piecemeal xlcpp.archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/src/boost/tools/build/src/tools/xlf.jam b/src/boost/tools/build/src/tools/xlf.jam
new file mode 100644
index 000000000..e7fcc6086
--- /dev/null
+++ b/src/boost/tools/build/src/tools/xlf.jam
@@ -0,0 +1,39 @@
+# Copyright (C) 2004 Toon Knapen
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# toolset configuration for the IBM Fortran compiler (xlf)
+#
+
+import toolset : flags ;
+import feature ;
+import fortran ;
+
+rule init ( version ? : command * : options * )
+{
+}
+
+# Declare flags and action for compilation
+flags xlf OPTIONS <optimization>off : -O0 ;
+flags xlf OPTIONS <optimization>speed : -O3 ;
+flags xlf OPTIONS <optimization>space : -Os ;
+
+flags xlf OPTIONS <debug-symbols>on : -g ;
+flags xlf OPTIONS <profiling>on : -pg ;
+
+flags xlf DEFINES <define> ;
+flags xlf INCLUDES <include> ;
+
+rule compile-fortran
+{
+}
+
+actions compile-fortran
+{
+ xlf $(OPTIONS) -I$(INCLUDES) -c -o "$(<)" "$(>)"
+}
+
+generators.register-fortran-compiler xlf.compile-fortran : FORTRAN : OBJ ;
diff --git a/src/boost/tools/build/src/tools/xsltproc-config.jam b/src/boost/tools/build/src/tools/xsltproc-config.jam
new file mode 100644
index 000000000..b240fe3af
--- /dev/null
+++ b/src/boost/tools/build/src/tools/xsltproc-config.jam
@@ -0,0 +1,36 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for the xsltproc toolset. To use, just import this
+# module.
+
+import os ;
+import toolset : using ;
+
+
+local rule locate-executable ( name )
+{
+ local path = [ modules.peek : PATH ] ;
+ local exe ;
+ if [ os.name ] = NT
+ {
+ exe = [ GLOB $(path) "C:\\Boost\\bin" : $(name)\.exe ] ;
+ }
+ else
+ {
+ exe = [ GLOB $(path) : $(name) ] ;
+ }
+ return $(exe[1]) ;
+}
+
+
+local xsltproc-exe = [ locate-executable xsltproc ] ;
+if $(xsltproc-exe)
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO notice\: using xsltproc ":" $(xsltproc-exe) ;
+ }
+ using xsltproc : $(xsltproc-exe) ;
+}
diff --git a/src/boost/tools/build/src/tools/xsltproc.jam b/src/boost/tools/build/src/tools/xsltproc.jam
new file mode 100644
index 000000000..6baf0491a
--- /dev/null
+++ b/src/boost/tools/build/src/tools/xsltproc.jam
@@ -0,0 +1,232 @@
+# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# This module defines rules to apply an XSLT stylesheet to an XML file using the
+# xsltproc driver, part of libxslt.
+
+import "class" : new ;
+import common ;
+import feature ;
+import generators ;
+import modules ;
+import os ;
+import path ;
+import regex ;
+import sequence ;
+import toolset ;
+import virtual-target ;
+
+feature.feature "xsl:param" : : free ;
+feature.feature "xsl:path" : : free ;
+feature.feature catalog : : free ;
+
+
+# Initialize xsltproc support. The parameters are:
+# xsltproc: The xsltproc executable
+#
+rule init ( xsltproc ? )
+{
+ if $(xsltproc)
+ {
+ modify-config ;
+ .xsltproc = $(xsltproc) ;
+ check-xsltproc ;
+ }
+}
+
+
+rule freeze-config ( )
+{
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+ .xsltproc ?= [ modules.peek : XSLTPROC ] ;
+ .xsltproc ?= xsltproc ;
+ check-xsltproc ;
+ .is-cygwin = [ .is-cygwin $(.xsltproc) ] ;
+ }
+}
+
+
+rule modify-config ( )
+{
+ if $(.config-frozen)
+ {
+ import errors ;
+ errors.user-error
+ "xsltproc: Cannot change xsltproc command after it has been used." ;
+ }
+}
+
+
+rule check-xsltproc ( )
+{
+ if $(.xsltproc)
+ {
+ local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ]
+ ;
+ if $(status[2]) != 0
+ {
+ import errors ;
+ errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ;
+ }
+ }
+}
+
+rule name ( )
+{
+ freeze-config ;
+ return $(.xsltproc) ;
+}
+
+# Returns a non-empty string if a cygwin xsltproc binary was specified.
+#
+rule is-cygwin ( )
+{
+ freeze-config ;
+ return $(.is-cygwin) ;
+}
+
+
+rule .is-cygwin ( xsltproc )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native [ path.join [ path.parent $(file) ] xsltproc ]
+ ] ;
+ if [ os.name ] = CYGWIN
+ {
+ dir = $(dir:W) ;
+ }
+ local command =
+ "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ;
+ local status = [ SHELL $(command) : no-output : exit-status ] ;
+ if $(status[2]) != "0"
+ {
+ return true ;
+ }
+ }
+}
+
+class xsltproc-action : action
+{
+ rule adjust-properties ( property-set )
+ {
+ local s = [ $(self.targets[1]).creating-subvariant ] ;
+ if $(s)
+ {
+ return [ $(property-set).add-raw
+ [ $(s).implicit-includes "xsl:path" : XML ] ] ;
+ }
+ else
+ {
+ return $(property-set) ;
+ }
+ }
+}
+
+class xsltproc-generator : generator
+{
+ rule action-class ( )
+ {
+ return xsltproc-action ;
+ }
+}
+
+rule register-generator ( id : source-types + : target-types + : requirements * )
+{
+ if ! $(id) in $(.known-rules)
+ {
+ .known-rules += $(id) ;
+ flags $(id) ;
+ }
+ generators.register [ new xsltproc-generator $(id) :
+ $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+IMPORT xsltproc : register-generator : : generators.register-xslt ;
+
+rule flags ( rulename )
+{
+ toolset.uses-features $(rulename) : <xsl:param> <catalog> : unchecked ;
+ toolset.flags $(rulename) XSL-PATH : <xsl:path> : unchecked ;
+ toolset.flags $(rulename) FLAGS : <flags> : unchecked ;
+}
+
+rule compute-xslt-flags ( target : properties * )
+{
+ local flags ;
+ # Translate <xsl:param> into command line flags.
+ for local param in [ feature.get-values <xsl:param> : $(properties) ]
+ {
+ local namevalue = [ regex.split $(param) "=" ] ;
+ flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ;
+ }
+
+ return $(flags) ;
+}
+
+
+local rule .xsltproc ( target : source stylesheet : properties * : dirname ? :
+ action )
+{
+ freeze-config ;
+ STYLESHEET on $(target) = $(stylesheet) ;
+ FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ;
+ NAME on $(target) = $(.xsltproc) ;
+
+ for local catalog in [ feature.get-values <catalog> : $(properties) ]
+ {
+ CATALOG = [ common.variable-setting-command XML_CATALOG_FILES :
+ $(catalog:T) ] ;
+ }
+
+ if [ os.on-windows ] && ! [ is-cygwin ]
+ {
+ action = $(action).windows ;
+ }
+
+ $(action) $(target) : $(source) ;
+}
+
+
+rule xslt ( target : source stylesheet : properties * )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : :
+ xslt-xsltproc ] ;
+}
+
+
+rule xslt-dir ( target : source stylesheet : properties * : dirname )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) :
+ $(dirname) : xslt-xsltproc-dir ] ;
+}
+
+_ = " " ;
+
+actions xslt-xsltproc.windows
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:W)" --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:T)" --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
+}
+
+
+actions xslt-xsltproc-dir.windows bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:W)" --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc-dir bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:T)" --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
+}
diff --git a/src/boost/tools/build/src/tools/xsltproc/included.xsl b/src/boost/tools/build/src/tools/xsltproc/included.xsl
new file mode 100644
index 000000000..ef86394a9
--- /dev/null
+++ b/src/boost/tools/build/src/tools/xsltproc/included.xsl
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2010 Steven Watanabe
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+ -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+</xsl:stylesheet>
diff --git a/src/boost/tools/build/src/tools/xsltproc/test.xml b/src/boost/tools/build/src/tools/xsltproc/test.xml
new file mode 100644
index 000000000..57c8ba187
--- /dev/null
+++ b/src/boost/tools/build/src/tools/xsltproc/test.xml
@@ -0,0 +1,2 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root/>
diff --git a/src/boost/tools/build/src/tools/xsltproc/test.xsl b/src/boost/tools/build/src/tools/xsltproc/test.xsl
new file mode 100644
index 000000000..a142c91dd
--- /dev/null
+++ b/src/boost/tools/build/src/tools/xsltproc/test.xsl
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright (c) 2010 Steven Watanabe
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+ -->
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ version="1.0">
+ <xsl:include href="included.xsl"/>
+</xsl:stylesheet>
diff --git a/src/boost/tools/build/src/tools/zlib.jam b/src/boost/tools/build/src/tools/zlib.jam
new file mode 100644
index 000000000..7ac81047d
--- /dev/null
+++ b/src/boost/tools/build/src/tools/zlib.jam
@@ -0,0 +1,235 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the zlib library
+#
+# After 'using zlib', the following targets are available:
+#
+# /zlib//zlib -- The zlib library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import os ;
+import property ;
+import property-set ;
+
+header = zlib.h ;
+names = z zlib zll zdll ;
+
+sources = adler32.c compress.c
+ crc32.c deflate.c gzclose.c gzio.c gzlib.c gzread.c gzwrite.c
+ infback.c inffast.c inflate.c inftrees.c trees.c uncompr.c zutil.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the zlib library.
+#
+# zlib can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt zlib::
+#
+# <search>
+# The directory containing the zlib binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the zlib headers.
+#
+# If none of these options is specified, then the environmental
+# variables ZLIB_LIBRARY_PATH, ZLIB_NAME, and ZLIB_INCLUDE will
+# be used instead.
+#
+# Options for building zlib from source::
+#
+# <source>
+# The zlib source directory. Defaults to the environmental variable
+# ZLIB_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find zlib in the default system location
+# using zlib ;
+# # Build zlib from source
+# using zlib : 1.2.7 : <source>/home/steven/zlib-1.2.7 ;
+# # Find zlib in /usr/local
+# using zlib : 1.2.7
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build zlib from source for msvc and find
+# # prebuilt binaries for gcc.
+# using zlib : 1.2.7 : <source>C:/Devel/src/zlib-1.2.7 : <toolset>msvc ;
+# using zlib : 1.2.7 : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The zlib version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the zlib target
+
+ : is-default ?
+ # Default configurations are only used when zlib
+ # has not yet been configured. This option is
+ # deprecated. A configuration will be treated
+ # as a default when none of <include>, <search>,
+ # <name>, and <source> are present.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project zlib ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local source-path = [ feature.get-values <source> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+ local tag = [ feature.get-values <tag> : $(options) ] ;
+ local build-name = [ feature.get-values <build-name> : $(options) ] ;
+
+ if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
+ {
+ is-default = true ;
+ }
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ # Ignore environmental ZLIB_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for zlib:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ }
+ else
+ {
+ source-path ?= [ os.environ ZLIB_SOURCE ] ;
+ if $(source-path)
+ {
+ source-path = [ path.root [ path.make $(source-path) ]
+ [ path.pwd ] ] ;
+ }
+ }
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [zlib] zlib is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "zlib is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path)
+ {
+ build-name ?= z ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag)
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [zlib] Building zlib from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [zlib] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [zlib] found zlib source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [zlib] could not find zlib source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources)
+ {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ <link>shared:<define>ZLIB_DLL
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library zlib : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ }
+ else
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [zlib] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [zlib] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library zlib : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/src/boost/tools/build/src/tools/zstd.jam b/src/boost/tools/build/src/tools/zstd.jam
new file mode 100644
index 000000000..2cba82d6b
--- /dev/null
+++ b/src/boost/tools/build/src/tools/zstd.jam
@@ -0,0 +1,100 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the zstd library
+#
+# After 'using zstd', the following targets are available:
+#
+# /zstd//zstd -- The zstd library
+
+import project ;
+import ac ;
+import errors ;
+import feature ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = zstd.h ;
+# libzstd only needed for VisualC++ builds
+# *_static variants for prebuilt Windows static libraries
+names = zstd zstd_static libzstd libzstd_static ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+rule init (
+ version ?
+ # (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the target
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project zstd ;
+ }
+
+ local library-path = [ feature.get-values <search> : $(options) ] ;
+ local include-path = [ feature.get-values <include> : $(options) ] ;
+ local library-name = [ feature.get-values <name> : $(options) ] ;
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ if $(.configured.$(condition))
+ {
+ if ! $(options)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [zstd] zstd is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "zstd is already configured" ;
+ }
+ return ;
+ }
+ else
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [zstd] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [zstd] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library zstd : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/src/boost/tools/build/src/util/__init__.py b/src/boost/tools/build/src/util/__init__.py
new file mode 100644
index 000000000..7c847cb57
--- /dev/null
+++ b/src/boost/tools/build/src/util/__init__.py
@@ -0,0 +1,321 @@
+
+import bjam
+import re
+import types
+
+from itertools import groupby
+
+
+def safe_isinstance(value, types=None, class_names=None):
+ """To prevent circular imports, this extends isinstance()
+ by checking also if `value` has a particular class name (or inherits from a
+ particular class name). This check is safe in that an AttributeError is not
+ raised in case `value` doesn't have a __class__ attribute.
+ """
+ # inspect is being imported here because I seriously doubt
+ # that this function will be used outside of the type
+ # checking below.
+ import inspect
+ result = False
+ if types is not None:
+ result = result or isinstance(value, types)
+ if class_names is not None and not result:
+ # this doesn't work with inheritance, but normally
+ # either the class will already be imported within the module,
+ # or the class doesn't have any subclasses. For example: PropertySet
+ if isinstance(class_names, basestring):
+ class_names = [class_names]
+ # this is the part that makes it "safe".
+ try:
+ base_names = [class_.__name__ for class_ in inspect.getmro(value.__class__)]
+ for name in class_names:
+ if name in base_names:
+ return True
+ except AttributeError:
+ pass
+ return result
+
+
+def is_iterable_typed(values, type_):
+ return is_iterable(values) and all(isinstance(v, type_) for v in values)
+
+
+def is_iterable(value):
+ """Returns whether value is iterable and not a string."""
+ return not isinstance(value, basestring) and hasattr(value, '__iter__')
+
+
+def is_iterable_or_none(value):
+ return is_iterable(value) or value is None
+
+
+def is_single_value(value):
+ # some functions may specify a bjam signature
+ # that is a string type, but still allow a
+ # PropertySet to be passed in
+ return safe_isinstance(value, (basestring, type(None)), 'PropertySet')
+
+
+if __debug__:
+
+ from textwrap import dedent
+ message = dedent(
+ """The parameter "{}" was passed in a wrong type for the "{}()" function.
+ Actual:
+ \ttype: {}
+ \tvalue: {}
+ Expected:
+ \t{}
+ """
+ )
+
+ bjam_types = {
+ '*': is_iterable_or_none,
+ '+': is_iterable_or_none,
+ '?': is_single_value,
+ '': is_single_value,
+ }
+
+ bjam_to_python = {
+ '*': 'iterable',
+ '+': 'iterable',
+ '?': 'single value',
+ '': 'single value',
+ }
+
+
+ def get_next_var(field):
+ it = iter(field)
+ var = it.next()
+ type_ = None
+ yield_var = False
+ while type_ not in bjam_types:
+ try:
+ # the first value has already
+ # been consumed outside of the loop
+ type_ = it.next()
+ except StopIteration:
+ # if there are no more values, then
+ # var still needs to be returned
+ yield_var = True
+ break
+ if type_ not in bjam_types:
+ # type_ is not a type and is
+ # another variable in the same field.
+ yield var, ''
+ # type_ is the next var
+ var = type_
+ else:
+ # otherwise, type_ is a type for var
+ yield var, type_
+ try:
+ # the next value should be a var
+ var = it.next()
+ except StopIteration:
+ # if not, then we're done with
+ # this field
+ break
+ if yield_var:
+ yield var, ''
+
+
+# Decorator the specifies bjam-side prototype for a Python function
+def bjam_signature(s):
+ if __debug__:
+ from inspect import getcallargs
+ def decorator(fn):
+ function_name = fn.__module__ + '.' + fn.__name__
+ def wrapper(*args, **kwargs):
+ callargs = getcallargs(fn, *args, **kwargs)
+ for field in s:
+ for var, type_ in get_next_var(field):
+ try:
+ value = callargs[var]
+ except KeyError:
+ raise Exception(
+ 'Bjam Signature specifies a variable named "{}"\n'
+ 'but is not found within the python function signature\n'
+ 'for function {}()'.format(var, function_name)
+ )
+ if not bjam_types[type_](value):
+ raise TypeError(
+ message.format(var, function_name, type(type_), repr(value),
+ bjam_to_python[type_])
+ )
+ return fn(*args, **kwargs)
+ wrapper.__name__ = fn.__name__
+ wrapper.bjam_signature = s
+ return wrapper
+ return decorator
+ else:
+ def decorator(f):
+ f.bjam_signature = s
+ return f
+
+ return decorator
+
+def metatarget(f):
+
+ f.bjam_signature = (["name"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"])
+ return f
+
+class cached(object):
+
+ def __init__(self, function):
+ self.function = function
+ self.cache = {}
+
+ def __call__(self, *args):
+ try:
+ return self.cache[args]
+ except KeyError:
+ v = self.function(*args)
+ self.cache[args] = v
+ return v
+
+ def __get__(self, instance, type):
+ return types.MethodType(self, instance, type)
+
+def unquote(s):
+ if s and s[0] == '"' and s[-1] == '"':
+ return s[1:-1]
+ else:
+ return s
+
+_extract_jamfile_and_rule = re.compile("(Jamfile<.*>)%(.*)")
+
+def qualify_jam_action(action_name, context_module):
+
+ if action_name.startswith("###"):
+ # Callable exported from Python. Don't touch
+ return action_name
+ elif _extract_jamfile_and_rule.match(action_name):
+ # Rule is already in indirect format
+ return action_name
+ else:
+ ix = action_name.find('.')
+ if ix != -1 and action_name[:ix] == context_module:
+ return context_module + '%' + action_name[ix+1:]
+
+ return context_module + '%' + action_name
+
+
+def set_jam_action(name, *args):
+
+ m = _extract_jamfile_and_rule.match(name)
+ if m:
+ args = ("set-update-action-in-module", m.group(1), m.group(2)) + args
+ else:
+ args = ("set-update-action", name) + args
+
+ return bjam.call(*args)
+
+
+def call_jam_function(name, *args):
+
+ m = _extract_jamfile_and_rule.match(name)
+ if m:
+ args = ("call-in-module", m.group(1), m.group(2)) + args
+ return bjam.call(*args)
+ else:
+ return bjam.call(*((name,) + args))
+
+__value_id = 0
+__python_to_jam = {}
+__jam_to_python = {}
+
+def value_to_jam(value, methods=False):
+ """Makes a token to refer to a Python value inside Jam language code.
+
+ The token is merely a string that can be passed around in Jam code and
+ eventually passed back. For example, we might want to pass PropertySet
+ instance to a tag function and it might eventually call back
+ to virtual_target.add_suffix_and_prefix, passing the same instance.
+
+ For values that are classes, we'll also make class methods callable
+ from Jam.
+
+ Note that this is necessary to make a bit more of existing Jamfiles work.
+ This trick should not be used to much, or else the performance benefits of
+ Python port will be eaten.
+ """
+
+ global __value_id
+
+ r = __python_to_jam.get(value, None)
+ if r:
+ return r
+
+ exported_name = '###_' + str(__value_id)
+ __value_id = __value_id + 1
+ __python_to_jam[value] = exported_name
+ __jam_to_python[exported_name] = value
+
+ if methods and type(value) == types.InstanceType:
+ for field_name in dir(value):
+ field = getattr(value, field_name)
+ if callable(field) and not field_name.startswith("__"):
+ bjam.import_rule("", exported_name + "." + field_name, field)
+
+ return exported_name
+
+def record_jam_to_value_mapping(jam_value, python_value):
+ __jam_to_python[jam_value] = python_value
+
+def jam_to_value_maybe(jam_value):
+
+ if type(jam_value) == type(""):
+ return __jam_to_python.get(jam_value, jam_value)
+ else:
+ return jam_value
+
+def stem(filename):
+ i = filename.find('.')
+ if i != -1:
+ return filename[0:i]
+ else:
+ return filename
+
+
+def abbreviate_dashed(s):
+ """Abbreviates each part of string that is delimited by a '-'."""
+ r = []
+ for part in s.split('-'):
+ r.append(abbreviate(part))
+ return '-'.join(r)
+
+
+def abbreviate(s):
+ """Apply a set of standard transformations to string to produce an
+ abbreviation no more than 4 characters long.
+ """
+ if not s:
+ return ''
+ # check the cache
+ if s in abbreviate.abbreviations:
+ return abbreviate.abbreviations[s]
+ # anything less than 4 characters doesn't need
+ # an abbreviation
+ if len(s) < 4:
+ # update cache
+ abbreviate.abbreviations[s] = s
+ return s
+ # save the first character in case it's a vowel
+ s1 = s[0]
+ s2 = s[1:]
+ if s.endswith('ing'):
+ # strip off the 'ing'
+ s2 = s2[:-3]
+ # reduce all doubled characters to one
+ s2 = ''.join(c for c, _ in groupby(s2))
+ # remove all vowels
+ s2 = s2.translate(None, "AEIOUaeiou")
+ # shorten remaining consonants to 4 characters
+ # and add the first char back to the front
+ s2 = s1 + s2[:4]
+ # update cache
+ abbreviate.abbreviations[s] = s2
+ return s2
+# maps key to its abbreviated form
+abbreviate.abbreviations = {}
diff --git a/src/boost/tools/build/src/util/assert.jam b/src/boost/tools/build/src/util/assert.jam
new file mode 100644
index 000000000..f6afcdecf
--- /dev/null
+++ b/src/boost/tools/build/src/util/assert.jam
@@ -0,0 +1,346 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import modules ;
+
+
+################################################################################
+#
+# Private implementation details.
+#
+################################################################################
+
+# Rule added as a replacement for the regular Jam = operator but which does not
+# ignore trailing empty string elements.
+#
+local rule exact-equal-test ( lhs * : rhs * )
+{
+ local lhs_extended = $(lhs) xxx ;
+ local rhs_extended = $(rhs) xxx ;
+ if $(lhs_extended) = $(rhs_extended)
+ {
+ return true ;
+ }
+}
+
+
+# Two lists are considered set-equal if they contain the same elements, ignoring
+# duplicates and ordering.
+#
+local rule set-equal-test ( set1 * : set2 * )
+{
+ if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) )
+ {
+ return true ;
+ }
+}
+
+
+################################################################################
+#
+# Public interface.
+#
+################################################################################
+
+# Assert the equality of A and B, ignoring trailing empty string elements.
+#
+rule equal ( a * : b * )
+{
+ if $(a) != $(b)
+ {
+ errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "==" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a false
+# logical value (is either an empty list or all empty strings).
+#
+rule false ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15)
+ : $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if $(result)
+ {
+ errors.error-skip-frames 3 assertion "failure:" Expected false result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) :
+ $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] "]" : "Got:" "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that ELEMENT is present in LIST.
+#
+rule "in" ( element : list * )
+{
+ if ! $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion "failure:" Expected \"$(element)\" in
+ "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B, ignoring trailing empty string elements.
+#
+rule not-equal ( a * : b * )
+{
+ if $(a) = $(b)
+ {
+ errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "!=" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that ELEMENT is not present in LIST.
+#
+rule not-in ( element : list * )
+{
+ if $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion "failure:" Did not expect
+ \"$(element)\" in "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B as sets.
+#
+rule not-set-equal ( a * : b * )
+{
+ if [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion "failure:" Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to not be equal as sets ;
+ }
+}
+
+
+# Assert that A and B are not exactly equal, not ignoring trailing empty string
+# elements.
+#
+rule not-exact-equal ( a * : b * )
+{
+ if [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "!=" \"$(b)\" ;
+ }
+}
+
+
+# Assert that EXPECTED is the result of calling RULE-NAME with the given
+# arguments.
+#
+rule result ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) :
+ $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if ! [ exact-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion "failure:" "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ] "]" : "Expected:" "[" \"$(expected)\" "]" : "Got:" "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored)
+# to the result of calling RULE-NAME with the given arguments. Note that rules
+# called this way may accept at most 18 parameters.
+#
+rule result-set-equal ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) :
+ $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if ! [ set-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion "failure:" "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ] "]" : "Expected:" "[" \"$(expected)\" "]" : "Got:" "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert the equality of A and B as sets.
+#
+rule set-equal ( a * : b * )
+{
+ if ! [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion "failure:" Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to be equal as sets ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a true
+# logical value (is neither an empty list nor all empty strings).
+#
+rule true ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15)
+ : $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if ! $(result)
+ {
+ errors.error-skip-frames 3 assertion "failure:" Expected true result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) :
+ $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] "]" ;
+ }
+}
+
+
+# Assert the exact equality of A and B, not ignoring trailing empty string
+# elements.
+#
+rule exact-equal ( a * : b * )
+{
+ if ! [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "==" \"$(b)\" ;
+ }
+}
+
+
+# Assert that the given variable is not an empty list.
+#
+rule variable-not-empty ( name )
+{
+ local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ;
+ if ! $(value)-is-not-empty
+ {
+ errors.error-skip-frames 3 assertion "failure:" Expected variable
+ \"$(name)\" not to be an empty list ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Helper rule used to avoid test duplication related to different list
+ # equality test rules.
+ #
+ local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? )
+ {
+ local not-equality-assert = not-$(equality-assert) ;
+
+ # When the given equality test is expected to ignore trailing empty
+ # strings some of the test results should be inverted.
+ local not-equality-assert-i = not-$(equality-assert) ;
+ if $(ignore-trailing-empty-strings)
+ {
+ not-equality-assert-i = $(equality-assert) ;
+ }
+
+ $(equality-assert) : ;
+ $(equality-assert) "" "" : "" "" ;
+ $(not-equality-assert-i) : "" "" ;
+ $(equality-assert) x : x ;
+ $(not-equality-assert) : x ;
+ $(not-equality-assert) "" : x ;
+ $(not-equality-assert) "" "" : x ;
+ $(not-equality-assert-i) x : x "" ;
+ $(equality-assert) x "" : x "" ;
+ $(not-equality-assert) x : "" x ;
+ $(equality-assert) "" x : "" x ;
+
+ $(equality-assert) 1 2 3 : 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 3 2 1 ;
+ $(not-equality-assert) 1 2 3 : 1 5 3 ;
+ $(not-equality-assert) 1 2 3 : 1 "" 3 ;
+ $(not-equality-assert) 1 2 3 : 1 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 1 2 2 3 ;
+ $(not-equality-assert) 1 2 3 : 5 6 7 ;
+
+ # Extra variables used here just to make sure Boost Jam or Boost Build
+ # do not handle lists with empty strings differently depending on
+ # whether they are literals or stored in variables.
+
+ local empty = ;
+ local empty-strings = "" "" ;
+ local x-empty-strings = x "" "" ;
+ local empty-strings-x = "" "" x ;
+
+ $(equality-assert) : $(empty) ;
+ $(not-equality-assert-i) "" : $(empty) ;
+ $(not-equality-assert-i) "" "" : $(empty) ;
+ $(not-equality-assert-i) : $(empty-strings) ;
+ $(not-equality-assert-i) "" : $(empty-strings) ;
+ $(equality-assert) "" "" : $(empty-strings) ;
+ $(equality-assert) $(empty) : $(empty) ;
+ $(equality-assert) $(empty-strings) : $(empty-strings) ;
+ $(not-equality-assert-i) $(empty) : $(empty-strings) ;
+ $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ;
+ $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ;
+ $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert-i) x "" : $(x-empty-strings) ;
+ $(equality-assert) x "" "" : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert) "" x : $(empty-strings-x) ;
+ $(equality-assert) "" "" x : $(empty-strings-x) ;
+ }
+
+
+ # ---------------
+ # Equality tests.
+ # ---------------
+
+ run-equality-test equal : ignore-trailing-empty-strings ;
+ run-equality-test exact-equal ;
+
+
+ # -------------------------
+ # assert.set-equal() tests.
+ # -------------------------
+
+ set-equal : ;
+ not-set-equal "" "" : ;
+ set-equal "" "" : "" ;
+ set-equal "" "" : "" "" ;
+ set-equal a b c : a b c ;
+ set-equal a b c : b c a ;
+ set-equal a b c a : a b c ;
+ set-equal a b c : a b c a ;
+ not-set-equal a b c : a b c d ;
+ not-set-equal a b c d : a b c ;
+}
diff --git a/src/boost/tools/build/src/util/container.jam b/src/boost/tools/build/src/util/container.jam
new file mode 100644
index 000000000..dd4963938
--- /dev/null
+++ b/src/boost/tools/build/src/util/container.jam
@@ -0,0 +1,339 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Various container classes.
+
+# Base for container objects. This lets us construct recursive structures. That
+# is containers with containers in them, specifically so we can tell literal
+# values from node values.
+#
+class node
+{
+ rule __init__ (
+ value ? # Optional value to set node to initially.
+ )
+ {
+ self.value = $(value) ;
+ }
+
+ # Set the value of this node, passing nothing will clear it.
+ #
+ rule set ( value * )
+ {
+ self.value = $(value) ;
+ }
+
+ # Get the value of this node.
+ #
+ rule get ( )
+ {
+ return $(self.value) ;
+ }
+}
+
+
+# A simple vector. Interface mimics the C++ std::vector and std::list, with the
+# exception that indices are one (1) based to follow Jam standard.
+#
+# TODO: Possibly add assertion checks.
+#
+class vector : node
+{
+ import numbers ;
+ import utility ;
+ import sequence ;
+
+ rule __init__ (
+ values * # Initial contents of vector.
+ )
+ {
+ node.__init__ ;
+ self.value = $(values) ;
+ }
+
+ # Get the value of the first element.
+ #
+ rule front ( )
+ {
+ return $(self.value[1]) ;
+ }
+
+ # Get the value of the last element.
+ #
+ rule back ( )
+ {
+ return $(self.value[-1]) ;
+ }
+
+ # Get the value of the element at the given index, one based. Access to
+ # elements of recursive structures is supported directly. Specifying
+ # additional index values recursively accesses the elements as containers.
+ # For example: [ $(v).at 1 : 2 ] would retrieve the second element of our
+ # first element, assuming the first element is a container.
+ #
+ rule at (
+ index # The element index, one based.
+ : * # Additional indices to access recursively.
+ )
+ {
+ local r = $(self.value[$(index)]) ;
+ if $(2)
+ {
+ r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ return $(r) ;
+ }
+
+ # Get the value contained in the given element. This has the same
+ # functionality and interface as "at" but in addition gets the value of the
+ # referenced element, assuming it is a "node".
+ #
+ rule get-at (
+ index # The element index, one based.
+ : * # Additional indices to access recursively.
+ )
+ {
+ local r = $(self.value[$(index)]) ;
+ if $(2)
+ {
+ r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
+ }
+ return [ $(r).get ] ;
+ }
+
+ # Insert the given value into the front of the vector pushing the rest of
+ # the elements back.
+ #
+ rule push-front (
+ value # Value to become first element.
+ )
+ {
+ self.value = $(value) $(self.value) ;
+ }
+
+ # Remove the front element from the vector. Does not return the value. No
+ # effect if vector is empty.
+ #
+ rule pop-front ( )
+ {
+ self.value = $(self.value[2-]) ;
+ }
+
+ # Add the given value at the end of the vector.
+ #
+ rule push-back (
+ value # Value to become back element.
+ )
+ {
+ self.value += $(value) ;
+ }
+
+ # Remove the back element from the vector. Does not return the value. No
+ # effect if vector is empty.
+ #
+ rule pop-back ( )
+ {
+ self.value = $(self.value[1--2]) ;
+ }
+
+ # Insert the given value at the given index, one based. The values at and to
+ # the right of the index are pushed back to make room for the new value.
+ # If the index is passed the end of the vector the element is added to the
+ # end.
+ #
+ rule insert (
+ index # The index to insert at, one based.
+ : value # The value to insert.
+ )
+ {
+ local left = $(self.value[1-$(index)]) ;
+ local right = $(self.value[$(index)-]) ;
+ if $(right)-is-not-empty
+ {
+ left = $(left[1--2]) ;
+ }
+ self.value = $(left) $(value) $(right) ;
+ }
+
+ # Remove one or more elements from the vector. The range is inclusive, and
+ # not specifying an end is equivalent to the [start, start] range.
+ #
+ rule erase (
+ start # Index of first element to remove.
+ end ? # Optional, index of last element to remove.
+ )
+ {
+ end ?= $(start) ;
+ local left = $(self.value[1-$(start)]) ;
+ left = $(left[1--2]) ;
+ local right = $(self.value[$(end)-]) ;
+ right = $(right[2-]) ;
+ self.value = $(left) $(right) ;
+ }
+
+ # Remove all elements from the vector.
+ #
+ rule clear ( )
+ {
+ self.value = ;
+ }
+
+ # The number of elements in the vector.
+ #
+ rule size ( )
+ {
+ return [ sequence.length $(self.value) ] ;
+ }
+
+ # Returns "true" if there are NO elements in the vector, empty otherwise.
+ #
+ rule empty ( )
+ {
+ if ! $(self.value)-is-not-empty
+ {
+ return true ;
+ }
+ }
+
+ # Returns the textual representation of content.
+ #
+ rule str ( )
+ {
+ return "[" [ sequence.transform utility.str : $(self.value) ] "]" ;
+ }
+
+ # Sorts the vector inplace, calling 'utility.less' for comparisons.
+ #
+ rule sort ( )
+ {
+ self.value = [ sequence.insertion-sort $(self.value) : utility.less ] ;
+ }
+
+ # Returns true if content is equal to the content of other vector. Uses
+ # 'utility.equal' for comparison.
+ #
+ rule equal ( another )
+ {
+ local mismatch ;
+ local size = [ size ] ;
+ if $(size) = [ $(another).size ]
+ {
+ for local i in [ numbers.range 1 $(size) ]
+ {
+ if ! [ utility.equal [ at $(i) ] [ $(another).at $(i) ] ]
+ {
+ mismatch = true ;
+ }
+ }
+ }
+ else
+ {
+ mismatch = true ;
+ }
+
+ if ! $(mismatch)
+ {
+ return true ;
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+
+ local v1 = [ new vector ] ;
+ assert.true $(v1).equal $(v1) ;
+ assert.true $(v1).empty ;
+ assert.result 0 : $(v1).size ;
+ assert.result "[" "]" : $(v1).str ;
+ $(v1).push-back b ;
+ $(v1).push-front a ;
+ assert.result "[" a b "]" : $(v1).str ;
+ assert.result a : $(v1).front ;
+ assert.result b : $(v1).back ;
+ $(v1).insert 2 : d ;
+ $(v1).insert 2 : c ;
+ $(v1).insert 4 : f ;
+ $(v1).insert 4 : e ;
+ $(v1).pop-back ;
+ assert.result 5 : $(v1).size ;
+ assert.result d : $(v1).at 3 ;
+ $(v1).pop-front ;
+ assert.result c : $(v1).front ;
+ assert.false $(v1).empty ;
+ $(v1).erase 3 4 ;
+ assert.result 2 : $(v1).size ;
+
+ local v2 = [ new vector q w e r t y ] ;
+ assert.result 6 : $(v2).size ;
+ $(v1).push-back $(v2) ;
+ assert.result 3 : $(v1).size ;
+ local v2-alias = [ $(v1).back ] ;
+ assert.result e : $(v2-alias).at 3 ;
+ $(v1).clear ;
+ assert.true $(v1).empty ;
+ assert.false $(v2-alias).empty ;
+ $(v2).pop-back ;
+ assert.result t : $(v2-alias).back ;
+
+ local v3 = [ new vector ] ;
+ $(v3).push-back [ new vector 1 2 3 4 5 ] ;
+ $(v3).push-back [ new vector a b c ] ;
+ assert.result "[" "[" 1 2 3 4 5 "]" "[" a b c "]" "]" : $(v3).str ;
+ $(v3).push-back [ new vector [ new vector x y z ] [ new vector 7 8 9 ] ] ;
+ assert.result 1 : $(v3).at 1 : 1 ;
+ assert.result b : $(v3).at 2 : 2 ;
+ assert.result a b c : $(v3).get-at 2 ;
+ assert.result 7 8 9 : $(v3).get-at 3 : 2 ;
+
+ local v4 = [ new vector 4 3 6 ] ;
+ $(v4).sort ;
+ assert.result 3 4 6 : $(v4).get ;
+ assert.false $(v4).equal $(v3) ;
+
+ local v5 = [ new vector 3 4 6 ] ;
+ assert.true $(v4).equal $(v5) ;
+ # Check that vectors of different sizes are considered non-equal.
+ $(v5).pop-back ;
+ assert.false $(v4).equal $(v5) ;
+
+ local v6 = [ new vector [ new vector 1 2 3 ] ] ;
+ assert.true $(v6).equal [ new vector [ new vector 1 2 3 ] ] ;
+
+ local v7 = [ new vector 111 222 333 ] ;
+ assert.true $(v7).equal $(v7) ;
+ $(v7).insert 4 : 444 ;
+ assert.result 111 222 333 444 : $(v7).get ;
+ $(v7).insert 999 : xxx ;
+ assert.result 111 222 333 444 xxx : $(v7).get ;
+
+ local v8 = [ new vector "" "" "" ] ;
+ assert.true $(v8).equal $(v8) ;
+ assert.false $(v8).empty ;
+ assert.result 3 : $(v8).size ;
+ assert.result "" : $(v8).at 1 ;
+ assert.result "" : $(v8).at 2 ;
+ assert.result "" : $(v8).at 3 ;
+ assert.result : $(v8).at 4 ;
+ $(v8).insert 2 : 222 ;
+ assert.result 4 : $(v8).size ;
+ assert.result "" 222 "" "" : $(v8).get ;
+ $(v8).insert 999 : "" ;
+ assert.result 5 : $(v8).size ;
+ assert.result "" 222 "" "" "" : $(v8).get ;
+ $(v8).insert 999 : xxx ;
+ assert.result 6 : $(v8).size ;
+ assert.result "" 222 "" "" "" xxx : $(v8).get ;
+
+ # Regression test for a bug causing vector.equal to compare only the first
+ # and the last element in the given vectors.
+ local v9 = [ new vector 111 xxx 222 ] ;
+ local v10 = [ new vector 111 yyy 222 ] ;
+ assert.false $(v9).equal $(v10) ;
+}
diff --git a/src/boost/tools/build/src/util/doc.jam b/src/boost/tools/build/src/util/doc.jam
new file mode 100644
index 000000000..8d03b7ee5
--- /dev/null
+++ b/src/boost/tools/build/src/util/doc.jam
@@ -0,0 +1,1076 @@
+# Copyright 2002, 2005 Dave Abrahams
+# Copyright 2002, 2003, 2006 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Documentation system, handles --help requests.
+# It defines rules that attach documentation to modules, rules, and variables.
+# Collects and generates documentation for the various parts of the build
+# system. The documentation is collected from comments integrated into the code.
+
+import modules ;
+import print ;
+import set ;
+import container ;
+import "class" ;
+import sequence ;
+import path ;
+
+
+# The type of output to generate.
+# "console" is formatted text echoed to the console (the default);
+# "text" is formatted text appended to the output file;
+# "html" is HTML output to the file.
+#
+help-output = console ;
+
+
+# The file to output documentation to when generating "text" or "html" help.
+# This is without extension as the extension is determined by the type of
+# output.
+#
+help-output-file = help ;
+
+# Whether to include local rules in help output.
+#
+.option.show-locals ?= ;
+
+# When showing documentation for a module, whether to also generate
+# automatically the detailed docs for each item in the module.
+#
+.option.detailed ?= ;
+
+# Generate debug output as the help is generated and modules are parsed.
+#
+.option.debug ?= ;
+
+# These are all the options available for enabling or disabling to control the
+# help system in various ways. Options can be enabled or disabled with
+# '--help-enable-<option>', and '--help-disable-<option>' respectively.
+#
+.option-description = Help Options ;
+
+# Enable or disable a documentation option.
+#
+local rule set-option (
+ option # The option name.
+ : value ? # Enabled (non-empty), or disabled (empty)
+)
+{
+ .option.$(option) = $(value) ;
+}
+
+
+# Set the type of output.
+#
+local rule set-output ( type )
+{
+ help-output = $(type) ;
+}
+
+
+# Set the output to a file.
+#
+local rule set-output-file ( file )
+{
+ help-output-file = $(file) ;
+}
+
+
+# Extracts the brief comment from a complete comment. The brief comment is the
+# first sentence.
+#
+local rule brief-comment (
+ docs * # The comment documentation.
+)
+{
+ local d = $(docs:J=" ") ;
+ local p = [ MATCH ".*([.])$" : $(d) ] ;
+ if ! $(p) { d = $(d)"." ; }
+ d = $(d)" " ;
+ local m = [ MATCH "^([^.]+[.])(.*)" : $(d) ] ;
+ local brief = $(m[1]) ;
+ while $(m[2]) && [ MATCH "^([^ ])" : $(m[2]) ]
+ {
+ m = [ MATCH "^([^.]+[.])(.*)" : $(m[2]) ] ;
+ brief += $(m[1]) ;
+ }
+ return $(brief:J="") ;
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-doc (
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-copyright (
+ module-name ? # The name of the module to document.
+ : copyright * # The copyright for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).copy-brief = [ brief-comment $(copyright) ] ;
+ $(module-name).copy-docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for a rule in the current module. If called in the
+# global module, this documents a global rule.
+#
+local rule set-rule-doc (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ is-local ? # Whether the rule is local to the module.
+ : docs * # The documentation for the rule.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).docs = $(docs) ;
+ $(module-name).$(name).is-local = $(is-local) ;
+
+ if ! $(name) in $($(module-name).rules)
+ {
+ $(module-name).rules += $(name) ;
+ }
+}
+
+
+# Specify a class, will turn a rule into a class.
+#
+local rule set-class-doc (
+ name # The name of the class.
+ module-name ? # The name of the module to document.
+ : super-name ? # The super class name.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).is-class = true ;
+ if $(super-name)
+ {
+ $(module-name).$(name).super-name = $(super-name) ;
+ }
+ $(module-name).$(name).class-rules =
+ [ MATCH "^($(name)[.].*)" : $($(module-name).rules) ] ;
+ $(module-name).$($(module-name).$(name).class-rules).is-class-rule = true ;
+
+ $(module-name).classes += $(name) ;
+ $(module-name).class-rules += $($(module-name).$(name).class-rules) ;
+ $(module-name).rules =
+ [ set.difference $($(module-name).rules) :
+ $(name) $($(module-name).$(name).class-rules) ] ;
+}
+
+
+# Set the argument call signature of a rule.
+#
+local rule set-rule-arguments-signature (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ : signature * # The arguments signature.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).signature = $(signature) ;
+}
+
+
+# Specifies the documentation for an argument of a rule.
+#
+local rule set-argument-doc (
+ name # The name of the argument.
+ qualifier # Argument syntax qualifier, "*", "+", etc.
+ rule-name # The name of the rule.
+ module-name ? # THe optional name of the module.
+ : docs * # The documentation.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(rule-name).args.$(name).qualifier = $(qualifier) ;
+ $(module-name).$(rule-name).args.$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).$(rule-name).args)
+ {
+ $(module-name).$(rule-name).args += $(name) ;
+ }
+}
+
+
+# Specifies the documentation for a variable in the current module. If called in
+# the global module, the global variable is documented.
+#
+local rule set-variable-doc (
+ name # The name of the variable.
+ default # The default value.
+ initial # The initial value.
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the variable.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).default = $(default) ;
+ $(module-name).$(name).initial = $(initial) ;
+ $(module-name).$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).variables)
+ {
+ $(module-name).variables += $(name) ;
+ }
+}
+
+
+# Generates a general description of the documentation and help system.
+#
+local rule print-help-top ( )
+{
+ print.section "General command line usage" ;
+
+ print.text " b2 [options] [properties] [targets]
+
+ Options, properties and targets can be specified in any order.
+ " ;
+
+ print.section "Important Options" ;
+
+ print.list-start ;
+ print.list-item "--clean Remove targets instead of building" ;
+ print.list-item "-a Rebuild everything" ;
+ print.list-item "-n Don't execute the commands, only print them" ;
+ print.list-item "-d+2 Show commands as they are executed" ;
+ print.list-item "-d0 Suppress all informational messages" ;
+ print.list-item "-q Stop at first error" ;
+ print.list-item "--reconfigure Rerun all configuration checks" ;
+ print.list-item "--debug-configuration Diagnose configuration" ;
+ print.list-item "--debug-building Report which targets are built with what properties" ;
+ print.list-item "--debug-generator Diagnose generator search/execution" ;
+ print.list-end ;
+
+ print.section "Further Help"
+ The following options can be used to obtain additional documentation.
+ ;
+
+ print.list-start ;
+ print.list-item "--help-options Print more obscure command line options." ;
+ print.list-item "--help-internal B2 implementation details." ;
+ print.list-item "--help-doc-options Implementation details doc formatting." ;
+ print.list-end ;
+}
+
+
+# Generate Jam/Boost.Jam command usage information.
+#
+local rule print-help-usage ( )
+{
+ print.section "B2 Usage"
+ "b2 [ options... ] targets..."
+ ;
+ print.list-start ;
+ print.list-item -a\;
+ Build all targets, even if they are current. ;
+ print.list-item -fx\;
+ Read '"x"' as the Jamfile for building instead of searching for the
+ B2 system. ;
+ print.list-item -jx\;
+ Run up to '"x"' commands concurrently. ;
+ print.list-item -n\;
+ Do not execute build commands. Instead print out the commands as they
+ would be executed if building. ;
+ print.list-item -ox\;
+ Output the used build commands to file '"x"'. ;
+ print.list-item -q\;
+ Quit as soon as a build failure is encountered. Without this option
+ Boost.Jam will continue building as many targets as it can. ;
+ print.list-item -sx=y\;
+ Sets a Jam variable '"x"' to the value '"y"', overriding any value that
+ variable would have from the environment. ;
+ print.list-item -tx\;
+ Rebuild the target '"x"', even if it is up-to-date. ;
+ print.list-item -v\;
+ Display the version of b2. ;
+ print.list-item --x\;
+ Any option not explicitly handled by B2 remains available to
+ build scripts using the '"ARGV"' variable. ;
+ print.list-item --abbreviate-paths\;
+ Use abbreviated paths for targets. ;
+ print.list-item --hash\;
+ Shorten target paths by using an MD5 hash. ;
+ print.list-item -dconsole\;
+ Run the interactive debugger. Cannot be used with any other option. ;
+ print.list-item -dn\;
+ Enables output of diagnostic messages. The debug level '"n"' and all
+ below it are enabled by this option. ;
+ print.list-item -d+n\;
+ Enables output of diagnostic messages. Only the output for debug level
+ '"n"' is enabled. ;
+ print.list-end ;
+ print.section "Debug Levels"
+ Each debug level shows a different set of information. Usually with
+ higher levels producing more verbose information. The following levels
+ are supported\: ;
+ print.list-start ;
+ print.list-item 0\;
+ Turn off all diagnostic output. Only errors are reported. ;
+ print.list-item 1\;
+ Show the actions taken for building targets, as they are executed. ;
+ print.list-item 2\;
+ Show "quiet" actions and display all action text, as they are executed. ;
+ print.list-item 3\;
+ Show dependency analysis, and target/source timestamps/paths. ;
+ print.list-item 4\;
+ Show arguments of shell invocations. ;
+ print.list-item 5\;
+ Show rule invocations and variable expansions. ;
+ print.list-item 6\;
+ Show directory/header file/archive scans, and attempts at binding to targets. ;
+ print.list-item 7\;
+ Show variable settings. ;
+ print.list-item 8\;
+ Show variable fetches, variable expansions, and evaluation of '"if"' expressions. ;
+ print.list-item 9\;
+ Show variable manipulation, scanner tokens, and memory usage. ;
+ print.list-item 10\;
+ Show execution times for rules. ;
+ print.list-item 11\;
+ Show parsing progress of Jamfiles. ;
+ print.list-item 12\;
+ Show graph for target dependencies. ;
+ print.list-item 13\;
+ Show changes in target status (fate). ;
+ print.list-end ;
+}
+
+# Generates description of options controlling the help system. This
+# automatically reads the options as all variables in the module given
+# with the name `module-name` of the form ".option.*".
+#
+local rule print-help-options (
+ module-name
+)
+{
+ local options-to-list = [ MATCH "^[.]option[.](.*)" : $($(module-name).variables) ] ;
+ if $(options-to-list)
+ {
+ local option-title = $($(module-name)..option-description.initial) ;
+ if ! $(option-title) || $(option-title) = "(empty)"
+ {
+ option-title = "$(module-name) Options" ;
+ }
+ local option-description = $(option-title)
+ $($(module-name)..option-description.docs) ;
+ print.section $(option-description) ;
+ print.list-start ;
+ for local option in [ sequence.insertion-sort $(options-to-list) ]
+ {
+ local def = disabled ;
+ if $($(module-name)..option.$(option).default) != "(empty)"
+ {
+ def = $($(module-name)..option.$(option).default) ;
+ }
+ print.list-item $(option)\: $($(module-name)..option.$(option).docs)
+ Default is $(def). ;
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate brief documentation for all the known items in the section for a
+# module. Possible sections are: "rules", and "variables".
+#
+local rule print-help-module-section (
+ module # The module name.
+ section # rules or variables.
+ : section-head # The title of the section.
+ section-description * # The detailed description of the section.
+)
+{
+ if $($(module).$(section))
+ {
+ print.section $(section-head) $(section-description) ;
+ print.list-start ;
+ for local item in [ sequence.insertion-sort $($(module).$(section)) ]
+ {
+ local show = ;
+ if ! $($(module).$(item).is-local)
+ {
+ show = yes ;
+ }
+ if $(.option.show-locals)
+ {
+ show = yes ;
+ }
+ if $(show)
+ {
+ print.list-item $(item)\: $($(module).$(item).brief) ;
+ }
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate documentation for all possible modules. We attempt to list all known
+# modules together with a brief description of each.
+#
+local rule print-help-all (
+ ignored # Usually the module name, but is ignored here.
+)
+{
+ print.section "Modules"
+ "These are all the known modules. Use --help <module> to get more"
+ "detailed information."
+ ;
+ if $(documented-modules)
+ {
+ print.list-start ;
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print.list-item $(module-name)\: $($(module-name).brief) ;
+ }
+ print.list-end ;
+ }
+ # The documentation for each module when details are requested.
+ if $(documented-modules) && $(.option.detailed)
+ {
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print-help-module $(module-name) ;
+ }
+ }
+}
+
+
+# Generate documentation for a module. Basic information about the module is
+# generated.
+#
+local rule print-help-module (
+ module-name # The module to generate docs for.
+)
+{
+ # Print the docs.
+ print.section "Module '$(module-name)'" $($(module-name).docs) ;
+
+ # Print out the documented classes.
+ print-help-module-section $(module-name) classes : "Module '$(module-name)' classes"
+ Use --help $(module-name).<class-name> to get more information. ;
+
+ # Print out the documented rules.
+ print-help-module-section $(module-name) rules : "Module '$(module-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out the documented variables.
+ print-help-module-section $(module-name) variables : "Module '$(module-name)' variables"
+ Use --help $(module-name).<variable-name> to get more information. ;
+
+ # Print out all the same information but indetailed form.
+ if $(.option.detailed)
+ {
+ print-help-classes $(module-name) ;
+ print-help-rules $(module-name) ;
+ print-help-variables $(module-name) ;
+ }
+}
+
+
+# Generate documentation for a set of rules in a module.
+#
+local rule print-help-rules (
+ module-name # Module of the rules.
+ : name * # Optional list of rules to describe.
+)
+{
+ name ?= $($(module-name).rules) ;
+ if [ set.intersection $(name) : $($(module-name).rules) $($(module-name).class-rules) ]
+ {
+ # Print out the given rules.
+ for local rule-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(rule-name).is-local)
+ {
+ local signature = $($(module-name).$(rule-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Rule '$(module-name).$(rule-name) ( $(signature) )'"
+ $($(module-name).$(rule-name).docs) ;
+ if $($(module-name).$(rule-name).args) &&
+ $($(module-name).$(rule-name).args.$($(module-name).$(rule-name).args).docs)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(rule-name).args)
+ {
+ if $($(module-name).$(rule-name).args.$(arg-name).docs)
+ {
+ print.list-item $(arg-name)\: $($(module-name).$(rule-name).args.$(arg-name).docs) ;
+ }
+ }
+ print.list-end ;
+ }
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of classes in a module.
+#
+local rule print-help-classes (
+ module-name # Module of the classes.
+ : name * # Optional list of classes to describe.
+)
+{
+ name ?= $($(module-name).classes) ;
+ if [ set.intersection $(name) : $($(module-name).classes) ]
+ {
+ # Print out the given classes.
+ for local class-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(class-name).is-local)
+ {
+ local signature = $($(module-name).$(class-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Class '$(module-name).$(class-name) ( $(signature) )'"
+ $($(module-name).$(class-name).docs)
+ "Inherits from '"$($(module-name).$(class-name).super-name)"'." ;
+ if $($(module-name).$(class-name).args)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(class-name).args)
+ {
+ print.list-item $(arg-name)\: $($(module-name).$(class-name).args.$(arg-name).docs) ;
+ }
+ print.list-end ;
+ }
+ }
+
+ # Print out the documented rules of the class.
+ print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out all the rules if details are requested.
+ if $(.option.detailed)
+ {
+ print-help-rules $(module-name) : $($(module-name).$(class-name).class-rules) ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of variables in a module.
+#
+local rule print-help-variables (
+ module-name ? # Module of the variables.
+ : name * # Optional list of variables to describe.
+)
+{
+ name ?= $($(module-name).variables) ;
+ if [ set.intersection $(name) : $($(module-name).variables) ]
+ {
+ # Print out the given variables.
+ for local variable-name in [ sequence.insertion-sort $(name) ]
+ {
+ print.section "Variable '$(module-name).$(variable-name)'" $($(module-name).$(variable-name).docs) ;
+ if $($(module-name).$(variable-name).default) ||
+ $($(module-name).$(variable-name).initial)
+ {
+ print.list-start ;
+ if $($(module-name).$(variable-name).default)
+ {
+ print.list-item "default value:" '$($(module-name).$(variable-name).default:J=" ")' ;
+ }
+ if $($(module-name).$(variable-name).initial)
+ {
+ print.list-item "initial value:" '$($(module-name).$(variable-name).initial:J=" ")' ;
+ }
+ print.list-end ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a project.
+#
+local rule print-help-project (
+ unused ?
+ : jamfile * # The project Jamfile.
+)
+{
+ if $(jamfile<$(jamfile)>.docs)
+ {
+ # Print the docs.
+ print.section "Project-specific help"
+ Project has jamfile at $(jamfile) ;
+
+ print.lines $(jamfile<$(jamfile)>.docs) "" ;
+ }
+}
+
+
+# Generate documentation for a config file.
+#
+local rule print-help-config (
+ unused ?
+ : type # The type of configuration file user or site.
+ config-file # The configuration Jamfile.
+)
+{
+ if $(jamfile<$(config-file)>.docs)
+ {
+ # Print the docs.
+ print.section "Configuration help"
+ Configuration file at $(config-file) ;
+
+ print.lines $(jamfile<$(config-file)>.docs) "" ;
+ }
+}
+
+
+ws = "\t " ;
+
+# Extract the text from a single comment
+#
+local rule extract-one-comment (
+ var # The name of the variable to extract from
+ : start # The initial part after the leading '#'
+)
+{
+ local m = [ MATCH ^(\\|)(.*) : $(start) ] ;
+ if $(m)
+ {
+ start = $(m[2]) ;
+ local comment ;
+ while true
+ {
+ local end = [ MATCH "(.*)(\\|#)(.*)" : $(start) ] ;
+ if $(end)
+ {
+ comment += $(end[1]) ;
+ $(var) = $(end[3]) $($(var)[2-]) ;
+ return $(comment) ;
+ }
+ else
+ {
+ comment += $(start) ;
+ $(var) = $($(var)[2-]) ;
+ }
+ start = $($(var)[1]) ;
+ }
+ }
+ else
+ {
+ $(var) = $($(var)[2-]) ;
+ if $(start) { return [ MATCH "^[$(ws)]?(.*)$" : $(start) ] ; }
+ else { return "" ; }
+ }
+}
+
+# Extract the text from a block of comments.
+#
+local rule extract-comment (
+ var # The name of the variable to extract from.
+)
+{
+ local comment = ;
+ local line = $($(var)[1]) ;
+ local l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ while $(l[1]) && $($(var))
+ {
+ comment += [ extract-one-comment $(var) : $(l[2]) ] ;
+ line = $($(var)[1]) ;
+ l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ }
+ return $(comment) ;
+}
+
+
+# Extract s single line of Jam syntax, ignoring any comments.
+#
+local rule extract-syntax (
+ var # The name of the variable to extract from.
+)
+{
+ local syntax = ;
+ local line = $($(var)[1]) ;
+ while ! $(syntax) && ! [ MATCH "^[$(ws)]*(#)" : $(line) ] && $($(var))
+ {
+ local m = [ MATCH "^[$(ws)]*(.*)$" : $(line) ] ;
+ if $(m)
+ {
+ syntax = $(m) ;
+ }
+ $(var) = $($(var)[2-]) ;
+ line = $($(var)[1]) ;
+ }
+ return $(syntax) ;
+}
+
+
+# Extract the next token, this is either a single Jam construct or a comment as
+# a single token.
+#
+local rule extract-token (
+ var # The name of the variable to extract from.
+)
+{
+ local parts = ;
+ while ! $(parts)
+ {
+ parts = [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]*(.*)" : $($(var)[1]) ] ;
+ if ! $(parts)
+ {
+ $(var) = $($(var)[2-]) ;
+ }
+ }
+ local token = ;
+ if [ MATCH "^(#)" : $(parts[1]) ]
+ {
+ token = $(parts:J=" ") ;
+ $(var) = $($(var)[2-]) ;
+ }
+ else
+ {
+ token = $(parts[1]) ;
+ $(var) = $(parts[2-]:J=" ") $($(var)[2-]) ;
+ }
+ return $(token) ;
+}
+
+
+# Scan for a rule declaration as the next item in the variable.
+#
+local rule scan-rule (
+ syntax ? # The first part of the text which contains the rule declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ local rule-parts =
+ [ MATCH "^[$(ws)]*(rule|local[$(ws)]*rule)[$(ws)]+([^$(ws)]+)[$(ws)]*(.*)" : $(syntax:J=" ") ] ;
+ if $(rule-parts[1])
+ {
+ # Mark as doc for rule.
+ local rule-name = $(rule-parts[2]) ;
+ if $(scope-name)
+ {
+ rule-name = $(scope-name).$(rule-name) ;
+ }
+ local is-local = [ MATCH "^(local).*" : $(rule-parts[1]) ] ;
+ if $(comment-block)
+ {
+ set-rule-doc $(rule-name) $(module-name) $(is-local) : $(comment-block) ;
+ }
+ # Parse args of rule.
+ $(var) = $(rule-parts[3-]) $($(var)) ;
+ set-rule-arguments-signature $(rule-name) $(module-name) : [ scan-rule-arguments $(var) ] ;
+ # Scan within this rules scope.
+ local scope-level = [ extract-token $(var) ] ;
+ local scope-name = $(rule-name) ;
+ while $(scope-level) && $($(var))
+ {
+ local comment-block = [ extract-comment $(var) ] ;
+ local syntax-block = [ extract-syntax $(var) ] ;
+ if [ scan-rule $(syntax-block) : $(var) ]
+ {
+ }
+ else if [ MATCH "^(\\{)" : $(syntax-block) ]
+ {
+ scope-level += "{" ;
+ }
+ else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ]
+ {
+ scope-level = $(scope-level[2-]) ;
+ }
+ }
+
+ return true ;
+ }
+}
+
+
+# Scan the arguments of a rule.
+#
+local rule scan-rule-arguments (
+ var # The name of the variable to extract from.
+)
+{
+ local arg-syntax = ;
+ local token = [ extract-token $(var) ] ;
+ while $(token) != "(" && $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ local arg-signature = ;
+ while $(token) != ")" && $(token) != "{"
+ {
+ local arg-name = ;
+ local arg-qualifier = " " ;
+ local arg-doc = ;
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ arg-name = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ if [ MATCH "^([\\*\\+\\?])" : $(token) ]
+ {
+ arg-qualifier = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if [ MATCH "^(#)" : $(token) ]
+ {
+ $(var) = $(token) $($(var)) ;
+ arg-doc = [ extract-comment $(var) ] ;
+ token = [ extract-token $(var) ] ;
+ }
+ set-argument-doc $(arg-name) $(arg-qualifier) $(rule-name) $(module-name) : $(arg-doc) ;
+ }
+ while $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ $(var) = "{" $($(var)) ;
+ arg-signature ?= "" ;
+ return $(arg-signature) ;
+}
+
+
+# Scan for a variable declaration.
+#
+local rule scan-variable (
+ syntax ? # The first part of the text which contains the variable declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ # [1] = name, [2] = value(s)
+ local var-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([\\?\\=]*)[$(ws)]+([^\\;]*)\\;" : $(syntax) ] ;
+ if $(var-parts)
+ {
+ local value = [ MATCH "^(.*)[ ]$" : $(var-parts[3-]:J=" ") ] ;
+ local default-value = "" ;
+ local initial-valie = "" ;
+ if $(var-parts[2]) = "?="
+ {
+ default-value = $(value) ;
+ default-value ?= "(empty)" ;
+ }
+ else
+ {
+ initial-value = $(value) ;
+ initial-value ?= "(empty)" ;
+ }
+ if $(comment-block)
+ {
+ set-variable-doc $(var-parts[1]) $(default-value) $(initial-value) $(module-name) : $(comment-block) ;
+ }
+ return true ;
+ }
+}
+
+
+# Scan a class declaration.
+#
+local rule scan-class (
+ syntax ? # The syntax text for the class declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ # [1] = class?, [2] = name, [3] = superclass
+ local class-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([^$(ws)]+)[$(ws)]*:*[$(ws)]*([^$(ws);]*)" : $(syntax) ] ;
+ if $(class-parts[1]) = "class" || $(class-parts[1]) = "class.class"
+ {
+ # Scan within this class scope.
+ local scope-level = [ extract-token $(var) ] ;
+ local scope-name = $(class-parts[2]) ;
+ while $(scope-level) && $($(var))
+ {
+ local comment-block = [ extract-comment $(var) ] ;
+ local syntax-block = [ extract-syntax $(var) ] ;
+ if [ scan-rule $(syntax-block) : $(var) ]
+ {
+ }
+ else if [ MATCH "^(\\{)" : $(syntax-block) ]
+ {
+ scope-level += "{" ;
+ }
+ else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ]
+ {
+ scope-level = $(scope-level[2-]) ;
+ }
+ }
+
+ # This has to come after parsing the rules, because
+ # it looks up the rules for the class from the global list.
+ set-class-doc $(class-parts[2]) $(module-name) : $(class-parts[3]) ;
+
+ return true ;
+ }
+}
+
+
+# Scan a module file for documentation comments. This also invokes any actions
+# assigned to the module. The actions are the rules that do the actual output of
+# the documentation. This rule is invoked as the header scan rule for the module
+# file.
+#
+rule scan-module (
+ target # The module file.
+ : text * # The text in the file, one item per line.
+ : action * # Rule to call to output docs for the module.
+)
+{
+ if $(.option.debug) { ECHO "HELP:" scanning module target '$(target)' ; }
+ local module-name = $(target:B) ;
+ local module-documented = ;
+ local comment-block = ;
+ local syntax-block = ;
+ # This is a hack because we can not get the line of a file if it happens to
+ # not have a new-line termination.
+ text += "}" ;
+ while $(text)
+ {
+ comment-block = [ extract-comment text ] ;
+ syntax-block = [ extract-syntax text ] ;
+ if $(.option.debug)
+ {
+ ECHO "HELP:" comment block\; '$(comment-block)' ;
+ ECHO "HELP:" syntax block\; '$(syntax-block)' ;
+ }
+ if [ scan-rule $(syntax-block) : text ] { }
+ else if [ scan-variable $(syntax-block) : text ] { }
+ else if [ scan-class $(syntax-block) : text ] { }
+ else if [ MATCH ".*([cC]opyright).*" : $(comment-block:J=" ") ]
+ {
+ # mark as the copy for the module.
+ set-module-copyright $(module-name) : $(comment-block) ;
+ }
+ else if $(action[1]) in "print-help-project" "print-help-config"
+ && ! $(jamfile<$(target)>.docs)
+ {
+ # special module docs for the project jamfile.
+ jamfile<$(target)>.docs = $(comment-block) ;
+ }
+ else if ! $(module-documented)
+ {
+ # document the module.
+ set-module-doc $(module-name) : $(comment-block) ;
+ module-documented = true ;
+ }
+ }
+ if $(action)
+ {
+ $(action[1]) $(module-name) : $(action[2-]) ;
+ }
+}
+
+
+# Import scan-module to global scope, so that it is available during header
+# scanning phase.
+#
+IMPORT $(__name__) : scan-module : : doc.scan-module ;
+
+
+# Read in a file using the SHELL builtin and return the individual lines as
+# would be done for header scanning.
+#
+local rule read-file (
+ file # The file to read in.
+)
+{
+ file = [ path.native [ path.root [ path.make $(file) ] [ path.pwd ] ] ] ;
+ if ! $(.file<$(file)>.lines)
+ {
+ local content ;
+ switch [ modules.peek : OS ]
+ {
+ case NT :
+ content = [ SHELL "TYPE \"$(file)\"" ] ;
+
+ case * :
+ content = [ SHELL "cat \"$(file)\"" ] ;
+ }
+ local lines ;
+ local << = "([^\r\n]*)[\r]?[\n](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ .file<$(file)>.lines = $(lines) ;
+ }
+ return $(.file<$(file)>.lines) ;
+}
+
+
+# Add a scan action to perform to generate the help documentation. The action
+# rule is passed the name of the module as the first argument. The second
+# argument(s) are optional and passed directly as specified here.
+#
+local rule do-scan (
+ modules + # The modules to scan and perform the action on.
+ : action * # The action rule, plus the secondary arguments to pass to the action rule.
+)
+{
+ if $(help-output) = text
+ {
+ print.output $(help-output-file).txt plain ;
+ ALWAYS $(help-output-file).txt ;
+ DEPENDS all : $(help-output-file).txt ;
+ }
+ if $(help-output) = html
+ {
+ print.output $(help-output-file).html html ;
+ ALWAYS $(help-output-file).html ;
+ DEPENDS all : $(help-output-file).html ;
+ }
+ for local module-file in $(modules[1--2])
+ {
+ scan-module $(module-file) : [ read-file $(module-file) ] ;
+ }
+ scan-module $(modules[-1]) : [ read-file $(modules[-1]) ] : $(action) ;
+}
diff --git a/src/boost/tools/build/src/util/indirect.jam b/src/boost/tools/build/src/util/indirect.jam
new file mode 100644
index 000000000..095fea95f
--- /dev/null
+++ b/src/boost/tools/build/src/util/indirect.jam
@@ -0,0 +1,124 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import numbers ;
+
+
+# The pattern that indirect rules must match: module%rule
+.pattern = "^([^%]*)%([^%]+)$" ;
+
+
+#
+# Type checking rules.
+#
+local rule indirect-rule ( x )
+{
+ if ! [ MATCH $(.pattern) : $(x) ]
+ {
+ return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+# Make an indirect rule which calls the given rule. If context is supplied it is
+# expected to be the module in which to invoke the rule by the 'call' rule
+# below. Otherwise, the rule will be invoked in the module of this rule's
+# caller.
+#
+rule make ( rulename bound-args * : context ? )
+{
+ if [ MATCH $(.pattern) : $(rulename) ]
+ {
+ return $(rulename) $(bound-args) ;
+ }
+ else
+ {
+ context ?= [ CALLER_MODULE ] ;
+ context ?= "" ;
+ return $(context)%$(rulename) $(bound-args) ;
+ }
+}
+
+
+# Make an indirect rule which calls the given rule. 'rulename' may be a
+# qualified rule; if so it is returned unchanged. Otherwise, if frames is not
+# supplied, the result will be invoked (by 'call', below) in the module of the
+# caller. Otherwise, frames > 1 specifies additional call frames to back up in
+# order to find the module context.
+#
+rule make-qualified ( rulename bound-args * : frames ? )
+{
+ if [ MATCH $(.pattern) : $(rulename) ]
+ {
+ return $(rulename) $(bound-args) ;
+ }
+ else
+ {
+ frames ?= 1 ;
+ # If the rule name includes a Jamfile module, grab it.
+ local module-context = [ MATCH "^(Jamfile<[^>]*>)\\..*" : $(rulename) ] ;
+
+ if ! $(module-context)
+ {
+ # Take the first dot-separated element as module name. This disallows
+ # module names with dots, but allows rule names with dots.
+ module-context = [ MATCH "^([^.]*)\\..*" : $(rulename) ] ;
+ }
+ module-context ?= [ CALLER_MODULE $(frames) ] ;
+ return [ make $(rulename) $(bound-args) : $(module-context) ] ;
+ }
+}
+
+
+# Returns the module name in which the given indirect rule will be invoked.
+#
+rule get-module ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ if ! $(m[1])
+ {
+ m = ;
+ }
+ return $(m[1]) ;
+}
+
+
+# Returns the rulename that will be called when x is invoked.
+#
+rule get-rule ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ return $(m[2]) ;
+}
+
+
+# Invoke the given indirect-rule.
+#
+rule call ( [indirect-rule] r args * : * )
+{
+ return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args) :
+ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) :
+ $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] ;
+}
+
+
+rule __test__
+{
+ import assert ;
+
+ rule foo-barr! ( x )
+ {
+ assert.equal $(x) : x ;
+ }
+
+ assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ;
+ assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ;
+
+ call [ make foo-barr! ] x ;
+ call [ make foo-barr! x ] ;
+ call [ make foo-barr! : [ CALLER_MODULE ] ] x ;
+}
diff --git a/src/boost/tools/build/src/util/indirect.py b/src/boost/tools/build/src/util/indirect.py
new file mode 100644
index 000000000..01c2e77c2
--- /dev/null
+++ b/src/boost/tools/build/src/util/indirect.py
@@ -0,0 +1,15 @@
+# Status: minimally ported. This module is not supposed to be used much
+# with Boost.Build/Python.
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+from b2.util import call_jam_function, bjam_signature
+
+def call(*args):
+ a1 = args[0]
+ name = a1[0]
+ a1tail = a1[1:]
+ call_jam_function(name, *((a1tail,) + args[1:]))
diff --git a/src/boost/tools/build/src/util/logger.py b/src/boost/tools/build/src/util/logger.py
new file mode 100644
index 000000000..8da0434ae
--- /dev/null
+++ b/src/boost/tools/build/src/util/logger.py
@@ -0,0 +1,46 @@
+# Copyright Pedro Ferreira 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import sys
+
+class NullLogger:
+ def __init__ (self):
+ self.indent_ = ''
+
+ def log (self, source_name, *args):
+ if self.on () and self.interesting (source_name):
+ self.do_log (self.indent_)
+ for i in args:
+ self.do_log (i)
+ self.do_log ('\n')
+
+ def increase_indent (self):
+ if self.on ():
+ self.indent_ += ' '
+
+ def decrease_indent (self):
+ if self.on () and len (self.indent_) > 4:
+ self.indent_ = self.indent_ [-4:]
+
+ def do_log (self, *args):
+ pass
+
+ def interesting (self, source_name):
+ return False
+
+ def on (self):
+ return True
+
+class TextLogger (NullLogger):
+ def __init__ (self):
+ NullLogger.__init__ (self)
+
+ def do_log (self, arg):
+ sys.stdout.write (str (arg))
+
+ def interesting (self, source_name):
+ return True
+
+ def on (self):
+ return True
diff --git a/src/boost/tools/build/src/util/numbers.jam b/src/boost/tools/build/src/util/numbers.jam
new file mode 100644
index 000000000..665347d31
--- /dev/null
+++ b/src/boost/tools/build/src/util/numbers.jam
@@ -0,0 +1,218 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+
+
+rule trim-leading-zeroes ( value )
+{
+ return [ CALC $(value) + 0 ] ;
+}
+
+
+rule check ( numbers * )
+{
+ for local n in $(numbers)
+ {
+ switch $(n)
+ {
+ case *[^0-9]* :
+ errors.error $(n) "in" $(numbers) : is not a number ;
+ }
+ }
+}
+
+
+rule increment ( number )
+{
+ return [ CALC $(number) + 1 ] ;
+}
+
+
+rule decrement ( number )
+{
+ return [ CALC $(number) - 1 ] ;
+}
+
+
+rule range ( start finish ? : step ? )
+{
+ if ! $(finish)
+ {
+ finish = $(start) ;
+ start = 1 ;
+ }
+ step ?= 1 ;
+
+ check $(start) $(finish) $(step) ;
+
+ if $(finish) != 0
+ {
+ local result ;
+ while [ less $(start) $(finish) ] || $(start) = $(finish)
+ {
+ result += $(start) ;
+ start = [ CALC $(start) + $(step) ] ;
+ }
+ return $(result) ;
+ }
+}
+
+
+rule less ( n1 n2 )
+{
+ switch [ CALC $(n2) - $(n1) ]
+ {
+ case [1-9]* : return true ;
+ }
+}
+
+
+rule log10 ( number )
+{
+ switch $(number)
+ {
+ case *[^0-9]* : errors.error $(number) is not a number ;
+ case 0 : errors.error can't take log of zero ;
+ case [1-9] : return 0 ;
+ case [1-9]? : return 1 ;
+ case [1-9]?? : return 2 ;
+ case [1-9]??? : return 3 ;
+ case [1-9]???? : return 4 ;
+ case [1-9]????? : return 5 ;
+ case [1-9]?????? : return 6 ;
+ case [1-9]??????? : return 7 ;
+ case [1-9]???????? : return 8 ;
+ case [1-9]????????? : return 9 ;
+ case * :
+ {
+ import sequence ;
+ import string ;
+ local chars = [ string.chars $(number) ] ;
+ while $(chars[1]) = 0
+ {
+ chars = $(chars[2-]) ;
+ }
+ if ! $(chars)
+ {
+ errors.error can't take log of zero ;
+ }
+ else
+ {
+ return [ decrement [ sequence.length $(chars) ] ] ;
+ }
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result 1 : increment 0 ;
+ assert.result 2 : increment 1 ;
+ assert.result 1 : decrement 2 ;
+ assert.result 0 : decrement 1 ;
+ assert.result 50 : increment 49 ;
+ assert.result 49 : decrement 50 ;
+ assert.result 99 : increment 98 ;
+ assert.result 99 : decrement 100 ;
+ assert.result 100 : increment 99 ;
+ assert.result 999 : decrement 1000 ;
+ assert.result 1000 : increment 999 ;
+
+ assert.result 1 2 3 : range 3 ;
+ assert.result 1 2 3 4 5 6 7 8 9 10 11 12 : range 12 ;
+ assert.result 3 4 5 6 7 8 9 10 11 : range 3 11 ;
+ assert.result : range 0 ;
+ assert.result 1 4 7 10 : range 10 : 3 ;
+ assert.result 2 4 6 8 10 : range 2 10 : 2 ;
+ assert.result 25 50 75 100 : range 25 100 : 25 ;
+
+ assert.result 0 : trim-leading-zeroes 0 ;
+ assert.result 1234 : trim-leading-zeroes 1234 ;
+ assert.result 123456 : trim-leading-zeroes 0000123456 ;
+ assert.result 1000123456 : trim-leading-zeroes 1000123456 ;
+ assert.result 10000 : trim-leading-zeroes 10000 ;
+ assert.result 10000 : trim-leading-zeroes 00010000 ;
+
+ assert.true less 1 2 ;
+ assert.true less 1 12 ;
+ assert.true less 1 21 ;
+ assert.true less 005 217 ;
+ assert.false less 0 0 ;
+ assert.false less 03 3 ;
+ assert.false less 3 03 ;
+ assert.true less 005 217 ;
+ assert.true less 0005 217 ;
+ assert.true less 5 00217 ;
+
+ # TEMPORARY disabled, because nested "try"/"catch" do not work and I do no
+ # have the time to fix that right now.
+ if $(0)
+ {
+ try ;
+ {
+ decrement 0 ;
+ }
+ catch can't decrement zero! ;
+
+ try ;
+ {
+ check foo ;
+ }
+ catch : not a number ;
+
+ try ;
+ {
+ increment foo ;
+ }
+ catch : not a number ;
+
+ try ;
+ {
+ log10 0 ;
+ }
+ catch can't take log of zero ;
+
+ try ;
+ {
+ log10 000 ;
+ }
+ catch can't take log of zero ;
+
+ }
+
+ assert.result 0 : log10 1 ;
+ assert.result 0 : log10 9 ;
+ assert.result 1 : log10 10 ;
+ assert.result 1 : log10 99 ;
+ assert.result 2 : log10 100 ;
+ assert.result 2 : log10 101 ;
+ assert.result 2 : log10 125 ;
+ assert.result 2 : log10 999 ;
+ assert.result 3 : log10 1000 ;
+ assert.result 10 : log10 12345678901 ;
+
+ for local x in [ range 75 110 : 5 ]
+ {
+ for local y in [ range $(x) 111 : 3 ]
+ {
+ if $(x) != $(y)
+ {
+ assert.true less $(x) $(y) ;
+ }
+ }
+ }
+
+ for local x in [ range 90 110 : 2 ]
+ {
+ for local y in [ range 80 $(x) : 4 ]
+ {
+ assert.false less $(x) $(y) ;
+ }
+ }
+}
diff --git a/src/boost/tools/build/src/util/option.jam b/src/boost/tools/build/src/util/option.jam
new file mode 100644
index 000000000..4c837f4e1
--- /dev/null
+++ b/src/boost/tools/build/src/util/option.jam
@@ -0,0 +1,109 @@
+# Copyright (c) 2005 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+
+# Set a value for a named option, to be used when not overridden on the command
+# line.
+rule set ( name : value ? )
+{
+ .option.$(name) = $(value) ;
+}
+
+rule get ( name : default-value ? : implied-value ? )
+{
+ local m = [ MATCH --$(name)=(.*) : [ modules.peek : ARGV ] ] ;
+ if $(m)
+ {
+ return $(m[1]) ;
+ }
+ else
+ {
+ m = [ MATCH (--$(name)) : [ modules.peek : ARGV ] ] ;
+ if $(m) && $(implied-value)
+ {
+ return $(implied-value) ;
+ }
+ else if $(.option.$(name))
+ {
+ return $(.option.$(name)) ;
+ }
+ else
+ {
+ return $(default-value) ;
+ }
+ }
+}
+
+
+# Check command-line args as soon as possible. For each option try to load
+# module named after option. Is that succeeds, invoke 'process' rule in the
+# module. The rule may return "true" to indicate that the regular build process
+# should not be attempted.
+#
+# Options take the general form of: --<name>[=<value>] [<value>]
+#
+rule process ( )
+{
+ local ARGV = [ modules.peek : ARGV ] ;
+ local BOOST_BUILD_PATH = [ modules.peek : BOOST_BUILD_PATH ] ;
+
+ local dont-build ;
+ local args = $(ARGV) ;
+ while $(args)
+ {
+ local arg = [ MATCH ^--(.*) : $(args[1]) ] ;
+ while $(args[2-]) && ! $(arg)
+ {
+ args = $(args[2-]) ;
+ arg = [ MATCH ^--(.*) : $(args[1]) ] ;
+ }
+ args = $(args[2-]) ;
+
+ if $(arg)
+ {
+ local split = [ MATCH "^(([^-=]+)[^=]*)(=?)(.*)$" : $(arg) ] ;
+ local full-name = $(split[1]) ;
+ local prefix = $(split[2]) ;
+ local values ;
+
+ if $(split[3])
+ {
+ values = $(split[4]) ;
+ }
+ if $(args) && ! [ MATCH ^(--).* : $(args[1]) ]
+ {
+ values += $(args[1]) ;
+ args = $(args[2-]) ;
+ }
+
+ # Jook in options subdirectories of BOOST_BUILD_PATH for modules
+ # matching the full option name and then its prefix.
+ local plugin-dir = options ;
+ local option-files = [ GLOB $(plugin-dir:D=$(BOOST_BUILD_PATH)) :
+ $(full-name).jam $(prefix).jam ] ;
+
+ if $(option-files)
+ {
+ # Load the file into a module named for the option.
+ local f = $(option-files[1]) ;
+ local module-name = --$(f:D=:S=) ;
+ modules.load $(module-name) : $(f:D=) : $(f:D) ;
+
+ # If there is a process rule, call it with the full option name
+ # and its value (if any). If there was no "=" in the option, the
+ # value will be empty.
+ if process in [ RULENAMES $(module-name) ]
+ {
+ dont-build += [ modules.call-in $(module-name) : process
+ --$(full-name) : $(values) ] ;
+ }
+ }
+ }
+ }
+
+ return $(dont-build) ;
+}
diff --git a/src/boost/tools/build/src/util/option.py b/src/boost/tools/build/src/util/option.py
new file mode 100644
index 000000000..b23a7257c
--- /dev/null
+++ b/src/boost/tools/build/src/util/option.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2005-2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import sys
+import re
+import b2.util.regex
+
+options = {}
+
+# Set a value for a named option, to be used when not overridden on the command
+# line.
+def set(name, value=None):
+
+ global options
+
+ options[name] = value
+
+def get(name, default_value=None, implied_value=None):
+
+ global options
+
+ matches = b2.util.regex.transform(sys.argv, "--" + re.escape(name) + "=(.*)")
+ if matches:
+ return matches[-1]
+ else:
+ m = b2.util.regex.transform(sys.argv, "--(" + re.escape(name) + ")")
+ if m and implied_value:
+ return implied_value
+ elif options.get(name) is not None:
+ return options[name]
+ else:
+ return default_value
diff --git a/src/boost/tools/build/src/util/order.jam b/src/boost/tools/build/src/util/order.jam
new file mode 100644
index 000000000..943d24889
--- /dev/null
+++ b/src/boost/tools/build/src/util/order.jam
@@ -0,0 +1,173 @@
+# Copyright (C) 2003 Vladimir Prus
+# Use, modification, and distribution is subject to the Boost Software
+# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
+# at http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines a class which allows to order arbitrary object with
+# regard to arbitrary binary relation.
+#
+# The primary use case is the gcc toolset, which is sensitive to library order:
+# if library 'a' uses symbols from library 'b', then 'a' must be present before
+# 'b' on the linker's command line.
+#
+# This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris
+# LD (and for Solaris toolset as well), the order always matters.
+#
+# So, we need to store order requirements and then order libraries according to
+# them. It is not possible to use the dependency graph as order requirements.
+# What we need is a "use symbols" relationship while dependency graph provides
+# the "needs to be updated" relationship.
+#
+# For example::
+# lib a : a.cpp b;
+# lib b ;
+#
+# For static linking, library 'a' need not depend on 'b'. However, it should
+# still come before 'b' on the command line.
+
+class order
+{
+ rule __init__ ( )
+ {
+ }
+
+ # Adds the constraint that 'first' should preceede 'second'.
+ rule add-pair ( first second )
+ {
+ .constraits += $(first)--$(second) ;
+ }
+ NATIVE_RULE class@order : add-pair ;
+
+ # Given a list of objects, reorder them so that the constraints specified by
+ # 'add-pair' are satisfied.
+ #
+ # The algorithm was adopted from an awk script by Nikita Youshchenko
+ # (yoush at cs dot msu dot su)
+ rule order ( objects * )
+ {
+ # The algorithm used is the same is standard transitive closure, except
+ # that we're not keeping in-degree for all vertices, but rather removing
+ # edges.
+ local result ;
+ if $(objects)
+ {
+ local constraints = [ eliminate-unused-constraits $(objects) ] ;
+
+ # Find some library that nobody depends upon and add it to the
+ # 'result' array.
+ local obj ;
+ while $(objects)
+ {
+ local new_objects ;
+ while $(objects)
+ {
+ obj = $(objects[1]) ;
+ if [ has-no-dependents $(obj) : $(constraints) ]
+ {
+ # Emulate break ;
+ new_objects += $(objects[2-]) ;
+ objects = ;
+ }
+ else
+ {
+ new_objects += $(obj) ;
+ obj = ;
+ objects = $(objects[2-]) ;
+ }
+ }
+
+ if ! $(obj)
+ {
+ errors.error "Circular order dependencies" ;
+ }
+ # No problem with placing first.
+ result += $(obj) ;
+ # Remove all constraints where 'obj' comes first, since they are
+ # already satisfied.
+ constraints = [ remove-satisfied $(constraints) : $(obj) ] ;
+
+ # Add the remaining objects for further processing on the next
+ # iteration
+ objects = $(new_objects) ;
+ }
+
+ }
+ return $(result) ;
+ }
+ NATIVE_RULE class@order : order ;
+
+ # Eliminate constraints which mention objects not in 'objects'. In
+ # graph-theory terms, this is finding a subgraph induced by ordered
+ # vertices.
+ rule eliminate-unused-constraits ( objects * )
+ {
+ local result ;
+ for local c in $(.constraints)
+ {
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[1]) in $(objects) && $(m[2]) in $(objects)
+ {
+ result += $(c) ;
+ }
+ }
+ return $(result) ;
+ }
+
+ # Returns true if there's no constraint in 'constaraints' where 'obj' comes
+ # second.
+ rule has-no-dependents ( obj : constraints * )
+ {
+ local failed ;
+ while $(constraints) && ! $(failed)
+ {
+ local c = $(constraints[1]) ;
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[2]) = $(obj)
+ {
+ failed = true ;
+ }
+ constraints = $(constraints[2-]) ;
+ }
+ if ! $(failed)
+ {
+ return true ;
+ }
+ }
+
+ rule remove-satisfied ( constraints * : obj )
+ {
+ local result ;
+ for local c in $(constraints)
+ {
+ local m = [ MATCH (.*)--(.*) : $(c) ] ;
+ if $(m[1]) != $(obj)
+ {
+ result += $(c) ;
+ }
+ }
+ return $(result) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import "class" : new ;
+ import assert ;
+
+ c1 = [ new order ] ;
+ $(c1).add-pair l1 l2 ;
+
+ assert.result l1 l2 : $(c1).order l1 l2 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+
+ $(c1).add-pair l2 l3 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+ $(c1).add-pair x l2 ;
+ assert.result l1 l2 : $(c1).order l2 l1 ;
+ assert.result l1 l2 l3 : $(c1).order l2 l3 l1 ;
+
+ # The output should be stable for unconstrained
+ # elements.
+ assert.result l4 l5 : $(c1).order l4 l5 ;
+}
diff --git a/src/boost/tools/build/src/util/order.py b/src/boost/tools/build/src/util/order.py
new file mode 100644
index 000000000..1cd57dad2
--- /dev/null
+++ b/src/boost/tools/build/src/util/order.py
@@ -0,0 +1,121 @@
+# Copyright (C) 2003 Vladimir Prus
+# Use, modification, and distribution is subject to the Boost Software
+# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
+# at http://www.boost.org/LICENSE_1_0.txt)
+
+class Order:
+ """Allows ordering arbitrary objects with regard to arbitrary binary relation.
+
+ The primary use case is the gcc toolset, which is sensitive to
+ library order: if library 'a' uses symbols from library 'b',
+ then 'a' must be present before 'b' on the linker's command line.
+
+ This requirement can be lifted for gcc with GNU ld, but for gcc with
+ Solaris LD (and for Solaris toolset as well), the order always matters.
+
+ So, we need to store order requirements and then order libraries
+ according to them. It it not possible to use dependency graph as
+ order requirements. What we need is "use symbols" relationship
+ while dependency graph provides "needs to be updated" relationship.
+
+ For example::
+ lib a : a.cpp b;
+ lib b ;
+
+ For static linking, the 'a' library need not depend on 'b'. However, it
+ still should come before 'b' on the command line.
+ """
+
+ def __init__ (self):
+ self.constraints_ = []
+
+ def add_pair (self, first, second):
+ """ Adds the constraint that 'first' should precede 'second'.
+ """
+ self.constraints_.append ((first, second))
+
+ def order (self, objects):
+ """ Given a list of objects, reorder them so that the constains specified
+ by 'add_pair' are satisfied.
+
+ The algorithm was adopted from an awk script by Nikita Youshchenko
+ (yoush at cs dot msu dot su)
+ """
+ # The algorithm used is the same is standard transitive closure,
+ # except that we're not keeping in-degree for all vertices, but
+ # rather removing edges.
+ result = []
+
+ if not objects:
+ return result
+
+ constraints = self.__eliminate_unused_constraits (objects)
+
+ # Find some library that nobody depends upon and add it to
+ # the 'result' array.
+ obj = None
+ while objects:
+ new_objects = []
+ while objects:
+ obj = objects [0]
+
+ if self.__has_no_dependents (obj, constraints):
+ # Emulate break ;
+ new_objects.extend (objects [1:])
+ objects = []
+
+ else:
+ new_objects.append (obj)
+ obj = None
+ objects = objects [1:]
+
+ if not obj:
+ raise BaseException ("Circular order dependencies")
+
+ # No problem with placing first.
+ result.append (obj)
+
+ # Remove all contains where 'obj' comes first,
+ # since they are already satisfied.
+ constraints = self.__remove_satisfied (constraints, obj)
+
+ # Add the remaining objects for further processing
+ # on the next iteration
+ objects = new_objects
+
+ return result
+
+ def __eliminate_unused_constraits (self, objects):
+ """ Eliminate constraints which mention objects not in 'objects'.
+ In graph-theory terms, this is finding subgraph induced by
+ ordered vertices.
+ """
+ result = []
+ for c in self.constraints_:
+ if c [0] in objects and c [1] in objects:
+ result.append (c)
+
+ return result
+
+ def __has_no_dependents (self, obj, constraints):
+ """ Returns true if there's no constraint in 'constraints' where
+ 'obj' comes second.
+ """
+ failed = False
+ while constraints and not failed:
+ c = constraints [0]
+
+ if c [1] == obj:
+ failed = True
+
+ constraints = constraints [1:]
+
+ return not failed
+
+ def __remove_satisfied (self, constraints, obj):
+ result = []
+ for c in constraints:
+ if c [0] != obj:
+ result.append (c)
+
+ return result
diff --git a/src/boost/tools/build/src/util/os.jam b/src/boost/tools/build/src/util/os.jam
new file mode 100644
index 000000000..21dd28f03
--- /dev/null
+++ b/src/boost/tools/build/src/util/os.jam
@@ -0,0 +1,208 @@
+# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import string ;
+
+
+# Return the value(s) of the given environment variable(s) at the time bjam was
+# invoked.
+rule environ ( variable-names + )
+{
+ local result ;
+ for local var-name in $(variable-names)
+ {
+ # We check the various cases of the var name for a value to account
+ # for programs that change the casing of env vars. One such program
+ # is Python that upper-cases env var names on import, and resports
+ # them as upper-case instead of keeping the original case.
+ local value ;
+ value ?= [ modules.peek .ENVIRON : $(var-name) ] ;
+ value ?= [ modules.peek .ENVIRON : $(var-name:U) ] ;
+ value ?= [ modules.peek .ENVIRON : $(var-name:L) ] ;
+ result += $(value) ;
+ }
+ return $(result) ;
+}
+
+.name = [ modules.peek : OS ] ;
+.platform = [ modules.peek : OSPLAT ] ;
+.version = [ modules.peek : OSVER ] ;
+
+
+local rule constant ( c : os ? )
+{
+ os ?= $(.name) ;
+ # First look for a platform-specific name, then the general value.
+ local variables = .$(c)-$(os) .$(c) ;
+ local result = $($(variables)) ;
+ return $(result[1]) ;
+}
+
+rule get-constant ( os ? )
+{
+ # Find the name of the constant being accessed, which is equal to the name
+ # used to invoke us.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = [ MATCH "([^.]*)$" : $(bt[4]) ] ;
+ return [ constant $(rulename) : $(os) ] ;
+}
+
+
+# export all the common constants
+.constants = name platform version shared-library-path-variable path-separator executable-path-variable executable-suffix ;
+for local constant in $(.constants)
+{
+ IMPORT $(__name__) : get-constant : $(__name__) : $(constant) ;
+}
+EXPORT $(__name__) : $(.constants) ;
+
+.executable-path-variable-NT = PATH ;
+# On Windows the case and capitalization of PATH is not always predictable, so
+# let's find out what variable name was really set.
+if $(.name) = NT
+{
+ for local n in [ VARNAMES .ENVIRON ]
+ {
+ if $(n:L) = path
+ {
+ .executable-path-variable-NT = $(n) ;
+ }
+ }
+}
+
+# Specific constants for various platforms. There's no need to define any
+# constant whose value would be the same as the default, below.
+.shared-library-path-variable-NT = $(.executable-path-variable-NT) ;
+.path-separator-NT = ";" ;
+.path-separator-VXWORKS = ";" ;
+.expand-variable-prefix-NT = % ;
+.expand-variable-suffix-NT = % ;
+.executable-suffix-NT = .exe ;
+
+.shared-library-path-variable-CYGWIN = PATH ;
+
+.shared-library-path-variable-MACOSX = DYLD_LIBRARY_PATH ;
+
+.shared-library-path-variable-AIX = LIBPATH ;
+
+.shared-library-path-variable-HAIKU = LIBRARY_PATH ;
+
+.shared-library-path-variable-VMS = PATH ;
+.path-separator-VMS = "," ;
+.expand-variable-prefix-VMS = '' ;
+.expand-variable-suffix-VMS = ' ;
+.executable-suffix-VMS = .exe ;
+
+# VxWorks uses the default LD_LIBRARY_PATH, but we need an alternate
+# name on the cross build host to propagate to the target system
+.shared-library-path-variable-VXWORKS = VSB_LD_LIBRARY_PATH ;
+
+# Default constants
+.shared-library-path-variable = LD_LIBRARY_PATH ;
+.path-separator = ":" ;
+.expand-variable-prefix = $ ;
+.expand-variable-suffix = "" ;
+.executable-path-variable = PATH ;
+.executable-suffix = "" ;
+
+
+# Return a list of the directories in the PATH. Yes, that information is (sort
+# of) available in the global module, but jam code can change those values, and
+# it isn't always clear what case/capitalization to use when looking. This rule
+# is a more reliable way to get there.
+rule executable-path ( )
+{
+ return [ string.words [ environ [ constant executable-path-variable ] ]
+ : [ constant path-separator ] ] ;
+}
+
+
+# Initialize the list of home directories for the current user depending on the
+# OS.
+if $(.name) = NT
+{
+ local home = [ environ HOMEDRIVE HOMEPATH ] ;
+ .home-directories = $(home[1])$(home[2]) [ environ HOME ] [ environ USERPROFILE ] ;
+}
+else
+{
+ .home-directories = [ environ HOME ] ;
+}
+
+
+# Can't use 'constant' mechanism because it only returns 1-element values.
+rule home-directories ( )
+{
+ return $(.home-directories) ;
+}
+
+
+# Return the string needed to represent the expansion of the named shell
+# variable.
+rule expand-variable ( variable )
+{
+ local prefix = [ constant expand-variable-prefix ] ;
+ local suffix = [ constant expand-variable-suffix ] ;
+ return $(prefix)$(variable)$(suffix) ;
+}
+
+
+# Returns true if running on windows, whether in cygwin or not.
+rule on-windows ( )
+{
+ local result ;
+ if [ modules.peek : NT ]
+ {
+ result = true ;
+ }
+ else if [ modules.peek : UNIX ]
+ {
+ switch [ modules.peek : JAMUNAME ]
+ {
+ case CYGWIN* :
+ {
+ result = true ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+rule on-vms ( )
+{
+ local result ;
+ if [ modules.peek : VMS ]
+ {
+ result = true ;
+ }
+ return $(result) ;
+}
+
+
+if ! [ on-windows ] && ! [ on-vms ]
+{
+ .on-unix = 1 ;
+}
+
+
+rule on-unix
+{
+ return $(.on-unix) ;
+}
+
+
+rule __test__
+{
+ import assert ;
+ if ! ( --quiet in [ modules.peek : ARGV ] )
+ {
+ ECHO "os:" name= [ name ] ;
+ ECHO "os:" version= [ version ] ;
+ }
+ assert.true name ;
+}
diff --git a/src/boost/tools/build/src/util/os_j.py b/src/boost/tools/build/src/util/os_j.py
new file mode 100644
index 000000000..f5dff1f90
--- /dev/null
+++ b/src/boost/tools/build/src/util/os_j.py
@@ -0,0 +1,24 @@
+# Status: stub, just enough to make tests work.
+#
+# Named os_j to avoid conflicts with standard 'os'. See
+# project.py:import for special-casing.
+#
+# Copyright 2001, 2002, 2003, 2005 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+import os
+
+import bjam
+
+__OS = bjam.call("peek", [], "OS")[0]
+
+# Return Jam's name of OS to prevent existing code from burning
+# when faced with Python naming
+def name():
+ return __OS
+
+
+def environ(keys):
+ return [os.environ[key] for key in keys if key in os.environ]
diff --git a/src/boost/tools/build/src/util/param.jam b/src/boost/tools/build/src/util/param.jam
new file mode 100644
index 000000000..9db2e5855
--- /dev/null
+++ b/src/boost/tools/build/src/util/param.jam
@@ -0,0 +1,54 @@
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Named parameters are represented as a list which has the
+# argument name as the first element and the value as the
+# remaining elements. This function sorts the parameters
+# into the correct variables and removes the parameter names.
+#
+# Example::
+#
+# rule exe ( name : sources * : requirements * )
+# {
+# param.handle-named-params sources requirements ;
+# # At this point $(sources) is test.cpp
+# }
+# exe test : requirements <link>shared : sources test.cpp ;
+#
+rule handle-named-params ( parameter-names * )
+{
+ module [ CALLER_MODULE ]
+ {
+ # Uglify the variable names, because we're executing in an unknown module.
+ local found-8bef5c096d06a1b0 ;
+ local tmp-8bef5c096d06a1b0.$(1) ;
+ for local v-8bef5c096d06a1b0 in $(1)
+ {
+ if $($(v-8bef5c096d06a1b0)[1]) && $($(v-8bef5c096d06a1b0)[1]) in $(1)
+ {
+ if $(tmp-8bef5c096d06a1b0.$($(v-8bef5c096d06a1b0)[1]))
+ {
+ import errors ;
+ errors.error Parameter '$($(v-8bef5c096d06a1b0)[1])' passed more than once. ;
+ }
+ found-8bef5c096d06a1b0 = true ;
+ tmp-8bef5c096d06a1b0.$($(v-8bef5c096d06a1b0)[1]) = $($(v-8bef5c096d06a1b0)[2-]) ;
+ }
+ else if $($(v-8bef5c096d06a1b0))-is-defined
+ {
+ if $(found-8bef5c096d06a1b0)
+ {
+ import errors ;
+ errors.error "Positional arguments must appear first." ;
+ }
+ tmp-8bef5c096d06a1b0.$(v-8bef5c096d06a1b0) = $($(v-8bef5c096d06a1b0)) ;
+ }
+ }
+ for local v-8bef5c096d06a1b0 in $(1)
+ {
+ $(v-8bef5c096d06a1b0) = $(tmp-8bef5c096d06a1b0.$(v-8bef5c096d06a1b0)) ;
+ }
+ }
+}
diff --git a/src/boost/tools/build/src/util/path.jam b/src/boost/tools/build/src/util/path.jam
new file mode 100644
index 000000000..02abd0706
--- /dev/null
+++ b/src/boost/tools/build/src/util/path.jam
@@ -0,0 +1,1015 @@
+# Copyright 2002-2006. Vladimir Prus
+# Copyright 2003-2004. Dave Abrahams
+# Copyright 2003-2006. Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Performs various path manipulations. Paths are always in a 'normalized'
+# representation. In it, a path may be either:
+#
+# - '.', or
+#
+# - ['/'] [ ( '..' '/' )* (token '/')* token ]
+#
+# In plain english, path can be rooted, '..' elements are allowed only at the
+# beginning, and it never ends in slash, except for path consisting of slash
+# only.
+
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+
+
+os = [ modules.peek : OS ] ;
+if [ modules.peek : UNIX ]
+{
+ local uname = [ modules.peek : JAMUNAME ] ;
+ switch $(uname)
+ {
+ case CYGWIN* : os = CYGWIN ;
+ case * : os = UNIX ;
+ }
+}
+
+
+# Converts the native path into normalized form.
+#
+rule make ( native )
+{
+ return [ make-$(os) $(native) ] ;
+}
+
+
+# Builds native representation of the path.
+#
+rule native ( path )
+{
+ return [ native-$(os) $(path) ] ;
+}
+
+
+# Tests if a path is rooted.
+#
+rule is-rooted ( path )
+{
+ return [ MATCH "^(/)" : $(path) ] ;
+}
+
+
+# Tests if a path has a parent.
+#
+rule has-parent ( path )
+{
+ if $(path) != /
+ {
+ return 1 ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Returns the path without any directory components.
+#
+rule basename ( path )
+{
+ return [ MATCH "([^/]+)$" : $(path) ] ;
+}
+
+
+# Returns parent directory of the path. If no parent exists, error is issued.
+#
+rule parent ( path )
+{
+ if [ has-parent $(path) ]
+ {
+ if $(path) = .
+ {
+ return .. ;
+ }
+ else
+ {
+ # Strip everything at the end of path up to and including the last
+ # slash.
+ local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
+
+ # Did we strip what we shouldn't?
+ if $(result[2]) = ".."
+ {
+ return $(path)/.. ;
+ }
+ else
+ {
+ if ! $(result[1])
+ {
+ if [ is-rooted $(path) ]
+ {
+ result = / ;
+ }
+ else
+ {
+ result = . ;
+ }
+ }
+ return $(result[1]) ;
+ }
+ }
+ }
+ else
+ {
+ import errors ;
+ errors.error "Path '$(path)' has no parent" ;
+ }
+}
+
+
+# Returns path2 such that "[ join path path2 ] = .". The path may not contain
+# ".." element or be rooted.
+#
+rule reverse ( path )
+{
+ if $(path) = .
+ {
+ return $(path) ;
+ }
+ else
+ {
+ local tokens = [ regex.split $(path) / ] ;
+ local tokens2 ;
+ for local i in $(tokens)
+ {
+ tokens2 += .. ;
+ }
+ return [ sequence.join $(tokens2) : / ] ;
+ }
+}
+
+
+# Concatenates the passed path elements. Generates an error if any element other
+# than the first one is rooted. Skips any empty or undefined path elements.
+#
+rule join ( elements + )
+{
+ if ! $(elements[2-])
+ {
+ return $(elements[1]) ;
+ }
+ else
+ {
+ for local e in $(elements[2-])
+ {
+ if [ is-rooted $(e) ]
+ {
+ import errors ;
+ errors.error only the first element may be rooted ;
+ }
+ }
+ return [ NORMALIZE_PATH "$(elements)" ] ;
+ }
+}
+
+
+# If 'path' is relative, it is rooted at 'root'. Otherwise, it is unchanged.
+#
+rule root ( path root )
+{
+ if [ is-rooted $(path) ]
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ join $(root) $(path) ] ;
+ }
+}
+
+
+# Returns the current working directory.
+#
+rule pwd ( )
+{
+ if ! $(.pwd)
+ {
+ .pwd = [ make [ PWD ] ] ;
+ }
+ return $(.pwd) ;
+}
+
+
+# Returns the list of files matching the given pattern in the specified
+# directory. Both directories and patterns are supplied as portable paths. Each
+# pattern should be non-absolute path, and can't contain "." or ".." elements.
+# Each slash separated element of pattern can contain the following special
+# characters:
+# - '?', which match any character
+# - '*', which matches arbitrary number of characters.
+# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and
+# only if e1 matches p1, e2 matches p2 and so on.
+#
+# For example:
+# [ glob . : *.cpp ]
+# [ glob . : */build/Jamfile ]
+#
+rule glob ( dirs * : patterns + : exclude-patterns * )
+{
+ local result ;
+ local real-patterns ;
+ local real-exclude-patterns ;
+ for local d in $(dirs)
+ {
+ for local p in $(patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-patterns += [ path.native $(pattern) ] ;
+ }
+
+ for local p in $(exclude-patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-exclude-patterns += [ path.native $(pattern) ] ;
+ }
+ }
+
+ local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ;
+ inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ;
+ local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ;
+ exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ;
+
+ return [ sequence.transform path.make : [ set.difference $(inc) : $(exc) ] ]
+ ;
+}
+
+
+# Recursive version of GLOB. Builds the glob of files while also searching in
+# the subdirectories of the given roots. An optional set of exclusion patterns
+# will filter out the matching entries from the result. The exclusions also
+# apply to the subdirectory scanning, such that directories that match the
+# exclusion patterns will not be searched.
+#
+rule glob-tree ( roots * : patterns + : exclude-patterns * )
+{
+ return [ sequence.transform path.make : [ .glob-tree [ sequence.transform
+ path.native : $(roots) ] : $(patterns) : $(exclude-patterns) ] ] ;
+}
+
+
+local rule .glob-tree ( roots * : patterns * : exclude-patterns * )
+{
+ local excluded ;
+ if $(exclude-patterns)
+ {
+ excluded = [ GLOB $(roots) : $(exclude-patterns) ] ;
+ }
+ local result = [ set.difference [ GLOB $(roots) : $(patterns) ] :
+ $(excluded) ] ;
+ local subdirs ;
+ for local d in [ set.difference [ GLOB $(roots) : * ] : $(excluded) ]
+ {
+ if ! ( $(d:D=) in . .. ) && ! [ CHECK_IF_FILE $(d) ]
+ {
+ subdirs += $(d) ;
+ }
+ }
+ if $(subdirs)
+ {
+ result += [ .glob-tree $(subdirs) : $(patterns) : $(exclude-patterns) ]
+ ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true is the specified file exists.
+#
+rule exists ( file )
+{
+ return [ path.glob $(file:D) : $(file:D=) ] ;
+}
+NATIVE_RULE path : exists ;
+
+
+# Find out the absolute name of path and returns the list of all the parents,
+# starting with the immediate one. Parents are returned as relative names. If
+# 'upper_limit' is specified, directories above it will be pruned.
+#
+rule all-parents ( path : upper_limit ? : cwd ? )
+{
+ cwd ?= [ pwd ] ;
+ local path_ele = [ regex.split [ root $(path) $(cwd) ] / ] ;
+
+ if ! $(upper_limit)
+ {
+ upper_limit = / ;
+ }
+ local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] / ] ;
+
+ # Leave only elements in 'path_ele' below 'upper_ele'.
+ while $(path_ele) && ( $(upper_ele[1]) = $(path_ele[1]) )
+ {
+ upper_ele = $(upper_ele[2-]) ;
+ path_ele = $(path_ele[2-]) ;
+ }
+
+ # Have all upper elements been removed ?
+ if $(upper_ele)
+ {
+ import errors ;
+ errors.error "$(upper_limit) is not prefix of $(path)" ;
+ }
+
+ # Create the relative paths to parents, number of elements in 'path_ele'.
+ local result ;
+ for local i in $(path_ele)
+ {
+ path = [ parent $(path) ] ;
+ result += $(path) ;
+ }
+ return $(result) ;
+}
+
+
+# Search for 'pattern' in parent directories of 'dir', up to and including
+# 'upper_limit', if it is specified, or up to the filesystem root otherwise.
+#
+rule glob-in-parents ( dir : patterns + : upper-limit ? )
+{
+ local result ;
+ local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
+
+ while $(parent-dirs) && ! $(result)
+ {
+ result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
+ parent-dirs = $(parent-dirs[2-]) ;
+ }
+ return $(result) ;
+}
+
+
+# Assuming 'child' is a subdirectory of 'parent', return the relative path from
+# 'parent' to 'child'.
+#
+rule relative ( child parent : no-error ? )
+{
+ local not-a-child ;
+ if $(parent) = "."
+ {
+ return $(child) ;
+ }
+ else
+ {
+ local split1 = [ regex.split $(parent) / ] ;
+ local split2 = [ regex.split $(child) / ] ;
+
+ while $(split1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else
+ {
+ not-a-child = true ;
+ split1 = ;
+ }
+ }
+ if $(split2)
+ {
+ if $(not-a-child)
+ {
+ if $(no-error)
+ {
+ return not-a-child ;
+ }
+ else
+ {
+ import errors ;
+ errors.error $(child) is not a subdir of $(parent) ;
+ }
+ }
+ else
+ {
+ return [ join $(split2) ] ;
+ }
+ }
+ else
+ {
+ return "." ;
+ }
+ }
+}
+
+
+# Returns the minimal path to path2 that is relative to path1.
+# If no such path exists and path2 is rooted, return it unchanged.
+#
+rule relative-to ( path1 path2 )
+{
+ local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
+ local split1 = [ regex.split $(path1) / ] ;
+ local split2 = [ regex.split $(path2) / ] ;
+ local is-rooted ;
+
+ if $(split1[1]) = "" && $(split2[1]) = ""
+ {
+ is-rooted = true ;
+ }
+ else if $(split1[1]) != "" && $(split2[1]) = ""
+ {
+ # Second path is rooted
+ return $(path2) ;
+ }
+ else if $(split1[1]) = "" && $(split2[1]) != ""
+ {
+ import errors ;
+ errors.error Cannot find relative path from $(path1) to $(path2) ;
+ }
+
+ # For windows paths on different drives, return an
+ # absolute path
+ if $(os) = NT && $(split1[1]) = "" &&
+ [ MATCH "^(.:)$" : $(split1[2]) ] &&
+ $(split1[2]) != $(split2[2])
+ {
+ return $(path2) ;
+ }
+
+ while $(split1) && $(root_1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ root_1 = $(root_1[2-]) ;
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else if $(split1[1]) = ..
+ {
+ if $(is-rooted)
+ {
+ return $(path2) ;
+ }
+ else
+ {
+ import errors ;
+ errors.error Cannot find relative path from $(path1) to $(path2) ;
+ return ;
+ }
+ }
+ else
+ {
+ split1 = ;
+ }
+ }
+ return [ join . $(root_1) $(split2) ] ;
+}
+
+
+# Returns the list of paths used by the operating system for looking up
+# programs.
+#
+rule programs-path ( )
+{
+ local result ;
+ local raw = [ modules.peek : PATH Path path ] ;
+ for local p in $(raw)
+ {
+ if $(p)
+ {
+ result += [ path.make $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule makedirs ( path )
+{
+ local result = true ;
+ local native = [ native $(path) ] ;
+ if ! [ exists $(native) ]
+ {
+ if [ makedirs [ parent $(path) ] ]
+ {
+ if ! [ MAKEDIR $(native) ]
+ {
+ import errors ;
+ errors.error "Could not create directory '$(path)'" ;
+ result = ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Converts native Windows paths into our internal canonic path representation.
+# Supports 'invalid' paths containing multiple successive path separator
+# characters.
+#
+# TODO: Check and if needed add support for Windows 'X:file' path format where
+# the file is located in the current folder on drive X.
+#
+rule make-NT ( native )
+{
+ local result = [ NORMALIZE_PATH $(native) ] ;
+
+ # We need to add an extra '/' in front in case this is a rooted Windows path
+ # starting with a drive letter and not a path separator character since the
+ # builtin NORMALIZE_PATH rule has no knowledge of this leading drive letter
+ # and treats it as a regular folder name.
+ if [ regex.match "(^.:)" : $(native) ]
+ {
+ result = /$(result) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule native-NT ( path )
+{
+ local remove-slash = [ MATCH "^/(.:.*)" : $(path) ] ;
+ if $(remove-slash)
+ {
+ path = $(remove-slash) ;
+ }
+ return [ regex.replace $(path) / \\ ] ;
+}
+
+
+rule make-UNIX ( native )
+{
+ # VP: I have no idea now 'native' can be empty here! But it can!
+ if ! $(native)
+ {
+ import errors ;
+ errors.error "Empty path passed to 'make-UNIX'" ;
+ }
+ else
+ {
+ return [ NORMALIZE_PATH $(native:T) ] ;
+ }
+}
+
+
+rule native-UNIX ( path )
+{
+ return $(path) ;
+}
+
+
+rule make-CYGWIN ( path )
+{
+ return [ make-NT $(path) ] ;
+}
+
+
+rule native-CYGWIN ( path )
+{
+ local result = $(path) ;
+ if [ regex.match "(^/.:)" : $(path) ] # Windows absolute path.
+ {
+ result = [ MATCH "^/?(.*)" : $(path) ] ; # Remove leading '/'.
+ }
+ return [ native-UNIX $(result) ] ;
+}
+
+
+# split-path-VMS: splits input native path into device dir file (each part is
+# optional).
+#
+# example:
+#
+# dev:[dir]file.c => dev: [dir] file.c
+#
+rule split-path-VMS ( native )
+{
+ local matches = [ MATCH "([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$" : $(native)
+ ] ;
+ local device = $(matches[1]) ;
+ local dir = $(matches[2]) ;
+ local file = $(matches[3]) ;
+
+ return $(device) $(dir) $(file) ;
+}
+
+
+# Converts a native VMS path into a portable path spec.
+#
+# Does not handle current-device absolute paths such as "[dir]File.c" as it is
+# not clear how to represent them in the portable path notation.
+#
+# Adds a trailing dot (".") to the file part if no extension is present (helps
+# when converting it back into native path).
+#
+rule make-VMS ( native )
+{
+ ## Use POSIX-style path (keep previous code commented out - real magic!).
+ ## VMS CRTL supports POSIX path, JAM is retrofitted to pass it to VMS CRTL.
+
+ local portable = [ make-UNIX $(native) ] ;
+
+ #if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
+ #{
+ # import errors ;
+ # errors.error "Can't handle default-device absolute paths: " $(native) ;
+ #}
+ #
+ #local parts = [ split-path-VMS $(native) ] ;
+ #local device = $(parts[1]) ;
+ #local dir = $(parts[2]) ;
+ #local file = $(parts[3]) ;
+ #local elems ;
+ #
+ #if $(device)
+ #{
+ # #
+ # # rooted
+ # #
+ # elems = /$(device) ;
+ #}
+ #
+ #if $(dir) = "[]"
+ #{
+ # #
+ # # Special case: current directory
+ # #
+ # elems = $(elems) "." ;
+ #}
+ #else if $(dir)
+ #{
+ # dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
+ # local dir_parts = [ regex.split $(dir) \\. ] ;
+ #
+ # if $(dir_parts[1]) = ""
+ # {
+ # #
+ # # Relative path
+ # #
+ # dir_parts = $(dir_parts[2--1]) ;
+ # }
+ #
+ # #
+ # # replace "parent-directory" parts (- => ..)
+ # #
+ # dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
+ #
+ # elems = $(elems) $(dir_parts) ;
+ #}
+ #
+ #if $(file)
+ #{
+ # if ! [ MATCH (\\.) : $(file) ]
+ # {
+ # #
+ # # Always add "." to end of non-extension file.
+ # #
+ # file = $(file). ;
+ # }
+ # elems = $(elems) $(file) ;
+ #}
+ #
+ #portable = [ path.join $(elems) ] ;
+
+ return $(portable) ;
+}
+
+
+# Converts a portable path spec into a native VMS path.
+#
+# Relies on having at least one dot (".") included in the file name to be able
+# to differentiate it from the directory part.
+#
+rule native-VMS ( path )
+{
+ ## Use POSIX-style path (keep previous code commented out - real magic!).
+ ## VMS CRTL supports POSIX path, JAM is retrofitted to pass it to VMS CRTL.
+ ## NOTE: While translation to VMS-style is implemented with $(:W) modifier,
+ ## Here we retain POSIX-style path, so it can be portably manipulated
+ ## in B2 rules, and only in actions it's translated with $(:W).
+
+ local native = [ native-UNIX $(path) ] ;
+
+ #local device = "" ;
+ #local dir = $(path) ;
+ #local file = "" ;
+ #local split ;
+ #
+ ##
+ ## Has device ?
+ ##
+ #if [ is-rooted $(dir) ]
+ #{
+ # split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
+ # device = $(split[1]) ;
+ # dir = $(split[2]) ;
+ #}
+ #
+ ##
+ ## Has file ?
+ ##
+ ## This is no exact science, just guess work:
+ ##
+ ## If the last part of the current path spec includes some chars, followed by
+ ## a dot, optionally followed by more chars - then it is a file (keep your
+ ## fingers crossed).
+ ##
+ #split = [ regex.split $(dir) / ] ;
+ #local maybe_file = $(split[-1]) ;
+ #
+ #if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
+ #{
+ # file = $(maybe_file) ;
+ # dir = [ sequence.join $(split[1--2]) : / ] ;
+ #}
+ #
+ ##
+ ## Has dir spec ?
+ ##
+ #if $(dir) = "."
+ #{
+ # dir = "[]" ;
+ #}
+ #else if $(dir)
+ #{
+ # dir = [ regex.replace $(dir) \\.\\. - ] ;
+ # dir = [ regex.replace $(dir) / . ] ;
+ #
+ # if $(device) = ""
+ # {
+ # #
+ # # Relative directory
+ # #
+ # dir = "."$(dir) ;
+ # }
+ # dir = "["$(dir)"]" ;
+ #}
+ #
+ #native = [ sequence.join $(device) $(dir) $(file) ] ;
+
+ return $(native) ;
+}
+
+
+if $(os) = VMS
+{
+ # Translates POSIX-style path to VMS-style path
+ #
+ # This results in actual VMS path, unlike 'native-VMS' rule which is meant
+ # to return POSIX-style in order to mask VMS specificity and help portability.
+
+ rule to-VMS ( path )
+ {
+ return $(path:W) ;
+ }
+
+ EXPORT $(__name__) : to-$(os) ;
+}
+
+# Remove one level of indirection
+IMPORT $(__name__) : make-$(os) native-$(os) : $(__name__) : make native ;
+EXPORT $(__name__) : make native ;
+
+rule __test__ ( )
+{
+ import assert ;
+ import errors : try catch ;
+
+ assert.true is-rooted "/" ;
+ assert.true is-rooted "/foo" ;
+ assert.true is-rooted "/foo/bar" ;
+ assert.result : is-rooted "." ;
+ assert.result : is-rooted "foo" ;
+ assert.result : is-rooted "foo/bar" ;
+
+ assert.true has-parent "foo" ;
+ assert.true has-parent "foo/bar" ;
+ assert.true has-parent "." ;
+ assert.result : has-parent "/" ;
+
+ assert.result "." : basename "." ;
+ assert.result ".." : basename ".." ;
+ assert.result "foo" : basename "foo" ;
+ assert.result "foo" : basename "bar/foo" ;
+ assert.result "foo" : basename "gaz/bar/foo" ;
+ assert.result "foo" : basename "/gaz/bar/foo" ;
+
+ assert.result "." : parent "foo" ;
+ assert.result "/" : parent "/foo" ;
+ assert.result "foo/bar" : parent "foo/bar/giz" ;
+ assert.result ".." : parent "." ;
+ assert.result ".." : parent "../foo" ;
+ assert.result "../../foo" : parent "../../foo/bar" ;
+
+ assert.result "." : reverse "." ;
+ assert.result ".." : reverse "foo" ;
+ assert.result "../../.." : reverse "foo/bar/giz" ;
+
+ assert.result "foo" : join "foo" ;
+ assert.result "/foo" : join "/" "foo" ;
+ assert.result "foo/bar" : join "foo" "bar" ;
+ assert.result "foo/bar" : join "foo/giz" "../bar" ;
+ assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
+ assert.result ".." : join "." ".." ;
+ assert.result ".." : join "foo" "../.." ;
+ assert.result "../.." : join "../foo" "../.." ;
+ assert.result "/foo" : join "/bar" "../foo" ;
+ assert.result "foo/giz" : join "foo/giz" "." ;
+ assert.result "." : join lib2 ".." ;
+ assert.result "/" : join "/a" ".." ;
+
+ assert.result /a/b : join /a/b/c .. ;
+
+ assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
+ assert.result "giz" : join "foo" ".." "giz" ;
+ assert.result "foo/giz" : join "foo" "." "giz" ;
+
+ try ;
+ {
+ join "a" "/b" ;
+ }
+ catch only first element may be rooted ;
+
+ local CWD = "/home/ghost/build" ;
+ assert.result : all-parents . : . : $(CWD) ;
+ assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
+ assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" :
+ $(CWD) ;
+ assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : ""
+ : $(CWD) ;
+
+ local CWD = "/home/ghost" ;
+ assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
+ assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
+
+ assert.result "c/d" : relative "a/b/c/d" "a/b" ;
+ assert.result "foo" : relative "foo" "." ;
+
+ assert.result "c/d" : relative-to "a/b" "a/b/c/d" ;
+ assert.result "foo" : relative-to "." "foo" ;
+ assert.result "../d" : relative-to "/a/b" "/a/d" ;
+ assert.result "x" : relative-to .. ../x ;
+ assert.result "/x" : relative-to x /x ;
+ try ;
+ {
+ relative-to "../x" "a" ;
+ }
+ catch Cannot find relative path from ../x to a ;
+ try ;
+ {
+ relative-to "../../x" "../a" ;
+ }
+ catch Cannot find relative path from ../../x to ../a ;
+ try ;
+ {
+ relative-to "/x/y" "a/b" ;
+ }
+ catch Cannot find relative path from /x/y to a/b ;
+
+ local save-os = [ modules.peek path : os ] ;
+ modules.poke path : os : NT ;
+
+ assert.result "foo/bar/giz" : make-NT "foo/bar/giz" ;
+ assert.result "foo/bar/giz" : make-NT "foo\\bar\\giz" ;
+ assert.result "foo" : make-NT "foo/" ;
+ assert.result "foo" : make-NT "foo\\" ;
+ assert.result "foo" : make-NT "foo/." ;
+ assert.result "foo" : make-NT "foo/bar/.." ;
+ assert.result "foo" : make-NT "foo/bar/../" ;
+ assert.result "foo" : make-NT "foo/bar/..\\" ;
+ assert.result "foo/bar" : make-NT "foo/././././bar" ;
+ assert.result "/foo" : make-NT "\\foo" ;
+ assert.result "/D:/My Documents" : make-NT "D:\\My Documents" ;
+ assert.result "/c:/boost/tools/build/new/project.jam" : make-NT
+ "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
+
+ # Test processing 'invalid' paths containing multiple successive path
+ # separators.
+ assert.result "foo" : make-NT "foo//" ;
+ assert.result "foo" : make-NT "foo///" ;
+ assert.result "foo" : make-NT "foo\\\\" ;
+ assert.result "foo" : make-NT "foo\\\\\\" ;
+ assert.result "/foo" : make-NT "//foo" ;
+ assert.result "/foo" : make-NT "///foo" ;
+ assert.result "/foo" : make-NT "\\\\foo" ;
+ assert.result "/foo" : make-NT "\\\\\\foo" ;
+ assert.result "/foo" : make-NT "\\/\\/foo" ;
+ assert.result "foo/bar" : make-NT "foo//\\//\\\\bar//\\//\\\\\\//\\//\\\\" ;
+ assert.result "foo" : make-NT "foo/bar//.." ;
+ assert.result "foo/bar" : make-NT "foo/bar/giz//.." ;
+ assert.result "foo/giz" : make-NT
+ "foo//\\//\\\\bar///\\\\//\\\\////\\/..///giz\\//\\\\\\//\\//\\\\" ;
+ assert.result "../../../foo" : make-NT "..///.//..///.//..////foo///" ;
+
+ # Test processing 'invalid' rooted paths with too many '..' path elements
+ # that would place them before the root.
+ assert.result : make-NT "/.." ;
+ assert.result : make-NT "/../" ;
+ assert.result : make-NT "/../." ;
+ assert.result : make-NT "/.././" ;
+ assert.result : make-NT "/foo/../bar/giz/.././././../../." ;
+ assert.result : make-NT "/foo/../bar/giz/.././././../.././" ;
+ assert.result : make-NT "//foo/../bar/giz/.././././../../." ;
+ assert.result : make-NT "//foo/../bar/giz/.././././../.././" ;
+ assert.result : make-NT "\\\\foo/../bar/giz/.././././../../." ;
+ assert.result : make-NT "\\\\foo/../bar/giz/.././././../.././" ;
+ assert.result : make-NT "/..///.//..///.//..////foo///" ;
+
+ assert.result "foo\\bar\\giz" : native-NT "foo/bar/giz" ;
+ assert.result "foo" : native-NT "foo" ;
+ assert.result "\\foo" : native-NT "/foo" ;
+ assert.result "D:\\My Documents\\Work" : native-NT "/D:/My Documents/Work" ;
+
+ assert.result "../y" : relative-to "/C:/x" "/C:/y" ;
+ assert.result "/D:/test" : relative-to "/C:/test" "/D:/test" ;
+ try ;
+ {
+ relative-to "/C:/y" "a/b" ;
+ }
+ catch Cannot find relative path from "/C:/y" to a/b ;
+
+ modules.poke path : os : UNIX ;
+
+ assert.result "foo/bar/giz" : make-UNIX "foo/bar/giz" ;
+ assert.result "/sub1" : make-UNIX "/sub1/." ;
+ assert.result "/sub1" : make-UNIX "/sub1/sub2/.." ;
+ assert.result "sub1" : make-UNIX "sub1/." ;
+ assert.result "sub1" : make-UNIX "sub1/sub2/.." ;
+ assert.result "/foo/bar" : native-UNIX "/foo/bar" ;
+
+ modules.poke path : os : VMS ;
+
+ ## On VMS use POSIX-style path (keep previous tests commented out).
+
+ assert.result "foo/bar/giz" : make-VMS "foo/bar/giz" ;
+ assert.result "/sub1" : make-VMS "/sub1/." ;
+ assert.result "/sub1" : make-VMS "/sub1/sub2/.." ;
+ assert.result "sub1" : make-VMS "sub1/." ;
+ assert.result "sub1" : make-VMS "sub1/sub2/.." ;
+ assert.result "/foo/bar" : native-VMS "/foo/bar" ;
+
+ ##
+ ## Do not really need to poke os before these
+ ##
+ #assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
+ #assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
+ #assert.result "disk:" "" "" : split-path-VMS "disk:" ;
+ #assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
+ #assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
+ #assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
+ #assert.result "" "" "file" : split-path-VMS "file" ;
+ #assert.result "" "" "" : split-path-VMS "" ;
+ #
+ ##
+ ## Special case: current directory
+ ##
+ #assert.result "" "[]" "" : split-path-VMS "[]" ;
+ #assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
+ #assert.result "" "[]" "file" : split-path-VMS "[]file" ;
+ #assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
+ #
+ ##
+ ## Make portable paths
+ ##
+ #assert.result "/disk:" : make-VMS "disk:" ;
+ #assert.result "foo/bar/giz" : make-VMS "[.foo.bar.giz]" ;
+ #assert.result "foo" : make-VMS "[.foo]" ;
+ #assert.result "foo" : make-VMS "[.foo.bar.-]" ;
+ #assert.result ".." : make-VMS "[.-]" ;
+ #assert.result ".." : make-VMS "[-]" ;
+ #assert.result "." : make-VMS "[]" ;
+ #assert.result "giz.h" : make-VMS "giz.h" ;
+ #assert.result "foo/bar/giz.h" : make-VMS "[.foo.bar]giz.h" ;
+ #assert.result "/disk:/my_docs" : make-VMS "disk:[my_docs]" ;
+ #assert.result "/disk:/boost/tools/build/new/project.jam" : make-VMS
+ # "disk:[boost.tools.build.test.-.new]project.jam" ;
+ #
+ ##
+ ## Special case (adds '.' to end of file w/o extension to disambiguate from
+ ## directory in portable path spec)
+ ##
+ #assert.result "Jamfile." : make-VMS "Jamfile" ;
+ #assert.result "dir/Jamfile." : make-VMS "[.dir]Jamfile" ;
+ #assert.result "/disk:/dir/Jamfile." : make-VMS "disk:[dir]Jamfile" ;
+ #
+ ##
+ ## Make native paths
+ ##
+ #assert.result "disk:" : native-VMS "/disk:" ;
+ #assert.result "[.foo.bar.giz]" : native-VMS "foo/bar/giz" ;
+ #assert.result "[.foo]" : native-VMS "foo" ;
+ #assert.result "[.-]" : native-VMS ".." ;
+ #assert.result "[.foo.-]" : native-VMS "foo/.." ;
+ #assert.result "[]" : native-VMS "." ;
+ #assert.result "disk:[my_docs.work]" : native-VMS "/disk:/my_docs/work" ;
+ #assert.result "giz.h" : native-VMS "giz.h" ;
+ #assert.result "disk:Jamfile." : native-VMS "/disk:Jamfile." ;
+ #assert.result "disk:[my_docs.work]Jamfile." : native-VMS
+ # "/disk:/my_docs/work/Jamfile." ;
+
+ modules.poke path : os : $(save-os) ;
+}
diff --git a/src/boost/tools/build/src/util/path.py b/src/boost/tools/build/src/util/path.py
new file mode 100644
index 000000000..c535a7d56
--- /dev/null
+++ b/src/boost/tools/build/src/util/path.py
@@ -0,0 +1,937 @@
+# Status: this module is ported on demand by however needs something
+# from it. Functionality that is not needed by Python port will
+# be dropped.
+
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# Performs various path manipulations. Path are always in a 'normilized'
+# representation. In it, a path may be either:
+#
+# - '.', or
+#
+# - ['/'] [ ( '..' '/' )* (token '/')* token ]
+#
+# In plain english, path can be rooted, '..' elements are allowed only
+# at the beginning, and it never ends in slash, except for path consisting
+# of slash only.
+
+import os.path
+from utility import to_seq
+from glob import glob as builtin_glob
+
+from b2.util import bjam_signature
+
+@bjam_signature((["path", "root"],))
+def root (path, root):
+ """ If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged.
+ """
+ if os.path.isabs (path):
+ return path
+ else:
+ return os.path.join (root, path)
+
+@bjam_signature((["native"],))
+def make (native):
+ """ Converts the native path into normalized form.
+ """
+ # TODO: make os selection here.
+ return make_UNIX (native)
+
+@bjam_signature([['native']])
+def make_UNIX (native):
+
+ # VP: I have no idea now 'native' can be empty here! But it can!
+ assert (native)
+
+ return os.path.normpath (native)
+
+@bjam_signature((["path"],))
+def native (path):
+ """ Builds a native representation of the path.
+ """
+ # TODO: make os selection here.
+ return native_UNIX (path)
+
+def native_UNIX (path):
+ return path
+
+
+def pwd ():
+ """ Returns the current working directory.
+ # TODO: is it a good idea to use the current dir? Some use-cases
+ may not allow us to depend on the current dir.
+ """
+ return make (os.getcwd ())
+
+def is_rooted (path):
+ """ Tests if a path is rooted.
+ """
+ return path and path [0] == '/'
+
+
+###################################################################
+# Still to port.
+# Original lines are prefixed with "# "
+#
+# # Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# # distribute this software is granted provided this copyright notice appears in
+# # all copies. This software is provided "as is" without express or implied
+# # warranty, and with no claim as to its suitability for any purpose.
+#
+# # Performs various path manipulations. Path are always in a 'normilized'
+# # representation. In it, a path may be either:
+# #
+# # - '.', or
+# #
+# # - ['/'] [ ( '..' '/' )* (token '/')* token ]
+# #
+# # In plain english, path can be rooted, '..' elements are allowed only
+# # at the beginning, and it never ends in slash, except for path consisting
+# # of slash only.
+#
+# import modules ;
+# import sequence ;
+# import regex ;
+# import errors : error ;
+#
+#
+# os = [ modules.peek : OS ] ;
+# if [ modules.peek : UNIX ]
+# {
+# local uname = [ modules.peek : JAMUNAME ] ;
+# switch $(uname)
+# {
+# case CYGWIN* :
+# os = CYGWIN ;
+#
+# case * :
+# os = UNIX ;
+# }
+# }
+#
+# #
+# # Tests if a path is rooted.
+# #
+# rule is-rooted ( path )
+# {
+# return [ MATCH "^(/)" : $(path) ] ;
+# }
+#
+# #
+# # Tests if a path has a parent.
+# #
+# rule has-parent ( path )
+# {
+# if $(path) != / {
+# return 1 ;
+# } else {
+# return ;
+# }
+# }
+#
+# #
+# # Returns the path without any directory components.
+# #
+# rule basename ( path )
+# {
+# return [ MATCH "([^/]+)$" : $(path) ] ;
+# }
+#
+# #
+# # Returns parent directory of the path. If no parent exists, error is issued.
+# #
+# rule parent ( path )
+# {
+# if [ has-parent $(path) ] {
+#
+# if $(path) = . {
+# return .. ;
+# } else {
+#
+# # Strip everything at the end of path up to and including
+# # the last slash
+# local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
+#
+# # Did we strip what we shouldn't?
+# if $(result[2]) = ".." {
+# return $(path)/.. ;
+# } else {
+# if ! $(result[1]) {
+# if [ is-rooted $(path) ] {
+# result = / ;
+# } else {
+# result = . ;
+# }
+# }
+# return $(result[1]) ;
+# }
+# }
+# } else {
+# error "Path '$(path)' has no parent" ;
+# }
+# }
+#
+# #
+# # Returns path2 such that "[ join path path2 ] = .".
+# # The path may not contain ".." element or be rooted.
+# #
+# rule reverse ( path )
+# {
+# if $(path) = .
+# {
+# return $(path) ;
+# }
+# else
+# {
+# local tokens = [ regex.split $(path) "/" ] ;
+# local tokens2 ;
+# for local i in $(tokens) {
+# tokens2 += .. ;
+# }
+# return [ sequence.join $(tokens2) : "/" ] ;
+# }
+# }
+def reverse(path):
+ """Returns path2 such that `os.path.join(path, path2) == '.'`.
+ `path` may not contain '..' or be rooted.
+
+ Args:
+ path (str): the path to reverse
+
+ Returns:
+ the string of the reversed path
+
+ Example:
+
+ >>> p1 = 'path/to/somewhere'
+ >>> p2 = reverse('path/to/somewhere')
+ >>> p2
+ '../../..'
+ >>> os.path.normpath(os.path.join(p1, p2))
+ '.'
+ """
+ if is_rooted(path) or '..' in path:
+ from b2.manager import get_manager
+ get_manager().errors()(
+ 'reverse(path): path is either rooted or contains ".." in the path')
+ if path == '.':
+ return path
+ path = os.path.normpath(path)
+ # os.sep.join() is being used over os.path.join() due
+ # to an extra '..' that is created by os.path.join()
+ return os.sep.join('..' for t in path.split(os.sep))
+# #
+# # Auxiliary rule: does all the semantic of 'join', except for error checking.
+# # The error checking is separated because this rule is recursive, and I don't
+# # like the idea of checking the same input over and over.
+# #
+# local rule join-imp ( elements + )
+# {
+# return [ NORMALIZE_PATH $(elements:J="/") ] ;
+# }
+#
+# #
+# # Contanenates the passed path elements. Generates an error if
+# # any element other than the first one is rooted.
+# #
+# rule join ( elements + )
+# {
+# if ! $(elements[2])
+# {
+# return $(elements[1]) ;
+# }
+# else
+# {
+# for local e in $(elements[2-])
+# {
+# if [ is-rooted $(e) ]
+# {
+# error only first element may be rooted ;
+# }
+# }
+# return [ join-imp $(elements) ] ;
+# }
+# }
+
+
+def glob (dirs, patterns):
+ """ Returns the list of files matching the given pattern in the
+ specified directory. Both directories and patterns are
+ supplied as portable paths. Each pattern should be non-absolute
+ path, and can't contain "." or ".." elements. Each slash separated
+ element of pattern can contain the following special characters:
+ - '?', which match any character
+ - '*', which matches arbitrary number of characters.
+ A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
+ if and only if e1 matches p1, e2 matches p2 and so on.
+
+ For example:
+ [ glob . : *.cpp ]
+ [ glob . : */build/Jamfile ]
+ """
+# {
+# local result ;
+# if $(patterns:D)
+# {
+# # When a pattern has a directory element, we first glob for
+# # directory, and then glob for file name is the found directories.
+# for local p in $(patterns)
+# {
+# # First glob for directory part.
+# local globbed-dirs = [ glob $(dirs) : $(p:D) ] ;
+# result += [ glob $(globbed-dirs) : $(p:D="") ] ;
+# }
+# }
+# else
+# {
+# # When a pattern has not directory, we glob directly.
+# # Take care of special ".." value. The "GLOB" rule simply ignores
+# # the ".." element (and ".") element in directory listings. This is
+# # needed so that
+# #
+# # [ glob libs/*/Jamfile ]
+# #
+# # don't return
+# #
+# # libs/../Jamfile (which is the same as ./Jamfile)
+# #
+# # On the other hand, when ".." is explicitly present in the pattern
+# # we need to return it.
+# #
+# for local dir in $(dirs)
+# {
+# for local p in $(patterns)
+# {
+# if $(p) != ".."
+# {
+# result += [ sequence.transform make
+# : [ GLOB [ native $(dir) ] : $(p) ] ] ;
+# }
+# else
+# {
+# result += [ path.join $(dir) .. ] ;
+# }
+# }
+# }
+# }
+# return $(result) ;
+# }
+#
+
+# TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested.
+ result = []
+ dirs = to_seq (dirs)
+ patterns = to_seq (patterns)
+
+ splitdirs = []
+ for dir in dirs:
+ splitdirs += dir.split (os.pathsep)
+
+ for dir in splitdirs:
+ for pattern in patterns:
+ p = os.path.join (dir, pattern)
+ import glob
+ result.extend (glob.glob (p))
+ return result
+
+#
+# Find out the absolute name of path and returns the list of all the parents,
+# starting with the immediate one. Parents are returned as relative names.
+# If 'upper_limit' is specified, directories above it will be pruned.
+#
+def all_parents(path, upper_limit=None, cwd=None):
+
+ if not cwd:
+ cwd = os.getcwd()
+
+ path_abs = os.path.join(cwd, path)
+
+ if upper_limit:
+ upper_limit = os.path.join(cwd, upper_limit)
+
+ result = []
+ while path_abs and path_abs != upper_limit:
+ (head, tail) = os.path.split(path)
+ path = os.path.join(path, "..")
+ result.append(path)
+ path_abs = head
+
+ if upper_limit and path_abs != upper_limit:
+ raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path))
+
+ return result
+
+# Search for 'pattern' in parent directories of 'dir', up till and including
+# 'upper_limit', if it is specified, or till the filesystem root otherwise.
+#
+def glob_in_parents(dir, patterns, upper_limit=None):
+
+ result = []
+ parent_dirs = all_parents(dir, upper_limit)
+
+ for p in parent_dirs:
+ result = glob(p, patterns)
+ if result: break
+
+ return result
+
+#
+# #
+# # Assuming 'child' is a subdirectory of 'parent', return the relative
+# # path from 'parent' to 'child'
+# #
+# rule relative ( child parent )
+# {
+# if $(parent) = "."
+# {
+# return $(child) ;
+# }
+# else
+# {
+# local split1 = [ regex.split $(parent) / ] ;
+# local split2 = [ regex.split $(child) / ] ;
+#
+# while $(split1)
+# {
+# if $(split1[1]) = $(split2[1])
+# {
+# split1 = $(split1[2-]) ;
+# split2 = $(split2[2-]) ;
+# }
+# else
+# {
+# errors.error $(child) is not a subdir of $(parent) ;
+# }
+# }
+# return [ join $(split2) ] ;
+# }
+# }
+#
+# # Returns the minimal path to path2 that is relative path1.
+# #
+# rule relative-to ( path1 path2 )
+# {
+# local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
+# local split1 = [ regex.split $(path1) / ] ;
+# local split2 = [ regex.split $(path2) / ] ;
+#
+# while $(split1) && $(root_1)
+# {
+# if $(split1[1]) = $(split2[1])
+# {
+# root_1 = $(root_1[2-]) ;
+# split1 = $(split1[2-]) ;
+# split2 = $(split2[2-]) ;
+# }
+# else
+# {
+# split1 = ;
+# }
+# }
+# return [ join . $(root_1) $(split2) ] ;
+# }
+
+# Returns the list of paths which are used by the operating system
+# for looking up programs
+def programs_path ():
+ raw = []
+ names = ['PATH', 'Path', 'path']
+
+ for name in names:
+ raw.append(os.environ.get (name, ''))
+
+ result = []
+ for elem in raw:
+ if elem:
+ for p in elem.split(os.path.pathsep):
+ # it's possible that the user's Path has
+ # double path separators, thus it is possible
+ # for p to be an empty string.
+ if p:
+ result.append(make(p))
+
+ return result
+
+# rule make-NT ( native )
+# {
+# local tokens = [ regex.split $(native) "[/\\]" ] ;
+# local result ;
+#
+# # Handle paths ending with slashes
+# if $(tokens[-1]) = ""
+# {
+# tokens = $(tokens[1--2]) ; # discard the empty element
+# }
+#
+# result = [ path.join $(tokens) ] ;
+#
+# if [ regex.match "(^.:)" : $(native) ]
+# {
+# result = /$(result) ;
+# }
+#
+# if $(native) = ""
+# {
+# result = "." ;
+# }
+#
+# return $(result) ;
+# }
+#
+# rule native-NT ( path )
+# {
+# local result = [ MATCH "^/?(.*)" : $(path) ] ;
+# result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
+# return $(result) ;
+# }
+#
+# rule make-CYGWIN ( path )
+# {
+# return [ make-NT $(path) ] ;
+# }
+#
+# rule native-CYGWIN ( path )
+# {
+# local result = $(path) ;
+# if [ regex.match "(^/.:)" : $(path) ] # win absolute
+# {
+# result = [ MATCH "^/?(.*)" : $(path) ] ; # remove leading '/'
+# }
+# return [ native-UNIX $(result) ] ;
+# }
+#
+# #
+# # split-VMS: splits input native path into
+# # device dir file (each part is optional),
+# # example:
+# #
+# # dev:[dir]file.c => dev: [dir] file.c
+# #
+# rule split-path-VMS ( native )
+# {
+# local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
+# local device = $(matches[1]) ;
+# local dir = $(matches[2]) ;
+# local file = $(matches[3]) ;
+#
+# return $(device) $(dir) $(file) ;
+# }
+#
+# #
+# # Converts a native VMS path into a portable path spec.
+# #
+# # Does not handle current-device absolute paths such
+# # as "[dir]File.c" as it is not clear how to represent
+# # them in the portable path notation.
+# #
+# # Adds a trailing dot (".") to the file part if no extension
+# # is present (helps when converting it back into native path).
+# #
+# rule make-VMS ( native )
+# {
+# if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
+# {
+# errors.error "Can't handle default-device absolute paths: " $(native) ;
+# }
+#
+# local parts = [ split-path-VMS $(native) ] ;
+# local device = $(parts[1]) ;
+# local dir = $(parts[2]) ;
+# local file = $(parts[3]) ;
+# local elems ;
+#
+# if $(device)
+# {
+# #
+# # rooted
+# #
+# elems = /$(device) ;
+# }
+#
+# if $(dir) = "[]"
+# {
+# #
+# # Special case: current directory
+# #
+# elems = $(elems) "." ;
+# }
+# else if $(dir)
+# {
+# dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
+# local dir_parts = [ regex.split $(dir) \\. ] ;
+#
+# if $(dir_parts[1]) = ""
+# {
+# #
+# # Relative path
+# #
+# dir_parts = $(dir_parts[2--1]) ;
+# }
+#
+# #
+# # replace "parent-directory" parts (- => ..)
+# #
+# dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
+#
+# elems = $(elems) $(dir_parts) ;
+# }
+#
+# if $(file)
+# {
+# if ! [ MATCH (\\.) : $(file) ]
+# {
+# #
+# # Always add "." to end of non-extension file
+# #
+# file = $(file). ;
+# }
+# elems = $(elems) $(file) ;
+# }
+#
+# local portable = [ path.join $(elems) ] ;
+#
+# return $(portable) ;
+# }
+#
+# #
+# # Converts a portable path spec into a native VMS path.
+# #
+# # Relies on having at least one dot (".") included in the file
+# # name to be able to differentiate it ftom the directory part.
+# #
+# rule native-VMS ( path )
+# {
+# local device = "" ;
+# local dir = $(path) ;
+# local file = "" ;
+# local native ;
+# local split ;
+#
+# #
+# # Has device ?
+# #
+# if [ is-rooted $(dir) ]
+# {
+# split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
+# device = $(split[1]) ;
+# dir = $(split[2]) ;
+# }
+#
+# #
+# # Has file ?
+# #
+# # This is no exact science, just guess work:
+# #
+# # If the last part of the current path spec
+# # includes some chars, followed by a dot,
+# # optionally followed by more chars -
+# # then it is a file (keep your fingers crossed).
+# #
+# split = [ regex.split $(dir) / ] ;
+# local maybe_file = $(split[-1]) ;
+#
+# if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
+# {
+# file = $(maybe_file) ;
+# dir = [ sequence.join $(split[1--2]) : / ] ;
+# }
+#
+# #
+# # Has dir spec ?
+# #
+# if $(dir) = "."
+# {
+# dir = "[]" ;
+# }
+# else if $(dir)
+# {
+# dir = [ regex.replace $(dir) \\.\\. - ] ;
+# dir = [ regex.replace $(dir) / . ] ;
+#
+# if $(device) = ""
+# {
+# #
+# # Relative directory
+# #
+# dir = "."$(dir) ;
+# }
+# dir = "["$(dir)"]" ;
+# }
+#
+# native = [ sequence.join $(device) $(dir) $(file) ] ;
+#
+# return $(native) ;
+# }
+#
+#
+# rule __test__ ( ) {
+#
+# import assert ;
+# import errors : try catch ;
+#
+# assert.true is-rooted "/" ;
+# assert.true is-rooted "/foo" ;
+# assert.true is-rooted "/foo/bar" ;
+# assert.result : is-rooted "." ;
+# assert.result : is-rooted "foo" ;
+# assert.result : is-rooted "foo/bar" ;
+#
+# assert.true has-parent "foo" ;
+# assert.true has-parent "foo/bar" ;
+# assert.true has-parent "." ;
+# assert.result : has-parent "/" ;
+#
+# assert.result "." : basename "." ;
+# assert.result ".." : basename ".." ;
+# assert.result "foo" : basename "foo" ;
+# assert.result "foo" : basename "bar/foo" ;
+# assert.result "foo" : basename "gaz/bar/foo" ;
+# assert.result "foo" : basename "/gaz/bar/foo" ;
+#
+# assert.result "." : parent "foo" ;
+# assert.result "/" : parent "/foo" ;
+# assert.result "foo/bar" : parent "foo/bar/giz" ;
+# assert.result ".." : parent "." ;
+# assert.result ".." : parent "../foo" ;
+# assert.result "../../foo" : parent "../../foo/bar" ;
+#
+#
+# assert.result "." : reverse "." ;
+# assert.result ".." : reverse "foo" ;
+# assert.result "../../.." : reverse "foo/bar/giz" ;
+#
+# assert.result "foo" : join "foo" ;
+# assert.result "/foo" : join "/" "foo" ;
+# assert.result "foo/bar" : join "foo" "bar" ;
+# assert.result "foo/bar" : join "foo/giz" "../bar" ;
+# assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
+# assert.result ".." : join "." ".." ;
+# assert.result ".." : join "foo" "../.." ;
+# assert.result "../.." : join "../foo" "../.." ;
+# assert.result "/foo" : join "/bar" "../foo" ;
+# assert.result "foo/giz" : join "foo/giz" "." ;
+# assert.result "." : join lib2 ".." ;
+# assert.result "/" : join "/a" ".." ;
+#
+# assert.result /a/b : join /a/b/c .. ;
+#
+# assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
+# assert.result "giz" : join "foo" ".." "giz" ;
+# assert.result "foo/giz" : join "foo" "." "giz" ;
+#
+# try ;
+# {
+# join "a" "/b" ;
+# }
+# catch only first element may be rooted ;
+#
+# local CWD = "/home/ghost/build" ;
+# assert.result : all-parents . : . : $(CWD) ;
+# assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
+# assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
+# assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
+#
+# local CWD = "/home/ghost" ;
+# assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
+# assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
+#
+# assert.result "c/d" : relative "a/b/c/d" "a/b" ;
+# assert.result "foo" : relative "foo" "." ;
+#
+# local save-os = [ modules.peek path : os ] ;
+# modules.poke path : os : NT ;
+#
+# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+# assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
+# assert.result "foo" : make "foo/." ;
+# assert.result "foo" : make "foo/bar/.." ;
+# assert.result "/D:/My Documents" : make "D:\\My Documents" ;
+# assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
+#
+# assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
+# assert.result "foo" : native "foo" ;
+# assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
+#
+# modules.poke path : os : UNIX ;
+#
+# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+# assert.result "/sub1" : make "/sub1/." ;
+# assert.result "/sub1" : make "/sub1/sub2/.." ;
+# assert.result "sub1" : make "sub1/." ;
+# assert.result "sub1" : make "sub1/sub2/.." ;
+# assert.result "/foo/bar" : native "/foo/bar" ;
+#
+# modules.poke path : os : VMS ;
+#
+# #
+# # Don't really need to poke os before these
+# #
+# assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
+# assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
+# assert.result "disk:" "" "" : split-path-VMS "disk:" ;
+# assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
+# assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
+# assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
+# assert.result "" "" "file" : split-path-VMS "file" ;
+# assert.result "" "" "" : split-path-VMS "" ;
+#
+# #
+# # Special case: current directory
+# #
+# assert.result "" "[]" "" : split-path-VMS "[]" ;
+# assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
+# assert.result "" "[]" "file" : split-path-VMS "[]file" ;
+# assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
+#
+# #
+# # Make portable paths
+# #
+# assert.result "/disk:" : make "disk:" ;
+# assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
+# assert.result "foo" : make "[.foo]" ;
+# assert.result "foo" : make "[.foo.bar.-]" ;
+# assert.result ".." : make "[.-]" ;
+# assert.result ".." : make "[-]" ;
+# assert.result "." : make "[]" ;
+# assert.result "giz.h" : make "giz.h" ;
+# assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
+# assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
+# assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
+#
+# #
+# # Special case (adds '.' to end of file w/o extension to
+# # disambiguate from directory in portable path spec).
+# #
+# assert.result "Jamfile." : make "Jamfile" ;
+# assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
+# assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
+#
+# #
+# # Make native paths
+# #
+# assert.result "disk:" : native "/disk:" ;
+# assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
+# assert.result "[.foo]" : native "foo" ;
+# assert.result "[.-]" : native ".." ;
+# assert.result "[.foo.-]" : native "foo/.." ;
+# assert.result "[]" : native "." ;
+# assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
+# assert.result "giz.h" : native "giz.h" ;
+# assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
+# assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
+#
+# modules.poke path : os : $(save-os) ;
+#
+# }
+
+#
+
+
+#def glob(dir, patterns):
+# result = []
+# for pattern in patterns:
+# result.extend(builtin_glob(os.path.join(dir, pattern)))
+# return result
+
+def glob(dirs, patterns, exclude_patterns=None):
+ """Returns the list of files matching the given pattern in the
+ specified directory. Both directories and patterns are
+ supplied as portable paths. Each pattern should be non-absolute
+ path, and can't contain '.' or '..' elements. Each slash separated
+ element of pattern can contain the following special characters:
+ - '?', which match any character
+ - '*', which matches arbitrary number of characters.
+ A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
+ if and only if e1 matches p1, e2 matches p2 and so on.
+ For example:
+ [ glob . : *.cpp ]
+ [ glob . : */build/Jamfile ]
+ """
+
+ assert(isinstance(patterns, list))
+ assert(isinstance(dirs, list))
+
+ if not exclude_patterns:
+ exclude_patterns = []
+ else:
+ assert(isinstance(exclude_patterns, list))
+
+ real_patterns = [os.path.join(d, p) for p in patterns for d in dirs]
+ real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns
+ for d in dirs]
+
+ inc = [os.path.normpath(name) for p in real_patterns
+ for name in builtin_glob(p)]
+ exc = [os.path.normpath(name) for p in real_exclude_patterns
+ for name in builtin_glob(p)]
+ return [x for x in inc if x not in exc]
+
+def glob_tree(roots, patterns, exclude_patterns=None):
+ """Recursive version of GLOB. Builds the glob of files while
+ also searching in the subdirectories of the given roots. An
+ optional set of exclusion patterns will filter out the
+ matching entries from the result. The exclusions also apply
+ to the subdirectory scanning, such that directories that
+ match the exclusion patterns will not be searched."""
+
+ if not exclude_patterns:
+ exclude_patterns = []
+
+ result = glob(roots, patterns, exclude_patterns)
+ subdirs = [s for s in glob(roots, ["*"], exclude_patterns) if s != "." and s != ".." and os.path.isdir(s)]
+ if subdirs:
+ result.extend(glob_tree(subdirs, patterns, exclude_patterns))
+
+ return result
+
+def glob_in_parents(dir, patterns, upper_limit=None):
+ """Recursive version of GLOB which glob sall parent directories
+ of dir until the first match is found. Returns an empty result if no match
+ is found"""
+
+ assert(isinstance(dir, str))
+ assert(isinstance(patterns, list))
+
+ result = []
+
+ absolute_dir = os.path.join(os.getcwd(), dir)
+ absolute_dir = os.path.normpath(absolute_dir)
+ while absolute_dir:
+ new_dir = os.path.split(absolute_dir)[0]
+ if new_dir == absolute_dir:
+ break
+ result = glob([new_dir], patterns)
+ if result:
+ break
+ absolute_dir = new_dir
+
+ return result
+
+
+# The relpath functionality is written by
+# Cimarron Taylor
+def split(p, rest=[]):
+ (h,t) = os.path.split(p)
+ if len(h) < 1: return [t]+rest
+ if len(t) < 1: return [h]+rest
+ return split(h,[t]+rest)
+
+def commonpath(l1, l2, common=[]):
+ if len(l1) < 1: return (common, l1, l2)
+ if len(l2) < 1: return (common, l1, l2)
+ if l1[0] != l2[0]: return (common, l1, l2)
+ return commonpath(l1[1:], l2[1:], common+[l1[0]])
+
+def relpath(p1, p2):
+ (common,l1,l2) = commonpath(split(p1), split(p2))
+ p = []
+ if len(l1) > 0:
+ p = [ '../' * len(l1) ]
+ p = p + l2
+ if p:
+ return os.path.join( *p )
+ else:
+ return "."
diff --git a/src/boost/tools/build/src/util/print.jam b/src/boost/tools/build/src/util/print.jam
new file mode 100644
index 000000000..814547f39
--- /dev/null
+++ b/src/boost/tools/build/src/util/print.jam
@@ -0,0 +1,508 @@
+# Copyright 2003 Douglas Gregor
+# Copyright 2002, 2003, 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Utilities for generating format independent output. Using these
+# will help in generation of documentation in at minimum plain/console
+# and html.
+
+import modules ;
+import numbers ;
+import string ;
+import regex ;
+import "class" ;
+import scanner ;
+import path ;
+import os ;
+
+# The current output target. Defaults to console.
+output-target = console ;
+
+# The current output type. Defaults to plain. Other possible values are "html".
+output-type = plain ;
+
+# Whitespace.
+.whitespace = [ string.whitespace ] ;
+
+# Redirect
+.redirect-out = ">" ;
+.redirect-append = ">>" ;
+if [ os.name ] = VMS
+{
+ .redirect-out = "| TYPE SYS$INPUT /OUT=" ;
+ .redirect-append = "| APPEND/NEW SYS$INPUT " ;
+}
+
+# Set the target and type of output to generate. This sets both the destination
+# output and the type of docs to generate to that output. The target can be
+# either a file or "console" for echoing to the console. If the type of output
+# is not specified it defaults to plain text.
+#
+rule output (
+ target # The target file or device; file or "console".
+ type ? # The type of output; "plain" or "html".
+)
+{
+ type ?= plain ;
+ if $(output-target) != $(target)
+ {
+ output-target = $(target) ;
+ output-type = $(type) ;
+ if $(output-type) = html
+ {
+ text
+ "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">"
+ "<html>"
+ "<head>"
+ "</head>"
+ "<body link=\"#0000ff\" vlink=\"#800080\">"
+ : true
+ : prefix ;
+ text
+ "</body>"
+ "</html>"
+ :
+ : suffix ;
+ }
+ }
+}
+
+
+# Generate a section with a description. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule section (
+ name # The name of the section.
+ description * # A number of description lines.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words "$(name):" ] ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ name = [ escape-html $(name) ] ;
+ text <h3>$(name)</h3> <p> ;
+ }
+ local pre = ;
+ while $(description)
+ {
+ local paragraph = ;
+ while $(description) && [ string.is-whitespace $(description[1]) ] { description = $(description[2-]) ; }
+ if $(pre)
+ {
+ while $(description) && (
+ $(pre) = " $(description[1])" ||
+ ( $(pre) < [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(description[1])" ] ] )
+ )
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ while [ string.is-whitespace $(paragraph[-1]) ] { paragraph = $(paragraph[1--2]) ; }
+ pre = ;
+ if $(output-type) = plain
+ {
+ lines $(paragraph) "" : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ while $(description) && ! [ string.is-whitespace $(description[1]) ]
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ if $(paragraph[1]) = "::" && ! $(paragraph[2])
+ {
+ pre = " " ;
+ }
+ if $(paragraph[1]) = "::"
+ {
+ if $(output-type) = plain
+ {
+ lines $(paragraph[2-]) "" : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph[2-]) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ local p = [ MATCH "(.*)(::)$" : $(paragraph[-1]) ] ;
+ local pws = [ MATCH "([ ]*)$" : $(p[1]) ] ;
+ p = [ MATCH "(.*)($(pws))($(p[2]))$" : $(paragraph[-1]) ] ;
+ if $(p[3]) = "::"
+ {
+ pre = [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(p[1])" ] ] ;
+ if ! $(p[2]) || $(p[2]) = "" { paragraph = $(paragraph[1--2]) "$(p[1]):" ; }
+ else { paragraph = $(paragraph[1--2]) $(p[1]) ; }
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ else
+ {
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ }
+ }
+ }
+ if $(output-type) = html
+ {
+ text </p> ;
+ }
+}
+
+
+# Generate the start of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-start ( )
+{
+ if $(output-type) = plain
+ {
+ }
+ else if $(output-type) = html
+ {
+ text <ul> ;
+ }
+}
+
+
+# Generate an item in a list. The type of output can be controlled by the value
+# of the 'output-type' variable.
+#
+rule list-item (
+ item + # The item to list.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words "*" $(item) ] : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <li> [ escape-html $(item) ] </li> ;
+ }
+}
+
+
+# Generate the end of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-end ( )
+{
+ if $(output-type) = plain
+ {
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </ul> ;
+ }
+}
+
+
+# Split the given text into separate lines, word-wrapping to a margin. The
+# default margin is 78 characters.
+#
+rule split-at-words (
+ text + # The text to split.
+ : margin ? # An optional margin, default is 78.
+)
+{
+ local lines = ;
+ text = [ string.words $(text:J=" ") ] ;
+ text = $(text:J=" ") ;
+ margin ?= 78 ;
+ local char-match-1 = ".?" ;
+ local char-match = "" ;
+ while $(margin) != 0
+ {
+ char-match = $(char-match)$(char-match-1) ;
+ margin = [ numbers.decrement $(margin) ] ;
+ }
+ while $(text)
+ {
+ local s = "" ;
+ local t = "" ;
+ # divide s into the first X characters and the rest
+ s = [ MATCH "^($(char-match))(.*)" : $(text) ] ;
+
+ if $(s[2])
+ {
+ # split the first half at a space
+ t = [ MATCH "^(.*)[\\ ]([^\\ ]*)$" : $(s[1]) ] ;
+ }
+ else
+ {
+ t = $(s) ;
+ }
+
+ if ! $(t[2])
+ {
+ t += "" ;
+ }
+
+ text = $(t[2])$(s[2]) ;
+ lines += $(t[1]) ;
+ }
+ return $(lines) ;
+}
+
+
+# Generate a set of fixed lines. Each single item passed in is output on a
+# separate line. For console this just echos each line, but for html this will
+# split them with <br>.
+#
+rule lines (
+ text * # The lines of text.
+ : indent ? # Optional indentation prepended to each line after the first.
+ outdent ? # Optional indentation to prepend to the first line.
+)
+{
+ text ?= "" ;
+ indent ?= "" ;
+ outdent ?= "" ;
+ if $(output-type) = plain
+ {
+ text $(outdent)$(text[1]) $(indent)$(text[2-]) ;
+ }
+ else if $(output-type) = html
+ {
+ local indent-chars = [ string.chars $(indent) ] ;
+ indent = "" ;
+ for local c in $(indent-chars)
+ {
+ if $(c) = " " { c = "&nbsp;" ; }
+ else if $(c) = " " { c = "&nbsp;&nbsp;&nbsp;&nbsp;" ; }
+ indent = $(indent)$(c) ;
+ }
+ local html-text = [ escape-html $(text) : "&nbsp;" ] ;
+ text $(html-text[1])<br> $(indent)$(html-text[2-])<br> ;
+ }
+}
+
+
+# Output text directly to the current target. When doing output to a file, one
+# can indicate if the text should be output to "prefix" it, as the "body"
+# (default), or "suffix" of the file. This is independent of the actual
+# execution order of the text rule. This rule invokes a singular action, one
+# action only once, which does the build of the file. Therefore actions on the
+# target outside of this rule will happen entirely before and/or after all
+# output using this rule.
+#
+rule text (
+ strings * # The strings of text to output.
+ : overwrite ? # True to overwrite the output (if it is a file).
+ : prefix-body-suffix ? # Indication to output prefix, body, or suffix (for
+ # a file).
+)
+{
+ prefix-body-suffix ?= body ;
+ if $(output-target) = console
+ {
+ if ! $(strings)
+ {
+ ECHO ;
+ }
+ else
+ {
+ for local s in $(strings)
+ {
+ ECHO $(s) ;
+ }
+ }
+ }
+ if ! $($(output-target).did-action)
+ {
+ $(output-target).did-action = yes ;
+ $(output-target).text-prefix = ;
+ $(output-target).text-body = ;
+ $(output-target).text-suffix = ;
+
+ nl on $(output-target) = "
+" ;
+ text-redirect on $(output-target) = $(.redirect-append) ;
+ if $(overwrite)
+ {
+ text-redirect on $(output-target) = $(.redirect-out) ;
+ }
+ text-content on $(output-target) = ;
+
+ text-action $(output-target) ;
+
+ if $(overwrite) && $(output-target) != console
+ {
+ check-for-update $(output-target) ;
+ }
+ }
+ $(output-target).text-$(prefix-body-suffix) += $(strings) ;
+ text-content on $(output-target) =
+ $($(output-target).text-prefix)
+ $($(output-target).text-body)
+ $($(output-target).text-suffix) ;
+}
+
+
+# Outputs the text to the current targets, after word-wrapping it.
+#
+rule wrapped-text ( text + )
+{
+ local lines = [ split-at-words $(text) ] ;
+ text $(lines) ;
+}
+
+
+# Escapes text into html/xml printable equivalents. Does not know about tags and
+# therefore tags fed into this will also be escaped. Currently escapes space,
+# "<", ">", and "&".
+#
+rule escape-html (
+ text + # The text to escape.
+ : space ? # What to replace spaces with, defaults to " ".
+)
+{
+ local html-text = ;
+ while $(text)
+ {
+ local html = $(text[1]) ;
+ text = $(text[2-]) ;
+ html = [ regex.replace $(html) "&" "&amp;" ] ;
+ html = [ regex.replace $(html) "<" "&lt;" ] ;
+ html = [ regex.replace $(html) ">" "&gt;" ] ;
+ if $(space)
+ {
+ html = [ regex.replace $(html) " " "$(space)" ] ;
+ }
+ html-text += $(html) ;
+ }
+ return $(html-text) ;
+}
+
+
+# Outputs the text strings collected by the text rule to the output file.
+#
+actions quietly text-action
+{
+ @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)"
+}
+
+if [ os.name ] = VMS
+{
+ actions quietly text-action
+ {
+ @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) $(<:W)
+ }
+}
+
+rule get-scanner ( )
+{
+ if ! $(.scanner)
+ {
+ .scanner = [ class.new print-scanner ] ;
+ }
+ return $(.scanner) ;
+}
+
+
+# The following code to update print targets when their contents change is a
+# horrible hack. It basically creates a target which binds to this file
+# (print.jam) and installs a scanner on it which reads the target and compares
+# its contents to the new contents that we are writing.
+#
+rule check-for-update ( target )
+{
+ local scanner = [ get-scanner ] ;
+ local file = [ path.native [ modules.binding $(__name__) ] ] ;
+ local g = [ MATCH <(.*)> : $(target:G) ] ;
+ local dependency-target = $(__file__:G=$(g:E=)-$(target:G=)-$(scanner)) ;
+ DEPENDS $(target) : $(dependency-target) ;
+ SEARCH on $(dependency-target) = $(file:D) ;
+ ISFILE $(dependency-target) ;
+ NOUPDATE $(dependency-target) ;
+ base on $(dependency-target) = $(target) ;
+ scanner.install $(scanner) : $(dependency-target) ;
+ return $(dependency-target) ;
+}
+
+
+class print-scanner : scanner
+{
+ import path ;
+ import os ;
+
+ rule pattern ( )
+ {
+ return "(One match...)" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local base = [ on $(target) return $(base) ] ;
+ local nl = [ on $(base) return $(nl) ] ;
+ local text-content = [ on $(base) return $(text-content) ] ;
+ local dir = [ on $(base) return $(LOCATE) ] ;
+ if $(dir)
+ {
+ dir = [ path.make $(dir) ] ;
+ }
+ local file = [ path.native [ path.join $(dir) $(base:G=) ] ] ;
+ local actual-content ;
+ if [ os.name ] = NT
+ {
+ actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ;
+ }
+ else if [ os.name ] = VMS
+ {
+ actual-content = [ SHELL "PIPE TYPE $(file:W) 2>NL:" ] ;
+ }
+ else
+ {
+ actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ;
+ }
+ if $(text-content:J=$(nl)) != $(actual-content)
+ {
+ ALWAYS $(base) ;
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result one two three : split-at-words one two three : 5 ;
+ assert.result "one two" three : split-at-words one two three : 8 ;
+ assert.result "one two" three : split-at-words one two three : 9 ;
+ assert.result "one two three" : split-at-words one two three ;
+
+ # VP, 2004-12-03 The following test fails for some reason, so commenting it
+ # out.
+ #assert.result "one&nbsp;two&nbsp;three" "&amp;&lt;&gt;" :
+ # escape-html "one two three" "&<>" ;
+}
diff --git a/src/boost/tools/build/src/util/regex.jam b/src/boost/tools/build/src/util/regex.jam
new file mode 100644
index 000000000..6da5f1e2c
--- /dev/null
+++ b/src/boost/tools/build/src/util/regex.jam
@@ -0,0 +1,208 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Returns a list of the following substrings:
+# 1) from beginning till the first occurrence of 'separator' or till the end,
+# 2) between each occurrence of 'separator' and the next occurrence,
+# 3) from the last occurrence of 'separator' till the end.
+# If no separator is present, the result will contain only one element.
+#
+
+rule split ( string separator )
+{
+ local result ;
+ local s = $(string) ;
+
+ # Break pieaces off 's' until it has no separators left.
+ local match = 1 ;
+ while $(match)
+ {
+ match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
+ if $(match)
+ {
+ match += "" ; # in case 3rd item was empty - works around MATCH bug
+ result = $(match[3]) $(result) ;
+ s = $(match[1]) ;
+ }
+ }
+ # Combine the remaining part at the beginning, which does not have
+ # separators, with the pieces broken off. Note that the rule's signature
+ # does not allow the initial s to be empty.
+ return $(s) $(result) ;
+}
+
+if [ HAS_NATIVE_RULE regex : split : 1 ]
+{
+ NATIVE_RULE regex : split ;
+}
+
+# Returns the concatenated results of Applying regex.split to every element of
+# the list using the separator pattern.
+#
+rule split-list ( list * : separator )
+{
+ local result ;
+ for s in $(list)
+ {
+ result += [ split $(s) $(separator) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Match string against pattern, and return the elements indicated by indices.
+#
+rule match ( pattern : string : indices * )
+{
+ indices ?= 1 2 3 4 5 6 7 8 9 ;
+ local x = [ MATCH $(pattern) : $(string) ] ;
+ return $(x[$(indices)]) ;
+}
+
+
+# Matches all elements of 'list' against the 'pattern' and returns a list of
+# elements indicated by indices of all successful matches. If 'indices' is
+# omitted returns a list of first parenthesised groups of all successful
+# matches.
+#
+rule transform ( list * : pattern : indices * )
+{
+ indices ?= 1 ;
+ local result ;
+ for local e in $(list)
+ {
+ local m = [ MATCH $(pattern) : $(e) ] ;
+ if $(m)
+ {
+ result += $(m[$(indices)]) ;
+ }
+ }
+ return $(result) ;
+}
+
+NATIVE_RULE regex : transform ;
+
+
+# Escapes all of the characters in symbols using the escape symbol escape-symbol
+# for the given string, and returns the escaped string.
+#
+rule escape ( string : symbols : escape-symbol )
+{
+ local result = "" ;
+ local m = 1 ;
+ while $(m)
+ {
+ m = [ MATCH "^([^$(symbols)]*)([$(symbols)])(.*)" : $(string) ] ;
+ if $(m)
+ {
+ m += "" ; # Supposedly a bug fix; borrowed from regex.split
+ result = "$(result)$(m[1])$(escape-symbol)$(m[2])" ;
+ string = $(m[3]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(result)$(string)" ;
+ return $(result) ;
+}
+
+
+# Replaces occurrences of a match string in a given string and returns the new
+# string. The match string can be a regex expression.
+#
+rule replace (
+ string # The string to modify.
+ match # The characters to replace.
+ replacement # The string to replace with.
+ )
+{
+ local result = "" ;
+ local parts = 1 ;
+ while $(parts)
+ {
+ parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
+ if $(parts)
+ {
+ parts += "" ;
+ result = "$(replacement)$(parts[3])$(result)" ;
+ string = $(parts[1]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(string)$(result)" ;
+ return $(result) ;
+}
+
+if [ HAS_NATIVE_RULE regex : replace : 1 ]
+{
+ NATIVE_RULE regex : replace ;
+}
+
+
+# Replaces occurrences of a match string in a given list of strings and returns
+# a list of new strings. The match string can be a regex expression.
+#
+# list - the list of strings to modify.
+# match - the search expression.
+# replacement - the string to replace with.
+#
+rule replace-list ( list * : match : replacement )
+{
+ local result ;
+ for local e in $(list)
+ {
+ result += [ replace $(e) $(match) $(replacement) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result a b c : split "a/b/c" / ;
+ assert.result "" a b c : split "/a/b/c" / ;
+ assert.result "" "" a b c : split "//a/b/c" / ;
+ assert.result "" a "" b c : split "/a//b/c" / ;
+ assert.result "" a "" b c "" : split "/a//b/c/" / ;
+ assert.result "" a "" b c "" "" : split "/a//b/c//" / ;
+ assert.result "" a b c "" : split "abc" "" ;
+ assert.result "" "" : split "" "" ;
+
+ assert.result a c b d
+ : match (.)(.)(.)(.) : abcd : 1 3 2 4 ;
+
+ assert.result a b c d
+ : match (.)(.)(.)(.) : abcd ;
+
+ assert.result ababab cddc
+ : match "((ab)*)([cd]+)" : abababcddc : 1 3 ;
+
+ assert.result a.h c.h
+ : transform <a.h> \"b.h\" <c.h> : <(.*)> ;
+
+ assert.result a.h b.h c.h
+ : transform <a.h> \"b.h\" <c.h> : "<([^>]*)>|\"([^\"]*)\"" : 1 2 ;
+
+ assert.result "^<?xml version=\"1.0\"^>"
+ : escape "<?xml version=\"1.0\">" : "&|()<>^" : "^" ;
+
+ assert.result "<?xml version=\\\"1.0\\\">"
+ : escape "<?xml version=\"1.0\">" : "\\\"" : "\\" ;
+
+ assert.result "string&nbsp;string&nbsp;" : replace "string string " " " "&nbsp;" ;
+ assert.result "&nbsp;string&nbsp;string" : replace " string string" " " "&nbsp;" ;
+ assert.result "string&nbsp;&nbsp;string" : replace "string string" " " "&nbsp;" ;
+ assert.result "-" : replace "&" "&" "-" ;
+ assert.result "x" : replace "" "" "x" ;
+ assert.result "xax" : replace "a" "" "x" ;
+ assert.result "xaxbx" : replace "ab" "" "x" ;
+
+ assert.result "-" "a-b" : replace-list "&" "a&b" : "&" : "-" ;
+}
diff --git a/src/boost/tools/build/src/util/regex.py b/src/boost/tools/build/src/util/regex.py
new file mode 100644
index 000000000..053c645f5
--- /dev/null
+++ b/src/boost/tools/build/src/util/regex.py
@@ -0,0 +1,63 @@
+# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+import re
+
+from b2.util import bjam_signature
+
+
+def transform (list, pattern, indices = [1]):
+ """ Matches all elements of 'list' against the 'pattern'
+ and returns a list of the elements indicated by indices of
+ all successful matches. If 'indices' is omitted returns
+ a list of first paranthethised groups of all successful
+ matches.
+ """
+ result = []
+
+ for e in list:
+ m = re.match (pattern, e)
+
+ if m:
+ for i in indices:
+ result.append (m.group (i))
+
+ return result
+
+
+@bjam_signature([['s', 'pattern', 'replacement']])
+def replace(s, pattern, replacement):
+ """Replaces occurrences of a match string in a given
+ string and returns the new string. The match string
+ can be a regex expression.
+
+ Args:
+ s (str): the string to modify
+ pattern (str): the search expression
+ replacement (str): the string to replace each match with
+ """
+ # the replacement string may contain invalid backreferences (like \1 or \g)
+ # which will cause python's regex to blow up. Since this should emulate
+ # the jam version exactly and the jam version didn't support
+ # backreferences, this version shouldn't either. re.sub
+ # allows replacement to be a callable; this is being used
+ # to simply return the replacement string and avoid the hassle
+ # of worrying about backreferences within the string.
+ def _replacement(matchobj):
+ return replacement
+ return re.sub(pattern, _replacement, s)
+
+
+@bjam_signature((['items', '*'], ['match'], ['replacement']))
+def replace_list(items, match, replacement):
+ """Replaces occurrences of a match string in a given list of strings and returns
+ a list of new strings. The match string can be a regex expression.
+
+ Args:
+ items (list): the list of strings to modify.
+ match (str): the search expression.
+ replacement (str): the string to replace with.
+ """
+ return [replace(item, match, replacement) for item in items]
diff --git a/src/boost/tools/build/src/util/sequence.jam b/src/boost/tools/build/src/util/sequence.jam
new file mode 100644
index 000000000..ddfd069c7
--- /dev/null
+++ b/src/boost/tools/build/src/util/sequence.jam
@@ -0,0 +1,378 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import assert ;
+import numbers ;
+import modules ;
+
+
+# Note that algorithms in this module execute largely in the caller's module
+# namespace, so that local rules can be used as function objects. Also note that
+# most predicates can be multi-element lists. In that case, all but the first
+# element are prepended to the first argument which is passed to the rule named
+# by the first element.
+
+
+# Return the elements e of $(sequence) for which [ $(predicate) e ] has a
+# non-null value.
+#
+rule filter ( predicate + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ if [ modules.call-in $(caller) : $(predicate) $(e) ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return a new sequence consisting of [ $(function) $(e) ] for each element e of
+# $(sequence).
+#
+rule transform ( function + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ result += [ modules.call-in $(caller) : $(function) $(e) ] ;
+ }
+ return $(result) ;
+}
+
+if [ HAS_NATIVE_RULE sequence : transform : 1 ]
+{
+ NATIVE_RULE sequence : transform ;
+}
+
+# Returns the elements of 's' in reverse order
+rule reverse ( s * )
+{
+ local r ;
+ for local x in $(s)
+ {
+ r = $(x) $(r) ;
+ }
+ return $(r) ;
+}
+
+
+rule less ( a b )
+{
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+}
+
+
+# Insertion-sort s using the BinaryPredicate ordered.
+#
+rule insertion-sort ( s * : ordered * )
+{
+ if ! $(ordered)
+ {
+ return [ SORT $(s) ] ;
+ }
+ else
+ {
+ local caller = [ CALLER_MODULE ] ;
+ ordered ?= sequence.less ;
+ local result = $(s[1]) ;
+ if $(ordered) = sequence.less
+ {
+ local head tail ;
+ for local x in $(s[2-])
+ {
+ head = ;
+ tail = $(result) ;
+ while $(tail) && ( $(tail[1]) < $(x) )
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+ else
+ {
+ for local x in $(s[2-])
+ {
+ local head tail ;
+ tail = $(result) ;
+ while $(tail) && [ modules.call-in $(caller) : $(ordered) $(tail[1]) $(x) ]
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+
+ return $(result) ;
+ }
+}
+
+
+# Merge two ordered sequences using the BinaryPredicate ordered.
+#
+rule merge ( s1 * : s2 * : ordered * )
+{
+ ordered ?= sequence.less ;
+ local result__ ;
+ local caller = [ CALLER_MODULE ] ;
+
+ while $(s1) && $(s2)
+ {
+ if [ modules.call-in $(caller) : $(ordered) $(s1[1]) $(s2[1]) ]
+ {
+ result__ += $(s1[1]) ;
+ s1 = $(s1[2-]) ;
+ }
+ else if [ modules.call-in $(caller) : $(ordered) $(s2[1]) $(s1[1]) ]
+ {
+ result__ += $(s2[1]) ;
+ s2 = $(s2[2-]) ;
+ }
+ else
+ {
+ s2 = $(s2[2-]) ;
+ }
+
+ }
+ result__ += $(s1) ;
+ result__ += $(s2) ;
+
+ return $(result__) ;
+}
+
+# Compares two sequences lexicagraphically
+#
+rule compare ( s1 * : s2 * : ordered * )
+{
+ if ! $(ordered)
+ {
+ if $(s1) < $(s2)
+ {
+ return true ;
+ }
+ }
+ else
+ {
+ while true
+ {
+ if ! $(s2[1])-is-defined
+ {
+ return ;
+ }
+ else if ! $(s1[1])-is-defined
+ {
+ return true ;
+ }
+ else if [ $(ordered) $(s1[1]) $(s2[1]) ]
+ {
+ return true ;
+ }
+ else if [ $(ordered) $(s2[1]) $(s1[1]) ]
+ {
+ return ;
+ }
+ s1 = $(s1[2-]) ;
+ s2 = $(s2[2-]) ;
+ }
+ }
+}
+
+# Join the elements of s into one long string. If joint is supplied, it is used
+# as a separator.
+#
+rule join ( s * : joint ? )
+{
+ joint ?= "" ;
+ return $(s:J=$(joint)) ;
+}
+
+
+# Find the length of any sequence.
+#
+rule length ( s * )
+{
+ local result = 0 ;
+ for local i in $(s)
+ {
+ result = [ CALC $(result) + 1 ] ;
+ }
+ return $(result) ;
+}
+
+# Removes duplicates from 'list'. If 'stable' is
+# passed, then the order of the elements will
+# be unchanged.
+rule unique ( list * : stable ? )
+{
+ local result ;
+ local prev ;
+ if $(stable)
+ {
+ for local f in $(list)
+ {
+ if ! $(f) in $(result)
+ {
+ result += $(f) ;
+ }
+ }
+ }
+ else
+ {
+ for local i in [ SORT $(list) ]
+ {
+ if $(i) != $(prev)
+ {
+ result += $(i) ;
+ }
+ prev = $(i) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the maximum number in 'elements'. Uses 'ordered' for comparisons or
+# 'numbers.less' if none is provided.
+#
+rule max-element ( elements + : ordered ? )
+{
+ ordered ?= numbers.less ;
+
+ local max = $(elements[1]) ;
+ for local e in $(elements[2-])
+ {
+ if [ $(ordered) $(max) $(e) ]
+ {
+ max = $(e) ;
+ }
+ }
+ return $(max) ;
+}
+
+
+# Returns all of 'elements' for which corresponding element in parallel list
+# 'rank' is equal to the maximum value in 'rank'.
+#
+rule select-highest-ranked ( elements * : ranks * )
+{
+ if $(elements)
+ {
+ local max-rank = [ max-element $(ranks) ] ;
+ local result ;
+ while $(elements)
+ {
+ if $(ranks[1]) = $(max-rank)
+ {
+ result += $(elements[1]) ;
+ }
+ elements = $(elements[2-]) ;
+ ranks = $(ranks[2-]) ;
+ }
+ return $(result) ;
+ }
+}
+NATIVE_RULE sequence : select-highest-ranked ;
+
+
+rule __test__ ( )
+{
+ # Use a unique module so we can test the use of local rules.
+ module sequence.__test__
+ {
+ import assert ;
+ import sequence ;
+
+ local rule is-even ( n )
+ {
+ if $(n) in 0 2 4 6 8
+ {
+ return true ;
+ }
+ }
+
+ assert.result 4 6 4 2 8 : sequence.filter is-even : 1 4 6 3 4 7 2 3 8 ;
+
+ # Test that argument binding works.
+ local rule is-equal-test ( x y )
+ {
+ if $(x) = $(y)
+ {
+ return true ;
+ }
+ }
+
+ assert.result 3 3 3 : sequence.filter is-equal-test 3 : 1 2 3 4 3 5 3 5 7 ;
+
+ local rule append-x ( n )
+ {
+ return $(n)x ;
+ }
+
+ assert.result 1x 2x 3x : sequence.transform append-x : 1 2 3 ;
+
+ local rule repeat2 ( x )
+ {
+ return $(x) $(x) ;
+ }
+
+ assert.result 1 1 2 2 3 3 : sequence.transform repeat2 : 1 2 3 ;
+
+ local rule test-greater ( a b )
+ {
+ if $(a) > $(b)
+ {
+ return true ;
+ }
+ }
+ assert.result 1 2 3 4 5 6 7 8 9 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 ;
+ assert.result 9 8 7 6 5 4 3 2 1 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 : test-greater ;
+ assert.result 1 2 3 4 5 6 : sequence.merge 1 3 5 : 2 4 6 ;
+ assert.result 6 5 4 3 2 1 : sequence.merge 5 3 1 : 6 4 2 : test-greater ;
+ assert.result 1 2 3 : sequence.merge 1 2 3 : ;
+ assert.result 1 : sequence.merge 1 : 1 ;
+
+ assert.result foo-bar-baz : sequence.join foo bar baz : - ;
+ assert.result substandard : sequence.join sub stan dard ;
+ assert.result 3.0.1 : sequence.join 3.0.1 : - ;
+
+ assert.result 0 : sequence.length ;
+ assert.result 3 : sequence.length a b c ;
+ assert.result 17 : sequence.length 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 ;
+
+ assert.result 1 : sequence.length a ;
+ assert.result 10 : sequence.length a b c d e f g h i j ;
+ assert.result 11 : sequence.length a b c d e f g h i j k ;
+ assert.result 12 : sequence.length a b c d e f g h i j k l ;
+
+ local p2 = x ;
+ for local i in 1 2 3 4 5 6 7 8
+ {
+ p2 = $(p2) $(p2) ;
+ }
+ assert.result 256 : sequence.length $(p2) ;
+
+ assert.result 1 2 3 4 5 : sequence.unique 1 2 3 2 4 3 3 5 5 5 ;
+
+ assert.result 5 : sequence.max-element 1 3 5 0 4 ;
+
+ assert.result e-3 h-3 : sequence.select-highest-ranked e-1 e-3 h-3 m-2 : 1 3 3 2 ;
+
+ assert.result 7 6 5 4 3 2 1 : sequence.reverse 1 2 3 4 5 6 7 ;
+ }
+}
diff --git a/src/boost/tools/build/src/util/sequence.py b/src/boost/tools/build/src/util/sequence.py
new file mode 100644
index 000000000..b5dddbade
--- /dev/null
+++ b/src/boost/tools/build/src/util/sequence.py
@@ -0,0 +1,58 @@
+# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+import operator
+
+from b2.util import is_iterable
+
+
+def unique (values, stable=False):
+ assert is_iterable(values)
+ if stable:
+ s = set()
+ r = []
+ for v in values:
+ if not v in s:
+ r.append(v)
+ s.add(v)
+ return r
+ else:
+ return list(set(values))
+
+def max_element (elements, ordered = None):
+ """ Returns the maximum number in 'elements'. Uses 'ordered' for comparisons,
+ or '<' is none is provided.
+ """
+ assert is_iterable(elements)
+ assert callable(ordered) or ordered is None
+ if not ordered: ordered = operator.lt
+
+ max = elements [0]
+ for e in elements [1:]:
+ if ordered (max, e):
+ max = e
+
+ return max
+
+def select_highest_ranked (elements, ranks):
+ """ Returns all of 'elements' for which corresponding element in parallel
+ list 'rank' is equal to the maximum value in 'rank'.
+ """
+ assert is_iterable(elements)
+ assert is_iterable(ranks)
+ if not elements:
+ return []
+
+ max_rank = max_element (ranks)
+
+ result = []
+ while elements:
+ if ranks [0] == max_rank:
+ result.append (elements [0])
+
+ elements = elements [1:]
+ ranks = ranks [1:]
+
+ return result
diff --git a/src/boost/tools/build/src/util/set.jam b/src/boost/tools/build/src/util/set.jam
new file mode 100644
index 000000000..fc179134f
--- /dev/null
+++ b/src/boost/tools/build/src/util/set.jam
@@ -0,0 +1,93 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+class set
+{
+ rule __init__ ( )
+ {
+ }
+
+ rule add ( elements * )
+ {
+ for local e in $(elements)
+ {
+ if ! $($(e))
+ {
+ $(e) = 1 ;
+ self.result += $(e) ;
+ }
+ }
+ }
+
+ rule contains ( element )
+ {
+ return $($(element)) ;
+ }
+
+ rule list ( )
+ {
+ return $(self.result) ;
+ }
+}
+
+
+
+# Returns the elements of set1 that are not in set2.
+#
+rule difference ( set1 * : set2 * )
+{
+ local result = ;
+ for local element in $(set1)
+ {
+ if ! ( $(element) in $(set2) )
+ {
+ result += $(element) ;
+ }
+ }
+ return $(result) ;
+}
+
+NATIVE_RULE set : difference ;
+
+
+# Removes all the items appearing in both set1 & set2.
+#
+rule intersection ( set1 * : set2 * )
+{
+ local result ;
+ for local v in $(set1)
+ {
+ if $(v) in $(set2)
+ {
+ result += $(v) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns whether set1 & set2 contain the same elements. Note that this ignores
+# any element ordering differences as well as any element duplication.
+#
+rule equal ( set1 * : set2 * )
+{
+ if $(set1) in $(set2) && ( $(set2) in $(set1) )
+ {
+ return true ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result 0 1 4 6 8 9 : difference 0 1 2 3 4 5 6 7 8 9 : 2 3 5 7 ;
+ assert.result 2 5 7 : intersection 0 1 2 4 5 6 7 8 9 : 2 3 5 7 ;
+
+ assert.true equal : ;
+ assert.true equal 1 1 2 3 : 3 2 2 1 ;
+ assert.false equal 2 3 : 3 2 2 1 ;
+}
diff --git a/src/boost/tools/build/src/util/set.py b/src/boost/tools/build/src/util/set.py
new file mode 100644
index 000000000..98b1d17f5
--- /dev/null
+++ b/src/boost/tools/build/src/util/set.py
@@ -0,0 +1,48 @@
+# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+from b2.util import is_iterable
+from .utility import to_seq
+
+
+def difference (b, a):
+ """ Returns the elements of B that are not in A.
+ """
+ a = set(a)
+ result = []
+ for item in b:
+ if item not in a:
+ result.append(item)
+ return result
+
+def intersection (set1, set2):
+ """ Removes from set1 any items which don't appear in set2 and returns the result.
+ """
+ assert is_iterable(set1)
+ assert is_iterable(set2)
+ result = []
+ for v in set1:
+ if v in set2:
+ result.append (v)
+ return result
+
+def contains (small, large):
+ """ Returns true iff all elements of 'small' exist in 'large'.
+ """
+ small = to_seq (small)
+ large = to_seq (large)
+
+ for s in small:
+ if not s in large:
+ return False
+ return True
+
+def equal (a, b):
+ """ Returns True iff 'a' contains the same elements as 'b', irrespective of their order.
+ # TODO: Python 2.4 has a proper set class.
+ """
+ assert is_iterable(a)
+ assert is_iterable(b)
+ return contains (a, b) and contains (b, a)
diff --git a/src/boost/tools/build/src/util/string.jam b/src/boost/tools/build/src/util/string.jam
new file mode 100644
index 000000000..b7af7c2e4
--- /dev/null
+++ b/src/boost/tools/build/src/util/string.jam
@@ -0,0 +1,189 @@
+# Copyright 2002 Dave Abrahams
+# Copyright 2002, 2003 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import regex ;
+
+
+# Characters considered whitespace, as a list.
+.whitespace-chars = " " " " "
+" ;
+
+# Characters considered whitespace, as a single string.
+.whitespace = $(.whitespace-chars:J="") ;
+
+
+# Returns the canonical set of whitespace characters, as a list.
+#
+rule whitespace-chars ( )
+{
+ return $(.whitespace-chars) ;
+}
+
+
+# Returns the canonical set of whitespace characters, as a single string.
+#
+rule whitespace ( )
+{
+ return $(.whitespace) ;
+}
+
+
+# Splits the given string into a list of strings composed of each character of
+# the string in sequence.
+#
+rule chars (
+ string # The string to split.
+ )
+{
+ local result ;
+ while $(string)
+ {
+ local s = [ MATCH (.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.*) : $(string) ] ;
+ string = $(s[9]) ;
+ result += $(s[1-8]) ;
+ }
+
+ # Trim off empty strings.
+ while $(result[1]) && ! $(result[-1])
+ {
+ result = $(result[1--2]) ;
+ }
+
+ return $(result) ;
+}
+
+
+# Apply a set of standard transformations to string to produce an abbreviation
+# no more than 5 characters long.
+#
+rule abbreviate ( string )
+{
+ local r = $(.abbreviated-$(string)) ;
+ if $(r)
+ {
+ return $(r) ;
+ }
+ # Anything less than 4 characters gets no abbreviation.
+ else if ! [ MATCH (....) : $(string) ]
+ {
+ .abbreviated-$(string) = $(string) ;
+ return $(string) ;
+ }
+ else
+ {
+ # Separate the initial letter in case it's a vowel.
+ local s1 = [ MATCH ^(.)(.*) : $(string) ] ;
+
+ # Drop trailing "ing".
+ local s2 = [ MATCH ^(.*)ing$ : $(s1[2]) ] ;
+ s2 ?= $(s1[2]) ;
+
+ # Reduce all doubled characters to one.
+ local last = "" ;
+ for local c in [ chars $(s2) ]
+ {
+ if $(c) != $(last)
+ {
+ r += $(c) ;
+ last = $(c) ;
+ }
+ }
+ s2 = $(r:J="") ;
+
+ # Chop all vowels out of the remainder.
+ s2 = [ regex.replace $(s2) "[AEIOUaeiou]" "" ] ;
+
+ # Shorten remaining consonants to 4 characters.
+ s2 = [ MATCH ^(.?.?.?.?) : $(s2) ] ;
+
+ # Glue the initial character back on to the front.
+ s2 = $(s1[1])$(s2) ;
+
+ .abbreviated-$(string) = $(s2) ;
+ return $(s2) ;
+ }
+}
+
+
+# Concatenates the given strings, inserting the given separator between each
+# string.
+#
+rule join (
+ strings * # The strings to join.
+ : separator ? # The optional separator.
+ )
+{
+ separator ?= "" ;
+ return $(strings:J=$(separator)) ;
+}
+
+
+# Split a string into whitespace separated words.
+#
+rule words (
+ string # The string to split.
+ : whitespace * # Optional, characters to consider as whitespace.
+ )
+{
+ whitespace = $(whitespace:J="") ;
+ whitespace ?= $(.whitespace) ;
+ local w = ;
+ while $(string)
+ {
+ string = [ MATCH "^[$(whitespace)]*([^$(whitespace)]*)(.*)" : $(string) ] ;
+ if $(string[1]) && $(string[1]) != ""
+ {
+ w += $(string[1]) ;
+ }
+ string = $(string[2]) ;
+ }
+ return $(w) ;
+}
+
+
+# Check that the given string is composed entirely of whitespace.
+#
+rule is-whitespace (
+ string ? # The string to test.
+ )
+{
+ if ! $(string) { return true ; }
+ else if $(string) = "" { return true ; }
+ else if [ MATCH "^([$(.whitespace)]+)$" : $(string) ] { return true ; }
+ else { return ; }
+}
+
+rule __test__ ( )
+{
+ import assert ;
+ assert.result a b c : chars abc ;
+
+ assert.result rntm : abbreviate runtime ;
+ assert.result ovrld : abbreviate overload ;
+ assert.result dbg : abbreviate debugging ;
+ assert.result async : abbreviate asynchronous ;
+ assert.result pop : abbreviate pop ;
+ assert.result aaa : abbreviate aaa ;
+ assert.result qck : abbreviate quack ;
+ assert.result sttc : abbreviate static ;
+
+ # Check boundary cases.
+ assert.result a : chars a ;
+ assert.result : chars "" ;
+ assert.result a b c d e f g h : chars abcdefgh ;
+ assert.result a b c d e f g h i : chars abcdefghi ;
+ assert.result a b c d e f g h i j : chars abcdefghij ;
+ assert.result a b c d e f g h i j k : chars abcdefghijk ;
+
+ assert.result a//b/c/d : join a "" b c d : / ;
+ assert.result abcd : join a "" b c d ;
+
+ assert.result a b c : words "a b c" ;
+
+ assert.true is-whitespace " " ;
+ assert.false is-whitespace " a b c " ;
+ assert.true is-whitespace "" ;
+ assert.true is-whitespace ;
+}
diff --git a/src/boost/tools/build/src/util/utility.jam b/src/boost/tools/build/src/util/utility.jam
new file mode 100644
index 000000000..d2cdb004f
--- /dev/null
+++ b/src/boost/tools/build/src/util/utility.jam
@@ -0,0 +1,235 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : is-instance ;
+
+
+# For all elements of 'list' which do not already have 'suffix', add 'suffix'.
+#
+rule apply-default-suffix ( suffix : list * )
+{
+ local result ;
+ for local i in $(list)
+ {
+ if $(i:S) = $(suffix)
+ {
+ result += $(i) ;
+ }
+ else
+ {
+ result += $(i)$(suffix) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'name' contains a dot, returns the part before the last dot. If 'name'
+# contains no dot, returns it unmodified.
+#
+rule basename ( name )
+{
+ if $(name:S)
+ {
+ name = $(name:B) ;
+ }
+ return $(name) ;
+}
+
+
+# Return the file of the caller of the rule that called caller-file.
+#
+rule caller-file ( )
+{
+ local bt = [ BACKTRACE ] ;
+ return $(bt[9]) ;
+}
+
+
+# Tests if 'a' is equal to 'b'. If 'a' is a class instance, calls its 'equal'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule equal ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).equal $(b) ] ;
+ }
+ else
+ {
+ if $(a) = $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Tests if 'a' is less than 'b'. If 'a' is a class instance, calls its 'less'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule less ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).less $(b) ] ;
+ }
+ else
+ {
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Returns the textual representation of argument. If it is a class instance,
+# class its 'str' method. Otherwise, returns the argument.
+#
+rule str ( value )
+{
+ if [ is-instance $(value) ]
+ {
+ return [ $(value).str ] ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+# Accepts a list of gristed values and returns them ungristed. Reports an error
+# in case any of the passed parameters is not gristed, i.e. surrounded in angle
+# brackets < and >.
+#
+rule ungrist ( names * )
+{
+ local result ;
+ for local name in $(names)
+ {
+ local stripped = [ MATCH ^<(.*)>$ : $(name) ] ;
+ if ! $(stripped)-defined
+ {
+ import errors ;
+ local quoted-names = \"$(names)\" ;
+ errors.error "in" ungrist "$(quoted-names:J= ):" \"$(name)\" is not
+ of the form <.*> ;
+ }
+ result += $(stripped) ;
+ }
+ return $(result) ;
+}
+
+
+# If the passed value is quoted, unquotes it. Otherwise returns the value
+# unchanged.
+#
+rule unquote ( value ? )
+{
+ local match-result = [ MATCH ^(\")(.*)(\")$ : $(value) ] ;
+ if $(match-result)
+ {
+ return $(match-result[2]) ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+
+ assert.result 123 : str 123 ;
+
+ class test-class__
+ {
+ rule __init__ ( ) { }
+ rule str ( ) { return "str-test-class" ; }
+ rule less ( a ) { return "yes, of course!" ; }
+ rule equal ( a ) { return "not sure" ; }
+ }
+
+ assert.result "str-test-class" : str [ new test-class__ ] ;
+ assert.true less 1 2 ;
+ assert.false less 2 1 ;
+ assert.result "yes, of course!" : less [ new test-class__ ] 1 ;
+ assert.true equal 1 1 ;
+ assert.false equal 1 2 ;
+ assert.result "not sure" : equal [ new test-class__ ] 1 ;
+
+ assert.result foo.lib foo.lib : apply-default-suffix .lib : foo.lib foo.lib
+ ;
+
+ assert.result foo : basename foo ;
+ assert.result foo : basename foo.so ;
+ assert.result foo.so : basename foo.so.1 ;
+
+ assert.result : unquote ;
+ assert.result "" : unquote "" ;
+ assert.result "" : unquote \"\" ;
+ assert.result \" : unquote \"\"\" ;
+ assert.result \"\" : unquote \"\"\"\" ;
+ assert.result foo : unquote foo ;
+ assert.result \"foo : unquote \"foo ;
+ assert.result foo\" : unquote foo\" ;
+ assert.result foo : unquote \"foo\" ;
+ assert.result \"foo\" : unquote \"\"foo\"\" ;
+
+ assert.result : ungrist ;
+ assert.result "" : ungrist <> ;
+ assert.result foo : ungrist <foo> ;
+ assert.result <foo> : ungrist <<foo>> ;
+ assert.result foo bar : ungrist <foo> <bar> ;
+
+ try ;
+ {
+ ungrist "" ;
+ }
+ catch "in" ungrist "\"\":" \"\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo ;
+ }
+ catch "in" ungrist "\"foo\":" \"foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist <foo ;
+ }
+ catch "in" ungrist "\"<foo\":" \"<foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo> ;
+ }
+ catch "in" ungrist "\"foo>\":" \"foo>\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo bar ;
+ }
+ catch "in" ungrist "\"foo\" "\"bar\"":" \"foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo <bar> ;
+ }
+ catch "in" ungrist "\"foo\" "\"<bar>\"":" \"foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist <foo> bar ;
+ }
+ catch "in" ungrist "\"<foo>\" "\"bar\"":" \"bar\" is not of the form <.*> ;
+}
diff --git a/src/boost/tools/build/src/util/utility.py b/src/boost/tools/build/src/util/utility.py
new file mode 100644
index 000000000..ded3e5bcd
--- /dev/null
+++ b/src/boost/tools/build/src/util/utility.py
@@ -0,0 +1,176 @@
+# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+""" Utility functions to add/remove/get grists.
+ Grists are string enclosed in angle brackets (<>) that are used as prefixes. See Jam for more information.
+"""
+
+import re
+import os
+import bjam
+from b2.exceptions import *
+from b2.util import is_iterable_typed
+
+__re_grist_and_value = re.compile (r'(<[^>]*>)(.*)')
+__re_grist_content = re.compile ('^<(.*)>$')
+__re_backslash = re.compile (r'\\')
+
+def to_seq (value):
+ """ If value is a sequence, returns it.
+ If it is a string, returns a sequence with value as its sole element.
+ """
+ if not value:
+ return []
+
+ if isinstance (value, str):
+ return [value]
+
+ else:
+ return value
+
+def replace_references_by_objects (manager, refs):
+ objs = []
+ for r in refs:
+ objs.append (manager.get_object (r))
+ return objs
+
+def add_grist (features):
+ """ Transform a string by bracketing it with "<>". If already bracketed, does nothing.
+ features: one string or a sequence of strings
+ return: the gristed string, if features is a string, or a sequence of gristed strings, if features is a sequence
+ """
+ assert is_iterable_typed(features, basestring) or isinstance(features, basestring)
+ def grist_one (feature):
+ if feature [0] != '<' and feature [len (feature) - 1] != '>':
+ return '<' + feature + '>'
+ else:
+ return feature
+
+ if isinstance (features, str):
+ return grist_one (features)
+ else:
+ return [ grist_one (feature) for feature in features ]
+
+def replace_grist (features, new_grist):
+ """ Replaces the grist of a string by a new one.
+ Returns the string with the new grist.
+ """
+ assert is_iterable_typed(features, basestring) or isinstance(features, basestring)
+ assert isinstance(new_grist, basestring)
+ # this function is used a lot in the build phase and the original implementation
+ # was extremely slow; thus some of the weird-looking optimizations for this function.
+ single_item = False
+ if isinstance(features, str):
+ features = [features]
+ single_item = True
+
+ result = []
+ for feature in features:
+ # '<feature>value' -> ('<feature', '>', 'value')
+ # 'something' -> ('something', '', '')
+ # '<toolset>msvc/<feature>value' -> ('<toolset', '>', 'msvc/<feature>value')
+ grist, split, value = feature.partition('>')
+ # if a partition didn't occur, then grist is just 'something'
+ # set the value to be the grist
+ if not value and not split:
+ value = grist
+ result.append(new_grist + value)
+
+ if single_item:
+ return result[0]
+ return result
+
+def get_value (property):
+ """ Gets the value of a property, that is, the part following the grist, if any.
+ """
+ assert is_iterable_typed(property, basestring) or isinstance(property, basestring)
+ return replace_grist (property, '')
+
+def get_grist (value):
+ """ Returns the grist of a string.
+ If value is a sequence, does it for every value and returns the result as a sequence.
+ """
+ assert is_iterable_typed(value, basestring) or isinstance(value, basestring)
+ def get_grist_one (name):
+ split = __re_grist_and_value.match (name)
+ if not split:
+ return ''
+ else:
+ return split.group (1)
+
+ if isinstance (value, str):
+ return get_grist_one (value)
+ else:
+ return [ get_grist_one (v) for v in value ]
+
+def ungrist (value):
+ """ Returns the value without grist.
+ If value is a sequence, does it for every value and returns the result as a sequence.
+ """
+ assert is_iterable_typed(value, basestring) or isinstance(value, basestring)
+ def ungrist_one (value):
+ stripped = __re_grist_content.match (value)
+ if not stripped:
+ raise BaseException ("in ungrist: '%s' is not of the form <.*>" % value)
+
+ return stripped.group (1)
+
+ if isinstance (value, str):
+ return ungrist_one (value)
+ else:
+ return [ ungrist_one (v) for v in value ]
+
+def replace_suffix (name, new_suffix):
+ """ Replaces the suffix of name by new_suffix.
+ If no suffix exists, the new one is added.
+ """
+ assert isinstance(name, basestring)
+ assert isinstance(new_suffix, basestring)
+ split = os.path.splitext (name)
+ return split [0] + new_suffix
+
+def forward_slashes (s):
+ """ Converts all backslashes to forward slashes.
+ """
+ assert isinstance(s, basestring)
+ return s.replace('\\', '/')
+
+
+def split_action_id (id):
+ """ Splits an id in the toolset and specific rule parts. E.g.
+ 'gcc.compile.c++' returns ('gcc', 'compile.c++')
+ """
+ assert isinstance(id, basestring)
+ split = id.split ('.', 1)
+ toolset = split [0]
+ name = ''
+ if len (split) > 1:
+ name = split [1]
+ return (toolset, name)
+
+def os_name ():
+ result = bjam.variable("OS")
+ assert(len(result) == 1)
+ return result[0]
+
+def platform ():
+ return bjam.variable("OSPLAT")
+
+def os_version ():
+ return bjam.variable("OSVER")
+
+def on_windows ():
+ """ Returns true if running on windows, whether in cygwin or not.
+ """
+ if bjam.variable("NT"):
+ return True
+
+ elif bjam.variable("UNIX"):
+
+ uname = bjam.variable("JAMUNAME")
+ if uname and uname[0].startswith("CYGWIN"):
+ return True
+
+ return False
diff --git a/src/boost/tools/build/test/BoostBuild.py b/src/boost/tools/build/test/BoostBuild.py
new file mode 100644
index 000000000..209fba3bd
--- /dev/null
+++ b/src/boost/tools/build/test/BoostBuild.py
@@ -0,0 +1,1353 @@
+# Copyright 2002-2005 Vladimir Prus.
+# Copyright 2002-2003 Dave Abrahams.
+# Copyright 2006 Rene Rivera.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from __future__ import print_function
+
+import TestCmd
+
+import copy
+import fnmatch
+import glob
+import math
+import os
+import os.path
+import re
+import shutil
+try:
+ from StringIO import StringIO
+except:
+ from io import StringIO
+import subprocess
+import sys
+import tempfile
+import time
+import traceback
+import tree
+import types
+
+from xml.sax.saxutils import escape
+
+try:
+ from functools import reduce
+except:
+ pass
+
+
+def isstr(data):
+ return isinstance(data, (type(''), type(u'')))
+
+
+class TestEnvironmentError(Exception):
+ pass
+
+
+annotations = []
+
+
+def print_annotation(name, value, xml):
+ """Writes some named bits of information about the current test run."""
+ if xml:
+ print(escape(name) + " {{{")
+ print(escape(value))
+ print("}}}")
+ else:
+ print(name + " {{{")
+ print(value)
+ print("}}}")
+
+
+def flush_annotations(xml=0):
+ global annotations
+ for ann in annotations:
+ print_annotation(ann[0], ann[1], xml)
+ annotations = []
+
+
+def clear_annotations():
+ global annotations
+ annotations = []
+
+
+defer_annotations = 0
+
+def set_defer_annotations(n):
+ global defer_annotations
+ defer_annotations = n
+
+
+def annotate_stack_trace(tb=None):
+ if tb:
+ trace = TestCmd.caller(traceback.extract_tb(tb), 0)
+ else:
+ trace = TestCmd.caller(traceback.extract_stack(), 1)
+ annotation("stacktrace", trace)
+
+
+def annotation(name, value):
+ """Records an annotation about the test run."""
+ annotations.append((name, value))
+ if not defer_annotations:
+ flush_annotations()
+
+
+def get_toolset():
+ toolset = None
+ for arg in sys.argv[1:]:
+ if not arg.startswith("-"):
+ toolset = arg
+ return toolset or "gcc"
+
+
+# Detect the host OS.
+cygwin = hasattr(os, "uname") and os.uname()[0].lower().startswith("cygwin")
+windows = cygwin or os.environ.get("OS", "").lower().startswith("windows")
+
+if cygwin:
+ default_os = "cygwin"
+elif windows:
+ default_os = "windows"
+elif hasattr(os, "uname"):
+ default_os = os.uname()[0].lower()
+
+def prepare_prefixes_and_suffixes(toolset, target_os=default_os):
+ ind = toolset.find('-')
+ if ind == -1:
+ rtoolset = toolset
+ else:
+ rtoolset = toolset[:ind]
+ prepare_suffix_map(rtoolset, target_os)
+ prepare_library_prefix(rtoolset, target_os)
+
+
+def prepare_suffix_map(toolset, target_os=default_os):
+ """
+ Set up suffix translation performed by the Boost Build testing framework
+ to accommodate different toolsets generating targets of the same type using
+ different filename extensions (suffixes).
+
+ """
+ global suffixes
+ suffixes = {}
+ if target_os == "cygwin":
+ suffixes[".lib"] = ".a"
+ suffixes[".obj"] = ".o"
+ suffixes[".implib"] = ".lib.a"
+ elif target_os == "windows":
+ if toolset == "gcc":
+ # MinGW
+ suffixes[".lib"] = ".a"
+ suffixes[".obj"] = ".o"
+ suffixes[".implib"] = ".dll.a"
+ else:
+ # Everything else Windows
+ suffixes[".implib"] = ".lib"
+ else:
+ suffixes[".exe"] = ""
+ suffixes[".dll"] = ".so"
+ suffixes[".lib"] = ".a"
+ suffixes[".obj"] = ".o"
+ suffixes[".implib"] = ".no_implib_files_on_this_platform"
+
+ if target_os == "darwin":
+ suffixes[".dll"] = ".dylib"
+
+
+def prepare_library_prefix(toolset, target_os=default_os):
+ """
+ Setup whether Boost Build is expected to automatically prepend prefixes
+ to its built library targets.
+
+ """
+ global lib_prefix
+ lib_prefix = "lib"
+
+ global dll_prefix
+ if target_os == "cygwin":
+ dll_prefix = "cyg"
+ elif target_os == "windows" and toolset != "gcc":
+ dll_prefix = None
+ else:
+ dll_prefix = "lib"
+
+
+def re_remove(sequence, regex):
+ me = re.compile(regex)
+ result = list(filter(lambda x: me.match(x), sequence))
+ if not result:
+ raise ValueError()
+ for r in result:
+ sequence.remove(r)
+
+
+def glob_remove(sequence, pattern):
+ result = list(fnmatch.filter(sequence, pattern))
+ if not result:
+ raise ValueError()
+ for r in result:
+ sequence.remove(r)
+
+
+class Tester(TestCmd.TestCmd):
+ """Main tester class for Boost Build.
+
+ Optional arguments:
+
+ `arguments` - Arguments passed to the run executable.
+ `executable` - Name of the executable to invoke.
+ `match` - Function to use for compating actual and
+ expected file contents.
+ `boost_build_path` - Boost build path to be passed to the run
+ executable.
+ `translate_suffixes` - Whether to update suffixes on the the file
+ names passed from the test script so they
+ match those actually created by the current
+ toolset. For example, static library files
+ are specified by using the .lib suffix but
+ when the "gcc" toolset is used it actually
+ creates them using the .a suffix.
+ `pass_toolset` - Whether the test system should pass the
+ specified toolset to the run executable.
+ `use_test_config` - Whether the test system should tell the run
+ executable to read in the test_config.jam
+ configuration file.
+ `ignore_toolset_requirements` - Whether the test system should tell the run
+ executable to ignore toolset requirements.
+ `workdir` - Absolute directory where the test will be
+ run from.
+ `pass_d0` - If set, when tests are not explicitly run
+ in verbose mode, they are run as silent
+ (-d0 & --quiet Boost Jam options).
+
+ Optional arguments inherited from the base class:
+
+ `description` - Test description string displayed in case
+ of a failed test.
+ `subdir` - List of subdirectories to automatically
+ create under the working directory. Each
+ subdirectory needs to be specified
+ separately, parent coming before its child.
+ `verbose` - Flag that may be used to enable more
+ verbose test system output. Note that it
+ does not also enable more verbose build
+ system output like the --verbose command
+ line option does.
+ """
+ def __init__(self, arguments=None, executable="b2",
+ match=TestCmd.match_exact, boost_build_path=None,
+ translate_suffixes=True, pass_toolset=True, use_test_config=True,
+ ignore_toolset_requirements=False, workdir="", pass_d0=False,
+ **keywords):
+
+ assert arguments.__class__ is not str
+ self.original_workdir = os.path.dirname(__file__)
+ if workdir and not os.path.isabs(workdir):
+ raise ("Parameter workdir <%s> must point to an absolute "
+ "directory: " % workdir)
+
+ self.last_build_timestamp = 0
+ self.translate_suffixes = translate_suffixes
+ self.use_test_config = use_test_config
+
+ self.toolset = get_toolset()
+ self.pass_toolset = pass_toolset
+ self.ignore_toolset_requirements = ignore_toolset_requirements
+
+ prepare_prefixes_and_suffixes(pass_toolset and self.toolset or "gcc")
+
+ use_default_bjam = "--default-bjam" in sys.argv
+
+ if not use_default_bjam:
+ jam_build_dir = ""
+
+ # Find where jam_src is located. Try for the debug version if it is
+ # lying around.
+ srcdir = os.path.join(os.path.dirname(__file__), "..", "src")
+ dirs = [os.path.join(srcdir, "engine", jam_build_dir + ".debug"),
+ os.path.join(srcdir, "engine", jam_build_dir)]
+ for d in dirs:
+ if os.path.exists(d):
+ jam_build_dir = d
+ break
+ else:
+ print("Cannot find built Boost.Jam")
+ sys.exit(1)
+
+ verbosity = ["-d0", "--quiet"]
+ if not pass_d0:
+ verbosity = []
+ if "--verbose" in sys.argv:
+ keywords["verbose"] = True
+ verbosity = ["-d2"]
+ self.verbosity = verbosity
+
+ if boost_build_path is None:
+ boost_build_path = self.original_workdir + "/.."
+
+ program_list = []
+ if use_default_bjam:
+ program_list.append(executable)
+ else:
+ program_list.append(os.path.join(jam_build_dir, executable))
+ program_list.append('-sBOOST_BUILD_PATH="' + boost_build_path + '"')
+ if arguments:
+ program_list += arguments
+
+ TestCmd.TestCmd.__init__(self, program=program_list, match=match,
+ workdir=workdir, inpath=use_default_bjam, **keywords)
+
+ os.chdir(self.workdir)
+
+ def cleanup(self):
+ try:
+ TestCmd.TestCmd.cleanup(self)
+ os.chdir(self.original_workdir)
+ except AttributeError:
+ # When this is called during TestCmd.TestCmd.__del__ we can have
+ # both 'TestCmd' and 'os' unavailable in our scope. Do nothing in
+ # this case.
+ pass
+
+ def set_toolset(self, toolset, target_os=default_os):
+ self.toolset = toolset
+ self.pass_toolset = True
+ prepare_prefixes_and_suffixes(toolset, target_os)
+
+
+ #
+ # Methods that change the working directory's content.
+ #
+ def set_tree(self, tree_location):
+ # It is not possible to remove the current directory.
+ d = os.getcwd()
+ os.chdir(os.path.dirname(self.workdir))
+ shutil.rmtree(self.workdir, ignore_errors=False)
+
+ if not os.path.isabs(tree_location):
+ tree_location = os.path.join(self.original_workdir, tree_location)
+ shutil.copytree(tree_location, self.workdir)
+
+ os.chdir(d)
+ def make_writable(unused, dir, entries):
+ for e in entries:
+ name = os.path.join(dir, e)
+ os.chmod(name, os.stat(name).st_mode | 0o222)
+ for root, _, files in os.walk("."):
+ make_writable(None, root, files)
+
+ def write(self, file, content, wait=True):
+ nfile = self.native_file_name(file)
+ self.__makedirs(os.path.dirname(nfile), wait)
+ if not type(content) == bytes:
+ content = content.encode()
+ f = open(nfile, "wb")
+ try:
+ f.write(content)
+ finally:
+ f.close()
+ self.__ensure_newer_than_last_build(nfile)
+
+ def copy(self, src, dst):
+ try:
+ self.write(dst, self.read(src, binary=True))
+ except:
+ self.fail_test(1)
+
+ def copy_preserving_timestamp(self, src, dst):
+ src_name = self.native_file_name(src)
+ dst_name = self.native_file_name(dst)
+ stats = os.stat(src_name)
+ self.write(dst, self.__read(src, binary=True))
+ os.utime(dst_name, (stats.st_atime, stats.st_mtime))
+
+ def touch(self, names, wait=True):
+ if isstr(names):
+ names = [names]
+ for name in names:
+ path = self.native_file_name(name)
+ if wait:
+ self.__ensure_newer_than_last_build(path)
+ else:
+ os.utime(path, None)
+
+ def rm(self, names):
+ if not type(names) == list:
+ names = [names]
+
+ if names == ["."]:
+ # If we are deleting the entire workspace, there is no need to wait
+ # for a clock tick.
+ self.last_build_timestamp = 0
+
+ # Avoid attempts to remove the current directory.
+ os.chdir(self.original_workdir)
+ for name in names:
+ n = glob.glob(self.native_file_name(name))
+ if n: n = n[0]
+ if not n:
+ n = self.glob_file(name.replace("$toolset", self.toolset + "*")
+ )
+ if n:
+ if os.path.isdir(n):
+ shutil.rmtree(n, ignore_errors=False)
+ else:
+ os.unlink(n)
+
+ # Create working dir root again in case we removed it.
+ if not os.path.exists(self.workdir):
+ os.mkdir(self.workdir)
+ os.chdir(self.workdir)
+
+ def expand_toolset(self, name):
+ """
+ Expands $toolset placeholder in the given file to the name of the
+ toolset currently being tested.
+
+ """
+ self.write(name, self.read(name).replace("$toolset", self.toolset))
+
+ def dump_stdio(self):
+ annotation("STDOUT", self.stdout())
+ annotation("STDERR", self.stderr())
+
+ def run_build_system(self, extra_args=None, subdir="", stdout=None,
+ stderr="", status=0, match=None, pass_toolset=None,
+ use_test_config=None, ignore_toolset_requirements=None,
+ expected_duration=None, **kw):
+
+ assert extra_args.__class__ is not str
+
+ if os.path.isabs(subdir):
+ print("You must pass a relative directory to subdir <%s>." % subdir
+ )
+ return
+
+ self.previous_tree, dummy = tree.build_tree(self.workdir)
+ self.wait_for_time_change_since_last_build()
+
+ if match is None:
+ match = self.match
+
+ if pass_toolset is None:
+ pass_toolset = self.pass_toolset
+
+ if use_test_config is None:
+ use_test_config = self.use_test_config
+
+ if ignore_toolset_requirements is None:
+ ignore_toolset_requirements = self.ignore_toolset_requirements
+
+ try:
+ kw["program"] = []
+ kw["program"] += self.program
+ if extra_args:
+ kw["program"] += extra_args
+ if not extra_args or not any(a.startswith("-j") for a in extra_args):
+ kw["program"] += ["-j1"]
+ if stdout is None and not any(a.startswith("-d") for a in kw["program"]):
+ kw["program"] += self.verbosity
+ if pass_toolset:
+ kw["program"].append("toolset=" + self.toolset)
+ if use_test_config:
+ kw["program"].append('--test-config="%s"' % os.path.join(
+ self.original_workdir, "test-config.jam"))
+ if ignore_toolset_requirements:
+ kw["program"].append("--ignore-toolset-requirements")
+ if "--python" in sys.argv:
+ # -z disables Python optimization mode.
+ # this enables type checking (all assert
+ # and if __debug__ statements).
+ kw["program"].extend(["--python", "-z"])
+ if "--stacktrace" in sys.argv:
+ kw["program"].append("--stacktrace")
+ kw["chdir"] = subdir
+ self.last_program_invocation = kw["program"]
+ build_time_start = time.time()
+ TestCmd.TestCmd.run(self, **kw)
+ build_time_finish = time.time()
+ except:
+ self.dump_stdio()
+ raise
+
+ old_last_build_timestamp = self.last_build_timestamp
+ self.tree, self.last_build_timestamp = tree.build_tree(self.workdir)
+ self.difference = tree.tree_difference(self.previous_tree, self.tree)
+ if self.difference.empty():
+ # If nothing has been changed by this build and sufficient time has
+ # passed since the last build that actually changed something,
+ # there is no need to wait for touched or newly created files to
+ # start getting newer timestamps than the currently existing ones.
+ self.last_build_timestamp = old_last_build_timestamp
+
+ self.difference.ignore_directories()
+ self.unexpected_difference = copy.deepcopy(self.difference)
+
+ if (status and self.status) is not None and self.status != status:
+ expect = ""
+ if status != 0:
+ expect = " (expected %d)" % status
+
+ annotation("failure", '"%s" returned %d%s' % (kw["program"],
+ self.status, expect))
+
+ annotation("reason", "unexpected status returned by bjam")
+ self.fail_test(1)
+
+ if stdout is not None and not match(self.stdout(), stdout):
+ stdout_test = match(self.stdout(), stdout)
+ annotation("failure", "Unexpected stdout")
+ annotation("Expected STDOUT", stdout)
+ annotation("Actual STDOUT", self.stdout())
+ stderr = self.stderr()
+ if stderr:
+ annotation("STDERR", stderr)
+ self.maybe_do_diff(self.stdout(), stdout, stdout_test)
+ self.fail_test(1, dump_stdio=False)
+
+ # Intel tends to produce some messages to stderr which make tests fail.
+ intel_workaround = re.compile("^xi(link|lib): executing.*\n", re.M)
+ actual_stderr = re.sub(intel_workaround, "", self.stderr())
+
+ if stderr is not None and not match(actual_stderr, stderr):
+ stderr_test = match(actual_stderr, stderr)
+ annotation("failure", "Unexpected stderr")
+ annotation("Expected STDERR", stderr)
+ annotation("Actual STDERR", self.stderr())
+ annotation("STDOUT", self.stdout())
+ self.maybe_do_diff(actual_stderr, stderr, stderr_test)
+ self.fail_test(1, dump_stdio=False)
+
+ if expected_duration is not None:
+ actual_duration = build_time_finish - build_time_start
+ if actual_duration > expected_duration:
+ print("Test run lasted %f seconds while it was expected to "
+ "finish in under %f seconds." % (actual_duration,
+ expected_duration))
+ self.fail_test(1, dump_stdio=False)
+
+ self.__ignore_junk()
+
+ def glob_file(self, name):
+ name = self.adjust_name(name)
+ result = None
+ if hasattr(self, "difference"):
+ for f in (self.difference.added_files +
+ self.difference.modified_files +
+ self.difference.touched_files):
+ if fnmatch.fnmatch(f, name):
+ result = self.__native_file_name(f)
+ break
+ if not result:
+ result = glob.glob(self.__native_file_name(name))
+ if result:
+ result = result[0]
+ return result
+
+ def __read(self, name, binary=False):
+ try:
+ openMode = "r"
+ if binary:
+ openMode += "b"
+ else:
+ openMode += "U"
+ f = open(name, openMode)
+ result = f.read()
+ f.close()
+ return result
+ except:
+ annotation("failure", "Could not open '%s'" % name)
+ self.fail_test(1)
+ return ""
+
+ def read(self, name, binary=False):
+ name = self.glob_file(name)
+ return self.__read(name, binary=binary)
+
+ def read_and_strip(self, name):
+ if not self.glob_file(name):
+ return ""
+ f = open(self.glob_file(name), "rb")
+ lines = f.readlines()
+ f.close()
+ result = "\n".join(x.decode().rstrip() for x in lines)
+ if lines and lines[-1][-1] != "\n":
+ return result + "\n"
+ return result
+
+ def fail_test(self, condition, dump_difference=True, dump_stdio=True,
+ dump_stack=True):
+ if not condition:
+ return
+
+ if dump_difference and hasattr(self, "difference"):
+ f = StringIO()
+ self.difference.pprint(f)
+ annotation("changes caused by the last build command",
+ f.getvalue())
+
+ if dump_stdio:
+ self.dump_stdio()
+
+ if "--preserve" in sys.argv:
+ print()
+ print("*** Copying the state of working dir into 'failed_test' ***")
+ print()
+ path = os.path.join(self.original_workdir, "failed_test")
+ if os.path.isdir(path):
+ shutil.rmtree(path, ignore_errors=False)
+ elif os.path.exists(path):
+ raise "Path " + path + " already exists and is not a directory"
+ shutil.copytree(self.workdir, path)
+ print("The failed command was:")
+ print(" ".join(self.last_program_invocation))
+
+ if dump_stack:
+ annotate_stack_trace()
+ sys.exit(1)
+
+ # A number of methods below check expectations with actual difference
+ # between directory trees before and after a build. All the 'expect*'
+ # methods require exact names to be passed. All the 'ignore*' methods allow
+ # wildcards.
+
+ # All names can be either a string or a list of strings.
+ def expect_addition(self, names):
+ for name in self.adjust_names(names):
+ try:
+ glob_remove(self.unexpected_difference.added_files, name)
+ except:
+ annotation("failure", "File %s not added as expected" % name)
+ self.fail_test(1)
+
+ def ignore_addition(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.added_files,
+ wildcard)
+
+ def expect_removal(self, names):
+ for name in self.adjust_names(names):
+ try:
+ glob_remove(self.unexpected_difference.removed_files, name)
+ except:
+ annotation("failure", "File %s not removed as expected" % name)
+ self.fail_test(1)
+
+ def ignore_removal(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.removed_files,
+ wildcard)
+
+ def expect_modification(self, names):
+ for name in self.adjust_names(names):
+ try:
+ glob_remove(self.unexpected_difference.modified_files, name)
+ except:
+ annotation("failure", "File %s not modified as expected" %
+ name)
+ self.fail_test(1)
+
+ def ignore_modification(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.modified_files,
+ wildcard)
+
+ def expect_touch(self, names):
+ d = self.unexpected_difference
+ for name in self.adjust_names(names):
+ # We need to check both touched and modified files. The reason is
+ # that:
+ # (1) Windows binaries such as obj, exe or dll files have slight
+ # differences even with identical inputs due to Windows PE
+ # format headers containing an internal timestamp.
+ # (2) Intel's compiler for Linux has the same behaviour.
+ filesets = [d.modified_files, d.touched_files]
+
+ while filesets:
+ try:
+ glob_remove(filesets[-1], name)
+ break
+ except ValueError:
+ filesets.pop()
+
+ if not filesets:
+ annotation("failure", "File %s not touched as expected" % name)
+ self.fail_test(1)
+
+ def ignore_touch(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.touched_files,
+ wildcard)
+
+ def ignore(self, wildcard):
+ self.ignore_addition(wildcard)
+ self.ignore_removal(wildcard)
+ self.ignore_modification(wildcard)
+ self.ignore_touch(wildcard)
+
+ def expect_nothing(self, names):
+ for name in self.adjust_names(names):
+ if name in self.difference.added_files:
+ annotation("failure",
+ "File %s added, but no action was expected" % name)
+ self.fail_test(1)
+ if name in self.difference.removed_files:
+ annotation("failure",
+ "File %s removed, but no action was expected" % name)
+ self.fail_test(1)
+ pass
+ if name in self.difference.modified_files:
+ annotation("failure",
+ "File %s modified, but no action was expected" % name)
+ self.fail_test(1)
+ if name in self.difference.touched_files:
+ annotation("failure",
+ "File %s touched, but no action was expected" % name)
+ self.fail_test(1)
+
+ def __ignore_junk(self):
+ # Not totally sure about this change, but I do not see a good
+ # alternative.
+ if windows:
+ self.ignore("*.ilk") # MSVC incremental linking files.
+ self.ignore("*.pdb") # MSVC program database files.
+ self.ignore("*.rsp") # Response files.
+ self.ignore("*.tds") # Borland debug symbols.
+ self.ignore("*.manifest") # MSVC DLL manifests.
+ self.ignore("bin/standalone/msvc/*/msvc-setup.bat")
+
+ # Debug builds of bjam built with gcc produce this profiling data.
+ self.ignore("gmon.out")
+ self.ignore("*/gmon.out")
+
+ # Boost Build's 'configure' functionality (unfinished at the time)
+ # produces this file.
+ self.ignore("bin/config.log")
+ self.ignore("bin/project-cache.jam")
+
+ # Compiled Python files created when running Python based Boost Build.
+ self.ignore("*.pyc")
+
+ # OSX/Darwin files and dirs.
+ self.ignore("*.dSYM/*")
+
+ def expect_nothing_more(self):
+ if not self.unexpected_difference.empty():
+ annotation("failure", "Unexpected changes found")
+ output = StringIO()
+ self.unexpected_difference.pprint(output)
+ annotation("unexpected changes", output.getvalue())
+ self.fail_test(1)
+
+ def expect_output_lines(self, lines, expected=True):
+ self.__expect_lines(self.stdout(), lines, expected)
+
+ def expect_content_lines(self, filename, line, expected=True):
+ self.__expect_lines(self.read_and_strip(filename), line, expected)
+
+ def expect_content(self, name, content, exact=False):
+ actual = self.read(name)
+ content = content.replace("$toolset", self.toolset + "*")
+
+ matched = False
+ if exact:
+ matched = fnmatch.fnmatch(actual, content)
+ else:
+ def sorted_(z):
+ z.sort(key=lambda x: x.lower().replace("\\", "/"))
+ return z
+ actual_ = list(map(lambda x: sorted_(x.split()), actual.splitlines()))
+ content_ = list(map(lambda x: sorted_(x.split()), content.splitlines()))
+ if len(actual_) == len(content_):
+ matched = map(
+ lambda x, y: map(lambda n, p: fnmatch.fnmatch(n, p), x, y),
+ actual_, content_)
+ matched = reduce(
+ lambda x, y: x and reduce(
+ lambda a, b: a and b,
+ y, True),
+ matched, True)
+
+ if not matched:
+ print("Expected:\n")
+ print(content)
+ print("Got:\n")
+ print(actual)
+ self.fail_test(1)
+
+ def maybe_do_diff(self, actual, expected, result=None):
+ if os.environ.get("DO_DIFF"):
+ e = tempfile.mktemp("expected")
+ a = tempfile.mktemp("actual")
+ f = open(e, "w")
+ f.write(expected)
+ f.close()
+ f = open(a, "w")
+ f.write(actual)
+ f.close()
+ print("DIFFERENCE")
+ # Current diff should return 1 to indicate 'different input files'
+ # but some older diff versions may return 0 and depending on the
+ # exact Python/OS platform version, os.system() call may gobble up
+ # the external process's return code and return 0 itself.
+ if os.system('diff -u "%s" "%s"' % (e, a)) not in [0, 1]:
+ print('Unable to compute difference: diff -u "%s" "%s"' % (e, a
+ ))
+ os.unlink(e)
+ os.unlink(a)
+ elif type(result) is TestCmd.MatchError:
+ print(result.message)
+ else:
+ print("Set environmental variable 'DO_DIFF' to examine the "
+ "difference.")
+
+ # Internal methods.
+ def adjust_lib_name(self, name):
+ global lib_prefix
+ global dll_prefix
+ result = name
+
+ pos = name.rfind(".")
+ if pos != -1:
+ suffix = name[pos:]
+ if suffix == ".lib":
+ (head, tail) = os.path.split(name)
+ if lib_prefix:
+ tail = lib_prefix + tail
+ result = os.path.join(head, tail)
+ elif suffix == ".dll" or suffix == ".implib":
+ (head, tail) = os.path.split(name)
+ if dll_prefix:
+ tail = dll_prefix + tail
+ result = os.path.join(head, tail)
+ # If we want to use this name in a Jamfile, we better convert \ to /,
+ # as otherwise we would have to quote \.
+ result = result.replace("\\", "/")
+ return result
+
+ def adjust_suffix(self, name):
+ if not self.translate_suffixes:
+ return name
+ pos = name.rfind(".")
+ if pos == -1:
+ return name
+ suffix = name[pos:]
+ return name[:pos] + suffixes.get(suffix, suffix)
+
+ # Acceps either a string or a list of strings and returns a list of
+ # strings. Adjusts suffixes on all names.
+ def adjust_names(self, names):
+ if isstr(names):
+ names = [names]
+ r = map(self.adjust_lib_name, names)
+ r = map(self.adjust_suffix, r)
+ r = map(lambda x, t=self.toolset: x.replace("$toolset", t + "*"), r)
+ return list(r)
+
+ def adjust_name(self, name):
+ return self.adjust_names(name)[0]
+
+ def __native_file_name(self, name):
+ return os.path.normpath(os.path.join(self.workdir, *name.split("/")))
+
+ def native_file_name(self, name):
+ return self.__native_file_name(self.adjust_name(name))
+
+ def wait_for_time_change(self, path, touch):
+ """
+ Wait for newly assigned file system modification timestamps for the
+ given path to become large enough for the timestamp difference to be
+ correctly recognized by both this Python based testing framework and
+ the Boost Jam executable being tested. May optionally touch the given
+ path to set its modification timestamp to the new value.
+
+ """
+ self.__wait_for_time_change(path, touch, last_build_time=False)
+
+ def wait_for_time_change_since_last_build(self):
+ """
+ Wait for newly assigned file system modification timestamps to
+ become large enough for the timestamp difference to be
+ correctly recognized by the Python based testing framework.
+ Does not care about Jam's timestamp resolution, since we
+ only need this to detect touched files.
+ """
+ if self.last_build_timestamp:
+ timestamp_file = "timestamp-3df2f2317e15e4a9"
+ open(timestamp_file, "wb").close()
+ self.__wait_for_time_change_impl(timestamp_file,
+ self.last_build_timestamp,
+ self.__python_timestamp_resolution(timestamp_file, 0), 0)
+ os.unlink(timestamp_file)
+
+ def __build_timestamp_resolution(self):
+ """
+ Returns the minimum path modification timestamp resolution supported
+ by the used Boost Jam executable.
+
+ """
+ dir = tempfile.mkdtemp("bjam_version_info")
+ try:
+ jam_script = "timestamp_resolution.jam"
+ f = open(os.path.join(dir, jam_script), "w")
+ try:
+ f.write("EXIT $(JAM_TIMESTAMP_RESOLUTION) : 0 ;")
+ finally:
+ f.close()
+ p = subprocess.Popen([self.program[0], "-d0", "-f%s" % jam_script],
+ stdout=subprocess.PIPE, cwd=dir, universal_newlines=True)
+ out, err = p.communicate()
+ finally:
+ shutil.rmtree(dir, ignore_errors=False)
+
+ if p.returncode != 0:
+ raise TestEnvironmentError("Unexpected return code (%s) when "
+ "detecting Boost Jam's minimum supported path modification "
+ "timestamp resolution version information." % p.returncode)
+ if err:
+ raise TestEnvironmentError("Unexpected error output (%s) when "
+ "detecting Boost Jam's minimum supported path modification "
+ "timestamp resolution version information." % err)
+
+ r = re.match("([0-9]{2}):([0-9]{2}):([0-9]{2}\\.[0-9]{9})$", out)
+ if not r:
+ # Older Boost Jam versions did not report their minimum supported
+ # path modification timestamp resolution and did not actually
+ # support path modification timestamp resolutions finer than 1
+ # second.
+ # TODO: Phase this support out to avoid such fallback code from
+ # possibly covering up other problems.
+ return 1
+ if r.group(1) != "00" or r.group(2) != "00": # hours, minutes
+ raise TestEnvironmentError("Boost Jam with too coarse minimum "
+ "supported path modification timestamp resolution (%s:%s:%s)."
+ % (r.group(1), r.group(2), r.group(3)))
+ return float(r.group(3)) # seconds.nanoseconds
+
+ def __ensure_newer_than_last_build(self, path):
+ """
+ Updates the given path's modification timestamp after waiting for the
+ newly assigned file system modification timestamp to become large
+ enough for the timestamp difference between it and the last build
+ timestamp to be correctly recognized by both this Python based testing
+ framework and the Boost Jam executable being tested. Does nothing if
+ there is no 'last build' information available.
+
+ """
+ if self.last_build_timestamp:
+ self.__wait_for_time_change(path, touch=True, last_build_time=True)
+
+ def __expect_lines(self, data, lines, expected):
+ """
+ Checks whether the given data contains the given lines.
+
+ Data may be specified as a single string containing text lines
+ separated by newline characters.
+
+ Lines may be specified in any of the following forms:
+ * Single string containing text lines separated by newlines - the
+ given lines are searched for in the given data without any extra
+ data lines between them.
+ * Container of strings containing text lines separated by newlines
+ - the given lines are searched for in the given data with extra
+ data lines allowed between lines belonging to different strings.
+ * Container of strings containing text lines separated by newlines
+ and containers containing strings - the same as above with the
+ internal containers containing strings being interpreted as if
+ all their content was joined together into a single string
+ separated by newlines.
+
+ A newline at the end of any multi-line lines string is interpreted as
+ an expected extra trailig empty line.
+ """
+ # str.splitlines() trims at most one trailing newline while we want the
+ # trailing newline to indicate that there should be an extra empty line
+ # at the end.
+ def splitlines(x):
+ return (x + "\n").splitlines()
+
+ if data is None:
+ data = []
+ elif isstr(data):
+ data = splitlines(data)
+
+ if isstr(lines):
+ lines = [splitlines(lines)]
+ else:
+ expanded = []
+ for x in lines:
+ if isstr(x):
+ x = splitlines(x)
+ expanded.append(x)
+ lines = expanded
+
+ if _contains_lines(data, lines) != bool(expected):
+ output = []
+ if expected:
+ output = ["Did not find expected lines:"]
+ else:
+ output = ["Found unexpected lines:"]
+ first = True
+ for line_sequence in lines:
+ if line_sequence:
+ if first:
+ first = False
+ else:
+ output.append("...")
+ output.extend(" > " + line for line in line_sequence)
+ output.append("in output:")
+ output.extend(" > " + line for line in data)
+ annotation("failure", "\n".join(output))
+ self.fail_test(1)
+
+ def __ignore_elements(self, things, wildcard):
+ """Removes in-place 'things' elements matching the given 'wildcard'."""
+ things[:] = list(filter(lambda x: not fnmatch.fnmatch(x, wildcard), things))
+
+ def __makedirs(self, path, wait):
+ """
+ Creates a folder with the given path, together with any missing
+ parent folders. If WAIT is set, makes sure any newly created folders
+ have modification timestamps newer than the ones left behind by the
+ last build run.
+
+ """
+ try:
+ if wait:
+ stack = []
+ while path and path not in stack and not os.path.isdir(path):
+ stack.append(path)
+ path = os.path.dirname(path)
+ while stack:
+ path = stack.pop()
+ os.mkdir(path)
+ self.__ensure_newer_than_last_build(path)
+ else:
+ os.makedirs(path)
+ except Exception:
+ pass
+
+ def __python_timestamp_resolution(self, path, minimum_resolution):
+ """
+ Returns the modification timestamp resolution for the given path
+ supported by the used Python interpreter/OS/filesystem combination.
+ Will not check for resolutions less than the given minimum value. Will
+ change the path's modification timestamp in the process.
+
+ Return values:
+ 0 - nanosecond resolution supported
+ positive decimal - timestamp resolution in seconds
+
+ """
+ # Note on Python's floating point timestamp support:
+ # Python interpreter versions prior to Python 2.3 did not support
+ # floating point timestamps. Versions 2.3 through 3.3 may or may not
+ # support it depending on the configuration (may be toggled by calling
+ # os.stat_float_times(True/False) at program startup, disabled by
+ # default prior to Python 2.5 and enabled by default since). Python 3.3
+ # deprecated this configuration and 3.4 removed support for it after
+ # which floating point timestamps are always supported.
+ ver = sys.version_info[0:2]
+ python_nanosecond_support = ver >= (3, 4) or (ver >= (2, 3) and
+ os.stat_float_times())
+
+ # Minimal expected floating point difference used to account for
+ # possible imprecise floating point number representations. We want
+ # this number to be small (at least smaller than 0.0001) but still
+ # large enough that we can be sure that increasing a floating point
+ # value by 2 * eta guarantees the value read back will be increased by
+ # at least eta.
+ eta = 0.00005
+
+ stats_orig = os.stat(path)
+ def test_time(diff):
+ """Returns whether a timestamp difference is detectable."""
+ os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime + diff))
+ return os.stat(path).st_mtime > stats_orig.st_mtime + eta
+
+ # Test for nanosecond timestamp resolution support.
+ if not minimum_resolution and python_nanosecond_support:
+ if test_time(2 * eta):
+ return 0
+
+ # Detect the filesystem timestamp resolution. Note that there is no
+ # need to make this code 'as fast as possible' as, this function gets
+ # called before having to sleep until the next detectable modification
+ # timestamp value and that, since we already know nanosecond resolution
+ # is not supported, will surely take longer than whatever we do here to
+ # detect this minimal detectable modification timestamp resolution.
+ step = 0.1
+ if not python_nanosecond_support:
+ # If Python does not support nanosecond timestamp resolution we
+ # know the minimum possible supported timestamp resolution is 1
+ # second.
+ minimum_resolution = max(1, minimum_resolution)
+ index = max(1, int(minimum_resolution / step))
+ while step * index < minimum_resolution:
+ # Floating point number representation errors may cause our
+ # initially calculated start index to be too small if calculated
+ # directly.
+ index += 1
+ while True:
+ # Do not simply add up the steps to avoid cumulative floating point
+ # number representation errors.
+ next = step * index
+ if next > 10:
+ raise TestEnvironmentError("File systems with too coarse "
+ "modification timestamp resolutions not supported.")
+ if test_time(next):
+ return next
+ index += 1
+
+ def __wait_for_time_change(self, path, touch, last_build_time):
+ """
+ Wait until a newly assigned file system modification timestamp for
+ the given path is large enough for the timestamp difference between it
+ and the last build timestamp or the path's original file system
+ modification timestamp (depending on the last_build_time flag) to be
+ correctly recognized by both this Python based testing framework and
+ the Boost Jam executable being tested. May optionally touch the given
+ path to set its modification timestamp to the new value.
+
+ """
+ assert self.last_build_timestamp or not last_build_time
+ stats_orig = os.stat(path)
+
+ if last_build_time:
+ start_time = self.last_build_timestamp
+ else:
+ start_time = stats_orig.st_mtime
+
+ build_resolution = self.__build_timestamp_resolution()
+ assert build_resolution >= 0
+
+ # Check whether the current timestamp is already new enough.
+ if stats_orig.st_mtime > start_time and (not build_resolution or
+ stats_orig.st_mtime >= start_time + build_resolution):
+ return
+
+ resolution = self.__python_timestamp_resolution(path, build_resolution)
+ assert resolution >= build_resolution
+ self.__wait_for_time_change_impl(path, start_time, resolution, build_resolution)
+
+ if not touch:
+ os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime))
+
+ def __wait_for_time_change_impl(self, path, start_time, resolution, build_resolution):
+ # Implementation notes:
+ # * Theoretically time.sleep() API might get interrupted too soon
+ # (never actually encountered).
+ # * We encountered cases where we sleep just long enough for the
+ # filesystem's modifiction timestamp to change to the desired value,
+ # but after waking up, the read timestamp is still just a tiny bit
+ # too small (encountered on Windows). This is most likely caused by
+ # imprecise floating point timestamp & sleep interval representation
+ # used by Python. Note though that we never encountered a case where
+ # more than one additional tiny sleep() call was needed to remedy
+ # the situation.
+ # * We try to wait long enough for the timestamp to change, but do not
+ # want to waste processing time by waiting too long. The main
+ # problem is that when we have a coarse resolution, the actual times
+ # get rounded and we do not know the exact sleep time needed for the
+ # difference between two such times to pass. E.g. if we have a 1
+ # second resolution and the original and the current file timestamps
+ # are both 10 seconds then it could be that the current time is
+ # 10.99 seconds and that we can wait for just one hundredth of a
+ # second for the current file timestamp to reach its next value, and
+ # using a longer sleep interval than that would just be wasting
+ # time.
+ while True:
+ os.utime(path, None)
+ c = os.stat(path).st_mtime
+ if resolution:
+ if c > start_time and (not build_resolution or c >= start_time
+ + build_resolution):
+ break
+ if c <= start_time - resolution:
+ # Move close to the desired timestamp in one sleep, but not
+ # close enough for timestamp rounding to potentially cause
+ # us to wait too long.
+ if start_time - c > 5:
+ if last_build_time:
+ error_message = ("Last build time recorded as "
+ "being a future event, causing a too long "
+ "wait period. Something must have played "
+ "around with the system clock.")
+ else:
+ error_message = ("Original path modification "
+ "timestamp set to far into the future or "
+ "something must have played around with the "
+ "system clock, causing a too long wait "
+ "period.\nPath: '%s'" % path)
+ raise TestEnvironmentError(message)
+ _sleep(start_time - c)
+ else:
+ # We are close to the desired timestamp so take baby sleeps
+ # to avoid sleeping too long.
+ _sleep(max(0.01, resolution / 10))
+ else:
+ if c > start_time:
+ break
+ _sleep(max(0.01, start_time - c))
+
+
+class List:
+ def __init__(self, s=""):
+ elements = []
+ if isstr(s):
+ # Have to handle escaped spaces correctly.
+ elements = s.replace("\ ", "\001").split()
+ else:
+ elements = s
+ self.l = [e.replace("\001", " ") for e in elements]
+
+ def __len__(self):
+ return len(self.l)
+
+ def __getitem__(self, key):
+ return self.l[key]
+
+ def __setitem__(self, key, value):
+ self.l[key] = value
+
+ def __delitem__(self, key):
+ del self.l[key]
+
+ def __str__(self):
+ return str(self.l)
+
+ def __repr__(self):
+ return "%s.List(%r)" % (self.__module__, " ".join(self.l))
+
+ def __mul__(self, other):
+ result = List()
+ if not isinstance(other, List):
+ other = List(other)
+ for f in self:
+ for s in other:
+ result.l.append(f + s)
+ return result
+
+ def __rmul__(self, other):
+ if not isinstance(other, List):
+ other = List(other)
+ return List.__mul__(other, self)
+
+ def __add__(self, other):
+ result = List()
+ result.l = self.l[:] + other.l[:]
+ return result
+
+
+def _contains_lines(data, lines):
+ data_line_count = len(data)
+ expected_line_count = reduce(lambda x, y: x + len(y), lines, 0)
+ index = 0
+ for expected in lines:
+ if expected_line_count > data_line_count - index:
+ return False
+ expected_line_count -= len(expected)
+ index = _match_line_sequence(data, index, data_line_count -
+ expected_line_count, expected)
+ if index < 0:
+ return False
+ return True
+
+
+def _match_line_sequence(data, start, end, lines):
+ if not lines:
+ return start
+ for index in range(start, end - len(lines) + 1):
+ data_index = index
+ for expected in lines:
+ if not fnmatch.fnmatch(data[data_index], expected):
+ break
+ data_index += 1
+ else:
+ return data_index
+ return -1
+
+
+def _sleep(delay):
+ if delay > 5:
+ raise TestEnvironmentError("Test environment error: sleep period of "
+ "more than 5 seconds requested. Most likely caused by a file with "
+ "its modification timestamp set to sometime in the future.")
+ time.sleep(delay)
+
+
+###############################################################################
+#
+# Initialization.
+#
+###############################################################################
+
+# Make os.stat() return file modification times as floats instead of integers
+# to get the best possible file timestamp resolution available. The exact
+# resolution depends on the underlying file system and the Python os.stat()
+# implementation. The better the resolution we achieve, the shorter we need to
+# wait for files we create to start getting new timestamps.
+#
+# Additional notes:
+# * os.stat_float_times() function first introduced in Python 2.3. and
+# suggested for deprecation in Python 3.3.
+# * On Python versions 2.5+ we do not need to do this as there os.stat()
+# returns floating point file modification times by default.
+# * Windows CPython implementations prior to version 2.5 do not support file
+# modification timestamp resolutions of less than 1 second no matter whether
+# these timestamps are returned as integer or floating point values.
+# * Python documentation states that this should be set in a program's
+# __main__ module to avoid affecting other libraries that might not be ready
+# to support floating point timestamps. Since we use no such external
+# libraries, we ignore this warning to make it easier to enable this feature
+# in both our single & multiple-test scripts.
+if (2, 3) <= sys.version_info < (2, 5) and not os.stat_float_times():
+ os.stat_float_times(True)
+
+
+# Quickie tests. Should use doctest instead.
+if __name__ == "__main__":
+ assert str(List("foo bar") * "/baz") == "['foo/baz', 'bar/baz']"
+ assert repr("foo/" * List("bar baz")) == "__main__.List('foo/bar foo/baz')"
+
+ assert _contains_lines([], [])
+ assert _contains_lines([], [[]])
+ assert _contains_lines([], [[], []])
+ assert _contains_lines([], [[], [], []])
+ assert not _contains_lines([], [[""]])
+ assert not _contains_lines([], [["a"]])
+
+ assert _contains_lines([""], [])
+ assert _contains_lines(["a"], [])
+ assert _contains_lines(["a", "b"], [])
+ assert _contains_lines(["a", "b"], [[], [], []])
+
+ assert _contains_lines([""], [[""]])
+ assert not _contains_lines([""], [["a"]])
+ assert not _contains_lines(["a"], [[""]])
+ assert _contains_lines(["a", "", "b", ""], [["a"]])
+ assert _contains_lines(["a", "", "b", ""], [[""]])
+ assert _contains_lines(["a", "", "b"], [["b"]])
+ assert not _contains_lines(["a", "b"], [[""]])
+ assert not _contains_lines(["a", "", "b", ""], [["c"]])
+ assert _contains_lines(["a", "", "b", "x"], [["x"]])
+
+ data = ["1", "2", "3", "4", "5", "6", "7", "8", "9"]
+ assert _contains_lines(data, [["1", "2"]])
+ assert not _contains_lines(data, [["2", "1"]])
+ assert not _contains_lines(data, [["1", "3"]])
+ assert not _contains_lines(data, [["1", "3"]])
+ assert _contains_lines(data, [["1"], ["2"]])
+ assert _contains_lines(data, [["1"], [], [], [], ["2"]])
+ assert _contains_lines(data, [["1"], ["3"]])
+ assert not _contains_lines(data, [["3"], ["1"]])
+ assert _contains_lines(data, [["3"], ["7"], ["8"]])
+ assert not _contains_lines(data, [["1"], ["3", "5"]])
+ assert not _contains_lines(data, [["1"], [""], ["5"]])
+ assert not _contains_lines(data, [["1"], ["5"], ["3"]])
+ assert not _contains_lines(data, [["1"], ["5", "3"]])
+
+ assert not _contains_lines(data, [[" 3"]])
+ assert not _contains_lines(data, [["3 "]])
+ assert not _contains_lines(data, [["3", ""]])
+ assert not _contains_lines(data, [["", "3"]])
+
+ print("tests passed")
diff --git a/src/boost/tools/build/test/Jamfile.jam b/src/boost/tools/build/test/Jamfile.jam
new file mode 100644
index 000000000..7ec0bf303
--- /dev/null
+++ b/src/boost/tools/build/test/Jamfile.jam
@@ -0,0 +1,29 @@
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import python ;
+import testing ;
+
+if ! [ python.configured ]
+{
+ using python ;
+}
+
+# Not quite perfect, but good enough for most purposes
+local test-files = [ glob *.py ] ;
+
+local boost-build-files = [ glob
+ ../src/tools/*.jam
+ ../src/tools/*/*.jam
+ ../src/build/*.jam
+ ../src/util/*.jam
+ ../src/kernel/*.jam
+ ../src/options/*.jam
+ ../src/*.jam ] ;
+
+testing.make-test run-pyd : test_all.py :
+ <dependency>$(test-files)
+ <dependency>$(boost-build-files)
+ ;
diff --git a/src/boost/tools/build/test/MockToolset.py b/src/boost/tools/build/test/MockToolset.py
new file mode 100755
index 000000000..a692a6861
--- /dev/null
+++ b/src/boost/tools/build/test/MockToolset.py
@@ -0,0 +1,267 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import sys
+
+def create(t):
+ t.write('''mockinfo.py''', '''
+from __future__ import print_function
+import re
+import optparse
+import os
+
+parser = optparse.OptionParser()
+parser.add_option('-o', dest="output_file")
+parser.add_option('-x', dest="language")
+parser.add_option('-c', dest="compile", action="store_true")
+parser.add_option('-I', dest="includes", action="append")
+parser.add_option('-D', dest="defines", action="append")
+parser.add_option('-L', dest="library_path", action="append")
+parser.add_option('--dll', dest="dll", action="store_true")
+parser.add_option('--archive', dest="archive", action="store_true")
+parser.add_option('--static-lib', dest="static_libraries", action="append")
+parser.add_option('--shared-lib', dest="shared_libraries", action="append")
+
+cwd = os.environ["JAM_CWD"]
+
+class MockInfo(object):
+ def __init__(self, verbose=False):
+ self.files = dict()
+ self.commands = list()
+ self.verbose = verbose
+ def source_file(self, name, pattern):
+ self.files[name] = pattern
+ def action(self, command, status=0):
+ if isinstance(command, str):
+ command = command.split()
+ self.commands.append((command, status))
+ def check(self, command):
+ print("Testing command", command)
+ for (raw, status) in self.commands:
+ if self.matches(raw, command):
+ return status
+ def matches(self, raw, command):
+ (expected_options, expected_args) = parser.parse_args(raw)
+ options = command[0]
+ input_files = list(command[1])
+ if self.verbose:
+ print(" - matching against", (expected_options, expected_args))
+ if len(expected_args) != len(input_files):
+ if self.verbose:
+ print(" argument list sizes differ")
+ return False
+ for arg in expected_args:
+ if arg.startswith('$'):
+ fileid = arg[1:]
+ pattern = self.files[fileid] if fileid in self.files else fileid
+ matching_file = None
+ for input_file in input_files:
+ with open(input_file, 'r') as f:
+ contents = f.read()
+ if pattern == contents:
+ matching_file = input_file
+ break
+ if matching_file is not None:
+ input_files.remove(matching_file)
+ else:
+ if self.verbose:
+ print(" Failed to match input file contents: %s" % arg)
+ return False
+ else:
+ if arg in input_files:
+ input_files.remove(arg)
+ else:
+ if self.verbose:
+ print(" Failed to match input file: %s" % arg)
+ return False
+
+ if options.language != expected_options.language:
+ if self.verbose:
+ print(" Failed to match -c")
+ return False
+
+ if options.compile != expected_options.compile:
+ if self.verbose:
+ print(" Failed to match -x")
+ return False
+
+ # Normalize a path for comparison purposes
+ def adjust_path(p):
+ return os.path.normcase(os.path.normpath(os.path.join(cwd, p)))
+
+ # order matters
+ if options.includes is None:
+ options.includes = []
+ if expected_options.includes is None:
+ expected_options.includes = []
+ if list(map(adjust_path, options.includes)) != \
+ list(map(adjust_path, expected_options.includes)):
+ if self.verbose:
+ print(" Failed to match -I ", list(map(adjust_path, options.includes)), \
+ " != ", list(map(adjust_path, expected_options.includes)))
+ return False
+
+ if options.defines is None:
+ options.defines = []
+ if expected_options.defines is None:
+ expected_options.defines = []
+ if options.defines != expected_options.defines:
+ if self.verbose:
+ print(" Failed to match -I ", options.defines, \
+ " != ", expected_options.defines)
+ return False
+
+ if options.library_path is None:
+ options.library_path = []
+ if expected_options.library_path is None:
+ expected_options.library_path = []
+ if list(map(adjust_path, options.library_path)) != \
+ list(map(adjust_path, expected_options.library_path)):
+ if self.verbose:
+ print(" Failed to match -L ", list(map(adjust_path, options.library_path)), \
+ " != ", list(map(adjust_path, expected_options.library_path)))
+ return False
+
+ if options.static_libraries != expected_options.static_libraries:
+ if self.verbose:
+ print(" Failed to match --static-lib")
+ return False
+
+ if options.shared_libraries != expected_options.shared_libraries:
+ if self.verbose:
+ print(" Failed to match --shared-lib")
+ return False
+
+ if options.dll != expected_options.dll:
+ if self.verbose:
+ print(" Failed to match --dll")
+ return False
+
+ if options.archive != expected_options.archive:
+ if self.verbose:
+ print(" Failed to match --archive")
+ return False
+
+ # The output must be handled after everything else
+ # is validated
+ if expected_options.output_file is not None:
+ if options.output_file is not None:
+ if expected_options.output_file.startswith('$'):
+ fileid = expected_options.output_file[1:]
+ if fileid not in self.files:
+ self.files[fileid] = fileid
+ else:
+ assert(self.files[fileid] == fileid)
+ with open(options.output_file, 'w') as output:
+ output.write(fileid)
+ else:
+ if self.verbose:
+ print("Failed to match -o")
+ return False
+ elif options.output_file is not None:
+ if self.verbose:
+ print("Failed to match -o")
+ return False
+
+ # if we've gotten here, then everything matched
+ if self.verbose:
+ print(" Matched")
+ return True
+''')
+
+ t.write('mock.py', '''
+from __future__ import print_function
+import mockinfo
+import markup
+import sys
+
+status = markup.info.check(mockinfo.parser.parse_args())
+if status is not None:
+ exit(status)
+else:
+ print("Unrecognized command: " + ' '.join(sys.argv))
+ exit(1)
+''')
+
+ t.write('mock.jam', '''
+import feature ;
+import toolset ;
+import path ;
+import modules ;
+import common ;
+import type ;
+
+.python-cmd = "\"%s\"" ;
+
+# Behave the same as gcc on Windows, because that's what
+# the test system expects
+type.set-generated-target-prefix SHARED_LIB : <toolset>mock <target-os>windows : lib ;
+type.set-generated-target-suffix STATIC_LIB : <toolset>mock <target-os>windows : a ;
+
+rule init ( )
+{
+ local here = [ path.make [ modules.binding $(__name__) ] ] ;
+ here = [ path.native [ path.root [ path.parent $(here) ] [ path.pwd ] ] ] ;
+ .config-cmd = [ common.variable-setting-command JAM_CWD : $(here) ] $(.python-cmd) -B ;
+}
+
+feature.extend toolset : mock ;
+
+generators.register-c-compiler mock.compile.c++ : CPP : OBJ : <toolset>mock ;
+generators.register-c-compiler mock.compile.c : C : OBJ : <toolset>mock ;
+
+generators.register-linker mock.link : LIB OBJ : EXE : <toolset>mock ;
+generators.register-linker mock.link.dll : LIB OBJ : SHARED_LIB : <toolset>mock ;
+generators.register-archiver mock.archive : OBJ : STATIC_LIB : <toolset>mock ;
+
+toolset.flags mock.compile OPTIONS <link>shared : -fPIC ;
+toolset.flags mock.compile INCLUDES : <include> ;
+toolset.flags mock.compile DEFINES : <define> ;
+
+actions compile.c
+{
+ $(.config-cmd) mock.py -c -x c -I"$(INCLUDES)" -D"$(DEFINES)" "$(>)" -o "$(<)"
+}
+
+actions compile.c++
+{
+ $(.config-cmd) mock.py -c -x c++ -I"$(INCLUDES)" -D"$(DEFINES)" "$(>)" -o "$(<)"
+}
+
+toolset.flags mock.link USER_OPTIONS <linkflags> ;
+toolset.flags mock.link FINDLIBS-STATIC <find-static-library> ;
+toolset.flags mock.link FINDLIBS-SHARED <find-shared-library> ;
+toolset.flags mock.link LINK_PATH <library-path> ;
+toolset.flags mock.link LIBRARIES <library-file> ;
+
+actions link
+{
+ $(.config-cmd) mock.py "$(>)" -o "$(<)" $(USER_OPTIONS) -L"$(LINK_PATH)" --static-lib=$(FINDLIBS-STATIC) --shared-lib=$(FINDLIBS-SHARED)
+}
+
+actions archive
+{
+ $(.config-cmd) mock.py --archive "$(>)" -o "$(<)" $(USER_OPTIONS)
+}
+
+actions link.dll
+{
+ $(.config-cmd) mock.py --dll "$(>)" -o "$(<)" $(USER_OPTIONS) -L"$(LINK_PATH)" --static-lib=$(FINDLIBS-STATIC) --shared-lib=$(FINDLIBS-SHARED)
+}
+
+''' % sys.executable.replace('\\', '\\\\'))
+
+def set_expected(t, markup):
+ verbose = "True" if t.verbose else "False"
+ t.write('markup.py', '''
+import mockinfo
+info = mockinfo.MockInfo(%s)
+def source_file(name, contents):
+ info.source_file(name, contents)
+def action(command, status=0):
+ info.action(command, status)
+''' % (verbose) + markup)
diff --git a/src/boost/tools/build/test/TestCmd.py b/src/boost/tools/build/test/TestCmd.py
new file mode 100644
index 000000000..3f9c2a3ce
--- /dev/null
+++ b/src/boost/tools/build/test/TestCmd.py
@@ -0,0 +1,609 @@
+"""
+TestCmd.py: a testing framework for commands and scripts.
+
+The TestCmd module provides a framework for portable automated testing of
+executable commands and scripts (in any language, not just Python), especially
+commands and scripts that require file system interaction.
+
+In addition to running tests and evaluating conditions, the TestCmd module
+manages and cleans up one or more temporary workspace directories, and provides
+methods for creating files and directories in those workspace directories from
+in-line data, here-documents), allowing tests to be completely self-contained.
+
+A TestCmd environment object is created via the usual invocation:
+
+ test = TestCmd()
+
+The TestCmd module provides pass_test(), fail_test(), and no_result() unbound
+methods that report test results for use with the Aegis change management
+system. These methods terminate the test immediately, reporting PASSED, FAILED
+or NO RESULT respectively and exiting with status 0 (success), 1 or 2
+respectively. This allows for a distinction between an actual failed test and a
+test that could not be properly evaluated because of an external condition (such
+as a full file system or incorrect permissions).
+
+"""
+
+# Copyright 2000 Steven Knight
+# This module is free software, and you may redistribute it and/or modify
+# it under the same terms as Python itself, so long as this copyright message
+# and disclaimer are retained in their original form.
+#
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
+#
+# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+# Copyright 2002-2003 Vladimir Prus.
+# Copyright 2002-2003 Dave Abrahams.
+# Copyright 2006 Rene Rivera.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from __future__ import print_function
+
+__author__ = "Steven Knight <knight@baldmt.com>"
+__revision__ = "TestCmd.py 0.D002 2001/08/31 14:56:12 software"
+__version__ = "0.02"
+
+from types import *
+
+import os
+import os.path
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import traceback
+
+
+tempfile.template = 'testcmd.'
+
+_Cleanup = []
+
+def _clean():
+ global _Cleanup
+ list = _Cleanup[:]
+ _Cleanup = []
+ list.reverse()
+ for test in list:
+ test.cleanup()
+
+sys.exitfunc = _clean
+
+
+def caller(tblist, skip):
+ string = ""
+ arr = []
+ for file, line, name, text in tblist:
+ if file[-10:] == "TestCmd.py":
+ break
+ arr = [(file, line, name, text)] + arr
+ atfrom = "at"
+ for file, line, name, text in arr[skip:]:
+ if name == "?":
+ name = ""
+ else:
+ name = " (" + name + ")"
+ string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
+ atfrom = "\tfrom"
+ return string
+
+
+def fail_test(self=None, condition=True, function=None, skip=0):
+ """Cause the test to fail.
+
+ By default, the fail_test() method reports that the test FAILED and exits
+ with a status of 1. If a condition argument is supplied, the test fails
+ only if the condition is true.
+
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + " ".join(self.program)
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ at = caller(traceback.extract_stack(), skip)
+
+ sys.stderr.write("FAILED test" + of + desc + sep + at + """
+in directory: """ + os.getcwd() )
+ sys.exit(1)
+
+
+def no_result(self=None, condition=True, function=None, skip=0):
+ """Causes a test to exit with no valid result.
+
+ By default, the no_result() method reports NO RESULT for the test and
+ exits with a status of 2. If a condition argument is supplied, the test
+ fails only if the condition is true.
+
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + self.program
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ at = caller(traceback.extract_stack(), skip)
+ sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
+ sys.exit(2)
+
+
+def pass_test(self=None, condition=True, function=None):
+ """Causes a test to pass.
+
+ By default, the pass_test() method reports PASSED for the test and exits
+ with a status of 0. If a condition argument is supplied, the test passes
+ only if the condition is true.
+
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ sys.stderr.write("PASSED\n")
+ sys.exit(0)
+
+class MatchError(object):
+ def __init__(self, message):
+ self.message = message
+ def __nonzero__(self):
+ return False
+ def __bool__(self):
+ return False
+
+def match_exact(lines=None, matches=None):
+ """
+ Returns whether the given lists or strings containing lines separated
+ using newline characters contain exactly the same data.
+
+ """
+ if not type(lines) is list:
+ lines = lines.split("\n")
+ if not type(matches) is list:
+ matches = matches.split("\n")
+ if len(lines) != len(matches):
+ return
+ for i in range(len(lines)):
+ if lines[i] != matches[i]:
+ return MatchError("Mismatch at line %d\n- %s\n+ %s\n" %
+ (i+1, matches[i], lines[i]))
+ if len(lines) < len(matches):
+ return MatchError("Missing lines at line %d\n- %s" %
+ (len(lines), "\n- ".join(matches[len(lines):])))
+ if len(lines) > len(matches):
+ return MatchError("Extra lines at line %d\n+ %s" %
+ (len(matches), "\n+ ".join(lines[len(matches):])))
+ return 1
+
+
+def match_re(lines=None, res=None):
+ """
+ Given lists or strings contain lines separated using newline characters.
+ This function matches those lines one by one, interpreting the lines in the
+ res parameter as regular expressions.
+
+ """
+ if not type(lines) is list:
+ lines = lines.split("\n")
+ if not type(res) is list:
+ res = res.split("\n")
+ for i in range(min(len(lines), len(res))):
+ if not re.compile("^" + res[i] + "$").search(lines[i]):
+ return MatchError("Mismatch at line %d\n- %s\n+ %s\n" %
+ (i+1, res[i], lines[i]))
+ if len(lines) < len(res):
+ return MatchError("Missing lines at line %d\n- %s" %
+ (len(lines), "\n- ".join(res[len(lines):])))
+ if len(lines) > len(res):
+ return MatchError("Extra lines at line %d\n+ %s" %
+ (len(res), "\n+ ".join(lines[len(res):])))
+ return 1
+
+
+class TestCmd:
+ def __init__(self, description=None, program=None, workdir=None,
+ subdir=None, verbose=False, match=None, inpath=None):
+
+ self._cwd = os.getcwd()
+ self.description_set(description)
+ self.program_set(program, inpath)
+ self.verbose_set(verbose)
+ if match is None:
+ self.match_func = match_re
+ else:
+ self.match_func = match
+ self._dirlist = []
+ self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
+ env = os.environ.get('PRESERVE')
+ if env:
+ self._preserve['pass_test'] = env
+ self._preserve['fail_test'] = env
+ self._preserve['no_result'] = env
+ else:
+ env = os.environ.get('PRESERVE_PASS')
+ if env is not None:
+ self._preserve['pass_test'] = env
+ env = os.environ.get('PRESERVE_FAIL')
+ if env is not None:
+ self._preserve['fail_test'] = env
+ env = os.environ.get('PRESERVE_PASS')
+ if env is not None:
+ self._preserve['PRESERVE_NO_RESULT'] = env
+ self._stdout = []
+ self._stderr = []
+ self.status = None
+ self.condition = 'no_result'
+ self.workdir_set(workdir)
+ self.subdir(subdir)
+
+ def __del__(self):
+ self.cleanup()
+
+ def __repr__(self):
+ return "%x" % id(self)
+
+ def cleanup(self, condition=None):
+ """
+ Removes any temporary working directories for the specified TestCmd
+ environment. If the environment variable PRESERVE was set when the
+ TestCmd environment was created, temporary working directories are not
+ removed. If any of the environment variables PRESERVE_PASS,
+ PRESERVE_FAIL or PRESERVE_NO_RESULT were set when the TestCmd
+ environment was created, then temporary working directories are not
+ removed if the test passed, failed or had no result, respectively.
+ Temporary working directories are also preserved for conditions
+ specified via the preserve method.
+
+ Typically, this method is not called directly, but is used when the
+ script exits to clean up temporary working directories as appropriate
+ for the exit status.
+
+ """
+ if not self._dirlist:
+ return
+ if condition is None:
+ condition = self.condition
+ if self._preserve[condition]:
+ for dir in self._dirlist:
+ print("Preserved directory %s" % dir)
+ else:
+ list = self._dirlist[:]
+ list.reverse()
+ for dir in list:
+ self.writable(dir, 1)
+ shutil.rmtree(dir, ignore_errors=1)
+
+ self._dirlist = []
+ self.workdir = None
+ os.chdir(self._cwd)
+ try:
+ global _Cleanup
+ _Cleanup.remove(self)
+ except (AttributeError, ValueError):
+ pass
+
+ def description_set(self, description):
+ """Set the description of the functionality being tested."""
+ self.description = description
+
+ def fail_test(self, condition=True, function=None, skip=0):
+ """Cause the test to fail."""
+ if not condition:
+ return
+ self.condition = 'fail_test'
+ fail_test(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def match(self, lines, matches):
+ """Compare actual and expected file contents."""
+ return self.match_func(lines, matches)
+
+ def match_exact(self, lines, matches):
+ """Compare actual and expected file content exactly."""
+ return match_exact(lines, matches)
+
+ def match_re(self, lines, res):
+ """Compare file content with a regular expression."""
+ return match_re(lines, res)
+
+ def no_result(self, condition=True, function=None, skip=0):
+ """Report that the test could not be run."""
+ if not condition:
+ return
+ self.condition = 'no_result'
+ no_result(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def pass_test(self, condition=True, function=None):
+ """Cause the test to pass."""
+ if not condition:
+ return
+ self.condition = 'pass_test'
+ pass_test(self, condition, function)
+
+ def preserve(self, *conditions):
+ """
+ Arrange for the temporary working directories for the specified
+ TestCmd environment to be preserved for one or more conditions. If no
+ conditions are specified, arranges for the temporary working
+ directories to be preserved for all conditions.
+
+ """
+ if conditions is ():
+ conditions = ('pass_test', 'fail_test', 'no_result')
+ for cond in conditions:
+ self._preserve[cond] = 1
+
+ def program_set(self, program, inpath):
+ """Set the executable program or script to be tested."""
+ if not inpath and program and not os.path.isabs(program[0]):
+ program[0] = os.path.join(self._cwd, program[0])
+ self.program = program
+
+ def read(self, file, mode='rb'):
+ """
+ Reads and returns the contents of the specified file name. The file
+ name may be a list, in which case the elements are concatenated with
+ the os.path.join() method. The file is assumed to be under the
+ temporary working directory unless it is an absolute path name. The I/O
+ mode for the file may be specified and must begin with an 'r'. The
+ default is 'rb' (binary read).
+
+ """
+ if type(file) is list:
+ file = os.path.join(*file)
+ if not os.path.isabs(file):
+ file = os.path.join(self.workdir, file)
+ if mode[0] != 'r':
+ raise ValueError("mode must begin with 'r'")
+ return open(file, mode).read()
+
+ def run(self, program=None, arguments=None, chdir=None, stdin=None,
+ universal_newlines=True):
+ """
+ Runs a test of the program or script for the test environment.
+ Standard output and error output are saved for future retrieval via the
+ stdout() and stderr() methods.
+
+ 'universal_newlines' parameter controls how the child process
+ input/output streams are opened as defined for the same named Python
+ subprocess.POpen constructor parameter.
+
+ """
+ if chdir:
+ if not os.path.isabs(chdir):
+ chdir = os.path.join(self.workpath(chdir))
+ if self.verbose:
+ sys.stderr.write("chdir(" + chdir + ")\n")
+ else:
+ chdir = self.workdir
+
+ cmd = []
+ if program and program[0]:
+ if program[0] != self.program[0] and not os.path.isabs(program[0]):
+ program[0] = os.path.join(self._cwd, program[0])
+ cmd += program
+ else:
+ cmd += self.program
+ if arguments:
+ cmd += arguments.split(" ")
+ if self.verbose:
+ sys.stderr.write(" ".join(cmd) + "\n")
+ p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=chdir,
+ universal_newlines=universal_newlines)
+
+ if stdin:
+ if type(stdin) is list:
+ stdin = "".join(stdin)
+ out, err = p.communicate(stdin)
+ if not type(out) is str:
+ out = out.decode()
+ if not type(err) is str:
+ err = err.decode()
+ self._stdout.append(out)
+ self._stderr.append(err)
+ self.status = p.returncode
+
+ if self.verbose:
+ sys.stdout.write(self._stdout[-1])
+ sys.stderr.write(self._stderr[-1])
+
+ def stderr(self, run=None):
+ """
+ Returns the error output from the specified run number. If there is
+ no specified run number, then returns the error output of the last run.
+ If the run number is less than zero, then returns the error output from
+ that many runs back from the current run.
+
+ """
+ if not run:
+ run = len(self._stderr)
+ elif run < 0:
+ run = len(self._stderr) + run
+ run -= 1
+ if run < 0:
+ return ''
+ return self._stderr[run]
+
+ def stdout(self, run=None):
+ """
+ Returns the standard output from the specified run number. If there
+ is no specified run number, then returns the standard output of the
+ last run. If the run number is less than zero, then returns the
+ standard output from that many runs back from the current run.
+
+ """
+ if not run:
+ run = len(self._stdout)
+ elif run < 0:
+ run = len(self._stdout) + run
+ run -= 1
+ if run < 0:
+ return ''
+ return self._stdout[run]
+
+ def subdir(self, *subdirs):
+ """
+ Create new subdirectories under the temporary working directory, one
+ for each argument. An argument may be a list, in which case the list
+ elements are concatenated using the os.path.join() method.
+ Subdirectories multiple levels deep must be created using a separate
+ argument for each level:
+
+ test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
+
+ Returns the number of subdirectories actually created.
+
+ """
+ count = 0
+ for sub in subdirs:
+ if sub is None:
+ continue
+ if type(sub) is list:
+ sub = os.path.join(*tuple(sub))
+ new = os.path.join(self.workdir, sub)
+ try:
+ os.mkdir(new)
+ except:
+ pass
+ else:
+ count += 1
+ return count
+
+ def unlink(self, file):
+ """
+ Unlinks the specified file name. The file name may be a list, in
+ which case the elements are concatenated using the os.path.join()
+ method. The file is assumed to be under the temporary working directory
+ unless it is an absolute path name.
+
+ """
+ if type(file) is list:
+ file = os.path.join(*tuple(file))
+ if not os.path.isabs(file):
+ file = os.path.join(self.workdir, file)
+ os.unlink(file)
+
+ def verbose_set(self, verbose):
+ """Set the verbose level."""
+ self.verbose = verbose
+
+ def workdir_set(self, path):
+ """
+ Creates a temporary working directory with the specified path name.
+ If the path is a null string (''), a unique directory name is created.
+
+ """
+ if os.path.isabs(path):
+ self.workdir = path
+ else:
+ if path != None:
+ if path == '':
+ path = tempfile.mktemp()
+ if path != None:
+ os.mkdir(path)
+ self._dirlist.append(path)
+ global _Cleanup
+ try:
+ _Cleanup.index(self)
+ except ValueError:
+ _Cleanup.append(self)
+ # We would like to set self.workdir like this:
+ # self.workdir = path
+ # But symlinks in the path will report things differently from
+ # os.getcwd(), so chdir there and back to fetch the canonical
+ # path.
+ cwd = os.getcwd()
+ os.chdir(path)
+ self.workdir = os.getcwd()
+ os.chdir(cwd)
+ else:
+ self.workdir = None
+
+ def workpath(self, *args):
+ """
+ Returns the absolute path name to a subdirectory or file within the
+ current temporary working directory. Concatenates the temporary working
+ directory name with the specified arguments using os.path.join().
+
+ """
+ return os.path.join(self.workdir, *tuple(args))
+
+ def writable(self, top, write):
+ """
+ Make the specified directory tree writable (write == 1) or not
+ (write == None).
+
+ """
+ def _walk_chmod(arg, dirname, names):
+ st = os.stat(dirname)
+ os.chmod(dirname, arg(st[stat.ST_MODE]))
+ for name in names:
+ fullname = os.path.join(dirname, name)
+ st = os.stat(fullname)
+ os.chmod(fullname, arg(st[stat.ST_MODE]))
+
+ _mode_writable = lambda mode: stat.S_IMODE(mode|0o200)
+ _mode_non_writable = lambda mode: stat.S_IMODE(mode&~0o200)
+
+ if write:
+ f = _mode_writable
+ else:
+ f = _mode_non_writable
+ try:
+ for root, _, files in os.walk(top):
+ _walk_chmod(f, root, files)
+ except:
+ pass # Ignore any problems changing modes.
+
+ def write(self, file, content, mode='wb'):
+ """
+ Writes the specified content text (second argument) to the specified
+ file name (first argument). The file name may be a list, in which case
+ the elements are concatenated using the os.path.join() method. The file
+ is created under the temporary working directory. Any subdirectories in
+ the path must already exist. The I/O mode for the file may be specified
+ and must begin with a 'w'. The default is 'wb' (binary write).
+
+ """
+ if type(file) is list:
+ file = os.path.join(*tuple(file))
+ if not os.path.isabs(file):
+ file = os.path.join(self.workdir, file)
+ if mode[0] != 'w':
+ raise ValueError("mode must begin with 'w'")
+ open(file, mode).write(content)
diff --git a/src/boost/tools/build/test/TestToolset.py b/src/boost/tools/build/test/TestToolset.py
new file mode 100644
index 000000000..fefa6c2ea
--- /dev/null
+++ b/src/boost/tools/build/test/TestToolset.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# validates a toolset using a mock of the compiler
+
+import BoostBuild
+import os
+import re
+import sys
+
+renames = {"debug": "variant=debug", "release": "variant=release"}
+
+def set_default_target_os(os):
+ global removed
+ global default_target_os
+ default_target_os = os
+ removed = set()
+ removed.add("target-os=" + default_target_os)
+
+def adjust_property(property):
+ global renames
+ if property in renames:
+ return renames[property]
+ else:
+ return property
+
+def adjust_properties(properties):
+ global removed
+ return [adjust_property(p) for p in properties if p not in removed]
+
+def has_property(name, properties):
+ return name in [re.sub("=.*", "", p) for p in properties]
+
+def get_property(name, properties):
+ for m in [re.match("(.*)=(.*)", p) for p in properties]:
+ if m and m.group(1) == name:
+ return m.group(2)
+
+def get_target_os(properties):
+ return get_property("target-os", properties) or default_target_os
+
+def expand_properties(properties):
+ result = properties[:]
+ if not has_property("variant", properties):
+ result += ["variant=debug"]
+ if not has_property("threading", properties):
+ result += ["threading=single"]
+ if not has_property("exception-handling", properties):
+ result += ["exception-handling=on"]
+ if not has_property("link", properties):
+ result += ["link=shared"]
+ if not has_property("rtti", properties):
+ result += ["rtti=on"]
+ if not has_property("runtime-link", properties):
+ result += ["runtime-link=shared"]
+ if not has_property("strip", properties):
+ result += ["strip=off"]
+ if not has_property("target-os", properties):
+ result += ["target-os=" + default_target_os]
+ return result
+
+def compute_path(properties, target_type):
+ path = ""
+ if "variant=release" in properties:
+ path += "/release"
+ else:
+ path += "/debug"
+ if has_property("address-model", properties):
+ path += "/address-model-" + get_property("address-model", properties)
+ if has_property("architecture", properties):
+ path += "/architecture-" + get_property("architecture", properties)
+ if "cxxstd=latest" in properties:
+ path += "/cxxstd-latest-iso"
+ if "exception-handling=off" in properties:
+ path += "/exception-handling-off"
+ if "link=static" in properties:
+ path += "/link-static"
+ if "rtti=off" in properties:
+ path += "/rtti-off"
+ if "runtime-link=static" in properties and target_type in ["exe"]:
+ path += "/runtime-link-static"
+ if "strip=on" in properties and target_type in ["dll", "exe", "obj2"]:
+ path += "/strip-on"
+ if get_target_os(properties) != default_target_os:
+ path += "/target-os-" + get_target_os(properties)
+ if "threading=multi" in properties:
+ path += "/threading-multi"
+ return path
+
+def test_toolset(toolset, version, property_sets):
+ t = BoostBuild.Tester()
+
+ t.set_tree("toolset-mock")
+
+ # Build necessary tools
+ t.run_build_system(["-sPYTHON_CMD=%s" % sys.executable], subdir="src")
+ set_default_target_os(t.read("src/bin/target-os.txt").strip())
+
+ for properties in property_sets:
+ t.set_toolset(toolset + "-" + version, get_target_os(properties))
+ properties = adjust_properties(properties)
+ def path(t):
+ return toolset.split("-")[0] + "-*" + version + compute_path(properties, t)
+ os.environ["B2_PROPERTIES"] = " ".join(expand_properties(properties))
+ t.run_build_system(["--user-config=", "-sPYTHON_CMD=%s" % sys.executable] + properties)
+ t.expect_addition("bin/%s/lib.obj" % (path("obj")))
+ if "link=static" not in properties:
+ t.expect_addition("bin/%s/l1.dll" % (path("dll")))
+ else:
+ t.expect_addition("bin/%s/l1.lib" % (path("lib")))
+ t.expect_addition("bin/%s/main.obj" % (path("obj2")))
+ t.expect_addition("bin/%s/test.exe" % (path("exe")))
+ t.expect_nothing_more()
+ t.rm("bin")
+
+ t.cleanup()
diff --git a/src/boost/tools/build/test/abs_workdir.py b/src/boost/tools/build/test/abs_workdir.py
new file mode 100644
index 000000000..fa6aadc58
--- /dev/null
+++ b/src/boost/tools/build/test/abs_workdir.py
@@ -0,0 +1,26 @@
+# Niklaus Giger, 2005-03-15
+# Testing whether we may run a test in absolute directories. There are no tests
+# for temporary directories as this is implictly tested in a lot of other cases.
+
+# TODO: Move to a separate testing-system test group.
+# TODO: Make the test not display any output on success.
+# TODO: Make sure implemented path handling is correct under Windows, Cygwin &
+# Unix/Linux.
+
+import BoostBuild
+import os
+import tempfile
+
+t = BoostBuild.Tester(["-ffile.jam"], workdir=os.getcwd(), pass_d0=False,
+ pass_toolset=False)
+
+t.write("file.jam", "EXIT [ PWD ] : 0 ;")
+
+t.run_build_system()
+t.expect_output_lines("*%s*" % tempfile.gettempdir(), False)
+t.expect_output_lines("*build/v2/test*")
+
+t.run_build_system(status=1, subdir="/must/fail/with/absolute/path",
+ stderr=None)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/absolute_sources.py b/src/boost/tools/build/test/absolute_sources.py
new file mode 100644
index 000000000..22ff1d080
--- /dev/null
+++ b/src/boost/tools/build/test/absolute_sources.py
@@ -0,0 +1,73 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that sources with absolute names are handled OK.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "path-constant TOP : . ;")
+t.write("jamfile.jam", """\
+local pwd = [ PWD ] ;
+ECHO $(pwd) XXXXX ;
+exe hello : $(pwd)/hello.cpp $(TOP)/empty.cpp ;
+""")
+t.write("hello.cpp", "int main() {}\n")
+t.write("empty.cpp", "\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+t.rm(".")
+
+# Test a contrived case in which an absolute name is used in a standalone
+# project (not Jamfile). Moreover, the target with an absolute name is returned
+# via an 'alias' and used from another project.
+t.write("a.cpp", "int main() {}\n")
+t.write("jamfile.jam", "exe a : /standalone//a ;")
+t.write("jamroot.jam", "import standalone ;")
+t.write("standalone.jam", """\
+import project ;
+project.initialize $(__name__) ;
+project standalone ;
+local pwd = [ PWD ] ;
+alias a : $(pwd)/a.cpp ;
+""")
+
+t.write("standalone.py", """
+from b2.manager import get_manager
+
+# FIXME: this is ugly as death
+get_manager().projects().initialize(__name__)
+
+import os ;
+
+# This use of list as parameter is also ugly.
+project(['standalone'])
+
+pwd = os.getcwd()
+alias('a', [os.path.join(pwd, 'a.cpp')])
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+
+# Test absolute path in target ids.
+t.rm(".")
+
+t.write("d1/jamroot.jam", "")
+t.write("d1/jamfile.jam", "exe a : a.cpp ;")
+t.write("d1/a.cpp", "int main() {}\n")
+t.write("d2/jamroot.jam", "")
+t.write("d2/jamfile.jam", """\
+local pwd = [ PWD ] ;
+alias x : $(pwd)/../d1//a ;
+""")
+
+t.run_build_system(subdir="d2")
+t.expect_addition("d1/bin/$toolset/debug*/a.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/alias.py b/src/boost/tools/build/test/alias.py
new file mode 100644
index 000000000..132e4c390
--- /dev/null
+++ b/src/boost/tools/build/test/alias.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_alias_rule()
+# -----------------
+#
+###############################################################################
+
+def test_alias_rule(t):
+ """Basic alias rule test."""
+
+ t.write("jamroot.jam", """\
+exe a : a.cpp ;
+exe b : b.cpp ;
+exe c : c.cpp ;
+
+alias bin1 : a ;
+alias bin2 : a b ;
+
+alias src : s.cpp ;
+exe hello : hello.cpp src ;
+""")
+
+ t.write("a.cpp", "int main() {}\n")
+ t.copy("a.cpp", "b.cpp")
+ t.copy("a.cpp", "c.cpp")
+ t.copy("a.cpp", "hello.cpp")
+ t.write("s.cpp", "")
+
+ # Check that targets to which "bin1" refers are updated, and only those.
+ t.run_build_system(["bin1"])
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug*/") * "a.exe a.obj")
+ t.expect_nothing_more()
+
+ # Try again with "bin2"
+ t.run_build_system(["bin2"])
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug*/") * "b.exe b.obj")
+ t.expect_nothing_more()
+
+ # Try building everything, making sure 'hello' target is created.
+ t.run_build_system()
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug*/") * \
+ "hello.exe hello.obj")
+ t.expect_addition("bin/$toolset/debug*/s.obj")
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug*/") * "c.exe c.obj")
+ t.expect_nothing_more()
+
+
+###############################################################################
+#
+# test_alias_source_usage_requirements()
+# --------------------------------------
+#
+###############################################################################
+
+def test_alias_source_usage_requirements(t):
+ """
+ Check whether usage requirements are propagated via "alias". In case they
+ are not, linking will fail as there will be no main() function defined
+ anywhere in the source.
+
+ """
+ t.write("jamroot.jam", """\
+lib l : l.cpp : : : <define>WANT_MAIN ;
+alias la : l ;
+exe main : main.cpp la ;
+""")
+
+ t.write("l.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+ t.write("main.cpp", """\
+#ifdef WANT_MAIN
+int main() {}
+#endif
+""")
+
+ t.run_build_system()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+t = BoostBuild.Tester(use_test_config=False)
+
+test_alias_rule(t)
+test_alias_source_usage_requirements(t)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/alternatives.py b/src/boost/tools/build/test/alternatives.py
new file mode 100644
index 000000000..7a52427d2
--- /dev/null
+++ b/src/boost/tools/build/test/alternatives.py
@@ -0,0 +1,129 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test main target alternatives.
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Test that basic alternatives selection works.
+t.write("jamroot.jam", "")
+
+t.write("jamfile.jam", """
+exe a : a_empty.cpp ;
+exe a : a.cpp : <variant>release ;
+""")
+
+t.write("a_empty.cpp", "")
+
+t.write("a.cpp", "int main() {}\n")
+
+t.run_build_system(["release"])
+
+t.expect_addition("bin/$toolset/release*/a.exe")
+
+# Test that alternative selection works for ordinary properties, in particular
+# user-defined.
+t.write("jamroot.jam", "")
+
+t.write("jamfile.jam", """
+import feature ;
+feature.feature X : off on : propagated ;
+exe a : b.cpp ;
+exe a : a.cpp : <X>on ;
+""")
+t.write("b.cpp", "int main() {}\n")
+
+t.rm("bin")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/b.obj")
+
+t.run_build_system(["X=on"])
+t.expect_addition("bin/$toolset/debug/X-on*/a.obj")
+
+t.rm("bin")
+
+# Test that everything works ok even with the default build.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp : <variant>release ;
+exe a : a.cpp : <variant>debug ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+
+# Test that only properties which are in the build request matter for
+# alternative selection. IOW, alternative with <variant>release is better than
+# one with <variant>debug when building the release variant.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp : <variant>debug ;
+exe a : a.cpp : <variant>release ;
+""")
+
+t.run_build_system(["release"])
+t.expect_addition("bin/$toolset/release*/a.exe")
+
+# Test that free properties do not matter. We really do not want <cxxflags>
+# property in build request to affect alternative selection.
+t.write("jamfile.jam", """
+exe a : a_empty.cpp : <variant>debug <define>FOO <include>BAR ;
+exe a : a.cpp : <variant>release ;
+""")
+
+t.rm("bin/$toolset/release/a.exe")
+t.rm("bin/$toolset/release/*/a.exe")
+t.run_build_system(["release", "define=FOO"])
+t.expect_addition("bin/$toolset/release*/a.exe")
+
+# Test that ambiguity is reported correctly.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp ;
+exe a : a.cpp ;
+""")
+t.run_build_system(["--no-error-backtrace"], status=None)
+t.expect_output_lines("error: No best alternative for ./a")
+
+# Another ambiguity test: two matches properties in one alternative are neither
+# better nor worse than a single one in another alternative.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp : <optimization>off <profiling>off ;
+exe a : a.cpp : <debug-symbols>on ;
+""")
+
+t.run_build_system(["--no-error-backtrace"], status=None)
+t.expect_output_lines("error: No best alternative for ./a")
+t.rm("bin")
+
+# Test that we can have alternative without sources.
+t.write("jamfile.jam", """\
+alias specific-sources ;
+import feature ;
+feature.extend os : MAGIC ;
+alias specific-sources : b.cpp : <os>MAGIC ;
+exe a : a.cpp specific-sources ;
+""")
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+t.rm("bin")
+
+# Test that subfeatures are expanded in alternatives
+# and that unknown subfeatures fail to match instead of
+# causing errors.
+t.write("jamfile.jam", """\
+import feature : feature subfeature ;
+feature X : off on : propagated ;
+subfeature X on : version : 1 : propagated ;
+exe a : a.cpp : <X>on-1 ;
+exe a : a_empty.cpp ;
+exe a : a_empty.cpp : <X>on-2 ;
+""")
+t.run_build_system(["X=on-1"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/always.py b/src/boost/tools/build/test/always.py
new file mode 100644
index 000000000..954503467
--- /dev/null
+++ b/src/boost/tools/build/test/always.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python
+
+# Copyright 2016 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("main.cpp", """\
+int main() {}
+""")
+
+t.write("Jamroot", """\
+exe test : main.cpp ;
+always test ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/main.obj")
+t.expect_addition("bin/$toolset/debug*/test.exe")
+t.expect_nothing_more()
+
+t.run_build_system()
+t.expect_touch("bin/$toolset/debug*/main.obj")
+t.expect_touch("bin/$toolset/debug*/test.exe")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/bad_dirname.py b/src/boost/tools/build/test/bad_dirname.py
new file mode 100644
index 000000000..47e4114b7
--- /dev/null
+++ b/src/boost/tools/build/test/bad_dirname.py
@@ -0,0 +1,22 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test: when directory of project root contained regex
+# metacharacters, B2 failed to work. Bug reported by Michael Stevens.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("bad[abc]dirname/jamfile.jam", """
+""")
+
+t.write("bad[abc]dirname/jamroot.jam", """
+""")
+
+t.run_build_system(subdir="bad[abc]dirname")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/boost-build.jam b/src/boost/tools/build/test/boost-build.jam
new file mode 100644
index 000000000..668452daf
--- /dev/null
+++ b/src/boost/tools/build/test/boost-build.jam
@@ -0,0 +1,14 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Assume BOOST_BUILD_PATH point to the 'test' directory.
+# We need to leave 'test' there, so that 'test-config.jam'
+# can be found, but also add parent directory, to find
+# all the other modules.
+
+BOOST_BUILD_PATH = $(BOOST_BUILD_PATH)/.. $(BOOST_BUILD_PATH) ;
+
+# Find the boost build system in the ../kernel directory.
+boost-build ../src/kernel ;
diff --git a/src/boost/tools/build/test/boostbook.py b/src/boost/tools/build/test/boostbook.py
new file mode 100644
index 000000000..672d63da4
--- /dev/null
+++ b/src/boost/tools/build/test/boostbook.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+
+# Copyright 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester()
+
+t.set_tree("boostbook")
+
+# For some reason, the messages are sent to stderr.
+t.run_build_system()
+t.fail_test(t.stdout().find("""Writing boost/A.html for refentry(boost.A)
+Writing library/reference.html for section(library.reference)
+Writing index.html for chapter(library)
+Writing docs_HTML.manifest
+""") == -1)
+t.expect_addition(["html/boost/A.html", "html/index.html"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/boostbook/a.hpp b/src/boost/tools/build/test/boostbook/a.hpp
new file mode 100644
index 000000000..5fab129a9
--- /dev/null
+++ b/src/boost/tools/build/test/boostbook/a.hpp
@@ -0,0 +1,16 @@
+/* Copyright 2004, 2006 Vladimir Prus */
+/* Distributed under the Boost Software License, Version 1.0. */
+/* (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) */
+
+
+// Seems like Boostbook does like classes outside of namespaces,
+// and won't generate anything for them.
+namespace boost {
+
+/// A class
+class A {
+public:
+ /// A constructor
+ A();
+};
+}
diff --git a/src/boost/tools/build/test/boostbook/docs.xml b/src/boost/tools/build/test/boostbook/docs.xml
new file mode 100644
index 000000000..c2d9b1f88
--- /dev/null
+++ b/src/boost/tools/build/test/boostbook/docs.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE library PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<!-- Copyright 2004 Vladimir Prus -->
+<!-- Distributed under the Boost Software License, Version 1.0. -->
+<!-- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -->
+
+<library
+ name="library"
+ dirname="librarys" id="library"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <libraryinfo>
+ <author>
+ <firstname>Joe</firstname>
+ <surname>Hacker</surname>
+ </author>
+
+ <copyright>
+ <year>7002</year>
+ <holder>Joe Hacker</holder>
+ </copyright>
+
+ </libraryinfo>
+
+ <title>Documentation</title>
+
+ <section>
+ <title>Introduction</title>
+
+ <para>This is introduction</para>
+
+ </section>
+
+ <xi:include href="autodoc.xml"/>
+</library>
diff --git a/src/boost/tools/build/test/boostbook/jamroot.jam b/src/boost/tools/build/test/boostbook/jamroot.jam
new file mode 100644
index 000000000..94564ca29
--- /dev/null
+++ b/src/boost/tools/build/test/boostbook/jamroot.jam
@@ -0,0 +1,3 @@
+
+boostbook docs : docs.xml autodoc ;
+doxygen autodoc : [ glob *.hpp ] ;
diff --git a/src/boost/tools/build/test/build_dir.py b/src/boost/tools/build/test/build_dir.py
new file mode 100644
index 000000000..3d1177bc0
--- /dev/null
+++ b/src/boost/tools/build/test/build_dir.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can change build directory using the 'build-dir' project
+# attribute.
+
+import BoostBuild
+import string
+import os
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# Test that top-level project can affect build dir.
+t.write("jamroot.jam", "import gcc ;")
+t.write("jamfile.jam", """\
+project : build-dir build ;
+exe a : a.cpp ;
+build-project src ;
+""")
+
+t.write("a.cpp", "int main() {}\n")
+
+t.write("src/jamfile.jam", "exe b : b.cpp ; ")
+
+t.write("src/b.cpp", "int main() {}\n")
+
+t.run_build_system()
+
+t.expect_addition(["build/$toolset/debug*/a.exe",
+ "build/src/$toolset/debug*/b.exe"])
+
+# Test that building from child projects work.
+t.run_build_system(subdir='src')
+t.ignore("build/config.log")
+t.ignore("build/project-cache.jam")
+t.expect_nothing_more()
+
+# Test that project can override build dir.
+t.write("jamfile.jam", """\
+exe a : a.cpp ;
+build-project src ;
+""")
+
+t.write("src/jamfile.jam", """\
+project : build-dir build ;
+exe b : b.cpp ;
+""")
+
+t.run_build_system()
+t.expect_addition(["bin/$toolset/debug*/a.exe",
+ "src/build/$toolset/debug*/b.exe"])
+
+# Now test the '--build-dir' option.
+t.rm(".")
+t.write("jamroot.jam", "")
+
+# Test that we get an error when no project id is specified.
+t.run_build_system(["--build-dir=foo"])
+t.fail_test(t.stdout().find(
+ "warning: the --build-dir option will be ignored") == -1)
+
+t.write("jamroot.jam", """\
+project foo ;
+exe a : a.cpp ;
+build-project sub ;
+""")
+t.write("a.cpp", "int main() {}\n")
+t.write("sub/jamfile.jam", "exe b : b.cpp ;\n")
+t.write("sub/b.cpp", "int main() {}\n")
+
+t.run_build_system(["--build-dir=build"])
+t.expect_addition(["build/foo/$toolset/debug*/a.exe",
+ "build/foo/sub/$toolset/debug*/b.exe"])
+
+t.write("jamroot.jam", """\
+project foo : build-dir bin.v2 ;
+exe a : a.cpp ;
+build-project sub ;
+""")
+
+t.run_build_system(["--build-dir=build"])
+t.expect_addition(["build/foo/bin.v2/$toolset/debug*/a.exe",
+ "build/foo/bin.v2/sub/$toolset/debug*/b.exe"])
+
+# Try building in subdir. We expect that the entire build tree with be in
+# 'sub/build'. Today, I am not sure if this is what the user expects, but let
+# it be.
+t.rm('build')
+t.run_build_system(["--build-dir=build"], subdir="sub")
+t.expect_addition(["sub/build/foo/bin.v2/sub/$toolset/debug*/b.exe"])
+
+t.write("jamroot.jam", """\
+project foo : build-dir %s ;
+exe a : a.cpp ;
+build-project sub ;
+""" % os.getcwd().replace('\\', '\\\\'))
+
+t.run_build_system(["--build-dir=build"], status=1)
+t.fail_test(t.stdout().find(
+ "Absolute directory specified via 'build-dir' project attribute") == -1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/build_file.py b/src/boost/tools/build/test/build_file.py
new file mode 100644
index 000000000..1ae860917
--- /dev/null
+++ b/src/boost/tools/build/test/build_file.py
@@ -0,0 +1,170 @@
+#!/usr/bin/python
+
+# Copyright (C) 2006. Vladimir Prus
+# Copyright (C) 2008. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that we explicitly request a file (not target) to be built by
+# specifying its name on the command line.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_building_file_from_specific_project()
+# ------------------------------------------
+#
+###############################################################################
+
+def test_building_file_from_specific_project():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+build-project sub ;
+""")
+ t.write("hello.cpp", "int main() {}\n")
+ t.write("sub/jamfile.jam", """
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+exe sub : hello.cpp ;
+""")
+ t.write("sub/hello.cpp", "int main() {}\n")
+
+ t.run_build_system(["sub", t.adjust_suffix("hello.obj")])
+ t.expect_output_lines("*depends on itself*", False)
+ t.expect_addition("sub/bin/$toolset/debug*/hello.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_file_from_specific_target()
+# -----------------------------------------
+#
+###############################################################################
+
+def test_building_file_from_specific_target():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello1 : hello1.cpp ;
+exe hello2 : hello2.cpp ;
+exe hello3 : hello3.cpp ;
+""")
+ t.write("hello1.cpp", "int main() {}\n")
+ t.write("hello2.cpp", "int main() {}\n")
+ t.write("hello3.cpp", "int main() {}\n")
+
+ t.run_build_system(["hello1", t.adjust_suffix("hello1.obj")])
+ t.expect_addition("bin/$toolset/debug*/hello1.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_missing_file_from_specific_target()
+# -------------------------------------------------
+#
+###############################################################################
+
+def test_building_missing_file_from_specific_target():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello1 : hello1.cpp ;
+exe hello2 : hello2.cpp ;
+exe hello3 : hello3.cpp ;
+""")
+ t.write("hello1.cpp", "int main() {}\n")
+ t.write("hello2.cpp", "int main() {}\n")
+ t.write("hello3.cpp", "int main() {}\n")
+
+ obj = t.adjust_suffix("hello2.obj")
+ t.run_build_system(["hello1", obj], status=1)
+ t.expect_output_lines("don't know how to make*" + obj)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_multiple_files_with_different_names()
+# ---------------------------------------------------
+#
+###############################################################################
+
+def test_building_multiple_files_with_different_names():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello1 : hello1.cpp ;
+exe hello2 : hello2.cpp ;
+exe hello3 : hello3.cpp ;
+""")
+ t.write("hello1.cpp", "int main() {}\n")
+ t.write("hello2.cpp", "int main() {}\n")
+ t.write("hello3.cpp", "int main() {}\n")
+
+ t.run_build_system([t.adjust_suffix("hello1.obj"), t.adjust_suffix(
+ "hello2.obj")])
+ t.expect_addition("bin/$toolset/debug*/hello1.obj")
+ t.expect_addition("bin/$toolset/debug*/hello2.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_multiple_files_with_the_same_name()
+# -------------------------------------------------
+#
+###############################################################################
+
+def test_building_multiple_files_with_the_same_name():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+build-project sub ;
+""")
+ t.write("hello.cpp", "int main() {}\n")
+ t.write("sub/jamfile.jam", """
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+exe sub : hello.cpp ;
+""")
+ t.write("sub/hello.cpp", "int main() {}\n")
+
+ t.run_build_system([t.adjust_suffix("hello.obj")])
+ t.expect_output_lines("*depends on itself*", False)
+ t.expect_addition("bin/$toolset/debug*/hello.obj")
+ t.expect_addition("sub/bin/$toolset/debug*/hello.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_building_file_from_specific_project()
+test_building_file_from_specific_target()
+test_building_missing_file_from_specific_target()
+test_building_multiple_files_with_different_names()
+test_building_multiple_files_with_the_same_name()
diff --git a/src/boost/tools/build/test/build_hooks.py b/src/boost/tools/build/test/build_hooks.py
new file mode 100644
index 000000000..9b8d37af1
--- /dev/null
+++ b/src/boost/tools/build/test/build_hooks.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests add-pre-build-hook and add-post-build-hook
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("Jamroot.jam", """
+import build-system ;
+build-system.add-pre-build-hook pre-build ;
+build-system.add-post-build-hook post-build ;
+
+rule pre-build ( )
+{
+ ECHO "in" pre-build hook ;
+}
+
+rule post-build ( okay ? )
+{
+ ECHO "in" post-build hook $(okay) ;
+}
+
+message show : building main targets ;
+""")
+
+t.run_build_system(stdout="""\
+building main targets
+in pre-build hook
+...found 1 target...
+in post-build hook ok
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/build_no.py b/src/boost/tools/build/test/build_no.py
new file mode 100644
index 000000000..771e697a7
--- /dev/null
+++ b/src/boost/tools/build/test/build_no.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that <build>no property prevents a target from being built.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "exe hello : hello.cpp : <variant>debug:<build>no ;")
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_nothing_more()
+
+t.run_build_system(["release"])
+t.expect_addition("bin/$toolset/release*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/builtin_echo.py b/src/boost/tools/build/test/builtin_echo.py
new file mode 100755
index 000000000..309236173
--- /dev/null
+++ b/src/boost/tools/build/test/builtin_echo.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the ECHO rule.
+
+import BoostBuild
+
+def test_echo(name):
+ t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+ t.write("file.jam", """\
+%s ;
+UPDATE ;
+""" % name)
+ t.run_build_system(stdout="\n")
+
+ t.write("file.jam", """\
+%s a message ;
+UPDATE ;
+""" % name)
+ t.run_build_system(stdout="a message\n")
+
+ t.cleanup()
+
+test_echo("ECHO")
+test_echo("Echo")
+test_echo("echo")
diff --git a/src/boost/tools/build/test/builtin_exit.py b/src/boost/tools/build/test/builtin_exit.py
new file mode 100755
index 000000000..1db869366
--- /dev/null
+++ b/src/boost/tools/build/test/builtin_exit.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the EXIT rule.
+
+import BoostBuild
+
+def test_exit(name):
+ t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+ t.write("file.jam", "%s ;" % name)
+ t.run_build_system(status=1, stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s : 0 ;" % name)
+ t.run_build_system(stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s : 1 ;" % name)
+ t.run_build_system(status=1, stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s : 2 ;" % name)
+ t.run_build_system(status=2, stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s a message ;" % name)
+ t.run_build_system(status=1, stdout="a message\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s a message : 0 ;" % name)
+ t.run_build_system(stdout="a message\n")
+ t.rm(".")
+
+ t.cleanup()
+
+test_exit("EXIT")
+test_exit("Exit")
+test_exit("exit")
diff --git a/src/boost/tools/build/test/builtin_glob.py b/src/boost/tools/build/test/builtin_glob.py
new file mode 100755
index 000000000..b68e7eebe
--- /dev/null
+++ b/src/boost/tools/build/test/builtin_glob.py
@@ -0,0 +1,87 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the GLOB rule.
+
+import os
+import BoostBuild
+
+def test_glob(files, glob, expected, setup=""):
+ t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+ t.write("file.jam", setup + """
+ for local p in [ SORT %s ]
+ {
+ ECHO $(p) ;
+ }
+ UPDATE ;
+ """ % glob)
+ for f in files:
+ t.write(f, "")
+ # convert / into \ on windows
+ expected = [os.path.join(*p.split("/")) for p in expected]
+ expected.sort()
+ t.run_build_system(stdout="\n".join(expected + [""]))
+ t.cleanup()
+
+# one or both arguments empty
+test_glob([], "[ GLOB : ]", [])
+test_glob([], "[ GLOB . : ]", [])
+test_glob([], "[ GLOB : * ]", [])
+
+# a single result
+test_glob([], "[ GLOB . : * ]", ["./file.jam"])
+
+# * can match any number of characters
+test_glob([], "[ GLOB . : file*.jam ]", ["./file.jam"])
+test_glob([], "[ GLOB . : f*am ]", ["./file.jam"])
+# ? should match a single character, but not more than one
+test_glob([], "[ GLOB . : fi?e.?am ]", ["./file.jam"])
+test_glob([], "[ GLOB . : fi?.jam ]", [])
+# [abc-fh-j] matches a set of characters
+test_glob([], '[ GLOB . : "[f][i][l][e].jam" ]', ["./file.jam"])
+test_glob([], '[ GLOB . : "[fghau][^usdrwe][k-o][^f-s].jam" ]', ["./file.jam"])
+# \x matches x
+test_glob([], "[ GLOB . : \\f\\i\\l\\e.jam ]", ["./file.jam"])
+
+# multiple results
+test_glob(["test.txt"], "[ GLOB . : * ]", ["./file.jam", "./test.txt"])
+
+# directories
+test_glob(["dir1/dir2/test.txt"], "[ GLOB dir1 : * ]", ["dir1/dir2"]);
+
+# non-existent directory
+test_glob([], "[ GLOB dir1 : * ] ", [])
+
+# multiple directories and patterns
+test_glob(["dir1/file1.txt", "dir2/file1.txt",
+ "dir2/file2.txt"],
+ "[ GLOB dir1 dir2 : file1* file2* ]",
+ ["dir1/file1.txt", "dir2/file1.txt",
+ "dir2/file2.txt"])
+
+# The directory can contain . and ..
+test_glob(["dir/test.txt"], "[ GLOB dir/. : test.txt ]", ["dir/./test.txt"])
+test_glob(["dir/test.txt"], "[ GLOB dir/.. : file.jam ]", ["dir/../file.jam"])
+
+# On case insensitive filesystems, the result should
+# be normalized. It should NOT be downcased.
+test_glob(["TEST.TXT"], "[ GLOB . : TEST.TXT ]", ["./TEST.TXT"])
+
+case_insensitive = (os.path.normcase("FILE") == "file")
+
+if case_insensitive:
+ test_glob(["TEST.TXT"], "[ GLOB . : test.txt ]", ["./TEST.TXT"])
+ # This used to fail because the caching routines incorrectly
+ # reported that . and .. do not exist.
+ test_glob(["D1/D2/TEST.TXT"], "[ GLOB D1/./D2 : test.txt ]",
+ ["D1/./D2/TEST.TXT"])
+ test_glob(["D1/TEST.TXT", "TEST.TXT"], "[ GLOB D1/../D1 : test.txt ]",
+ ["D1/../D1/TEST.TXT"])
+ # This also failed because directories that were first found
+ # by GLOB were recorded as non-existent.
+ test_glob(["D1/D2/TEST.TXT"], "[ GLOB d1/d2 : test.txt ]",
+ ["D1/D2/TEST.TXT"],
+ "GLOB . : * ;")
diff --git a/src/boost/tools/build/test/builtin_glob_archive.py b/src/boost/tools/build/test/builtin_glob_archive.py
new file mode 100644
index 000000000..8cbc3be58
--- /dev/null
+++ b/src/boost/tools/build/test/builtin_glob_archive.py
@@ -0,0 +1,217 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Copyright 2015 Artur Shepilko
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the GLOB_ARCHIVE rule.
+
+import os
+import sys
+try:
+ from StringIO import StringIO
+except:
+ from io import StringIO
+import BoostBuild
+
+vms = ( os.name == 'posix' and sys.platform == 'OpenVMS')
+
+t = BoostBuild.Tester()
+
+## Setup test archive sources and symbols they contain.
+sources = {
+ "a.cpp" : ["a"],
+ "b.cpp" : ["b"],
+ "b_match.cpp" : ["b_match"],
+ "c/nopath_check.cpp" : ["nopath_check"],
+ "CaseCheck.cpp" : ["CaseCheck"],
+ "seq_check1.cpp" : ["seq_check1"],
+ "seq_check2.cpp" : ["seq_check2"],
+ "seq_check3.cpp" : ["seq_check3"],
+ "symbols_check.c" : ["symbol", "symbol_match"],
+ "members_and_symbols_check.c" : ["member_and_symbol_match"],
+ "symbol_case_check.c" : ["SymbolCaseCheck"],
+ "main_check.cpp" : ["main"]
+}
+
+
+def create_sources(path, sources):
+ for s in sources :
+ f = os.path.join(path, s)
+ t.write(f, "")
+ output = StringIO()
+ for sym in sources[s] :
+ output.write("int %s() { return 0; }\n" % sym)
+ t.write(f, output.getvalue())
+
+
+def setup_archive(name, sources):
+ global archive
+ global obj_suffix
+ archive = t.adjust_names(name)[0]
+ obj_suffix = t.adjust_names(".obj")[0]
+ output = StringIO()
+ t.write("jamroot.jam","")
+ output.write("""\
+static-lib %s :
+""" % name.split(".")[0])
+ ## sort the sources, so we can test order of the globbed members
+ for s in sorted(sources) :
+ output.write("""\
+ %s
+""" % s)
+ output.write("""\
+ ;
+""")
+ t.write("lib/jamfile.jam", output.getvalue())
+ create_sources("lib", sources)
+ t.run_build_system(subdir="lib")
+ built_archive = "lib/bin/$toolset/debug*/%s" % name
+ t.expect_addition(built_archive)
+ t.copy(built_archive, name)
+ t.rm("lib")
+
+
+def test_glob_archive(archives, glob, expected, sort_results = False):
+ output = StringIO()
+ ## replace placeholders
+ glob = glob.replace("$archive1", archives[0]).replace("$obj", obj_suffix)
+ expected = [ m.replace("$archive1",
+ archives[0]).replace("$obj", obj_suffix) for m in expected ]
+ if len(archives) > 1 :
+ glob = glob.replace("$archive2", archives[1]).replace("$obj", obj_suffix)
+ expected = [ m.replace("$archive2",
+ archives[1]).replace("$obj", obj_suffix) for m in expected ]
+ ## create test jamfile
+ if sort_results : glob = "[ SORT %s ]" % glob
+ output.write("""\
+ for local p in %s
+ {
+ ECHO $(p) ;
+ }
+ UPDATE ;
+ """ % glob)
+ t.write("file.jam", output.getvalue())
+ ## run test jamfile and match against expected results
+ if sort_results : expected.sort()
+ t.run_build_system(["-ffile.jam"], stdout="\n".join(expected + [""]))
+ t.rm("file.jam")
+
+
+## RUN TESTS
+setup_archive("auxilliary1.lib", sources)
+archive1 = archive
+setup_archive("auxilliary2.lib", sources)
+archive2 = archive
+
+## all arguments empty
+test_glob_archive([archive1], "[ GLOB_ARCHIVE ]", [])
+
+## empty query
+test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : ]", [])
+
+## no-match
+test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : a ]", [])
+
+## match exact
+test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : a$obj ]",
+ ["$archive1(a$obj)"])
+
+## glob wildcards:1
+test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : b.* ]",
+ ["$archive1(b$obj)"])
+
+## glob wildcards:2
+test_glob_archive([archive1], '[ GLOB_ARCHIVE $archive1 : "\\b?match[\.]*" ]',
+ ["$archive1(b_match$obj)"])
+
+## glob wildcards:3
+test_glob_archive([archive1], "[ SORT [ GLOB_ARCHIVE $archive1 : b* ] ]",
+ ["$archive1(b$obj)", "$archive1(b_match$obj)"])
+
+## glob multiple patterns with multiple results.
+test_glob_archive([archive1], "[ SORT [ GLOB_ARCHIVE $archive1 : b.* b_* ] ]",
+ ["$archive1(b$obj)", "$archive1(b_match$obj)"])
+
+## glob multiple archives and patterns.
+test_glob_archive([archive1, archive2],
+ "[ SORT [ GLOB_ARCHIVE $archive1 $archive2 : b.* b_* ] ]",
+ ["$archive1(b$obj)", "$archive1(b_match$obj)",
+ "$archive2(b$obj)", "$archive2(b_match$obj)"])
+
+## glob same archive multiple times.
+test_glob_archive([archive1, archive1],
+ "[ GLOB_ARCHIVE $archive1 $archive2 $archive1 : b.* ]",
+ ["$archive1(b$obj)", "$archive2(b$obj)", "$archive1(b$obj)"])
+
+## returned archive member has no path, even though its source object-file did.
+## this is rather NT-specific, where members also store their object-file's path.
+test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : nopath_check$obj ]",
+ ["$archive1(nopath_check$obj)"])
+
+## case insensitive matching, when archives support case sensitive member names.
+## VMS implementation forces case-insensitive matching and downcased member names.
+
+case_sensitive_members = ( not vms )
+
+if case_sensitive_members:
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : casecheck$obj : true ]",
+ ["$archive1(CaseCheck$obj)"])
+elif vms:
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : CaseCheck$obj : false ]",
+ ["$archive1(casecheck$obj)"])
+
+
+## test the order of matched members, in general it should match the
+## insertion sequence.
+test_glob_archive([archive1], "[ SORT [ GLOB_ARCHIVE $archive1 : seq_check*$obj ] ]",
+ ["$archive1(seq_check1$obj)", "$archive1(seq_check2$obj)",
+ "$archive1(seq_check3$obj)"])
+
+
+## glob members by symbols they contain.
+## Currently supported only on VMS.
+symbol_glob_supported = ( vms )
+
+if symbol_glob_supported :
+ ## NOTE: generated symbols are compiler-dependent and may be specifically
+ ## mangled (as in C++ case), so globbing by exact symbol is non-trivial.
+ ## However, C-generated symbols are likely to have more portable names,
+ ## so for the glob-by-symbol tests we glob C-generated archive members.
+
+ ## glob members by exact symbol.
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : : : symbol ]",
+ ["$archive1(symbols_check$obj)"])
+
+ ## glob members by symbol wildcard.
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : : : symbol_* ]",
+ ["$archive1(symbols_check$obj)"])
+
+ ## glob members by member pattern AND symbol pattern.
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : *symbol* : : *member* ]",
+ ["$archive1(members_and_symbols_check$obj)"])
+
+ ## case insensitive symbol glob.
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : : true : symbolcasecheck ]",
+ ["$archive1(symbol_case_check$obj)"])
+
+ ## glob member that contains main symbol.
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : : : main _main ]",
+ ["$archive1(main_check$obj)"])
+
+else:
+ test_glob_archive([archive1],
+ "[ GLOB_ARCHIVE $archive1 : : : symbol ]",
+ [])
+
+
+t.cleanup()
+
diff --git a/src/boost/tools/build/test/builtin_readlink.py b/src/boost/tools/build/test/builtin_readlink.py
new file mode 100755
index 000000000..e57d7286a
--- /dev/null
+++ b/src/boost/tools/build/test/builtin_readlink.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("link-target", "")
+os.symlink("link-target", "link")
+
+t.write("file.jam", """
+ECHO [ READLINK link ] ;
+EXIT [ READLINK link-target ] : 0 ;
+""")
+
+t.run_build_system(["-ffile.jam"], stdout="""link-target
+
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/builtin_split_by_characters.py b/src/boost/tools/build/test/builtin_split_by_characters.py
new file mode 100755
index 000000000..4a0a0e061
--- /dev/null
+++ b/src/boost/tools/build/test/builtin_split_by_characters.py
@@ -0,0 +1,57 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the SPLIT_BY_CHARACTERS rule.
+
+import BoostBuild
+
+def test_invalid(params, expected_error_line):
+ t = BoostBuild.Tester(pass_toolset=0)
+ t.write("file.jam", "SPLIT_BY_CHARACTERS %s ;" % params)
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines("[*] %s" % expected_error_line)
+ t.cleanup()
+
+def test_valid():
+ t = BoostBuild.Tester(pass_toolset=0)
+ t.write("jamroot.jam", """\
+import assert ;
+
+assert.result FooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : "" ;
+assert.result FooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : x ;
+assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : r ;
+assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : rr ;
+assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : rrr ;
+assert.result FooB rB z : SPLIT_BY_CHARACTERS FooBarBaz : a ;
+assert.result FooB B z : SPLIT_BY_CHARACTERS FooBarBaz : ar ;
+assert.result ooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : F ;
+assert.result FooBarBa : SPLIT_BY_CHARACTERS FooBarBaz : z ;
+assert.result ooBarBa : SPLIT_BY_CHARACTERS FooBarBaz : Fz ;
+assert.result F B rB z : SPLIT_BY_CHARACTERS FooBarBaz : oa ;
+assert.result Alib b : SPLIT_BY_CHARACTERS Alibaba : oa ;
+assert.result libaba : SPLIT_BY_CHARACTERS Alibaba : oA ;
+assert.result : SPLIT_BY_CHARACTERS FooBarBaz : FooBarBaz ;
+assert.result : SPLIT_BY_CHARACTERS FooBarBaz : FoBarz ;
+
+# Questionable results - should they return an empty string or an empty list?
+assert.result : SPLIT_BY_CHARACTERS "" : "" ;
+assert.result : SPLIT_BY_CHARACTERS "" : x ;
+assert.result : SPLIT_BY_CHARACTERS "" : r ;
+assert.result : SPLIT_BY_CHARACTERS "" : rr ;
+assert.result : SPLIT_BY_CHARACTERS "" : rrr ;
+assert.result : SPLIT_BY_CHARACTERS "" : oa ;
+""")
+ t.run_build_system()
+ t.cleanup()
+
+test_invalid("", "missing argument string")
+test_invalid("Foo", "missing argument delimiters")
+test_invalid(": Bar", "missing argument string")
+test_invalid("a : b : c", "extra argument c")
+test_invalid("a b : c", "extra argument b")
+test_invalid("a : b c", "extra argument c")
+test_valid()
diff --git a/src/boost/tools/build/test/bzip2.py b/src/boost/tools/build/test/bzip2.py
new file mode 100755
index 000000000..4e74c6023
--- /dev/null
+++ b/src/boost/tools/build/test/bzip2.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("bzip2/bzlib.h", 'bzip2')
+t.write("bzip2/blocksort.c", 'blocksort')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using bzip2 : : <source>$(here)/bzip2 ;
+alias bzip2 : /bzip2//bzip2 : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('blocksort.c', 'blocksort')
+action('-c -x c -I./bzip2 -o $blocksort.o $blocksort.c')
+action('--dll $blocksort.o -o $bz2.so')
+action('--archive $blocksort.o -o $bz2.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/bzip2/mock/debug/bz2.dll')
+t.expect_addition('bin/standalone/bzip2/mock/debug/link-static/bz2.lib')
+
+t.rm('bzip2')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('bzlib.h.cpp', '#include <bzlib.h>\\n')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using bzip2 ;
+exe test : test.cpp /bzip2//bzip2 : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=bz2 -o $config.exe')
+action('-c -x c++ $bzlib.h.cpp -o $bzlib.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=bz2 -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using bzip2 ;
+exe test : test.cpp /bzip2//bzip2 : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=bz2 -o $config.exe')
+action('-c -x c++ $bzlib.h.cpp -o $bzlib.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=bz2 -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using bzip2 : : <name>mybzlib <include>$(here)/bzip2 <search>$(here)/bzip2 ;
+exe test : test.cpp /bzip2//bzip2 : : <link>static <link>shared ;
+""")
+
+t.write('bzip2/bzlib.h', 'bzip2')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./bzip2 --static-lib=mybzlib -o $config.exe')
+action('-c -x c++ $test.cpp -I./bzip2 -o $test.o')
+action('$test.o -L./bzip2 --static-lib=mybzlib -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using bzip2 : : <name>mybzlib <include>$(here)/bzip2 <search>$(here)/bzip2 ;
+exe test : test.cpp /bzip2//bzip2 : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./bzip2 --shared-lib=mybzlib -o $config.exe')
+action('-c -x c++ $test.cpp -I./bzip2 -o $test.o')
+action('$test.o -L./bzip2 --shared-lib=mybzlib -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/c_file.py b/src/boost/tools/build/test/c_file.py
new file mode 100644
index 000000000..85407d5f1
--- /dev/null
+++ b/src/boost/tools/build/test/c_file.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that C files are compiled by a C compiler.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+project ;
+exe hello : hello.cpp a.c ;
+""")
+
+t.write("hello.cpp", """
+extern "C" int foo();
+int main() { return foo(); }
+""")
+
+t.write("a.c", """
+// This will not compile unless in C mode.
+int foo()
+{
+ int new = 0;
+ new = (new+1)*7;
+ return new;
+}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/chain.py b/src/boost/tools/build/test/chain.py
new file mode 100644
index 000000000..981e6ad11
--- /dev/null
+++ b/src/boost/tools/build/test/chain.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests that :
+# 1) the 'make' correctly assigns types to produced targets
+# 2) if 'make' creates targets of type CPP, they are correctly used.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# In order to correctly link this app, 'b.cpp', created by a 'make' rule, should
+# be compiled.
+
+t.write("jamroot.jam", "import gcc ;")
+
+t.write("jamfile.jam", r'''
+import os ;
+if [ os.name ] = NT
+{
+ actions create
+ {
+ echo int main() {} > $(<)
+ }
+}
+else
+{
+ actions create
+ {
+ echo "int main() {}" > $(<)
+ }
+}
+
+IMPORT $(__name__) : create : : create ;
+
+exe a : l dummy.cpp ;
+
+# Needs to be a static lib for Windows - main() cannot appear in DLL.
+static-lib l : a.cpp b.cpp ;
+
+make b.cpp : : create ;
+''')
+
+t.write("a.cpp", "")
+
+t.write("dummy.cpp", "// msvc needs at least one object file\n")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/a.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/clean.py b/src/boost/tools/build/test/clean.py
new file mode 100644
index 000000000..dc72b924c
--- /dev/null
+++ b/src/boost/tools/build/test/clean.py
@@ -0,0 +1,104 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+t.write("jamroot.jam", "exe a : a.cpp sub1//sub1 sub2//sub2 sub3//sub3 ;")
+t.write("sub1/jamfile.jam", """\
+lib sub1 : sub1.cpp sub1_2 ../sub2//sub2 ;
+lib sub1_2 : sub1_2.cpp ;
+""")
+
+t.write("sub1/sub1.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub1() {}
+""")
+
+t.write("sub1/sub1_2.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub1() {}
+""")
+
+t.write("sub2/jamfile.jam", "lib sub2 : sub2.cpp ;")
+t.write("sub2/sub2.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub2() {}
+""")
+
+t.write("sub3/jamroot.jam", "lib sub3 : sub3.cpp ;")
+t.write("sub3/sub3.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub3() {}
+""")
+
+# 'clean' should not remove files under separate jamroot.jam.
+t.run_build_system()
+t.run_build_system(["--clean"])
+t.expect_removal("bin/$toolset/debug*/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1_2.obj")
+t.expect_removal("sub2/bin/$toolset/debug*/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug*/sub3.obj")
+
+# 'clean-all' removes everything it can reach.
+t.run_build_system()
+t.run_build_system(["--clean-all"])
+t.expect_removal("bin/$toolset/debug*/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1_2.obj")
+t.expect_removal("sub2/bin/$toolset/debug*/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug*/sub3.obj")
+
+# 'clean' together with project target removes only under that project.
+t.run_build_system()
+t.run_build_system(["sub1", "--clean"])
+t.expect_nothing("bin/$toolset/debug*/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1_2.obj")
+t.expect_nothing("sub2/bin/$toolset/debug*/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug*/sub3.obj")
+
+# 'clean-all' removes everything.
+t.run_build_system()
+t.run_build_system(["sub1", "--clean-all"])
+t.expect_nothing("bin/$toolset/debug*/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug*/sub1_2.obj")
+t.expect_removal("sub2/bin/$toolset/debug*/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug*/sub3.obj")
+
+# If main target is explicitly named, we should not remove files from other
+# targets.
+t.run_build_system()
+t.run_build_system(["sub1//sub1", "--clean"])
+t.expect_removal("sub1/bin/$toolset/debug*/sub1.obj")
+t.expect_nothing("sub1/bin/$toolset/debug*/sub1_2.obj")
+t.expect_nothing("sub2/bin/$toolset/debug*/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug*/sub3.obj")
+
+# Regression test: sources of the 'cast' rule were mistakenly deleted.
+t.rm(".")
+t.write("jamroot.jam", """\
+import cast ;
+cast a cpp : a.h ;
+""")
+t.write("a.h", "")
+t.run_build_system(["--clean"])
+t.expect_nothing("a.h")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/cli_property_expansion.py b/src/boost/tools/build/test/cli_property_expansion.py
new file mode 100644
index 000000000..24c821617
--- /dev/null
+++ b/src/boost/tools/build/test/cli_property_expansion.py
@@ -0,0 +1,41 @@
+#!/usr/bin/python
+
+# Copyright 2015 Aaron Boman
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that free property inside.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "")
+t.write(
+ "subdir/build.jam",
+ """
+ import feature ;
+ feature.feature my-feature : : free ;
+ """
+)
+t.write(
+ "subdir/subsubdir/build.jam",
+ """
+ exe hello : hello.c ;
+ """
+)
+t.write(
+ "subdir/subsubdir/hello.c",
+ r"""
+ #include <stdio.h>
+
+ int main(int argc, char **argv){
+ printf("%s\n", "Hello, World!");
+ }
+ """
+)
+
+# run from the root directory
+t.run_build_system(['subdir/subsubdir', 'my-feature="some value"'])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/collect_debug_info.py b/src/boost/tools/build/test/collect_debug_info.py
new file mode 100755
index 000000000..27a664928
--- /dev/null
+++ b/src/boost/tools/build/test/collect_debug_info.py
@@ -0,0 +1,341 @@
+#!/usr/bin/python
+
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Temporarily enabled dummy test that always fails and is used to collect
+# extra debugging information from Boost Build test runner sites.
+
+import BoostBuild
+
+import os
+import re
+import sys
+
+
+###############################################################################
+#
+# Public interface.
+#
+###############################################################################
+
+def collectDebugInfo():
+ t = _init()
+
+ global tag
+
+ tag = "Python version"
+ try:
+ _info(sys.version)
+ except:
+ _info_exc()
+
+ tag = "Python platform"
+ try:
+ _info(sys.platform)
+ except:
+ _info_exc()
+
+ tag = "Boost Jam/Build version"
+ try:
+ _infoX(_getJamVersionInfo(t))
+ except:
+ _info_exc()
+
+ #_collectDebugInfo_environ()
+
+ # Report prepared annotations.
+ t.fail_test(1, dump_difference=False, dump_stdio=False, dump_stack=False)
+
+
+###############################################################################
+#
+# Private interface.
+#
+###############################################################################
+
+varSeparator = "###$^%~~~"
+
+
+def _collect(results, prefix, name, t):
+ results.append("%s - %s - os.getenv(): %r" % (prefix, name, os.getenv(
+ name)))
+ results.append("%s - %s - os.environ.get(): %r" % (prefix, name,
+ os.environ.get(name)))
+ external_values = _getExternalValues(t, name)
+ results.append("%s - %s - external: %r" % (prefix, name,
+ external_values[name]))
+
+
+def _collectDebugInfo_environ(t):
+ dummyVars = ["WOOF_WOOFIE_%d" % x for x in range(4)]
+ global tag
+
+ tag = "XXX in os.environ"
+ try:
+ def f(name):
+ return "%s: %s" % (name, name in os.environ)
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ tag = "os.environ[XXX]"
+ try:
+ def f(name):
+ try:
+ result = os.environ[name]
+ except:
+ result = _str_exc()
+ return "%s: %r" % (name, result)
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ tag = "os.environ.get(XXX)"
+ try:
+ def f(name):
+ return "%s: %r" % (name, os.environ.get(name))
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ tag = "os.getenv(XXX)"
+ try:
+ def f(name):
+ return "%s: %r" % (name, os.getenv(name))
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ name = dummyVars[0]
+ value = "foo"
+ tag = "os.putenv(%s) to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.putenv(name, value)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[1]
+ value = "bar"
+ tag = "os.environ[%s] to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.environ[name] = value
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[1]
+ value = "baz"
+ tag = "os.putenv(%s) to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.putenv(name, value)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[1]
+ value = ""
+ tag = "os.putenv(%s) to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.putenv(name, value)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value = "foo"
+ tag = "os.unsetenv(%s) from %r" % (name, value)
+ try:
+ results = []
+ os.environ[name] = value
+ _collect(results, "before", name, t)
+ os.unsetenv(name)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value = "foo"
+ tag = "del os.environ[%s] from %r" % (name, value)
+ try:
+ results = []
+ os.environ[name] = value
+ _collect(results, "before", name, t)
+ del os.environ[name]
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value = "foo"
+ tag = "os.environ.pop(%s) from %r" % (name, value)
+ try:
+ results = []
+ os.environ[name] = value
+ _collect(results, "before", name, t)
+ os.environ.pop(name)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value1 = "foo"
+ value2 = ""
+ tag = "os.environ[%s] to %r from %r" % (name, value2, value1)
+ try:
+ results = []
+ os.environ[name] = value1
+ _collect(results, "before", name, t)
+ os.environ[name] = value2
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[3]
+ value = '""'
+ tag = "os.environ[%s] to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.environ[name] = value
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+
+def _getExternalValues(t, *args):
+ t.run_build_system(["---var-name=%s" % x for x in args])
+ result = dict()
+ for x in args:
+ m = re.search(r"^\*\*\*ENV\*\*\* %s: '(.*)' \*\*\*$" % x, t.stdout(),
+ re.MULTILINE)
+ if m:
+ result[x] = m.group(1)
+ else:
+ result[x] = None
+ return result
+
+
+def _getJamVersionInfo(t):
+ result = []
+
+ # JAM version variables.
+ t.run_build_system(["---version"])
+ for m in re.finditer(r"^\*\*\*VAR\*\*\* ([^:]*): (.*)\*\*\*$", t.stdout(),
+ re.MULTILINE):
+ name = m.group(1)
+ value = m.group(2)
+ if not value:
+ value = []
+ elif value[-1] == ' ':
+ value = value[:-1].split(varSeparator)
+ else:
+ value = "!!!INVALID!!! - '%s'" % value
+ result.append("%s = %s" % (name, value))
+ result.append("")
+
+ # bjam -v output.
+ t.run_build_system(["-v"])
+ result.append("--- output for 'bjam -v' ---")
+ result.append(t.stdout())
+
+ # bjam --version output.
+ t.run_build_system(["--version"], status=1)
+ result.append("--- output for 'bjam --version' ---")
+ result.append(t.stdout())
+
+ return result
+
+
+def _init():
+ toolsetName = "__myDummyToolset__"
+
+ t = BoostBuild.Tester(["toolset=%s" % toolsetName], pass_toolset=False,
+ use_test_config=False)
+
+ # Prepare a dummy toolset so we do not get errors in case the default one
+ # is not found.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+feature.extend toolset : %s ;
+rule init ( ) { }
+""" % toolsetName )
+
+ # Python version of the same dummy toolset.
+ t.write(toolsetName + ".py", """\
+from b2.build import feature
+feature.extend('toolset', ['%s'])
+def init(): pass
+""" % toolsetName )
+
+ t.write("jamroot.jam", """\
+import os ;
+.argv = [ modules.peek : ARGV ] ;
+local names = [ MATCH ^---var-name=(.*) : $(.argv) ] ;
+for x in $(names)
+{
+ value = [ os.environ $(x) ] ;
+ ECHO ***ENV*** $(x): '$(value)' *** ;
+}
+if ---version in $(.argv)
+{
+ for x in JAMVERSION JAM_VERSION JAMUNAME JAM_TIMESTAMP_RESOLUTION OS
+ {
+ v = [ modules.peek : $(x) ] ;
+ ECHO ***VAR*** $(x): "$(v:J=%s)" *** ;
+ }
+}
+""" % varSeparator)
+
+ return t
+
+
+def _info(*values):
+ values = list(values) + [""]
+ BoostBuild.annotation(tag, "\n".join(str(x) for x in values))
+
+
+def _infoX(values):
+ _info(*values)
+
+
+def _info_exc():
+ _info(_str_exc())
+
+
+def _str_exc():
+ exc_type, exc_value = sys.exc_info()[0:2]
+ if exc_type is None:
+ exc_type_name = "None"
+ else:
+ exc_type_name = exc_type.__name__
+ return "*** EXCEPTION *** %s - %s ***" % (exc_type_name, exc_value)
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+collectDebugInfo()
diff --git a/src/boost/tools/build/test/command_line_properties.py b/src/boost/tools/build/test/command_line_properties.py
new file mode 100644
index 000000000..518991b6c
--- /dev/null
+++ b/src/boost/tools/build/test/command_line_properties.py
@@ -0,0 +1,166 @@
+#!/usr/bin/python
+
+import BoostBuild
+
+def test_basic():
+ '''Tests that feature=value works'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature ;
+ import toolset : flags ;
+ feature f1 : 1 2 ;
+ make output.txt : : @run ;
+ flags run OPTIONS <f1> ;
+ actions run { echo $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['f1=2'])
+ t.expect_content("bin/*/output.txt", "2")
+ t.cleanup()
+
+def test_implicit():
+ '''Tests that implicit features can be named without a feature'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature ;
+ import toolset : flags ;
+ feature f1 : v1 v2 : implicit ;
+ make output.txt : : @run ;
+ flags run OPTIONS <f1> ;
+ actions run { echo $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['v2'])
+ t.expect_content("bin/*/output.txt", "v2")
+ t.cleanup()
+
+def test_optional():
+ '''Tests that feature= works for optional features'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature ;
+ import toolset : flags ;
+ feature f1 : 1 2 : optional ;
+ make output.txt : : @run ;
+ flags run OPTIONS <f1> ;
+ actions run { echo b $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['f1='])
+ t.expect_content("bin/*/output.txt", "b")
+ t.cleanup()
+
+def test_free():
+ '''Free features named on the command line apply to all targets
+ everywhere. Free features can contain any characters, even those
+ that have a special meaning.'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature ;
+ import toolset : flags ;
+ feature f1 : : free ;
+ make output1.txt : : @run : <dependency>output2.txt ;
+ make output2.txt : : @run ;
+ explicit output2.txt ;
+ flags run OPTIONS <f1> ;
+ actions run { echo $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['f1=x,/:-'])
+ t.expect_content("bin*/output1.txt", "x,/:-")
+ t.expect_content("bin*/output2.txt", "x,/:-")
+ t.cleanup()
+
+def test_subfeature():
+ '''Subfeatures should be expressed as feature=value-subvalue'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature subfeature ;
+ import toolset : flags ;
+ feature f1 : 1 2 ;
+ subfeature f1 2 : sub : x y ;
+ make output.txt : : @run ;
+ flags run OPTIONS <f1-2:sub> ;
+ actions run { echo $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['f1=2-y'])
+ t.expect_content("bin/*/output.txt", "y")
+ t.cleanup()
+
+def test_multiple_values():
+ '''Multiple values of a feature can be given in a comma-separated list'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature ;
+ import toolset : flags ;
+ feature f1 : 1 2 3 ;
+ make output.txt : : @run ;
+ flags run OPTIONS <f1> ;
+ actions run { echo $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['f1=2,3'])
+ t.expect_content("bin*/f1-2*/output.txt", "2")
+ t.expect_content("bin*/f1-3*/output.txt", "3")
+ t.cleanup()
+
+def test_multiple_properties():
+ '''Multiple properties can be grouped with /'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature ;
+ import toolset : flags ;
+ feature f1 : 1 2 ;
+ feature f2 : 3 4 ;
+ make output.txt : : @run ;
+ flags run OPTIONS <f1> ;
+ flags run OPTIONS <f2> ;
+ actions run { echo $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['f1=2/f2=4'])
+ t.expect_content("bin/*/output.txt", "2 4")
+ t.cleanup()
+
+def test_cross_product():
+ '''If multiple properties are specified on the command line
+ we expand to every possible maximum set of non-conflicting features.
+ This test should be run after testing individual components in
+ isolation.'''
+ t = BoostBuild.Tester()
+ t.write('Jamroot.jam', '''
+ import feature : feature ;
+ import toolset : flags ;
+ # Make features symmetric to make the paths easier to distinguish
+ feature f1 : 11 12 13 14 15 : symmetric ;
+ feature f2 : 21 22 23 : symmetric ;
+ feature f3 : v1 v2 v3 v4 : implicit symmetric ;
+ feature f4 : : free ;
+ make output.txt : : @run ;
+ flags run OPTIONS <f1> ;
+ flags run OPTIONS <f2> ;
+ flags run OPTIONS <f3> ;
+ flags run OPTIONS <f4> ;
+ actions run { echo $(OPTIONS) > $(<) }
+ ''')
+ t.run_build_system(['f1=12,13/f2=22', 'v2', 'v3', 'f1=14', 'f2=23',
+ 'f4=xxx', 'f4=yyy', 'v4/f1=15/f4=zzz'])
+ t.expect_content("bin*/v2*/f1-12/f2-22*/output.txt", "12 22 v2 xxx yyy")
+ t.expect_addition("bin*/v2*/f1-12/f2-22*/output.txt")
+ t.expect_content("bin*/v2*/f1-13/f2-22*/output.txt", "13 22 v2 xxx yyy")
+ t.expect_addition("bin*/v2*/f1-13/f2-22*/output.txt")
+ t.expect_content("bin*/v2*/f1-14/f2-23*/output.txt", "14 23 v2 xxx yyy")
+ t.expect_addition("bin*/v2*/f1-14/f2-23*/output.txt")
+ t.expect_content("bin*/v3*/f1-12/f2-22*/output.txt", "12 22 v3 xxx yyy")
+ t.expect_addition("bin*/v3*/f1-12/f2-22*/output.txt")
+ t.expect_content("bin*/v3*/f1-13/f2-22*/output.txt", "13 22 v3 xxx yyy")
+ t.expect_addition("bin*/v3*/f1-13/f2-22*/output.txt")
+ t.expect_content("bin*/v3*/f1-14/f2-23*/output.txt", "14 23 v3 xxx yyy")
+ t.expect_addition("bin*/v3*/f1-14/f2-23*/output.txt")
+ t.expect_content("bin*/v4*/f1-15/f2-23*/output.txt", "15 23 v4 xxx yyy zzz")
+ t.expect_addition("bin*/v4*/f1-15/f2-23*/output.txt")
+ t.expect_nothing_more()
+ t.cleanup()
+
+test_basic()
+test_implicit()
+test_optional()
+test_free()
+test_subfeature()
+test_multiple_values()
+test_multiple_properties()
+test_cross_product()
diff --git a/src/boost/tools/build/test/composite.py b/src/boost/tools/build/test/composite.py
new file mode 100644
index 000000000..e3a334b4f
--- /dev/null
+++ b/src/boost/tools/build/test/composite.py
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that composite properties are handled correctly.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+exe hello : hello.cpp : <variant>release ;
+""")
+
+t.write("hello.cpp", """
+int main() {}
+""")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/release*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/conditionals.py b/src/boost/tools/build/test/conditionals.py
new file mode 100644
index 000000000..3ad36466c
--- /dev/null
+++ b/src/boost/tools/build/test/conditionals.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test conditional properties.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Arrange a project which will build only if 'a.cpp' is compiled with "STATIC"
+# define.
+t.write("a.cpp", """\
+#ifdef STATIC
+int main() {}
+#endif
+""")
+
+# Test conditionals in target requirements.
+t.write("jamroot.jam", "exe a : a.cpp : <link>static:<define>STATIC ;")
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static*/a.exe")
+t.rm("bin")
+
+# Test conditionals in project requirements.
+t.write("jamroot.jam", """
+project : requirements <link>static:<define>STATIC ;
+exe a : a.cpp ;
+""")
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static*/a.exe")
+t.rm("bin")
+
+# Regression test for a bug found by Ali Azarbayejani. Conditionals inside
+# usage requirement were not being evaluated.
+t.write("jamroot.jam", """
+lib l : l.cpp : : : <link>static:<define>STATIC ;
+exe a : a.cpp l ;
+""")
+t.write("l.cpp", "int i;")
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static*/a.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/conditionals2.py b/src/boost/tools/build/test/conditionals2.py
new file mode 100644
index 000000000..585e5ca77
--- /dev/null
+++ b/src/boost/tools/build/test/conditionals2.py
@@ -0,0 +1,43 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test: it was possible that due to evaluation of conditional
+# requirements, two different values of non-free features were present in a
+# property set.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("a.cpp", "")
+
+t.write("jamroot.jam", """
+import feature ;
+import common ;
+
+feature.feature the_feature : false true : propagated ;
+
+rule maker ( targets * : sources * : properties * )
+{
+ if <the_feature>false in $(properties) &&
+ <the_feature>true in $(properties)
+ {
+ EXIT "Oops, two different values of non-free feature" ;
+ }
+ CMD on $(targets) = [ common.file-creation-command ] ;
+}
+
+actions maker
+{
+ $(CMD) $(<) ;
+}
+
+make a : a.cpp : maker : <variant>debug:<the_feature>true ;
+""")
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/conditionals3.py b/src/boost/tools/build/test/conditionals3.py
new file mode 100644
index 000000000..feffe4900
--- /dev/null
+++ b/src/boost/tools/build/test/conditionals3.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that conditional properties work, even if property is free, and value
+# includes a colon.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+exe hello : hello.cpp : <variant>debug:<define>"CLASS=Foo::Bar" ;
+""")
+
+t.write("hello.cpp", """
+namespace Foo { class Bar { } ; }
+int main()
+{
+ CLASS c;
+ c; // Disables the unused variable warning.
+}
+""")
+
+t.run_build_system(stdout=None, stderr=None)
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/conditionals_multiple.py b/src/boost/tools/build/test/conditionals_multiple.py
new file mode 100755
index 000000000..cb0cfa8c5
--- /dev/null
+++ b/src/boost/tools/build/test/conditionals_multiple.py
@@ -0,0 +1,312 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that properties conditioned on more than one other property work as
+# expected.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_multiple_conditions()
+# --------------------------
+#
+###############################################################################
+
+def test_multiple_conditions():
+ """Basic tests for properties conditioned on multiple other properties."""
+
+ t = BoostBuild.Tester(["--user-config=", "--ignore-site-config",
+ "toolset=testToolset"], pass_toolset=False, use_test_config=False)
+
+ t.write("testToolset.jam", """\
+import feature ;
+feature.extend toolset : testToolset ;
+rule init ( ) { }
+""")
+
+ t.write("testToolset.py", """\
+from b2.build import feature
+feature.extend('toolset', ["testToolset"])
+def init ( ): pass
+""")
+
+ t.write("jamroot.jam", """\
+import feature ;
+import notfile ;
+import toolset ;
+
+feature.feature description : : free incidental ;
+feature.feature aaa : 1 0 : incidental ;
+feature.feature bbb : 1 0 : incidental ;
+feature.feature ccc : 1 0 : incidental ;
+
+rule buildRule ( name : targets ? : properties * )
+{
+ for local description in [ feature.get-values description : $(properties) ]
+ {
+ ECHO "description:" /$(description)/ ;
+ }
+}
+
+notfile testTarget1 : @buildRule : :
+ <description>d
+ <aaa>0:<description>a0
+ <aaa>1:<description>a1
+ <aaa>0,<bbb>0:<description>a0-b0
+ <aaa>0,<bbb>1:<description>a0-b1
+ <aaa>1,<bbb>0:<description>a1-b0
+ <aaa>1,<bbb>1:<description>a1-b1
+ <aaa>0,<bbb>0,<ccc>0:<description>a0-b0-c0
+ <aaa>0,<bbb>0,<ccc>1:<description>a0-b0-c1
+ <aaa>0,<bbb>1,<ccc>1:<description>a0-b1-c1
+ <aaa>1,<bbb>0,<ccc>1:<description>a1-b0-c1
+ <aaa>1,<bbb>1,<ccc>0:<description>a1-b1-c0
+ <aaa>1,<bbb>1,<ccc>1:<description>a1-b1-c1 ;
+""")
+
+ t.run_build_system(["aaa=1", "bbb=1", "ccc=1"])
+ t.expect_output_lines("description: /d/" )
+ t.expect_output_lines("description: /a0/" , False)
+ t.expect_output_lines("description: /a1/" )
+ t.expect_output_lines("description: /a0-b0/" , False)
+ t.expect_output_lines("description: /a0-b1/" , False)
+ t.expect_output_lines("description: /a1-b0/" , False)
+ t.expect_output_lines("description: /a1-b1/" )
+ t.expect_output_lines("description: /a0-b0-c0/", False)
+ t.expect_output_lines("description: /a0-b0-c1/", False)
+ t.expect_output_lines("description: /a0-b1-c1/", False)
+ t.expect_output_lines("description: /a1-b0-c1/", False)
+ t.expect_output_lines("description: /a1-b1-c0/", False)
+ t.expect_output_lines("description: /a1-b1-c1/" )
+
+ t.run_build_system(["aaa=0", "bbb=0", "ccc=1"])
+ t.expect_output_lines("description: /d/" )
+ t.expect_output_lines("description: /a0/" )
+ t.expect_output_lines("description: /a1/" , False)
+ t.expect_output_lines("description: /a0-b0/" )
+ t.expect_output_lines("description: /a0-b1/" , False)
+ t.expect_output_lines("description: /a1-b0/" , False)
+ t.expect_output_lines("description: /a1-b1/" , False)
+ t.expect_output_lines("description: /a0-b0-c0/", False)
+ t.expect_output_lines("description: /a0-b0-c1/" )
+ t.expect_output_lines("description: /a0-b1-c1/", False)
+ t.expect_output_lines("description: /a1-b0-c1/", False)
+ t.expect_output_lines("description: /a1-b1-c0/", False)
+ t.expect_output_lines("description: /a1-b1-c1/", False)
+
+ t.run_build_system(["aaa=0", "bbb=0", "ccc=0"])
+ t.expect_output_lines("description: /d/" )
+ t.expect_output_lines("description: /a0/" )
+ t.expect_output_lines("description: /a1/" , False)
+ t.expect_output_lines("description: /a0-b0/" )
+ t.expect_output_lines("description: /a0-b1/" , False)
+ t.expect_output_lines("description: /a1-b0/" , False)
+ t.expect_output_lines("description: /a1-b1/" , False)
+ t.expect_output_lines("description: /a0-b0-c0/" )
+ t.expect_output_lines("description: /a0-b0-c1/", False)
+ t.expect_output_lines("description: /a0-b1-c1/", False)
+ t.expect_output_lines("description: /a1-b0-c1/", False)
+ t.expect_output_lines("description: /a1-b1-c0/", False)
+ t.expect_output_lines("description: /a1-b1-c1/", False)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_multiple_conditions_with_toolset_version()
+# -----------------------------------------------
+#
+###############################################################################
+
+def test_multiple_conditions_with_toolset_version():
+ """
+ Regression tests for properties conditioned on the toolset version
+ subfeature and some additional properties.
+
+ """
+ toolset = "testToolset" ;
+
+ t = BoostBuild.Tester(["--user-config=", "--ignore-site-config"],
+ pass_toolset=False, use_test_config=False)
+
+ t.write(toolset + ".jam", """\
+import feature ;
+feature.extend toolset : %(toolset)s ;
+feature.subfeature toolset %(toolset)s : version : 0 1 ;
+rule init ( version ? ) { }
+""" % {"toolset": toolset})
+
+ t.write("testToolset.py", """\
+from b2.build import feature
+feature.extend('toolset', ["%(toolset)s"])
+feature.subfeature('toolset', "%(toolset)s", "version", ['0','1'])
+def init (version=''): pass
+""" % {"toolset": toolset})
+
+ t.write("jamroot.jam", """\
+import feature ;
+import notfile ;
+import toolset ;
+
+toolset.using testToolset ;
+
+feature.feature description : : free incidental ;
+feature.feature aaa : 0 1 : incidental ;
+feature.feature bbb : 0 1 : incidental ;
+feature.feature ccc : 0 1 : incidental ;
+
+rule buildRule ( name : targets ? : properties * )
+{
+ local ttt = [ feature.get-values toolset : $(properties) ] ;
+ local vvv = [ feature.get-values "toolset-testToolset:version" : $(properties) ] ;
+ local aaa = [ feature.get-values aaa : $(properties) ] ;
+ local bbb = [ feature.get-values bbb : $(properties) ] ;
+ local ccc = [ feature.get-values ccc : $(properties) ] ;
+ ECHO "toolset:" /$(ttt)/ "version:" /$(vvv)/ "aaa/bbb/ccc:" /$(aaa)/$(bbb)/$(ccc)/ ;
+ for local description in [ feature.get-values description : $(properties) ]
+ {
+ ECHO "description:" /$(description)/ ;
+ }
+}
+
+notfile testTarget1 : @buildRule : :
+ <toolset>testToolset,<aaa>0:<description>t-a0
+ <toolset>testToolset,<aaa>1:<description>t-a1
+
+ <toolset>testToolset-0,<aaa>0:<description>t0-a0
+ <toolset>testToolset-0,<aaa>1:<description>t0-a1
+ <toolset>testToolset-1,<aaa>0:<description>t1-a0
+ <toolset>testToolset-1,<aaa>1:<description>t1-a1
+
+ <toolset>testToolset,<aaa>0,<bbb>0:<description>t-a0-b0
+ <toolset>testToolset,<aaa>0,<bbb>1:<description>t-a0-b1
+ <toolset>testToolset,<aaa>1,<bbb>0:<description>t-a1-b0
+ <toolset>testToolset,<aaa>1,<bbb>1:<description>t-a1-b1
+
+ <aaa>0,<toolset>testToolset,<bbb>0:<description>a0-t-b0
+ <aaa>0,<toolset>testToolset,<bbb>1:<description>a0-t-b1
+ <aaa>1,<toolset>testToolset,<bbb>0:<description>a1-t-b0
+ <aaa>1,<toolset>testToolset,<bbb>1:<description>a1-t-b1
+
+ <aaa>0,<bbb>0,<toolset>testToolset:<description>a0-b0-t
+ <aaa>0,<bbb>1,<toolset>testToolset:<description>a0-b1-t
+ <aaa>1,<bbb>0,<toolset>testToolset:<description>a1-b0-t
+ <aaa>1,<bbb>1,<toolset>testToolset:<description>a1-b1-t
+
+ <toolset>testToolset-0,<aaa>0,<bbb>0:<description>t0-a0-b0
+ <toolset>testToolset-0,<aaa>0,<bbb>1:<description>t0-a0-b1
+ <toolset>testToolset-0,<aaa>1,<bbb>0:<description>t0-a1-b0
+ <toolset>testToolset-0,<aaa>1,<bbb>1:<description>t0-a1-b1
+ <toolset>testToolset-1,<aaa>0,<bbb>0:<description>t1-a0-b0
+ <toolset>testToolset-1,<aaa>0,<bbb>1:<description>t1-a0-b1
+ <toolset>testToolset-1,<aaa>1,<bbb>0:<description>t1-a1-b0
+ <toolset>testToolset-1,<aaa>1,<bbb>1:<description>t1-a1-b1
+
+ <aaa>0,<toolset>testToolset-1,<bbb>0:<description>a0-t1-b0
+ <aaa>0,<toolset>testToolset-1,<bbb>1:<description>a0-t1-b1
+ <aaa>1,<toolset>testToolset-0,<bbb>0:<description>a1-t0-b0
+ <aaa>1,<toolset>testToolset-0,<bbb>1:<description>a1-t0-b1
+
+ <bbb>0,<aaa>1,<toolset>testToolset-0:<description>b0-a1-t0
+ <bbb>0,<aaa>0,<toolset>testToolset-1:<description>b0-a0-t1
+ <bbb>0,<aaa>1,<toolset>testToolset-1:<description>b0-a1-t1
+ <bbb>1,<aaa>0,<toolset>testToolset-1:<description>b1-a0-t1
+ <bbb>1,<aaa>1,<toolset>testToolset-0:<description>b1-a1-t0
+ <bbb>1,<aaa>1,<toolset>testToolset-1:<description>b1-a1-t1 ;
+""")
+
+ t.run_build_system(["aaa=1", "bbb=1", "ccc=1", "toolset=%s-0" % toolset])
+ t.expect_output_lines("description: /t-a0/" , False)
+ t.expect_output_lines("description: /t-a1/" )
+ t.expect_output_lines("description: /t0-a0/" , False)
+ t.expect_output_lines("description: /t0-a1/" )
+ t.expect_output_lines("description: /t1-a0/" , False)
+ t.expect_output_lines("description: /t1-a1/" , False)
+ t.expect_output_lines("description: /t-a0-b0/" , False)
+ t.expect_output_lines("description: /t-a0-b1/" , False)
+ t.expect_output_lines("description: /t-a1-b0/" , False)
+ t.expect_output_lines("description: /t-a1-b1/" )
+ t.expect_output_lines("description: /a0-t-b0/" , False)
+ t.expect_output_lines("description: /a0-t-b1/" , False)
+ t.expect_output_lines("description: /a1-t-b0/" , False)
+ t.expect_output_lines("description: /a1-t-b1/" )
+ t.expect_output_lines("description: /a0-b0-t/" , False)
+ t.expect_output_lines("description: /a0-b1-t/" , False)
+ t.expect_output_lines("description: /a1-b0-t/" , False)
+ t.expect_output_lines("description: /a1-b1-t/" )
+ t.expect_output_lines("description: /t0-a0-b0/", False)
+ t.expect_output_lines("description: /t0-a0-b1/", False)
+ t.expect_output_lines("description: /t0-a1-b0/", False)
+ t.expect_output_lines("description: /t0-a1-b1/" )
+ t.expect_output_lines("description: /t1-a0-b0/", False)
+ t.expect_output_lines("description: /t1-a0-b1/", False)
+ t.expect_output_lines("description: /t1-a1-b0/", False)
+ t.expect_output_lines("description: /t1-a1-b1/", False)
+ t.expect_output_lines("description: /a0-t1-b0/", False)
+ t.expect_output_lines("description: /a0-t1-b1/", False)
+ t.expect_output_lines("description: /a1-t0-b0/", False)
+ t.expect_output_lines("description: /a1-t0-b1/" )
+ t.expect_output_lines("description: /b0-a1-t0/", False)
+ t.expect_output_lines("description: /b0-a0-t1/", False)
+ t.expect_output_lines("description: /b0-a1-t1/", False)
+ t.expect_output_lines("description: /b1-a0-t1/", False)
+ t.expect_output_lines("description: /b1-a1-t0/" )
+ t.expect_output_lines("description: /b1-a1-t1/", False)
+
+ t.run_build_system(["aaa=1", "bbb=1", "ccc=1", "toolset=%s-1" % toolset])
+ t.expect_output_lines("description: /t-a0/" , False)
+ t.expect_output_lines("description: /t-a1/" )
+ t.expect_output_lines("description: /t0-a0/" , False)
+ t.expect_output_lines("description: /t0-a1/" , False)
+ t.expect_output_lines("description: /t1-a0/" , False)
+ t.expect_output_lines("description: /t1-a1/" )
+ t.expect_output_lines("description: /t-a0-b0/" , False)
+ t.expect_output_lines("description: /t-a0-b1/" , False)
+ t.expect_output_lines("description: /t-a1-b0/" , False)
+ t.expect_output_lines("description: /t-a1-b1/" )
+ t.expect_output_lines("description: /a0-t-b0/" , False)
+ t.expect_output_lines("description: /a0-t-b1/" , False)
+ t.expect_output_lines("description: /a1-t-b0/" , False)
+ t.expect_output_lines("description: /a1-t-b1/" )
+ t.expect_output_lines("description: /a0-b0-t/" , False)
+ t.expect_output_lines("description: /a0-b1-t/" , False)
+ t.expect_output_lines("description: /a1-b0-t/" , False)
+ t.expect_output_lines("description: /a1-b1-t/" )
+ t.expect_output_lines("description: /t0-a0-b0/", False)
+ t.expect_output_lines("description: /t0-a0-b1/", False)
+ t.expect_output_lines("description: /t0-a1-b0/", False)
+ t.expect_output_lines("description: /t0-a1-b1/", False)
+ t.expect_output_lines("description: /t1-a0-b0/", False)
+ t.expect_output_lines("description: /t1-a0-b1/", False)
+ t.expect_output_lines("description: /t1-a1-b0/", False)
+ t.expect_output_lines("description: /t1-a1-b1/" )
+ t.expect_output_lines("description: /a0-t1-b0/", False)
+ t.expect_output_lines("description: /a0-t1-b1/", False)
+ t.expect_output_lines("description: /a1-t0-b0/", False)
+ t.expect_output_lines("description: /a1-t0-b1/", False)
+ t.expect_output_lines("description: /b0-a1-t0/", False)
+ t.expect_output_lines("description: /b0-a0-t1/", False)
+ t.expect_output_lines("description: /b0-a1-t1/", False)
+ t.expect_output_lines("description: /b1-a0-t1/", False)
+ t.expect_output_lines("description: /b1-a1-t0/", False)
+ t.expect_output_lines("description: /b1-a1-t1/" )
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_multiple_conditions()
+test_multiple_conditions_with_toolset_version()
diff --git a/src/boost/tools/build/test/configuration.py b/src/boost/tools/build/test/configuration.py
new file mode 100755
index 000000000..fea326320
--- /dev/null
+++ b/src/boost/tools/build/test/configuration.py
@@ -0,0 +1,397 @@
+#!/usr/bin/python
+
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test Boost Build configuration file handling.
+
+import BoostBuild
+import TestCmd
+
+import os
+import os.path
+import re
+
+
+###############################################################################
+#
+# test_user_configuration()
+# -------------------------
+#
+###############################################################################
+
+def test_user_configuration():
+ """
+ Test Boost Build user configuration handling. Both relative and absolute
+ path handling is tested.
+
+ """
+
+ implicitConfigLoadMessage = \
+ "notice: Loading user-config configuration file: *"
+ explicitConfigLoadMessage = \
+ "notice: Loading explicitly specified user configuration file:"
+ disabledConfigLoadMessage = \
+ "notice: User configuration file loading explicitly disabled."
+ testMessage = "_!_!_!_!_!_!_!_!_ %s _!_!_!_!_!_!_!_!_"
+ toolsetName = "__myDummyToolset__"
+ subdirName = "ASubDirectory"
+ configFileNames = ["ups_lala_1.jam", "ups_lala_2.jam",
+ os.path.join(subdirName, "ups_lala_3.jam")]
+
+ t = BoostBuild.Tester(["toolset=%s" % toolsetName,
+ "--debug-configuration"], pass_toolset=False, use_test_config=False)
+
+ for configFileName in configFileNames:
+ message = "ECHO \"%s\" ;" % testMessage % configFileName
+ # We need to double any backslashes in the message or Jam will
+ # interpret them as escape characters.
+ t.write(configFileName, message.replace("\\", "\\\\"))
+
+ # Prepare a dummy toolset so we do not get errors in case the default one
+ # is not found.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+feature.extend toolset : %s ;
+rule init ( ) { }
+""" % toolsetName)
+
+ # Python version of the same dummy toolset.
+ t.write(toolsetName + ".py", """\
+from b2.build import feature
+feature.extend('toolset', ['%s'])
+def init(): pass
+""" % toolsetName)
+
+ t.write("jamroot.jam", """\
+local test-index = [ MATCH ---test-id---=(.*) : [ modules.peek : ARGV ] ] ;
+ECHO test-index: $(test-index:E=(unknown)) ;
+""")
+
+ class LocalTester:
+ def __init__(self, tester):
+ self.__tester = tester
+ self.__test_ids = []
+
+ def __assertionFailure(self, message):
+ BoostBuild.annotation("failure", "Internal test assertion failure "
+ "- %s" % message)
+ self.__tester.fail_test(1)
+
+ def __call__(self, test_id, env, extra_args=None, *args, **kwargs):
+ if env == "" and not canSetEmptyEnvironmentVariable:
+ self.__assertionFailure("Can not set empty environment "
+ "variables on this platform.")
+ self.__registerTestId(str(test_id))
+ if extra_args is None:
+ extra_args = []
+ extra_args.append("---test-id---=%s" % test_id)
+ env_name = "BOOST_BUILD_USER_CONFIG"
+ previous_env = os.environ.get(env_name)
+ _env_set(env_name, env)
+ try:
+ self.__tester.run_build_system(extra_args, *args, **kwargs)
+ finally:
+ _env_set(env_name, previous_env)
+
+ def __registerTestId(self, test_id):
+ if test_id in self.__test_ids:
+ self.__assertionFailure("Multiple test cases encountered "
+ "using the same test id '%s'." % test_id)
+ self.__test_ids.append(test_id)
+
+ test = LocalTester(t)
+
+ test(1, None)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(2, None, ["--user-config="])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(3, None, ['--user-config=""'])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(4, None, ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(5, None, ['--user-config="%s"' % configFileNames[2]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2])
+
+ test(6, None, ['--user-config="%s"' % os.path.abspath(configFileNames[1])])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1])
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(7, None, ['--user-config="%s"' % os.path.abspath(configFileNames[2])])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2])
+
+ if canSetEmptyEnvironmentVariable:
+ test(8, "")
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage, True)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(9, '""')
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(10, configFileNames[1])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1])
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(11, configFileNames[1], ['--user-config=""'])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(12, configFileNames[1], ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ if canSetEmptyEnvironmentVariable:
+ test(13, "", ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(14, '""', ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(15, "invalid", ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# Private interface.
+#
+###############################################################################
+
+def _canSetEmptyEnvironmentVariable():
+ """
+ Unfortunately different OSs (and possibly Python implementations as well)
+ have different interpretations of what it means to set an environment
+ variable to an empty string. Some (e.g. Windows) interpret it as unsetting
+ the variable and some (e.g. AIX or Darwin) actually set it to an empty
+ string.
+
+ """
+ dummyName = "UGNABUNGA_FOO_BAR_BAZ_FEE_FAE_FOU_FAM"
+ original = os.environ.get(dummyName)
+ _env_set(dummyName, "")
+ result = _getExternalEnv(dummyName) == ""
+ _env_set(dummyName, original)
+ return result
+
+
+def _env_del(name):
+ """
+ Unsets the given environment variable if it is currently set.
+
+ Note that we can not use os.environ.pop() or os.environ.clear() here
+ since prior to Python 2.6 these functions did not remove the actual
+ environment variable by calling os.unsetenv().
+
+ """
+ try:
+ del os.environ[name]
+ except KeyError:
+ pass
+
+
+def _env_set(name, value):
+ """
+ Sets the given environment variable value or unsets it, if the value is
+ None.
+
+ """
+ if value is None:
+ _env_del(name)
+ else:
+ os.environ[name] = value
+
+
+def _getExternalEnv(name):
+ toolsetName = "__myDummyToolset__"
+
+ t = BoostBuild.Tester(["toolset=%s" % toolsetName], pass_toolset=False,
+ use_test_config=False)
+ try:
+ # Prepare a dummy toolset so we do not get errors in case the default
+ # one is not found.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+feature.extend toolset : %s ;
+rule init ( ) { }
+""" % toolsetName)
+
+ # Python version of the same dummy toolset.
+ t.write(toolsetName + ".py", """\
+from b2.build import feature
+feature.extend('toolset', ['%s'])
+def init(): pass
+""" % toolsetName)
+
+ t.write("jamroot.jam", """\
+import os ;
+local names = [ MATCH ^---var-name---=(.*) : [ modules.peek : ARGV ] ] ;
+for x in $(names)
+{
+ value = [ os.environ $(x) ] ;
+ ECHO "###" $(x): '$(value)' "###" ;
+}
+""")
+
+ t.run_build_system(["---var-name---=%s" % name])
+ m = re.search("^### %s: '(.*)' ###$" % name, t.stdout(), re.MULTILINE)
+ if m:
+ return m.group(1)
+ finally:
+ t.cleanup()
+
+
+def test_site_config():
+ # Ignore user-config, just in case it depends on the user's site-config.jam
+ t = BoostBuild.Tester(["--user-config="], use_test_config=False,
+ pass_toolset=0)
+ # We can immediately exit after we finish loading the config files
+ t.write("Jamroot", "EXIT Done : 0 ;")
+ t.write("my-site-config.jam", "ECHO Loaded my-site-config ;")
+
+ t.run_build_system(["--site-config=my-site-config.jam"],
+ stdout="Loaded my-site-config\nDone\n")
+
+ t.run_build_system(["--ignore-site-config", "--debug-configuration"])
+ t.expect_output_lines("""\
+notice: Site configuration files will be ignored due to the
+notice: --ignore-site-config command-line option.""")
+
+ t.run_build_system(["--site-config=", "--debug-configuration"])
+ t.expect_output_lines("""\
+notice: Site configuration file loading explicitly disabled.""")
+
+ t.cleanup()
+
+def test_global_config():
+ t = BoostBuild.Tester(use_test_config=False, pass_toolset=0)
+ t.write("my-config.jam", "ECHO Loading my-config ;")
+ t.write("Jamroot", "EXIT Done : 0 ;")
+ t.write("project-config.jam", "ECHO bad ;")
+ t.run_build_system(["--config=my-config.jam", "--debug-configuration"],
+ match=TestCmd.match_re, stdout=
+r"""notice: found boost-build\.jam at .*
+notice: loading B2 from .*
+notice: Searching '.*' for all-config configuration file 'my-config\.jam'\.
+notice: Loading all-config configuration file 'my-config\.jam' from '.*'\.
+Loading my-config
+notice: Regular configuration files will be ignored due
+notice: to the global configuration being loaded\.
+Done
+""")
+ t.run_build_system(["--config=", "--debug-configuration"],
+ match=TestCmd.match_re, stdout=
+r"""notice: found boost-build\.jam at .*
+notice: loading B2 from .*
+notice: Configuration file loading explicitly disabled.
+Done
+""")
+ t.cleanup()
+
+def test_project_config():
+ t = BoostBuild.Tester(["--user-config=", "--site-config="],
+ use_test_config=False, pass_toolset=False)
+ t.write("Jamroot", "EXIT Done : 0 ;")
+ t.write("project-config.jam", "ECHO Loading Root ;")
+ t.write("my-project-config.jam", "ECHO Loading explicit ;")
+ t.write("sub/project-config.jam", "ECHO Loading subdir ;")
+ t.write("sub/Jamfile", "")
+
+ t.run_build_system(stdout="Loading Root\nDone\n")
+ t.run_build_system(subdir="sub", stdout="Loading subdir\nDone\n")
+ t.rm("sub/project-config.jam")
+ t.run_build_system(subdir="sub", stdout="Loading Root\nDone\n")
+ t.run_build_system(["--project-config=my-project-config.jam"],
+ stdout="Loading explicit\nDone\n")
+
+ t.cleanup()
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+canSetEmptyEnvironmentVariable = _canSetEmptyEnvironmentVariable()
+
+test_user_configuration()
+test_site_config()
+test_global_config()
+test_project_config()
diff --git a/src/boost/tools/build/test/configure.py b/src/boost/tools/build/test/configure.py
new file mode 100644
index 000000000..9e47af2e5
--- /dev/null
+++ b/src/boost/tools/build/test/configure.py
@@ -0,0 +1,267 @@
+#!/usr/bin/python
+
+# Copyright 2017 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests configure.check-target-builds and friends
+
+import BoostBuild
+
+def test_check_target_builds():
+ t = BoostBuild.Tester(use_test_config=0)
+ t.write("Jamroot", """
+import configure ;
+obj pass : pass.cpp ;
+obj fail : fail.cpp ;
+explicit pass fail ;
+obj foo : foo.cpp :
+ [ configure.check-target-builds pass : <define>PASS : <define>FAIL ] ;
+obj bar : foo.cpp :
+ [ configure.check-target-builds fail : <define>FAIL : <define>PASS ] ;
+""")
+ t.write("pass.cpp", "void f() {}\n")
+ t.write("fail.cpp", "#error fail.cpp\n")
+ t.write("foo.cpp", """
+#ifndef PASS
+#error PASS not defined
+#endif
+#ifdef FAIL
+#error FAIL is defined
+#endif
+""")
+ t.run_build_system()
+ t.expect_output_lines([
+ " - pass builds : yes",
+ " - fail builds : no"])
+ t.expect_addition("bin/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/$toolset/debug*/foo.obj")
+ t.expect_addition("bin/$toolset/debug*/bar.obj")
+ t.expect_nothing_more()
+
+ # An up-to-date build should use the cache
+ t.run_build_system()
+ t.expect_output_lines([
+ " - pass builds : yes (cached)",
+ " - fail builds : no (cached)"])
+ t.expect_nothing_more()
+
+ # -a should re-run everything, including configuration checks
+ t.run_build_system(["-a"])
+ t.expect_output_lines([
+ " - pass builds : yes",
+ " - fail builds : no"])
+ t.expect_touch("bin/$toolset/debug*/pass.obj")
+ t.expect_touch("bin/$toolset/debug*/foo.obj")
+ t.expect_touch("bin/$toolset/debug*/bar.obj")
+ t.expect_nothing_more()
+
+ # --reconfigure should re-run configuration checks only
+ t.run_build_system(["--reconfigure"])
+ t.expect_output_lines([
+ " - pass builds : yes",
+ " - fail builds : no"])
+ t.expect_touch("bin/$toolset/debug*/pass.obj")
+ t.expect_nothing_more()
+
+ # -a -n should not rebuild configuration checks
+ t.run_build_system(["-a", "-n"])
+ t.expect_output_lines([
+ " - pass builds : yes (cached)",
+ " - fail builds : no (cached)"])
+ t.expect_nothing_more()
+
+ # --clean-all should clear all configuration checks
+ t.run_build_system(["--clean-all"])
+ t.expect_output_lines([
+ " - pass builds : yes (cached)",
+ " - fail builds : no (cached)"])
+ t.expect_removal("bin/$toolset/debug*/pass.obj")
+ t.expect_removal("bin/$toolset/debug*/foo.obj")
+ t.expect_removal("bin/$toolset/debug*/bar.obj")
+ t.expect_nothing_more()
+
+ # If configuration checks are absent, then --clean-all
+ # should create them and then delete them again. This
+ # currently fails because clean cannot remove targets
+ # that were created in the same build.
+ #t.run_build_system(["--clean-all"])
+ #t.expect_output_lines([
+ # " - pass builds : yes",
+ # " - fail builds : no"])
+ #t.expect_nothing_more()
+
+ # Just verify that we're actually in the initial
+ # state here.
+ t.run_build_system()
+ t.expect_output_lines([
+ " - pass builds : yes",
+ " - fail builds : no"])
+ t.expect_addition("bin/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/$toolset/debug*/foo.obj")
+ t.expect_addition("bin/$toolset/debug*/bar.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_choose():
+ t = BoostBuild.Tester(use_test_config=0)
+ t.write("Jamroot", """
+import configure ;
+obj pass : pass.cpp ;
+obj fail : fail.cpp ;
+explicit pass fail ;
+obj foo : foo.cpp :
+ [ configure.choose "which one?" : fail <define>FAIL : pass <define>PASS ] ;
+""")
+ t.write("pass.cpp", "void f() {}\n")
+ t.write("fail.cpp", "#error fail.cpp\n")
+ t.write("foo.cpp", """
+#ifndef PASS
+#error PASS not defined
+#endif
+#ifdef FAIL
+#error FAIL is defined
+#endif
+""")
+ t.run_build_system()
+ t.expect_output_lines([
+ " - which one? : pass"])
+ t.expect_addition("bin/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/$toolset/debug*/foo.obj")
+ t.expect_nothing_more()
+
+ # An up-to-date build should use the cache
+ t.run_build_system()
+ t.expect_output_lines([
+ " - which one? : pass (cached)"])
+ t.expect_nothing_more()
+
+ # -a should re-run everything, including configuration checks
+ t.run_build_system(["-a"])
+ t.expect_output_lines([
+ " - which one? : pass"])
+ t.expect_touch("bin/$toolset/debug*/pass.obj")
+ t.expect_touch("bin/$toolset/debug*/foo.obj")
+ t.expect_nothing_more()
+
+ # --reconfigure should re-run configuration checks only
+ t.run_build_system(["--reconfigure"])
+ t.expect_output_lines([
+ " - which one? : pass"])
+ t.expect_touch("bin/$toolset/debug*/pass.obj")
+ t.expect_nothing_more()
+
+ # -a -n should not rebuild configuration checks
+ t.run_build_system(["-a", "-n"])
+ t.expect_output_lines([
+ " - which one? : pass (cached)"])
+ t.expect_nothing_more()
+
+ # --clean-all should clear all configuration checks
+ t.run_build_system(["--clean-all"])
+ t.expect_output_lines([
+ " - which one? : pass (cached)"])
+ t.expect_removal("bin/$toolset/debug*/pass.obj")
+ t.expect_removal("bin/$toolset/debug*/foo.obj")
+ t.expect_nothing_more()
+
+ # If configuration checks are absent, then --clean-all
+ # should create them and then delete them again. This
+ # currently fails because clean cannot remove targets
+ # that were created in the same build.
+ #t.run_build_system(["--clean-all"])
+ #t.expect_output_lines([
+ # " - which one? : pass"])
+ #t.expect_nothing_more()
+
+ # Just verify that we're actually in the initial
+ # state here.
+ t.run_build_system()
+ t.expect_output_lines([
+ " - which one? : pass"])
+ t.expect_addition("bin/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/$toolset/debug*/foo.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_translation():
+ """Tests scoping for targets, paths, and rules within check-target-builds"""
+ t = BoostBuild.Tester(use_test_config=0)
+ t.write("Jamroot", "")
+ t.write("subdir/Jamfile", """
+import configure ;
+obj pass : pass.cpp ;
+obj fail : fail.cpp ;
+explicit pass fail ;
+obj foo : :
+ [ configure.check-target-builds pass
+ : [ configure.check-target-builds fail : <define>FAIL
+ : <define>PASS <include>include1 <conditional>@c1 ]
+ : <define>FAIL ] ;
+obj bar : :
+ [ configure.choose "which one?" : pass
+ [ configure.choose "Try again?" : pass
+ <define>PASS <include>include1 <conditional>@c1 ] ] ;
+rule c1 ( properties * )
+{
+ return <include>include2 <source>foo.cpp ;
+}
+""")
+ t.write("subdir/include1/a.h", "")
+ t.write("subdir/include2/b.h", "")
+ t.write("subdir/pass.cpp", "void f() {}\n")
+ t.write("subdir/fail.cpp", "#error fail.cpp\n")
+ t.write("subdir/foo.cpp", """
+#include <a.h>
+#include <b.h>
+#ifndef PASS
+#error PASS not defined
+#endif
+#ifdef FAIL
+#error FAIL is defined
+#endif
+""")
+ t.run_build_system(["subdir"])
+ t.expect_output_lines([
+ " - pass builds : yes",
+ " - fail builds : no"])
+ t.expect_addition("subdir/bin/$toolset/debug*/pass.obj")
+ t.expect_addition("subdir/bin/$toolset/debug*/foo.obj")
+ t.expect_addition("subdir/bin/$toolset/debug*/bar.obj")
+ t.expect_nothing_more()
+ t.cleanup()
+
+def test_choose_none():
+ """Tests choose when none of the alternatives match."""
+ t = BoostBuild.Tester(use_test_config=0)
+ t.write("Jamroot", """
+import configure ;
+obj fail : fail.cpp ;
+explicit pass fail ;
+obj foo : foo.cpp :
+ [ configure.choose "which one?" : fail <define>FAIL ] ;
+""")
+ t.write("fail.cpp", "#error fail.cpp\n")
+ t.write("foo.cpp", """
+#ifdef FAIL
+#error FAIL is defined
+#endif
+""")
+ t.run_build_system()
+ t.expect_output_lines([
+ " - which one? : none"])
+
+ # An up-to-date build should use the cache
+ t.run_build_system()
+ t.expect_output_lines([
+ " - which one? : none (cached)"])
+ t.expect_nothing_more()
+ t.cleanup()
+
+test_check_target_builds()
+test_choose()
+test_translation()
+test_choose_none()
diff --git a/src/boost/tools/build/test/copy_time.py b/src/boost/tools/build/test/copy_time.py
new file mode 100755
index 000000000..a036fe62b
--- /dev/null
+++ b/src/boost/tools/build/test/copy_time.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the common.copy rule set the modification date of the new file to
+# the current time.
+
+import BoostBuild
+
+tester = BoostBuild.Tester(use_test_config=False)
+
+tester.write("test1.cpp", """\
+template<bool, int M, class Next>
+struct time_waster {
+ typedef typename time_waster<true, M-1, time_waster>::type type1;
+ typedef typename time_waster<false, M-1, time_waster>::type type2;
+ typedef void type;
+};
+template<bool B, class Next>
+struct time_waster<B, 0, Next> {
+ typedef void type;
+};
+typedef time_waster<true, 10, void>::type type;
+int f() { return 0; }
+""")
+
+tester.write("test2.cpp", """\
+template<bool, int M, class Next>
+struct time_waster {
+ typedef typename time_waster<true, M-1, time_waster>::type type1;
+ typedef typename time_waster<false, M-1, time_waster>::type type2;
+ typedef void type;
+};
+template<bool B, class Next>
+struct time_waster<B, 0, Next> {
+ typedef void type;
+};
+typedef time_waster<true, 10, void>::type type;
+int g() { return 0; }
+""")
+
+tester.write("jamroot.jam", """\
+obj test2 : test2.cpp ;
+obj test1 : test1.cpp : <dependency>test2 ;
+install test2i : test2 : <dependency>test1 ;
+""")
+
+tester.run_build_system()
+tester.expect_addition("bin/$toolset/debug*/test2.obj")
+tester.expect_addition("bin/$toolset/debug*/test1.obj")
+tester.expect_addition("test2i/test2.obj")
+tester.expect_nothing_more()
+
+test2src = tester.read("test2i/test2.obj", binary=True)
+test2dest = tester.read("bin/$toolset/debug*/test2.obj", binary=True)
+if test2src != test2dest:
+ BoostBuild.annotation("failure", "The object file was not copied "
+ "correctly")
+ tester.fail_test(1)
+
+tester.run_build_system(["-d1"])
+tester.expect_output_lines("common.copy*", False)
+tester.expect_nothing_more()
+
+tester.cleanup()
diff --git a/src/boost/tools/build/test/core-language/test.jam b/src/boost/tools/build/test/core-language/test.jam
new file mode 100644
index 000000000..778bd5723
--- /dev/null
+++ b/src/boost/tools/build/test/core-language/test.jam
@@ -0,0 +1,1563 @@
+# Copyright 2011 Steven Watanabe.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tools
+
+passed = 0 ;
+failed = 0 ;
+
+rule show-result ( id : test-result )
+{
+ if ! ( --quiet in $(ARGV) )
+ {
+ ECHO $(test-result): $(id) ;
+ }
+ $(test-result) = [ CALC $($(test-result)) + 1 ] ;
+}
+
+rule check-equal ( id : values * : expected * )
+{
+ local test-result ;
+ if x$(values) = x$(expected)
+ {
+ test-result = passed ;
+ }
+ else
+ {
+ ECHO error: "[" $(values) "] != [" $(expected) "]" ;
+ test-result = failed ;
+ }
+ show-result $(id) : $(test-result) ;
+}
+
+rule mark-order ( id : result * )
+{
+ order += $(id) ;
+ return $(result) ;
+}
+
+rule check-order ( id : expected * )
+{
+ check-equal $(id) : $(order) : $(expected) ;
+ order = ;
+}
+
+# Check variable expansion
+
+{
+
+local v1 = 1 2 3 ;
+local v2 = 4 5 6 ;
+local v3 = 0 1 2 3 4 5 6 7 8 9 10 ;
+local g = g1 g2 ;
+local v4 = String/With/Mixed/Case ;
+local v5 = path\\with\\backslashes ;
+local v6 = <grist>generic/path.txt(member.txt) ;
+local v7 = <Grist1>Dir1/File1.cpp(M1.c) <Grist2>Dir2/File2.hpp(M2.c) ;
+local v8 = <Grist3>Dir3/File3.c(M3.c) <Grist4>Dir4/File4.h(M4.c) ;
+local select1 = GU BL DBST ;
+local case1 = L U ;
+local vars = 7 8 ;
+local sub = 2 1 ;
+local p0 = name ;
+local p1 = dir/name ;
+local p2 = dir/sub/name ;
+local j1 = , - ;
+
+check-equal var-product : $(v1)$(v2) : 14 15 16 24 25 26 34 35 36 ;
+
+check-equal var-set-grist : $(v1:G=grist) : <grist>1 <grist>2 <grist>3 ;
+check-equal var-set-grist-multi : $(v1:G=$(g)) : <g1>1 <g1>2 <g1>3 <g2>1 <g2>2 <g2>3 ;
+
+check-equal var-lower : $(v4:L) : string/with/mixed/case ;
+check-equal var-upper : $(v4:U) : STRING/WITH/MIXED/CASE ;
+check-equal var-LU : $(v4:LU) : STRING/WITH/MIXED/CASE ;
+check-equal var-slashes : $(v5:T) : path/with/backslashes ;
+check-equal var-grist : $(v6:G) : <grist> ;
+check-equal var-grist-none : $(v1:G) : "" "" "" ;
+check-equal var-base : $(v6:B) : path ;
+check-equal var-suffix : $(v6:S) : .txt ;
+check-equal var-dir : $(v6:D) : generic ;
+check-equal var-member : $(v6:M) : (member.txt) ;
+check-equal var-multi : $(v6:$(select1)) : <GRIST> path generic/path.txt ;
+
+check-equal var-join-0 : $(:J=,) : ;
+check-equal var-join-1 : $(p0:J=,) : name ;
+check-equal var-join-3 : $(v1:J=,) : 1,2,3 ;
+check-equal var-set-grist-join : $(v1:G=grist:J=,) : <grist>1,<grist>2,<grist>3 ;
+# behavior change. In the past, a J= modifier would
+# cause only the last element of the other modifiers
+# to take effect.
+check-equal var-set-grist-multi-join : $(v1:G=$(g):J=,) : <g1>1,<g1>2,<g1>3 <g2>1,<g2>2,<g2>3 ;
+check-equal var-set-grist-multi-join-multi : $(v1:G=$(g):J=$(j1)) : <g1>1,<g1>2,<g1>3 <g1>1-<g1>2-<g1>3 <g2>1,<g2>2,<g2>3 <g2>1-<g2>2-<g2>3 ;
+
+check-equal var-D=-0 : name : $(p0:D=) ;
+check-equal var-D=-1 : name : $(p1:D=) ;
+check-equal var-D=-2 : name : $(p2:D=) ;
+check-equal var-D-0 : "" : $(p0:D) ;
+check-equal var-D-1 : dir : $(p1:D) ;
+check-equal var-D-2 : dir/sub : $(p2:D) ;
+check-equal var-S-1 : "" : $(p0:S) ;
+check-equal var-no-at-file-0 : ($(p0)) : [ MATCH ^@(.*) : "@($(p0))" ] ;
+check-equal var-no-at-file-1 : ($(p0)) : [ MATCH @(.*) : "--@($(p0))" ] ;
+
+if $(OS) = CYGWIN
+{
+ local cyg-root = $(:WE=/) ;
+ local cyg1 = /cygdrive/c/path1.txt ;
+ check-equal cygwin-to-cygdrive : $(cyg1:W) : C:\\path1.txt ;
+ local cyg2 = /bin/bash ;
+ check-equal cygwin-to-windows : $(cyg2:W) : $(cyg-root)\\bin\\bash ;
+ check-equal cygwin-combine-WT : $(cyg2:WT) : $(cyg-root)\\bin\\bash ;
+
+ local cyg3 = /home/boost/devel/trunk/bin.v2/ ; # exactly 31 characters
+ local win3 = $(cyg-root)\\home\\boost\\devel\\trunk\\bin.v2\\ ;
+ # This is is the easiest way to demonstrate a bug
+ # that used to cause undefined behavior. Longer paths
+ # resulted in a use-after-free error, which happened
+ # to work most of the time.
+ check-equal cygwin-long-WU : $(cyg3:WU) : $(win3:U) ;
+
+ local cyg-grist = <grist>$(cyg1) ;
+ check-equal cygwin-grist : $(cyg-grist:W) : <grist>\\cygdrive\\c\\path1.txt ;
+
+ check-equal cygwin-WU : $(cyg2:WU) : $(cyg-root:U)\\BIN\\BASH ;
+ # behavior change: L now consistently applied after W.
+ # used to affect all except the drive letter.
+ check-equal cygwin-WL : $(cyg2:WL) : $(cyg-root:L)\\bin\\bash ;
+}
+
+# behavior change
+check-equal var-test1 : $(v7[2]:G:L) : <grist2> ;
+
+check-equal var-multi-product-smm : $(v$(vars)[$(sub)]:G=$(g):$(case1)) :
+ <g1>dir2/file2.hpp(m2.c) <G1>DIR2/FILE2.HPP(M2.C)
+ <g2>dir2/file2.hpp(m2.c) <G2>DIR2/FILE2.HPP(M2.C)
+ <g1>dir1/file1.cpp(m1.c) <G1>DIR1/FILE1.CPP(M1.C)
+ <g2>dir1/file1.cpp(m1.c) <G2>DIR1/FILE1.CPP(M1.C)
+ <g1>dir4/file4.h(m4.c) <G1>DIR4/FILE4.H(M4.C)
+ <g2>dir4/file4.h(m4.c) <G2>DIR4/FILE4.H(M4.C)
+ <g1>dir3/file3.c(m3.c) <G1>DIR3/FILE3.C(M3.C)
+ <g2>dir3/file3.c(m3.c) <G2>DIR3/FILE3.C(M3.C)
+;
+check-equal var-nopathmods : $(:E=//) : // ;
+
+# showcases all the idiosyncracies of indexing
+# key: h = high, l = low, p = positive, m = minus, e = end.
+
+check-equal var-subscript-one-p : $(v3[3]) : 2 ;
+check-equal var-subscript-one-m : $(v3[-3]) : 8 ;
+check-equal var-subscript-one-0 : $(v3[0]) : 0 ;
+check-equal var-subscript-one-h : $(v3[20]) : ;
+check-equal var-subscript-one-l : $(v3[-20]) : 0 ;
+check-equal var-subscript-range-pp : $(v3[2-4]) : 1 2 3 ;
+check-equal var-subscript-range-pm : $(v3[2--3]) : 1 2 3 4 5 6 7 8 ;
+check-equal var-subscript-range-pe : $(v3[2-]) : 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-ph : $(v3[2-20]) : 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-pl : $(v3[2--20]) : ;
+check-equal var-subscript-range-mp : $(v3[-3-10]) : 8 9 ;
+check-equal var-subscript-range-mm : $(v3[-4--2]) : 7 8 9 ;
+check-equal var-subscript-range-me : $(v3[-4-]) : 7 8 9 10 ;
+check-equal var-subscript-range-mh : $(v3[-4-20]) : 7 8 9 10 ;
+check-equal var-subscript-range-mh : $(v3[-4--20]) : ;
+check-equal var-subscript-range-0p : $(v3[0-2]) : 0 1 2 ;
+check-equal var-subscript-range-0m : $(v3[0--4]) : 0 1 2 3 4 5 6 7 8 ;
+check-equal var-subscript-range-0e : $(v3[0-]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-0h : $(v3[0-20]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-0l : $(v3[0--20]) : ;
+check-equal var-subscript-range-hp : $(v3[20-4]) : ;
+check-equal var-subscript-range-hm : $(v3[20--4]) : ;
+check-equal var-subscript-range-he : $(v3[20-]) : ;
+check-equal var-subscript-range-hh : $(v3[20-20]) : ;
+check-equal var-subscript-range-hl : $(v3[20--20]) : ;
+check-equal var-subscript-range-lp : $(v3[-13-4]) : 0 1 2 3 4 5 ;
+check-equal var-subscript-range-lm : $(v3[-13--4]) : 0 1 2 3 4 5 6 7 8 9 ;
+check-equal var-subscript-range-le : $(v3[-13-]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-lh : $(v3[-13-20]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-ll : $(v3[-13--13]) : 0 ;
+check-equal var-subscript-range-empty : $(v3[4-3]) : ;
+
+}
+
+# Check rules
+
+{
+
+rule test-rule
+{
+ return $(<) - $(>) - $(1) - $(2) - $(3) - $(4) - $(5) - $(6) - $(7) - $(8) - $(9) - $(10) - $(11) - $(12) - $(13) - $(14) - $(15) - $(16) - $(17) - $(18) - $(19) ;
+}
+
+check-equal rule-arguments-numbered :
+ [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
+ a1 - a2 - a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
+
+rule test-rule
+{
+ return $(<:L) - $(>:L) - $(1:L) - $(2:L) - $(3:L) - $(4:L) - $(5:L) - $(6:L) - $(7:L) - $(8:L) - $(9:L) - $(10:L) - $(11:L) - $(12:L) - $(13:L) - $(14:L) - $(15:L) - $(16:L) - $(17:L) - $(18:L) - $(19:L) ;
+}
+
+# behavior change
+check-equal rule-arguments-numbered-lower :
+ [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
+ a1 - a2 - a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
+
+
+rule test-rule ( p1 : p2 : p3 : p4 : p5 : p6 : p7 : p8 : p9 :
+ p10 : p11 : p12 : p13 : p14 : p15 : p16 : p17 : p18 : p19 )
+
+
+{
+ return $(p1) - $(p2) - $(p3) - $(p4) - $(p5) - $(p6) - $(p7) - $(p8) - $(p9) - $(p10) - $(p11) - $(p12) - $(p13) - $(p14) - $(p15) - $(p16) - $(p17) - $(p18) - $(p19) ;
+}
+
+check-equal rule-arguments-named :
+ [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
+ a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
+
+#
+# test rule indirection
+#
+rule select ( n list * )
+{
+ return $(list[$(n)]) ;
+}
+
+rule indirect1 ( rule + : args * )
+{
+ return [ $(rule) $(args) ] ;
+}
+
+check-equal rule-indirect-1 : [ indirect1 select 1 : a b c d e ] : a ;
+check-equal rule-indirect-2 : [ indirect1 select 2 : a b c d e ] : b ;
+
+x = reset ;
+rule reset-x ( new-value )
+{
+ x = $(new-value) ;
+}
+$(x)-x bar ; # invokes reset-x...
+check-equal rule-reset : $(x) : bar ; # which changes x
+
+rule bar-x ( new-value )
+{
+ mark-order r3 ;
+}
+
+# The arguments are evaluated in forward order
+# before the rule name
+$(x)-x [ mark-order r1 : [ reset-x reset ] ] : [ mark-order r2 ] ;
+check-order rule-order : r1 r2 ;
+
+# Cases that look like member calls
+rule looks.like-a-member ( args * )
+{
+ return $(args) ;
+}
+
+rule call-non-member ( rule + )
+{
+ return [ $(rule).like-a-member ] ;
+}
+
+rule call-non-member-with-args ( rule + )
+{
+ return [ $(rule).like-a-member a2 ] ;
+}
+
+check-equal rule-non-member : [ call-non-member looks ] : ;
+#check-equal rule-non-member-a1 : [ call-non-member looks a1 ] : looks.a1 ;
+check-equal rule-non-member-args : [ call-non-member-with-args looks ] : a2 ;
+#check-equal rule-non-member-args-a1 : [ call-non-member-with-args looks a1 ] : looks.a1 a2 ;
+
+}
+
+# Check append
+
+{
+
+local value = [ mark-order r1 : v1 v2 ] [ mark-order r2 : v3 v4 ] ;
+check-equal append : $(value) : v1 v2 v3 v4 ;
+check-order append-order : r1 r2 ;
+
+}
+
+# Check foreach
+
+{
+
+local v1 = 1 2 3 ;
+local x = old ;
+local result ;
+
+for local x in $(v1)
+{
+ result += $(x) + ;
+}
+
+check-equal foreach-local-item : $(result) : 1 + 2 + 3 + ;
+check-equal foreach-local : $(x) : old ;
+
+result = ;
+
+for x in $(v1)
+{
+ result += $(x) + ;
+}
+
+check-equal foreach-nonlocal-item : $(result) : 1 + 2 + 3 + ;
+check-equal foreach-nonlocal : $(x) : 3 ;
+
+rule call-foreach ( values * )
+{
+ for local x in $(values)
+ {
+ return $(x) ;
+ }
+}
+
+check-equal foreach-result : [ call-foreach 1 2 3 ] : 1 ;
+
+result = ;
+local varname = x ;
+x = old ;
+
+for local $(varname) in $(v1)
+{
+ result += $(x) + ;
+}
+
+check-equal foreach-no-expand : $(result) : old + old + old + ;
+
+result = ;
+
+for local v1 in $(v1)
+{
+ result += $(v1) + ;
+}
+
+check-equal foreach-order : $(result) : 1 + 2 + 3 + ;
+
+}
+
+# Check if
+
+{
+
+if true
+{
+ mark-order r1 ;
+}
+
+check-order if-true : r1 ;
+
+if $(false)
+{
+ mark-order r1 ;
+}
+
+check-order if-false : ;
+
+if true
+{
+ mark-order r1 ;
+}
+else
+{
+ mark-order r2 ;
+}
+
+check-order if-else-true : r1 ;
+
+if $(false)
+{
+ mark-order r1 ;
+}
+else
+{
+ mark-order r2 ;
+}
+
+check-order if-else-false : r2 ;
+
+rule test-rule
+{
+ if true
+ {
+ return result ;
+ }
+}
+
+check-equal if-true-result : [ test-rule ] : result ;
+
+rule test-rule
+{
+ local idx = 1 2 ;
+ local values = true ;
+ while $(idx)
+ {
+ local v = $(values[$(idx[1])]) ;
+ idx = $(idx[2-]) ;
+ if $(v)
+ {
+ return result ;
+ }
+ }
+}
+
+check-equal if-false-result : [ test-rule ] : result ;
+
+rule test-rule
+{
+ if true
+ {
+ return r1 ;
+ }
+ else
+ {
+ return r2 ;
+ }
+}
+
+check-equal if-else-true-result : [ test-rule ] : r1 ;
+
+rule test-rule
+{
+ if $(false)
+ {
+ return r1 ;
+ }
+ else
+ {
+ return r2 ;
+ }
+}
+
+check-equal if-else-false-result : [ test-rule ] : r2 ;
+
+}
+
+# Check the evaluation of conditions
+
+{
+
+local test-result ;
+local v1 = "" "" "" ;
+local v2 = ;
+local v3 = a b c ;
+local v4 = a b c d ;
+local v5 = a b d ;
+local v6 = "" "" "" d ;
+
+rule test-comparison ( id : equal less greater )
+{
+ check-equal $(id)-empty-1 : [ eval-$(id) $(v1) : $(v2) ] : $(equal) ;
+ check-equal $(id)-empty-2 : [ eval-$(id) $(v1) : $(v2) ] : $(equal) ;
+ check-equal $(id)-equal : [ eval-$(id) $(v3) : $(v3) ] : $(equal) ;
+ check-equal $(id)-less-1 : [ eval-$(id) $(v3) : $(v4) ] : $(less) ;
+ check-equal $(id)-less-2 : [ eval-$(id) $(v3) : $(v5) ] : $(less) ;
+ check-equal $(id)-less-3 : [ eval-$(id) $(v4) : $(v5) ] : $(less) ;
+ check-equal $(id)-greater-1 : [ eval-$(id) $(v4) : $(v3) ] : $(greater) ;
+ check-equal $(id)-greater-2 : [ eval-$(id) $(v5) : $(v3) ] : $(greater) ;
+ check-equal $(id)-greater-3 : [ eval-$(id) $(v5) : $(v4) ] : $(greater) ;
+}
+
+rule eval-lt ( lhs * : rhs * )
+{
+ if $(lhs) < $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison lt : false true false ;
+
+rule eval-gt ( lhs * : rhs * )
+{
+ if $(lhs) > $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison gt : false false true ;
+
+rule eval-le ( lhs * : rhs * )
+{
+ if $(lhs) <= $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison le : true true false ;
+
+rule eval-ge ( lhs * : rhs * )
+{
+ if $(lhs) >= $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison ge : true false true ;
+
+rule eval-eq ( lhs * : rhs * )
+{
+ if $(lhs) = $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison eq : true false false ;
+
+rule eval-ne ( lhs * : rhs * )
+{
+ if $(lhs) != $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison ne : false true true ;
+
+rule eval-not-lt ( lhs * : rhs * )
+{
+ if ! ( $(lhs) < $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-lt : true false true ;
+
+rule eval-not-gt ( lhs * : rhs * )
+{
+ if ! ( $(lhs) > $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-gt : true true false ;
+
+rule eval-not-le ( lhs * : rhs * )
+{
+ if ! ( $(lhs) <= $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-le : false false true ;
+
+rule eval-not-ge ( lhs * : rhs * )
+{
+ if ! ( $(lhs) >= $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-ge : false true false ;
+
+rule eval-not-eq ( lhs * : rhs * )
+{
+ if ! ( $(lhs) = $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-eq : false true true ;
+
+rule eval-not-ne ( lhs * : rhs * )
+{
+ if ! ( $(lhs) != $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-ne : true false false ;
+
+local v7 = a a a a a a ;
+local v8 = c b ;
+local v9 = c d b ;
+local v10 = c a b c c b a a a ;
+
+rule test-in ( id : subset not-subset )
+{
+ check-equal $(id)-0-0 : [ eval-$(id) $(v2) : $(v2) ] : $(subset) ;
+ check-equal $(id)-0-empty : [ eval-$(id) $(v2) : $(v1) ] : $(subset) ;
+ check-equal $(id)-empty-0 : [ eval-$(id) $(v1) : $(v2) ] : $(not-subset) ;
+ check-equal $(id)-equal : [ eval-$(id) $(v3) : $(v3) ] : $(subset) ;
+ check-equal $(id)-simple : [ eval-$(id) $(v3) : $(v4) ] : $(subset) ;
+ check-equal $(id)-extra : [ eval-$(id) $(v4) : $(v3) ] : $(not-subset) ;
+ check-equal $(id)-multiple : [ eval-$(id) $(v7) : $(v3) ] : $(subset) ;
+ check-equal $(id)-unordered : [ eval-$(id) $(v8) : $(v3) ] : $(subset) ;
+ check-equal $(id)-unordered-extra : [ eval-$(id) $(v9) : $(v3) ] : $(not-subset) ;
+ check-equal $(id)-unordered-multiple : [ eval-$(id) $(v10) : $(v3) ] : $(subset) ;
+}
+
+rule eval-in ( lhs * : rhs * )
+{
+ if $(lhs) in $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-in "in" : true false ;
+
+rule eval-not-in ( lhs * : rhs * )
+{
+ if ! ( $(lhs) in $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-in not-in : false true ;
+
+rule test-truth-table ( id : tt tf ft ff )
+{
+ check-equal $(id)-tt : [ eval-$(id) 1 : 1 ] : $(tt) ;
+ check-equal $(id)-tf : [ eval-$(id) 1 : ] : $(tf) ;
+ check-equal $(id)-ft : [ eval-$(id) : 1 ] : $(ft) ;
+ check-equal $(id)-ff : [ eval-$(id) : ] : $(ff) ;
+}
+
+rule eval-and ( lhs ? : rhs ? )
+{
+ if $(lhs) && $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table and : true false false false ;
+
+rule eval-or ( lhs ? : rhs ? )
+{
+ if $(lhs) || $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table or : true true true false ;
+
+rule eval-not-and ( lhs ? : rhs ? )
+{
+ if ! ( $(lhs) && $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table not-and : false true true true ;
+
+rule eval-not-or ( lhs ? : rhs ? )
+{
+ if ! ( $(lhs) || $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table not-or : false false false true ;
+
+if [ mark-order r1 : test1 ] < [ mark-order r2 : test2 ] { }
+check-order lt-order : r1 r2 ;
+if [ mark-order r1 : test1 ] > [ mark-order r2 : test2 ] { }
+check-order gt-order : r1 r2 ;
+if [ mark-order r1 : test1 ] <= [ mark-order r2 : test2 ] { }
+check-order le-order : r1 r2 ;
+if [ mark-order r1 : test1 ] >= [ mark-order r2 : test2 ] { }
+check-order ge-order : r1 r2 ;
+if [ mark-order r1 : test1 ] = [ mark-order r2 : test2 ] { }
+check-order eq-order : r1 r2 ;
+if [ mark-order r1 : test1 ] != [ mark-order r2 : test2 ] { }
+check-order ne-order : r1 r2 ;
+if [ mark-order r1 : test1 ] in [ mark-order r2 : test2 ] { }
+check-order in-order : r1 r2 ;
+
+if [ mark-order r1 : test1 ] && [ mark-order r2 : test2 ] { }
+check-order and-order : r1 r2 ;
+if [ mark-order r1 ] && [ mark-order r2 : test2 ] { }
+check-order and-order-short-circuit : r1 ;
+
+if [ mark-order r1 ] || [ mark-order r2 : test2 ] { }
+check-order or-order : r1 r2 ;
+if [ mark-order r1 : test1 ] || [ mark-order r2 : test2 ] { }
+check-order or-order-short-circuit : r1 ;
+
+}
+
+# Check include
+
+{
+#FIXME:
+# plain include
+# include in module
+# include returns an empty list
+# rule arguments are available inside include
+}
+
+# Check local
+
+{
+
+local v1 = a b c ;
+local v2 = f g h ;
+
+{
+ local v1 ;
+ check-equal local-no-init : $(v1) : ;
+}
+
+check-equal local-restore : $(v1) : a b c ;
+
+{
+ local v1 = d e f ;
+ check-equal local-init : $(v1) : d e f ;
+}
+
+check-equal local-restore-init : $(v1) : a b c ;
+
+{
+ local v1 v2 ;
+ check-equal local-multiple-no-init : $(v1) - $(v2) : - ;
+}
+
+check-equal local-multiple-restore : $(v1) - $(v2) : a b c - f g h ;
+
+{
+ local v1 v2 = d e f ;
+ check-equal local-multiple-init : $(v1) - $(v2) : d e f - d e f ;
+}
+
+{
+ local v1 v1 = d e f ;
+ check-equal local-duplicate : $(v1) - $(v1) : d e f - d e f ;
+}
+
+check-equal local-duplicate-restore : $(v1) : a b c ;
+
+{
+ local [ mark-order r1 : v1 ] = [ mark-order r2 : d e f ] ;
+ check-order local-order : r1 r2 ;
+}
+
+}
+
+# Check module
+
+{
+ local var1 = root-module-var ;
+ module my_module
+ {
+ var1 = module-var ;
+ rule get ( )
+ {
+ return $(var1) ;
+ }
+ local rule not_really ( ) { return nothing ; }
+ }
+
+ check-equal module-var-not-root : $(var1) : root-module-var ;
+
+ check-equal module-rulenames : [ RULENAMES my_module ] : get ;
+
+ IMPORT_MODULE my_module ;
+ check-equal module-rule-import-module : [ my_module.get ] : module-var ;
+
+ IMPORT my_module : get : : module-get ;
+ check-equal module-rule-imort : [ module-get ] : module-var ;
+
+ IMPORT my_module : get : : module-get : LOCALIZE ;
+ check-equal module-rule-imort-localize : [ module-get ] : root-module-var ;
+
+}
+
+# Check class
+{
+#FIXME:
+# ...
+}
+
+# Check on
+
+{
+
+local target1 = test-on-target1 ;
+local target2 = test-on-target2 ;
+local targets = $(target1) $(target2) ;
+local v1 v2 v3 ;
+
+VAR on $(target1) = value1 ;
+V2 on $(target2) = value2 ;
+
+check-equal on-return : [ on $(target1) return $(VAR) ] : value1 ;
+
+rule test-rule
+{
+ return $(VAR) ;
+}
+
+check-equal on-rule : [ on $(target1) test-rule ] : value1 ;
+
+check-equal on-multiple : [ on $(targets) return $(V2) ] : ;
+
+rule test-rule
+{
+ on $(target1)
+ {
+ return $(VAR) ;
+ }
+}
+
+check-equal on-block : [ test-rule ] : value1 ;
+
+# FIXME: crazy implementation artifacts:
+
+v1 on test-on-target3 = x1 ;
+on test-on-target3
+{
+ v1 on test-on-target3 += x1 ;
+ v1 = y1 ;
+ v2 on test-on-target3 += x2 ;
+ v2 = y2 ;
+ v3 = y3 ;
+}
+
+check-equal on-swap-old1 : $(v1) : x1 ;
+check-equal on-swap-old2 : [ on test-on-target3 return $(v1) ] : y1 ;
+check-equal on-swap-new1 : $(v2) : x2 ;
+check-equal on-swap-new2 : [ on test-on-target3 return $(v2) ] : y2 ;
+check-equal on-no-swap : $(v3) : y3 ;
+
+}
+
+# Check rule
+
+{
+#FIXME:
+# argument order
+# expand rule name
+}
+
+# Check rules
+
+{
+#FIXME:
+}
+
+# Check set
+
+{
+local v1 ;
+local v2 ;
+local v3 ;
+local vars = v1 v2 v3 ;
+
+v1 = x1 ;
+check-equal set-set-empty : $(v1) : x1 ;
+v2 += x2 ;
+check-equal set-append-empty : $(v2) : x2 ;
+v3 ?= x3 ;
+check-equal set-default-empty : $(v3) : x3 ;
+
+v1 = y1 ;
+check-equal set-set-non-empty : $(v1) : y1 ;
+v2 += y2 ;
+check-equal set-append-non-empty : $(v2) : x2 y2 ;
+v3 ?= y3 ;
+check-equal set-default-non-empty : $(v3) : x3 ;
+
+v1 = ;
+v2 = ;
+v3 = ;
+$(vars) = z ;
+check-equal set-set-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = ;
+v2 = ;
+v3 = ;
+$(vars) += z ;
+check-equal set-append-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = ;
+v2 = ;
+v3 = ;
+$(vars) ?= z ;
+check-equal set-default-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = x1 ;
+v2 = x2 ;
+v3 = x3 ;
+$(vars) = z ;
+check-equal set-set-non-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = x1 ;
+v2 = x2 ;
+v3 = x3 ;
+$(vars) += z ;
+check-equal set-append-non-empty-group : $(v1) - $(v2) - $(v3) : x1 z - x2 z - x3 z ;
+
+v1 = x1 ;
+v2 = x2 ;
+v3 = x3 ;
+$(vars) ?= z ;
+check-equal set-default-non-empty-group : $(v1) - $(v2) - $(v3) : x1 - x2 - x3 ;
+
+v1 = x1 ;
+v2 = ;
+v3 = x3 ;
+$(vars) = z ;
+check-equal set-set-mixed-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = x1 ;
+v2 = ;
+v3 = x3 ;
+$(vars) += z ;
+check-equal set-append-mixed-group : $(v1) - $(v2) - $(v3) : x1 z - z - x3 z ;
+
+v1 = x1 ;
+v2 = ;
+v3 = x3 ;
+$(vars) ?= z ;
+check-equal set-default-mixed-group : $(v1) - $(v2) - $(v3) : x1 - z - x3 ;
+
+vars = v1 v1 ;
+
+v1 = ;
+$(vars) = z ;
+check-equal set-set-duplicate-empty : $(v1) : z ;
+v1 = ;
+$(vars) += z ;
+check-equal set-append-duplicate-empty : $(v1) : z z ;
+v1 = ;
+$(vars) ?= z ;
+check-equal set-default-duplicate-empty : $(v1) : z ;
+
+v1 = x1 ;
+$(vars) = z ;
+check-equal set-set-duplicate-non-empty : $(v1) : z ;
+v1 = x1 ;
+$(vars) += z ;
+check-equal set-append-duplicate-non-empty : $(v1) : x1 z z ;
+v1 = x1 ;
+$(vars) ?= z ;
+check-equal set-default-duplicate-non-empty : $(v1) : x1 ;
+
+rule test-rule { v1 = x1 ; }
+check-equal set-set-result : [ test-rule ] : x1 ;
+rule test-rule { v1 += x1 ; }
+check-equal set-append-result : [ test-rule ] : x1 ;
+rule test-rule { v1 ?= x1 ; }
+check-equal set-default-result : [ test-rule ] : x1 ;
+
+[ mark-order r1 ] = [ mark-order r2 ] ;
+check-order set-set-order : r1 r2 ;
+[ mark-order r1 ] += [ mark-order r2 ] ;
+check-order set-append-order : r1 r2 ;
+[ mark-order r1 ] ?= [ mark-order r2 ] ;
+check-order set-default-order : r1 r2 ;
+
+}
+
+# Check setcomp
+
+{
+#FIXME
+# Expand arguments
+# Don't expand name
+}
+
+# Check setexec
+
+{
+#FIXME:
+# Don't expand name
+# Evaluate bindlist
+}
+
+# Check settings ;
+
+{
+
+local target1 = test-settings-target1 ;
+local target2 = test-settings-target2 ;
+local target3 = test-settings-target3 ;
+local targets = $(target2) $(target3) ;
+
+local vars = v1 v2 v3 ;
+
+v1 on $(target1) = x1 ;
+check-equal settings-set-empty : [ on $(target1) return $(v1) ] : x1 ;
+v2 on $(target1) += x2 ;
+check-equal settings-append-empty : [ on $(target1) return $(v2) ] : x2 ;
+v3 on $(target1) ?= x3 ;
+check-equal settings-default-empty : [ on $(target1) return $(v3) ] : x3 ;
+
+v1 on $(target1) = y1 ;
+check-equal settings-set-non-empty : [ on $(target1) return $(v1) ] : y1 ;
+v2 on $(target1) += y2 ;
+check-equal settings-append-non-empty : [ on $(target1) return $(v2) ] : x2 y2 ;
+v3 on $(target1) ?= y3 ;
+check-equal settings-default-non-empty : [ on $(target1) return $(v3) ] : x3 ;
+
+$(vars) on setting-target2 = z ;
+check-equal settings-set-empty-group : [ on setting-target2 return $(v1) ] - [ on setting-target2 return $(v2) ] - [ on setting-target2 return $(v3) ] : z - z - z ;
+
+$(vars) on setting-target3 += z ;
+check-equal settings-append-empty-group : [ on setting-target3 return $(v1) ] - [ on setting-target3 return $(v2) ] - [ on setting-target3 return $(v3) ] : z - z - z ;
+
+$(vars) on setting-target4 ?= z ;
+check-equal settings-default-empty-group : [ on setting-target4 return $(v1) ] - [ on setting-target4 return $(v2) ] - [ on setting-target4 return $(v3) ] : z - z - z ;
+
+v1 on $(target1) = x1 ;
+v2 on $(target1) = x2 ;
+v3 on $(target1) = x3 ;
+$(vars) on $(target1) = z ;
+check-equal settings-set-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : z - z - z ;
+
+v1 on $(target1) = x1 ;
+v2 on $(target1) = x2 ;
+v3 on $(target1) = x3 ;
+$(vars) on $(target1) += z ;
+check-equal settings-append-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : x1 z - x2 z - x3 z ;
+
+v1 on $(target1) = x1 ;
+v2 on $(target1) = x2 ;
+v3 on $(target1) = x3 ;
+$(vars) on $(target1) ?= z ;
+check-equal settings-default-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : x1 - x2 - x3 ;
+
+v1 on setting-target5 = x1 ;
+v3 on setting-target5 = x3 ;
+$(vars) on setting-target5 = z ;
+check-equal settings-set-mixed-group : [ on setting-target5 return $(v1) ] - [ on setting-target5 return $(v2) ] - [ on setting-target5 return $(v3) ] : z - z - z ;
+
+v1 on setting-target6 = x1 ;
+v3 on setting-target6 = x3 ;
+$(vars) on setting-target6 += z ;
+check-equal settings-append-mixed-group : [ on setting-target6 return $(v1) ] - [ on setting-target6 return $(v2) ] - [ on setting-target6 return $(v3) ] : x1 z - z - x3 z ;
+
+v1 on setting-target7 = x1 ;
+v3 on setting-target7 = x3 ;
+$(vars) on setting-target7 ?= z ;
+check-equal settings-default-mixed-group : [ on setting-target7 return $(v1) ] - [ on setting-target7 return $(v2) ] - [ on setting-target7 return $(v3) ] : x1 - z - x3 ;
+
+vars = v1 v1 ;
+
+$(vars) on setting-target8 = z ;
+check-equal settings-set-duplicate-empty : [ on setting-target8 return $(v1) ] : z ;
+$(vars) on setting-target9 += z ;
+check-equal settings-append-duplicate-empty : [ on setting-target9 return $(v1) ] : z z ;
+$(vars) on setting-target10 ?= z ;
+check-equal settings-default-duplicate-empty : [ on setting-target10 return $(v1) ] : z ;
+
+v1 on $(target1) = x1 ;
+$(vars) on $(target1) = z ;
+check-equal settings-set-duplicate-non-empty : [ on $(target1) return $(v1) ] : z ;
+v1 on $(target1) = x1 ;
+$(vars) on $(target1) += z ;
+check-equal settings-append-duplicate-non-empty : [ on $(target1) return $(v1) ] : x1 z z ;
+v1 on $(target1) = x1 ;
+$(vars) on $(target1) ?= z ;
+check-equal settings-default-duplicate-non-empty : [ on $(target1) return $(v1) ] : x1 ;
+
+v1 on $(target1) = ;
+v1 on $(target1) ?= z ;
+check-equal settings-default-set-but-empty : [ on $(target1) return $(v1) ] : ;
+
+v1 on $(targets) = multi ;
+check-equal settings-set-multi-empty : [ on $(target2) return $(v1) ] - [ on $(target3) return $(v1) ] : multi - multi ;
+v2 on $(targets) += multi ;
+check-equal settings-append-multi-empty : [ on $(target2) return $(v2) ] - [ on $(target3) return $(v2) ] : multi - multi ;
+v3 on $(targets) ?= multi ;
+check-equal settings-default-multi-empty : [ on $(target2) return $(v3) ] - [ on $(target3) return $(v3) ] : multi - multi ;
+
+v1 on $(targets) = multi2 ;
+check-equal settings-set-multi-empty : [ on $(target2) return $(v1) ] - [ on $(target3) return $(v1) ] : multi2 - multi2 ;
+v2 on $(targets) += multi2 ;
+check-equal settings-append-multi-empty : [ on $(target2) return $(v2) ] - [ on $(target3) return $(v2) ] : multi multi2 - multi multi2 ;
+v3 on $(targets) ?= multi2 ;
+check-equal settings-default-multi-empty : [ on $(target2) return $(v3) ] - [ on $(target3) return $(v3) ] : multi - multi ;
+
+rule test-rule { v1 on $(target1) = x1 ; }
+check-equal settings-set-result : [ test-rule ] : x1 ;
+rule test-rule { v1 on $(target1) += x1 ; }
+check-equal settings-append-result : [ test-rule ] : x1 ;
+rule test-rule { v1 on $(target1) ?= x1 ; }
+check-equal settings-default-result : [ test-rule ] : x1 ;
+
+[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] = [ mark-order r2 : value ] ;
+check-order settings-set-order : r1 r2 r3 ;
+[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] += [ mark-order r2 : value ] ;
+check-order settings-append-order : r1 r2 r3 ;
+[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] ?= [ mark-order r2 : value ] ;
+check-order settings-default-order : r1 r2 r3 ;
+
+}
+
+# Check switch
+
+{
+
+local pattern = * ;
+
+switch value
+{
+ case * : mark-order r1 ;
+}
+
+check-order switch-match-any : r1 ;
+
+switch value
+{
+ case v2 : mark-order r1 ;
+}
+
+check-order switch-no-match : ;
+
+switch value
+{
+ case $(pattern) : mark-order r1 ;
+}
+
+check-order switch-no-expand : ;
+
+switch value
+{
+ case value : mark-order r1 ;
+ case * : mark-order r2 ;
+}
+
+check-order switch-match-several : r1 ;
+
+rule test-rule ( value )
+{
+ switch $(value)
+ {
+ case value : return 1 ;
+ }
+}
+
+check-equal switch-result-match : [ test-rule value ] : 1 ;
+check-equal switch-result-match : [ test-rule v1 ] : ;
+
+switch $()
+{
+ case "" : mark-order r1 ;
+ case * : mark-order r2 ;
+}
+
+check-order switch-empty : r1 ;
+
+local values = v1 v2 v3 ;
+switch $(values)
+{
+ case v1 : mark-order r1 ;
+ case v2 : mark-order r2 ;
+ case v3 : mark-order r3 ;
+}
+
+check-order switch-multiple : r1 ;
+
+# Test glob matching
+
+switch value { case * : mark-order r1 ; }
+check-order switch-glob-star : r1 ;
+
+switch value { case va*e : mark-order r1 ; }
+check-order switch-glob-star-1 : r1 ;
+
+switch value { case *a* : mark-order r1 ; }
+check-order switch-glob-star-2 : r1 ;
+
+switch value { case *a*ue* : mark-order r1 ; }
+check-order switch-glob-star-3 : r1 ;
+
+switch value { case *[eaiou]*ue : mark-order r1 ; }
+check-order switch-glob-group : r1 ;
+
+switch value { case *[eaiou]ue : mark-order r1 ; }
+check-order switch-glob-group-fail : ;
+
+switch value { case ?a?ue : mark-order r1 ; }
+check-order switch-glob-any : r1 ;
+
+switch value { case ?lue : mark-order r1 ; }
+check-order switch-glob-any-fail : ;
+
+}
+
+# Test while
+
+{
+
+local value = 1 2 3 ;
+
+while $(value)
+{
+ mark-order r$(value[1]) ;
+ value = $(value[2-]) ;
+}
+
+check-order while-exec : r1 r2 r3 ;
+
+rule test-rule
+{
+ local value = 1 2 3 ;
+ while $(value)
+ {
+ value = $(value[2-]) ;
+ return x ;
+ }
+}
+
+check-equal while-result : [ test-rule ] : x ;
+
+rule test-rule
+{
+ local value = 1 2 ;
+ while $(value)
+ {
+ value = $(value[2-]) ;
+ local inner = $(value) ;
+ while $(inner)
+ {
+ inner = $(inner[2-]) ;
+ return x ;
+ }
+ }
+}
+
+check-equal while-result-2 : [ test-rule ] : x ;
+
+}
+
+
+#
+# test break
+#
+
+{
+
+local z = original ;
+local done ;
+while ! $(done)
+{
+ local z = inner ;
+ mark-order r1 ;
+ break ;
+ mark-order r2 ;
+ done = true ;
+}
+
+check-order break-while-exec : r1 ;
+check-equal break-while-cleanup : $(z) : original ;
+
+local values = v1 v2 ;
+
+for y in $(values)
+{
+ local z = inner ;
+ mark-order r1-$(y) ;
+ break ;
+ mark-order r2-$(y) ;
+}
+
+check-order break-for-exec : r1-v1 ;
+check-equal break-for-cleanup : $(z) : original ;
+
+for local y in $(values)
+{
+ local z = inner ;
+ mark-order r1-$(y) ;
+ break ;
+ mark-order r2-$(y) ;
+}
+
+check-order break-for-local-exec : r1-v1 ;
+check-equal break-for-local-cleanup : $(z) : original ;
+
+local z1 = z1val ;
+local z2 = z2val ;
+done = ;
+while ! $(done)
+{
+ local z1 = z1new ;
+ mark-order r1 ;
+ for local y in $(values)
+ {
+ local z2 = z2new ;
+ mark-order r2 ;
+ break ;
+ mark-order r3 ;
+ }
+ mark-order r4 ;
+ break ;
+ mark-order r5 ;
+ done = true ;
+}
+
+check-order break-nested-exec : r1 r2 r4 ;
+check-equal break-nested-cleanup1 : $(z1) : z1val ;
+check-equal break-nested-cleanup2 : $(z2) : z2val ;
+
+}
+
+#
+# test continue
+#
+
+{
+
+local z = original ;
+local done ;
+while ! [ mark-order r1 : $(done) ]
+{
+ local z = inner ;
+ done = true ;
+ mark-order r2 ;
+ continue ;
+ mark-order r3 ;
+}
+
+check-order continue-while-exec : r1 r2 r1 ;
+check-equal continue-while-cleanup : $(z) : original ;
+
+local values = v1 v2 ;
+for y in $(values)
+{
+ local z = inner ;
+ mark-order r1-$(y) ;
+ continue ;
+ mark-order r2-$(y) ;
+}
+
+check-order continue-for-exec : r1-v1 r1-v2 ;
+check-equal continue-for-cleanup : $(z) : original ;
+
+for local y in $(values)
+{
+ local z = inner ;
+ mark-order r1-$(y) ;
+ continue ;
+ mark-order r2-$(y) ;
+}
+
+check-order continue-for-local-exec : r1-v1 r1-v2 ;
+check-equal continue-for-local-cleanup : $(z) : original ;
+
+local z1 = z1val ;
+local z2 = z2val ;
+done = ;
+while ! [ mark-order r1 : $(done) ]
+{
+ local z1 = z1new ;
+ done = true ;
+ mark-order r2 ;
+ for local y in $(values)
+ {
+ local z2 = z2new ;
+ mark-order r3-$(y) ;
+ continue ;
+ mark-order r4-$(y) ;
+ }
+ mark-order r5 ;
+ continue ;
+ mark-order r6 ;
+}
+
+check-order continue-nested-exec : r1 r2 r3-v1 r3-v2 r5 r1 ;
+check-equal continue-nested-cleanup1 : $(z1) : z1val ;
+check-equal continue-nested-cleanup2 : $(z2) : z2val ;
+
+}
+
+#
+# test CALLER_MODULE and backtrace
+#
+
+{
+ local base = [ BACKTRACE ] ;
+ base = $(base[2]) ;
+ rule backtrace ( )
+ {
+ local bt = [ BACKTRACE ] ;
+ check-equal backtrace-1-file : $(bt) :
+ test.jam [ CALC $(base) + 4 ] "" backtrace
+ test.jam [ CALC $(base) + 28 ] module2. module2.f
+ test.jam [ CALC $(base) + 19 ] module1. module1.f
+ test.jam [ CALC $(base) + 32 ] "" "module scope"
+ ;
+ }
+ module module1
+ {
+ IMPORT_MODULE module2 : module1 ;
+ rule f ( )
+ {
+ local m = [ CALLER_MODULE ] ;
+ check-equal caller-module-root : $(m) ;
+ module2.f ;
+ }
+ }
+ module module2
+ {
+ rule f ( )
+ {
+ local m = [ CALLER_MODULE ] ;
+ check-equal caller-module : module1 : $(m) ;
+ backtrace ;
+ }
+ }
+ IMPORT_MODULE module1 ;
+ module1.f ;
+}
+
+
+# Test NORMALIZE_PATH
+
+{
+check-equal normalize-path : "." : [ NORMALIZE_PATH ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "." ] ;
+check-equal normalize-path : ".." : [ NORMALIZE_PATH ".." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "//" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "//\\\\//\\\\" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/./" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\///.///\\\\\\" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "./././././." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/./././././." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////././." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////./././" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo////.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "///foo/\\\\/.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\\\foo\\//\\.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/./.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/././././.." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/./././bar/./././.././././baz/./././.." ] ;
+check-equal normalize-path : "/foo" : [ NORMALIZE_PATH "/foo/./././bar/./././.././././baz/./././.." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/./././bar/./././////.././././baz/./././.." ] ;
+check-equal normalize-path : "/foo" : [ NORMALIZE_PATH "/foo/./././bar/./././////.././././baz/./././.." ] ;
+check-equal normalize-path : ".." : [ NORMALIZE_PATH "./.." ] ;
+check-equal normalize-path : ".." : [ NORMALIZE_PATH "././././.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "../.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "./../.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "././././../.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "./.././././.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "././././.././././.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "..//\\\\\\//.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "../..\\\\/\\\\" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/../bar/../baz/.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo////..////bar////.//////.////../baz/.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/foo/../bar/../baz/.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/foo////..////bar////.//////.////../baz/.." ] ;
+
+# Invalid rooted paths with leading dotdots.
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../" ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "//\\\\//\\\\/.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "\\\\//\\\\//\\.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../../.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/foo/bar/../baz/../../.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../for/././../././bar/././../././.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../foo/bar" ] ;
+
+}
+
+# Test W32_GETREGNAMES
+
+{
+
+if $(NT)
+{
+ local sound = "Beep" "ExtendedSounds" ;
+ local r1 = [ W32_GETREGNAMES "HKEY_CURRENT_USER\\Control Panel\\Sound" :
+ values ] ;
+ check-equal w32_getregnames : $(sound:L) : [ SORT $(r1:L) ] ;
+ local r2 = [ W32_GETREGNAMES "HKCU\\Control Panel\\Sound" : values ] ;
+ check-equal w32_getregnames : $(sound:L) : [ SORT $(r2:L) ] ;
+
+ # Some Windows platforms may have additional keys under
+ # 'CurrentControlSet' which we then remove here so they would not be
+ # reported as errors by our test.
+ local rule remove-policies ( param * )
+ {
+ local found ;
+ local r ;
+ for local x in $(param:L)
+ {
+ if ! x in $(found) &&
+ $(x) in "addservices" "policies" "deleted device ids" "software"
+ {
+ found += $(x) ;
+ }
+ else
+ {
+ r += $(x) ;
+ }
+ }
+ return $(r) ;
+ }
+ local CurrentControlSet = "Control" "Enum" "Hardware Profiles" "Services" ;
+ local r3 = [ W32_GETREGNAMES "HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet"
+ : subkeys ] ;
+ check-equal w32_getregnames : $(CurrentControlSet:L) : [ remove-policies
+ $(r3:L) ] ;
+ local r4 = [ W32_GETREGNAMES "HKLM\\SYSTEM\\CurrentControlSet" : subkeys ] ;
+ check-equal w32_getregnames : $(CurrentControlSet:L) : [ remove-policies
+ $(r4:L) ] ;
+}
+
+}
+
+# Test SHELL
+
+{
+
+local c = "echo value" ;
+if $(OS) = VMS { c = "PIPE WRITE SYS$OUTPUT \"value\"" ; }
+
+check-equal shell : "value\n" : [ SHELL $(c) ] ;
+check-equal shell : "" : [ SHELL $(c) : no-output ] ;
+check-equal shell : "value\n" 0 : [ SHELL $(c) : exit-status ] ;
+check-equal shell : "" 0 : [ SHELL $(c) : no-output : exit-status ] ;
+check-equal shell : "" 0 : [ SHELL $(c) : no-output : exit-status : strip-eol ] ;
+check-equal command : "value\n" : [ COMMAND $(c) ] ;
+check-equal command : "" : [ COMMAND $(c) : no-output ] ;
+check-equal command : "value\n" 0 : [ COMMAND $(c) : exit-status ] ;
+check-equal command : "" 0 : [ COMMAND $(c) : no-output : exit-status ] ;
+
+# buffered output
+
+local expected = "When the shell output buffer splits on whitespace, the whitespace shouldn't be trimmed. end." ;
+local buffered = "echo \"$(expected)\"" ;
+if $(OS) = VMS { buffered = "PIPE WRITE SYS$OUTPUT \"$(expected)\"" ; }
+if $(OS) = NT { buffered = "echo $(expected)" ; }
+
+check-equal shell : "$(expected)\n" : [ SHELL $(buffered) ] ;
+check-equal shell : "" : [ SHELL $(buffered) : no-output ] ;
+check-equal shell : "$(expected)\n" 0 : [ SHELL $(buffered) : exit-status ] ;
+check-equal shell : "$(expected)" 0 : [ SHELL $(buffered) : strip-eol : exit-status ] ;
+check-equal shell : "" 0 : [ SHELL $(buffered) : no-output : exit-status ] ;
+check-equal shell : "" 0 : [ SHELL $(buffered) : no-output : exit-status : strip-eol ] ;
+check-equal shell : "$(expected)" 0 : [ SHELL $(buffered) : strip-eol : exit-status ] ;
+
+check-equal command : "$(expected)\n" : [ COMMAND $(buffered) ] ;
+check-equal command : "" : [ COMMAND $(buffered) : no-output ] ;
+check-equal command : "$(expected)\n" 0 : [ COMMAND $(buffered) : exit-status ] ;
+check-equal command : "$(expected)" 0 : [ COMMAND $(buffered) : strip-eol : exit-status ] ;
+check-equal command : "" 0 : [ COMMAND $(buffered) : no-output : exit-status ] ;
+
+}
+
+# Test SUBST
+
+{
+
+# Check that unmatched subst returns an empty list
+check-equal subst-nomatch : [ SUBST "abc" "d+" x ] : ;
+
+# Check that a matched subst works
+check-equal subst-match : [ SUBST "ddd" "d+" x ] : x ;
+
+# Check that we can get multiple substitutions from a single invocation
+check-equal subst-multiple : [ SUBST "x/y/z" "([^/]*)/([^/]*).*" "\\1" $2 "\\1-\\2" ] : x y x-y ;
+
+}
+
+# Test summary
+
+if $(failed) = 0
+{
+ status = 0 ;
+}
+else
+{
+ status = 1 ;
+}
+
+EXIT $(passed) passed $(failed) failed : $(status) ;
diff --git a/src/boost/tools/build/test/core_action_output.py b/src/boost/tools/build/test/core_action_output.py
new file mode 100755
index 000000000..757519863
--- /dev/null
+++ b/src/boost/tools/build/test/core_action_output.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test correct "-p" option handling.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-d1"], pass_toolset=False)
+
+t.write("file.jam", """\
+prefix = "echo \\"" ;
+suffix = "\\"" ;
+if $(NT)
+{
+ prefix = "(echo " ;
+ suffix = ")" ;
+}
+actions go
+{
+ $(prefix)stdout$(suffix)
+ $(prefix)stderr$(suffix) 1>&2
+}
+ECHO "{{{" $(XXX) "}}}" ;
+ALWAYS all ;
+go all ;
+""")
+
+t.run_build_system(["-ffile.jam", "-sXXX=1"], stderr="")
+t.expect_output_lines("{{{ 1 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr")
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=2", "-p0"], stderr="")
+t.expect_output_lines("{{{ 2 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr")
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=3", "-p1"], stderr="")
+t.expect_output_lines("{{{ 3 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr*", False)
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=4", "-p2"], stderr="stderr\n")
+t.expect_output_lines("{{{ 4 }}}")
+t.expect_output_lines("stdout*", False)
+t.expect_output_lines("stderr*", False)
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=5", "-p3"], stderr="stderr\n")
+t.expect_output_lines("{{{ 5 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr*", False)
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_action_status.py b/src/boost/tools/build/test/core_action_status.py
new file mode 100755
index 000000000..7ebd43869
--- /dev/null
+++ b/src/boost/tools/build/test/core_action_status.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+actions quietly .a. { $(ACTION) }
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+DEPENDS all : subtest_a ;
+""")
+
+t.run_build_system(["-ffile.jam", "-sACTION=invalid"], status=1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_actions_quietly.py b/src/boost/tools/build/test/core_actions_quietly.py
new file mode 100755
index 000000000..c020846d5
--- /dev/null
+++ b/src/boost/tools/build/test/core_actions_quietly.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+actions quietly .a.
+{
+echo [$(<:B)] 0
+echo [$(<:B)] 1
+echo [$(<:B)] 2
+}
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+.a. subtest_b : subtest ;
+DEPENDS all : subtest_a subtest_b ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d2"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+.a. subtest_a
+
+echo [subtest_a] 0
+echo [subtest_a] 1
+echo [subtest_a] 2
+
+[subtest_a] 0
+[subtest_a] 1
+[subtest_a] 2
+.a. subtest_b
+
+echo [subtest_b] 0
+echo [subtest_b] 1
+echo [subtest_b] 2
+
+[subtest_b] 0
+[subtest_b] 1
+[subtest_b] 2
+...updated 2 targets...
+""")
+
+t.run_build_system(["-ffile.jam", "-d1"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+...updated 2 targets...
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_arguments.py b/src/boost/tools/build/test/core_arguments.py
new file mode 100755
index 000000000..1e0bd4b0c
--- /dev/null
+++ b/src/boost/tools/build/test/core_arguments.py
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+
+# Copyright 2001 Dave Abrahams
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+def simple_args(start, finish):
+ return " : ".join("%d" % x for x in range(start, finish + 1))
+
+
+def test(t, type, input, output, status=0):
+ code = ["include echo_args.jam ; echo_%s" % type]
+ if input: code.append(input)
+ code.append(";")
+ t.write("file.jam", " ".join(code))
+ t.run_build_system(["-ffile.jam"], status=status)
+ t.expect_output_lines(output)
+
+
+def test_args(t, *args, **kwargs):
+ test(t, "args", *args, **kwargs)
+
+
+def test_varargs(t, *args, **kwargs):
+ test(t, "varargs", *args, **kwargs)
+
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("echo_args.jam", """\
+NOCARE all ;
+
+rule echo_args ( a b ? c ? : d + : e * )
+{
+ ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e) ;
+}
+
+rule echo_varargs ( a b ? c ? : d + : e * : * )
+{
+ ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e)
+ ": rest= "$(4[1]) $(4[2-])
+ ": "$(5[1]) $(5[2-]) ": "$(6[1]) $(6[2-]) ": "$(7[1]) $(7[2-])
+ ": "$(8[1]) $(8[2-]) ": "$(9[1]) $(9[2-]) ": "$(10[1]) $(10[2-])
+ ": "$(11[1]) $(11[2-]) ": "$(12[1]) $(12[2-]) ": "$(13[1]) $(13[2-])
+ ": "$(14[1]) $(14[2-]) ": "$(15[1]) $(15[2-]) ": "$(16[1]) $(16[2-])
+ ": "$(17[1]) $(17[2-]) ": "$(18[1]) $(18[2-]) ": "$(19[1]) $(19[2-])
+ ": "$(20[1]) $(20[2-]) ": "$(21[1]) $(21[2-]) ": "$(22[1]) $(22[2-])
+ ": "$(23[1]) $(23[2-]) ": "$(24[1]) $(24[2-]) ": "$(25[1]) $(25[2-]) ;
+}
+""")
+
+test_args(t, "", "* missing argument a", status=1)
+test_args(t, "1 2 : 3 : 4 : 5", "* extra argument 5", status=1)
+test_args(t, "a b c1 c2 : d", "* extra argument c2", status=1)
+
+# Check modifier '?'
+test_args(t, "1 2 3 : 4", "a= 1 b= 2 c= 3 : d= 4 : e=")
+test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=")
+test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=")
+test_args(t, "1 : 2", "a= 1 b= c= : d= 2 : e=")
+
+# Check modifier '+'
+test_args(t, "1", "* missing argument d", status=1)
+test_args(t, "1 : 2 3", "a= 1 b= c= : d= 2 3 : e=")
+test_args(t, "1 : 2 3 4", "a= 1 b= c= : d= 2 3 4 : e=")
+
+# Check modifier '*'
+test_args(t, "1 : 2 : 3", "a= 1 b= c= : d= 2 : e= 3")
+test_args(t, "1 : 2 : 3 4", "a= 1 b= c= : d= 2 : e= 3 4")
+test_args(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5")
+
+# Check varargs
+test_varargs(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5")
+test_varargs(t, "1 : 2 : 3 4 5 : 6", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7",
+ "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8",
+ "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9",
+ "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : 9")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : "
+ "16 : 17 : 18 : 19a 19b", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : "
+ "9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : "
+ "16 : 17 : 18 : 19a 19b 19c : 20", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= "
+ "6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b 19c : "
+ "20")
+
+# Check varargs upper limit
+expected = "a= 1 b= c= : d= 2 : e= 3 : rest= " + simple_args(4, 19)
+test_varargs(t, simple_args(1, 19), expected)
+test_varargs(t, simple_args(1, 19) + " 19b 19c 19d", expected + " 19b 19c 19d")
+test_varargs(t, simple_args(1, 19) + " 19b 19c 19d : 20", expected + " 19b "
+ "19c 19d")
+test_varargs(t, simple_args(1, 20), expected)
+test_varargs(t, simple_args(1, 50), expected)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_at_file.py b/src/boost/tools/build/test/core_at_file.py
new file mode 100755
index 000000000..50fa51220
--- /dev/null
+++ b/src/boost/tools/build/test/core_at_file.py
@@ -0,0 +1,63 @@
+#!/usr/bin/python
+
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+EXIT "file:" "@(o$(name) .txt:E= test -D$(contents))" : 0 ;
+""")
+
+t.run_build_system()
+t.expect_output_lines("file: on1 on2 .txt");
+t.expect_addition("on1 on2 .txt")
+t.expect_content("on1 on2 .txt", " test -DM1 -DM2", True)
+
+t.rm(".")
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+actions run { echo file: "@(o$(name) .txt:E= test -D$(contents))" }
+run all ;
+""")
+
+t.run_build_system(["-d2"])
+t.expect_output_lines(' echo file: "on1 on2 .txt" ');
+t.expect_addition("on1 on2 .txt")
+t.expect_content("on1 on2 .txt", " test -DM1 -DM2", True)
+
+t.rm(".")
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+file = "@($(STDOUT):E= test -D$(contents)\n)" ;
+actions run { $(file) }
+run all ;
+""")
+
+t.run_build_system(["-d1"])
+t.expect_output_lines(" test -DM1 -DM2")
+
+t.rm(".")
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+actions run { @($(STDOUT):E= test -D$(contents)\n) }
+run all ;
+""")
+
+t.run_build_system(["-d1"])
+t.expect_output_lines(" test -DM1 -DM2")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_bindrule.py b/src/boost/tools/build/test/core_bindrule.py
new file mode 100755
index 000000000..f97a31f56
--- /dev/null
+++ b/src/boost/tools/build/test/core_bindrule.py
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+
+# Copyright 2001 Dave Abrahams
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(["-d1"], pass_toolset=0)
+
+t.write("subdir1/file-to-bind", "# This file intentionally left blank")
+
+t.write("file.jam", """\
+rule do-nothing ( target : source )
+{
+ DEPENDS $(target) : $(source) ;
+}
+actions quietly do-nothing { }
+
+# Make a non-file target which depends on a file that exists
+NOTFILE fake-target ;
+SEARCH on file-to-bind = subdir1 ;
+
+do-nothing fake-target : file-to-bind ;
+
+# Set jam up to call our bind-rule
+BINDRULE = bind-rule ;
+
+rule bind-rule ( target : path )
+{
+ ECHO "found:" $(target) at $(path) ;
+}
+
+DEPENDS all : fake-target ;
+""")
+
+t.run_build_system(["-ffile.jam"], stdout="""\
+found: all at all
+found: file-to-bind at subdir1%sfile-to-bind
+...found 3 targets...
+""" % os.sep)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_d12.py b/src/boost/tools/build/test/core_d12.py
new file mode 100644
index 000000000..370fc4bf1
--- /dev/null
+++ b/src/boost/tools/build/test/core_d12.py
@@ -0,0 +1,32 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests correct handling of "-d1" and "-d2" options.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+t.write("file.jam", """\
+actions a { }
+actions quietly b { }
+ALWAYS all ;
+a all ;
+b all ;
+""")
+
+t.run_build_system(["-d0"], stdout="")
+
+t.run_build_system(["-d1"])
+t.expect_output_lines("a all")
+t.expect_output_lines("b all", False)
+
+t.run_build_system(["-d2"])
+t.expect_output_lines("a all")
+t.expect_output_lines("b all")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_delete_module.py b/src/boost/tools/build/test/core_delete_module.py
new file mode 100644
index 000000000..d56ffe6e7
--- /dev/null
+++ b/src/boost/tools/build/test/core_delete_module.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the facilities for deleting modules.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """
+module foo
+{
+ rule bar { }
+ var = x y ;
+}
+DELETE_MODULE foo ;
+if [ RULENAMES foo ]
+{
+ EXIT DELETE_MODULE failed to kill foo's rules: [ RULENAMES foo ] ;
+}
+
+module foo
+{
+ if $(var)
+ {
+ EXIT DELETE_MODULE failed to kill foo's variables ;
+ }
+
+ rule bar { }
+ var = x y ;
+
+ DELETE_MODULE foo ;
+
+ if $(var)
+ {
+ EXIT internal DELETE_MODULE failed to kill foo's variables ;
+ }
+ if [ RULENAMES foo ]
+ {
+ EXIT internal DELETE_MODULE failed to kill foo's rules: [ RULENAMES foo ] ;
+ }
+}
+DEPENDS all : xx ;
+NOTFILE xx ;
+""")
+
+t.run_build_system(["-ffile.jam"], status=0)
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_dependencies.py b/src/boost/tools/build/test/core_dependencies.py
new file mode 100644
index 000000000..4c60537d9
--- /dev/null
+++ b/src/boost/tools/build/test/core_dependencies.py
@@ -0,0 +1,157 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests correct handling of dependencies, specifically, on generated
+# sources, and from generated sources.
+
+import BoostBuild
+
+import string
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("core-dependency-helpers", """
+rule hdrrule
+{
+ INCLUDES $(1) : $(2) ;
+}
+actions copy
+{
+ cp $(>) $(<)
+}
+""")
+
+code = """include core-dependency-helpers ;
+DEPENDS all : a ;
+DEPENDS a : b ;
+
+actions create-b
+{
+ echo '#include <foo.h>' > $(<)
+}
+copy a : b ;
+create-b b ;
+HDRRULE on b foo.h bar.h = hdrrule ;
+HDRSCAN on b foo.h bar.h = \"#include <(.*)>\" ;
+"""
+
+# This creates 'a' which depends on 'b', which is generated. The generated 'b'
+# contains '#include <foo.h>' and no rules for foo.h are given. The system
+# should error out on the first invocation.
+t.run_build_system("-f-", stdin=code)
+t.fail_test(t.stdout().find("...skipped a for lack of foo.h...") == -1)
+
+t.rm('b')
+
+# Now test that if target 'c' also depends on 'b', then it will not be built, as
+# well.
+t.run_build_system("-f-", stdin=code + " copy c : b ; DEPENDS c : b ; DEPENDS all : c ; ")
+t.fail_test(t.stdout().find("...skipped c for lack of foo.h...") == -1)
+
+t.rm('b')
+
+# Now add a rule for creating foo.h.
+code += """
+actions create-foo
+{
+ echo // > $(<)
+}
+create-foo foo.h ;
+"""
+t.run_build_system("-f-", stdin=code)
+
+# Run two times, adding explicit dependency from all to foo.h at the beginning
+# and at the end, to make sure that foo.h is generated before 'a' in all cases.
+
+def mk_correct_order_func(s1, s2):
+ def correct_order(s):
+ n1 = s.find(s1)
+ n2 = s.find(s2)
+ return ( n1 != -1 ) and ( n2 != -1 ) and ( n1 < n2 )
+ return correct_order
+
+correct_order = mk_correct_order_func("create-foo", "copy a")
+
+t.rm(["a", "b", "foo.h"])
+t.run_build_system("-d+2 -f-", stdin=code + " DEPENDS all : foo.h ;")
+t.fail_test(not correct_order(t.stdout()))
+
+t.rm(["a", "b", "foo.h"])
+t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : foo.h ; " + code)
+t.fail_test(not correct_order(t.stdout()))
+
+# Now foo.h exists. Test include from b -> foo.h -> bar.h -> biz.h. b and foo.h
+# already have updating actions.
+t.rm(["a", "b"])
+t.write("foo.h", "#include <bar.h>")
+t.write("bar.h", "#include <biz.h>")
+t.run_build_system("-d+2 -f-", stdin=code)
+t.fail_test(t.stdout().find("...skipped a for lack of biz.h...") == -1)
+
+# Add an action for biz.h.
+code += """
+actions create-biz
+{
+ echo // > $(<)
+}
+create-biz biz.h ;
+"""
+
+t.rm(["b"])
+correct_order = mk_correct_order_func("create-biz", "copy a")
+t.run_build_system("-d+2 -f-", stdin=code + " DEPENDS all : biz.h ;")
+t.fail_test(not correct_order(t.stdout()))
+
+t.rm(["a", "biz.h"])
+t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : biz.h ; " + code)
+t.fail_test(not correct_order(t.stdout()))
+
+t.write("a", "")
+
+code="""
+DEPENDS all : main d ;
+
+actions copy
+{
+ cp $(>) $(<) ;
+}
+
+DEPENDS main : a ;
+copy main : a ;
+
+INCLUDES a : <1>c ;
+
+NOCARE <1>c ;
+SEARCH on <1>c = . ;
+
+actions create-c
+{
+ echo d > $(<)
+}
+
+actions create-d
+{
+ echo // > $(<)
+}
+
+create-c <2>c ;
+LOCATE on <2>c = . ;
+create-d d ;
+
+HDRSCAN on <1>c = (.*) ;
+HDRRULE on <1>c = hdrrule ;
+
+rule hdrrule
+{
+ INCLUDES $(1) : d ;
+}
+"""
+
+correct_order = mk_correct_order_func("create-d", "copy main")
+t.run_build_system("-d2 -f-", stdin=code)
+t.fail_test(not correct_order(t.stdout()))
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_fail_expected.py b/src/boost/tools/build/test/core_fail_expected.py
new file mode 100644
index 000000000..0865a0b7a
--- /dev/null
+++ b/src/boost/tools/build/test/core_fail_expected.py
@@ -0,0 +1,139 @@
+#!/usr/bin/python
+
+# Copyright 2017 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+def test_basic():
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+ actions fail
+ {
+ invalid-dd0eeb5899734622
+ }
+
+ FAIL_EXPECTED t1 ;
+ fail t1 ;
+
+ UPDATE t1 ;
+ """)
+
+ t.run_build_system(["-ffile.jam"])
+ t.expect_output_lines("...failed*", False)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_error():
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+ actions pass
+ {
+ echo okay >$(<)
+ }
+
+ FAIL_EXPECTED t1 ;
+ pass t1 ;
+
+ UPDATE t1 ;
+ """)
+
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines("...failed pass t1...")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_multiple_actions():
+ """FAIL_EXPECTED targets are considered to pass if the first
+ updating action fails. Further actions will be skipped."""
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+ actions fail
+ {
+ invalid-dd0eeb5899734622
+ }
+
+ actions pass
+ {
+ echo okay >$(<)
+ }
+
+ FAIL_EXPECTED t1 ;
+ fail t1 ;
+ pass t1 ;
+
+ UPDATE t1 ;
+ """)
+
+ t.run_build_system(["-ffile.jam", "-d1"])
+ t.expect_output_lines("...failed*", False)
+ t.expect_output_lines("pass t1", False)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_quitquick():
+ """Tests that FAIL_EXPECTED targets do not cause early exit
+ on failure."""
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+ actions fail
+ {
+ invalid-dd0eeb5899734622
+ }
+
+ actions pass
+ {
+ echo okay >$(<)
+ }
+
+ FAIL_EXPECTED t1 ;
+ fail t1 ;
+
+ pass t2 ;
+
+ UPDATE t1 t2 ;
+ """)
+
+ t.run_build_system(["-ffile.jam", "-q", "-d1"])
+ t.expect_output_lines("pass t2")
+ t.expect_addition("t2")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_quitquick_error():
+ """FAIL_EXPECTED targets should cause early exit if they unexpectedly pass."""
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+ actions pass
+ {
+ echo okay >$(<)
+ }
+
+ FAIL_EXPECTED t1 ;
+ pass t1 ;
+ pass t2 ;
+
+ UPDATE t1 t2 ;
+ """)
+
+ t.run_build_system(["-ffile.jam", "-q", "-d1"], status=1)
+ t.expect_output_lines("pass t2", False)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+test_basic()
+test_error()
+test_multiple_actions()
+test_quitquick()
+test_quitquick_error()
diff --git a/src/boost/tools/build/test/core_import_module.py b/src/boost/tools/build/test/core_import_module.py
new file mode 100644
index 000000000..5903dcd64
--- /dev/null
+++ b/src/boost/tools/build/test/core_import_module.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("code", """\
+module a
+{
+ rule r1 ( )
+ {
+ ECHO R1 ;
+ }
+
+ local rule l1 ( )
+ {
+ ECHO A.L1 ;
+ }
+}
+module a2
+{
+ rule r2 ( )
+ {
+ ECHO R2 ;
+ }
+}
+IMPORT a2 : r2 : : a2.r2 ;
+
+rule a.l1 ( )
+{
+ ECHO L1 ;
+}
+
+module b
+{
+ IMPORT_MODULE a : b ;
+ rule test
+ {
+ # Call rule visible via IMPORT_MODULE
+ a.r1 ;
+ # Call rule in global scope
+ a2.r2 ;
+ # Call rule in global scope. Doesn't find local rule
+ a.l1 ;
+ # Make l1 visible
+ EXPORT a : l1 ;
+ a.l1 ;
+ }
+}
+
+IMPORT b : test : : test ;
+test ;
+
+module c
+{
+ rule test
+ {
+ ECHO CTEST ;
+ }
+}
+
+IMPORT_MODULE c : ;
+c.test ;
+
+EXIT : 0 ;
+""")
+
+t.run_build_system(["-fcode"], stdout="""\
+R1
+R2
+L1
+A.L1
+CTEST
+
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_jamshell.py b/src/boost/tools/build/test/core_jamshell.py
new file mode 100644
index 000000000..7020ac0b6
--- /dev/null
+++ b/src/boost/tools/build/test/core_jamshell.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import sys
+
+t = BoostBuild.Tester(pass_toolset=False)
+
+t.write("file.jam", """
+actions run {
+ $(ACTION)
+}
+
+# Raw commands only work on Windows
+if $(OS) = NT
+{
+ JAMSHELL on test-raw = % ;
+ JAMSHELL on test-raw-fail = % ;
+}
+ACTION on test-raw = "\"$(PYTHON)\" -V" ;
+run test-raw ;
+
+ACTION on test-raw-fail = missing-executable ;
+run test-raw-fail ;
+
+# On Windows, the command is stored in a temporary
+# file. On other systems it is passed directly.
+if $(OS) = NT
+{
+ JAMSHELL on test-py = $(PYTHON) ;
+}
+else
+{
+ JAMSHELL on test-py = $(PYTHON) -c ;
+}
+ACTION on test-py = "
+from __future__ import print_function
+print(\\\",\\\".join([str(x) for x in range(3)]))
+" ;
+run test-py ;
+
+DEPENDS all : test-raw test-raw-fail test-py ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d1", "-sPYTHON=" + sys.executable], status=1)
+t.expect_output_lines([
+ "...failed run test-raw-fail...",
+ "0,1,2",
+ "...failed updating 1 target...",
+ "...updated 2 targets..."])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_language.py b/src/boost/tools/build/test/core_language.py
new file mode 100755
index 000000000..88a6d1934
--- /dev/null
+++ b/src/boost/tools/build/test/core_language.py
@@ -0,0 +1,12 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+t.set_tree("core-language")
+t.run_build_system(["-ftest.jam"])
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_modifiers.py b/src/boost/tools/build/test/core_modifiers.py
new file mode 100644
index 000000000..241443583
--- /dev/null
+++ b/src/boost/tools/build/test/core_modifiers.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the "existing" and "updated" modifiers on actions.
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+code = """
+DEPENDS all : a ;
+ALWAYS a ;
+NOTFILE a ;
+
+actions existing make-a
+{
+ echo $(>) > list
+}
+make-a a : a-1 a-2 a-3 ;
+DEPENDS a : a-1 a-2 a-3 ;
+NOCARE a-1 a-2 ;
+
+actions make-a3
+{
+ echo foo > $(<)
+}
+make-a3 a-3 ;
+"""
+
+t.write("file.jam", code)
+t.write("a-1", "")
+
+t.run_build_system("-ffile.jam")
+t.fail_test(string.strip(t.read("list")) != "a-1")
+t.rm(["a-3", "list"])
+
+code = code.replace("existing", "updated")
+t.write("file.jam", code)
+t.run_build_system("-ffile.jam")
+t.fail_test(string.strip(t.read("list")) != "a-3")
+
+code = code.replace("updated", "existing updated")
+t.write("file.jam", code)
+t.run_build_system("-ffile.jam")
+t.fail_test(string.strip(t.read("list")) != "a-1 a-3")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_multifile_actions.py b/src/boost/tools/build/test/core_multifile_actions.py
new file mode 100755
index 000000000..a9c7f4790
--- /dev/null
+++ b/src/boost/tools/build/test/core_multifile_actions.py
@@ -0,0 +1,202 @@
+#!/usr/bin/python
+
+# Copyright 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that actions that produce multiple targets are handled
+# correctly. The rules are as follows:
+#
+# - If any action that updates a target is run, then the target
+# is considered to be out-of-date and all of its updating actions
+# are run in order.
+# - A target is considered updated when all of its updating actions
+# have completed successfully.
+# - If any updating action for a target fails, then the remaining
+# actions are skipped and the target is marked as failed.
+#
+# Note that this is a more thorough test case for the same
+# problem that core_parallel_multifile_actions_N.py checks for.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-d1"], pass_toolset=0)
+
+t.write("file.jam", """
+actions update
+{
+ echo updating $(<)
+}
+
+update x1 x2 ;
+update x2 x3 ;
+""")
+
+# Updating x1 should force x2 to update as well.
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 3 targets...
+...updating 3 targets...
+update x1
+updating x1 x2
+update x2
+updating x2 x3
+...updated 3 targets...
+""")
+
+# If x1 is up-to-date, we don't need to update x2,
+# even though x2 is missing.
+t.write("x1", "")
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 1 target...
+""")
+
+# Building x3 should update x1 and x2, even though
+# x1 would be considered up-to-date, taken alone.
+t.run_build_system(["-ffile.jam", "x3"], stdout="""\
+...found 3 targets...
+...updating 2 targets...
+update x1
+updating x1 x2
+update x2
+updating x2 x3
+...updated 3 targets...
+""")
+
+# Updating x2 should succeed, but x3 should be skipped
+t.rm("x1")
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+actions fail
+{
+ echo failed $(<)
+ exit 1
+}
+
+update x1 x2 ;
+fail x1 ;
+update x1 x3 ;
+update x2 ;
+update x3 ;
+""")
+
+t.run_build_system(["-ffile.jam", "x3"], status=1, stdout="""\
+...found 3 targets...
+...updating 3 targets...
+update x1
+updating x1 x2
+fail x1
+failed x1
+
+ echo failed x1
+ exit 1
+
+...failed fail x1...
+update x2
+updating x2
+...failed updating 2 targets...
+...updated 1 target...
+""")
+
+# Make sure that dependencies of targets that are
+# updated as a result of a multifile action are
+# processed correctly.
+t.rm("x1")
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+
+update x1 ;
+update x2 ;
+DEPENDS x2 : x1 ;
+update x2 x3 ;
+""")
+t.run_build_system(["-ffile.jam", "x3"], stdout="""\
+...found 3 targets...
+...updating 3 targets...
+update x1
+updating x1
+update x2
+updating x2
+update x2
+updating x2 x3
+...updated 3 targets...
+""")
+
+# JAM_SEMAPHORE rules:
+#
+# - if two updating actions have targets that share a semaphore,
+# these actions cannot be run in parallel.
+#
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+
+targets = x1 x2 ;
+JAM_SEMAPHORE on $(targets) = <s>update_sem ;
+update x1 x2 ;
+""")
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 2 targets...
+...updating 2 targets...
+update x1
+updating x1 x2
+...updated 2 targets...
+""")
+
+# A target can appear multiple times in an action
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+
+update x1 x1 ;
+""")
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 1 target...
+...updating 1 target...
+update x1
+updating x1 x1
+...updated 1 target...
+""")
+
+# Together actions should check that all the targets are the same
+# before combining.
+t.write("file.jam", """\
+actions together update
+{
+ echo updating $(<) : $(>)
+}
+
+update x1 x2 : s1 ;
+update x1 x2 : s2 ;
+
+update x3 : s3 ;
+update x3 x4 : s4 ;
+update x4 x3 : s5 ;
+DEPENDS all : x1 x2 x3 x4 ;
+""")
+t.run_build_system(["-ffile.jam"], stdout="""\
+...found 5 targets...
+...updating 4 targets...
+update x1
+updating x1 x2 : s1 s2
+update x3
+updating x3 : s3
+update x3
+updating x3 x4 : s4
+update x4
+updating x4 x3 : s5
+...updated 4 targets...
+""")
+
+
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_nt_cmd_line.py b/src/boost/tools/build/test/core_nt_cmd_line.py
new file mode 100755
index 000000000..be21f6ca5
--- /dev/null
+++ b/src/boost/tools/build/test/core_nt_cmd_line.py
@@ -0,0 +1,266 @@
+#!/usr/bin/python
+
+# Copyright 2001 Dave Abrahams
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests Windows command line construction.
+#
+# Note that the regular 'echo' is an internal shell command on Windows and
+# therefore can not be called directly as a standalone Windows process.
+
+import BoostBuild
+import os
+import re
+import sys
+
+
+executable = sys.executable.replace("\\", "/")
+if " " in executable:
+ executable = '"%s"' % executable
+
+
+def string_of_length(n):
+ if n <= 0:
+ return ""
+ n -= 1
+ y = ['', '$(1x10-1)', '$(10x10-1)', '$(100x10-1)', '$(1000x10-1)']
+ result = []
+ for i in reversed(range(5)):
+ x, n = divmod(n, 10 ** i)
+ result += [y[i]] * x
+ result.append('x')
+ return " ".join(result)
+
+
+# Boost Jam currently does not allow preparing actions with completely empty
+# content and always requires at least a single whitespace after the opening
+# brace in order to satisfy its Boost Jam language grammar rules.
+def test_raw_empty():
+ whitespace_in = " \n\n\r\r\v\v\t\t \t \r\r \n\n"
+
+ # We tell the testing system to read its child process output as raw
+ # binary data but the bjam process we run will read its input file and
+ # write out its output as text, i.e. convert all of our "\r\n" sequences to
+ # "\n" on input and all of its "\n" characters back to "\r\n" on output.
+ # This means that any lone "\n" input characters not preceded by "\r" will
+ # get an extra "\r" added in front of it on output.
+ whitespace_out = whitespace_in.replace("\r\n", "\n").replace("\n", "\r\n")
+
+ t = BoostBuild.Tester(["-d2", "-d+4"], pass_toolset=0,
+ use_test_config=False)
+ t.write("file.jam", """\
+actions do_empty {%s}
+JAMSHELL = %% ;
+do_empty all ;
+""" % (whitespace_in))
+ t.run_build_system(["-ffile.jam"], universal_newlines=False)
+ t.expect_output_lines("do_empty all")
+ t.expect_output_lines("Executing raw command directly", False)
+ if "\r\n%s\r\n" % whitespace_out not in t.stdout():
+ BoostBuild.annotation("failure", "Whitespace action content not found "
+ "on stdout.")
+ t.fail_test(1, dump_difference=False)
+ t.cleanup()
+
+
+def test_raw_nt(n=None, error=False):
+ t = BoostBuild.Tester(["-d1", "-d+4"], pass_toolset=0,
+ use_test_config=False)
+
+ cmd_prefix = "%s -c \"print('XXX: " % executable
+ cmd_suffix = "')\""
+ cmd_extra_length = len(cmd_prefix) + len(cmd_suffix)
+
+ if n == None:
+ n = cmd_extra_length
+
+ data_length = n - cmd_extra_length
+ if data_length < 0:
+ BoostBuild.annotation("failure", """\
+Can not construct Windows command of desired length. Requested command length
+too short for the current test configuration.
+ Requested command length: %d
+ Minimal supported command length: %d
+""" % (n, cmd_extra_length))
+ t.fail_test(1, dump_difference=False)
+
+ # Each $(Xx10-1) variable contains X words of 9 characters each, which,
+ # including spaces between words, brings the total number of characters in
+ # its string representation to X * 10 - 1 (X * 9 characters + (X - 1)
+ # spaces).
+ t.write("file.jam", """\
+ten = 0 1 2 3 4 5 6 7 8 9 ;
+
+1x10-1 = 123456789 ;
+10x10-1 = $(ten)12345678 ;
+100x10-1 = $(ten)$(ten)1234567 ;
+1000x10-1 = $(ten)$(ten)$(ten)123456 ;
+
+actions do_echo
+{
+ %s%s%s
+}
+JAMSHELL = %% ;
+do_echo all ;
+""" % (cmd_prefix, string_of_length(data_length), cmd_suffix))
+ if error:
+ expected_status = 1
+ else:
+ expected_status = 0
+ t.run_build_system(["-ffile.jam"], status=expected_status)
+ if error:
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("do_echo action is too long (%d, max 32766):" % n
+ )
+ t.expect_output_lines("XXX: *", False)
+ else:
+ t.expect_output_lines("Executing raw command directly")
+ t.expect_output_lines("do_echo action is too long*", False)
+
+ m = re.search("^XXX: (.*)$", t.stdout(), re.MULTILINE)
+ if not m:
+ BoostBuild.annotation("failure", "Expected output line starting "
+ "with 'XXX: ' not found.")
+ t.fail_test(1, dump_difference=False)
+ if len(m.group(1)) != data_length:
+ BoostBuild.annotation("failure", """Unexpected output data length.
+ Expected: %d
+ Received: %d""" % (n, len(m.group(1))))
+ t.fail_test(1, dump_difference=False)
+
+ t.cleanup()
+
+
+def test_raw_to_shell_fallback_nt():
+ t = BoostBuild.Tester(["-d1", "-d+4"], pass_toolset=0,
+ use_test_config=False)
+
+ cmd_prefix = '%s -c print(' % executable
+ cmd_suffix = ')'
+
+ t.write("file_multiline.jam", """\
+actions do_multiline
+{
+ echo one
+
+
+ echo two
+}
+JAMSHELL = % ;
+do_multiline all ;
+""")
+ t.run_build_system(["-ffile_multiline.jam"])
+ t.expect_output_lines("do_multiline all")
+ t.expect_output_lines("one")
+ t.expect_output_lines("two")
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+
+ t.write("file_redirect.jam", """\
+actions do_redirect { echo one > two.txt }
+JAMSHELL = % ;
+do_redirect all ;
+""")
+ t.run_build_system(["-ffile_redirect.jam"])
+ t.expect_output_lines("do_redirect all")
+ t.expect_output_lines("one", False)
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+ t.expect_addition("two.txt")
+
+ t.write("file_pipe.jam", """\
+actions do_pipe
+{
+ echo one | echo two
+}
+JAMSHELL = % ;
+do_pipe all ;
+""")
+ t.run_build_system(["-ffile_pipe.jam"])
+ t.expect_output_lines("do_pipe all")
+ t.expect_output_lines("one*", False)
+ t.expect_output_lines("two")
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+
+ t.write("file_single_quoted.jam", """\
+actions do_single_quoted { %s'5>10'%s }
+JAMSHELL = %% ;
+do_single_quoted all ;
+""" % (cmd_prefix, cmd_suffix))
+ t.run_build_system(["-ffile_single_quoted.jam"])
+ t.expect_output_lines("do_single_quoted all")
+ t.expect_output_lines("5>10")
+ t.expect_output_lines("Executing raw command directly")
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C", False)
+ t.expect_nothing_more()
+
+ t.write("file_double_quoted.jam", """\
+actions do_double_quoted { %s"5>10"%s }
+JAMSHELL = %% ;
+do_double_quoted all ;
+""" % (cmd_prefix, cmd_suffix))
+ t.run_build_system(["-ffile_double_quoted.jam"])
+ t.expect_output_lines("do_double_quoted all")
+ # The difference between this example and the similar previous one using
+ # single instead of double quotes stems from how the used Python executable
+ # parses the command-line string received from Windows.
+ t.expect_output_lines("False")
+ t.expect_output_lines("Executing raw command directly")
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C", False)
+ t.expect_nothing_more()
+
+ t.write("file_escaped_quote.jam", """\
+actions do_escaped_quote { %s\\"5>10\\"%s }
+JAMSHELL = %% ;
+do_escaped_quote all ;
+""" % (cmd_prefix, cmd_suffix))
+ t.run_build_system(["-ffile_escaped_quote.jam"])
+ t.expect_output_lines("do_escaped_quote all")
+ t.expect_output_lines("5>10")
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+if os.name == 'nt':
+ test_raw_empty()
+
+ # Can not test much shorter lengths as the shortest possible command line
+ # line length constructed in this depends on the runtime environment, e.g.
+ # path to the Panther executable running this test.
+ test_raw_nt()
+ test_raw_nt(255)
+ test_raw_nt(1000)
+ test_raw_nt(8000)
+ test_raw_nt(8191)
+ test_raw_nt(8192)
+ test_raw_nt(10000)
+ test_raw_nt(30000)
+ test_raw_nt(32766)
+ # CreateProcessA() Windows API places a limit of 32768 on the allowed
+ # command-line length, including a trailing Unicode (2-byte) nul-terminator
+ # character.
+ test_raw_nt(32767, error=True)
+ test_raw_nt(40000, error=True)
+ test_raw_nt(100001, error=True)
+
+ test_raw_to_shell_fallback_nt() \ No newline at end of file
diff --git a/src/boost/tools/build/test/core_option_d2.py b/src/boost/tools/build/test/core_option_d2.py
new file mode 100755
index 000000000..8e6b05a45
--- /dev/null
+++ b/src/boost/tools/build/test/core_option_d2.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+actions .a.
+{
+echo [$(<:B)] 0
+echo [$(<:B)] 1
+echo [$(<:B)] 2
+}
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+.a. subtest_b : subtest ;
+DEPENDS all : subtest_a subtest_b ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d2"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+.a. subtest_a
+
+echo [subtest_a] 0
+echo [subtest_a] 1
+echo [subtest_a] 2
+
+[subtest_a] 0
+[subtest_a] 1
+[subtest_a] 2
+.a. subtest_b
+
+echo [subtest_b] 0
+echo [subtest_b] 1
+echo [subtest_b] 2
+
+[subtest_b] 0
+[subtest_b] 1
+[subtest_b] 2
+...updated 2 targets...
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_option_l.py b/src/boost/tools/build/test/core_option_l.py
new file mode 100755
index 000000000..e237dcf63
--- /dev/null
+++ b/src/boost/tools/build/test/core_option_l.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ SLEEP = @call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+actions .a. {
+echo 001
+$(SLEEP) 4
+echo 002
+}
+
+.a. sleeper ;
+
+DEPENDS all : sleeper ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d1", "-l2"], status=1)
+t.expect_output_lines("2 second time limit exceeded")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_option_n.py b/src/boost/tools/build/test/core_option_n.py
new file mode 100755
index 000000000..af3ee0c3e
--- /dev/null
+++ b/src/boost/tools/build/test/core_option_n.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+actions .a.
+{
+echo [$(<:B)] 0
+echo [$(<:B)] 1
+echo [$(<:B)] 2
+}
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+.a. subtest_b : subtest ;
+FAIL_EXPECTED subtest_b ;
+DEPENDS all : subtest_a subtest_b ;
+""")
+
+t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+.a. subtest_a
+
+echo [subtest_a] 0
+echo [subtest_a] 1
+echo [subtest_a] 2
+
+.a. subtest_b
+
+echo [subtest_b] 0
+echo [subtest_b] 1
+echo [subtest_b] 2
+
+...updated 2 targets...
+""")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_parallel_actions.py b/src/boost/tools/build/test/core_parallel_actions.py
new file mode 100755
index 000000000..4f1627c20
--- /dev/null
+++ b/src/boost/tools/build/test/core_parallel_actions.py
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+
+# Copyright 2006 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-d1"], pass_toolset=0)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ actions sleeper
+ {
+ echo [$(<:S)] 0
+ call sleep.bat 1
+ echo [$(<:S)] 1
+ call sleep.bat 1
+ echo [$(<:S)] 2
+ call sleep.bat $(<:B)
+ }
+}
+else
+{
+ actions sleeper
+ {
+ echo "[$(<:S)] 0"
+ sleep 1
+ echo "[$(<:S)] 1"
+ sleep 1
+ echo "[$(<:S)] 2"
+ sleep $(<:B)
+ }
+}
+
+rule sleeper
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE front ;
+sleeper 1.a : front ;
+sleeper 2.a : front ;
+sleeper 3.a : front ;
+sleeper 4.a : front ;
+NOTFILE choke ;
+DEPENDS choke : 1.a 2.a 3.a 4.a ;
+sleeper 1.b : choke ;
+sleeper 2.b : choke ;
+sleeper 3.b : choke ;
+sleeper 4.b : choke ;
+DEPENDS bottom : 1.b 2.b 3.b 4.b ;
+DEPENDS all : bottom ;
+""")
+
+t.run_build_system(["-ffile.jam", "-j4"], stdout="""\
+...found 12 targets...
+...updating 8 targets...
+sleeper 1.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 2.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 3.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 4.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 1.b
+[.b] 0
+[.b] 1
+[.b] 2
+sleeper 2.b
+[.b] 0
+[.b] 1
+[.b] 2
+sleeper 3.b
+[.b] 0
+[.b] 1
+[.b] 2
+sleeper 4.b
+[.b] 0
+[.b] 1
+[.b] 2
+...updated 8 targets...
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_parallel_multifile_actions_1.py b/src/boost/tools/build/test/core_parallel_multifile_actions_1.py
new file mode 100755
index 000000000..4b800a788
--- /dev/null
+++ b/src/boost/tools/build/test/core_parallel_multifile_actions_1.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Added to guard against a bug causing targets to be used before they
+# themselves have finished building. This used to happen for targets built by a
+# multi-file action that got triggered by another target.
+#
+# Example:
+# When target A and target B were declared as created by a single action and
+# target A triggered running that action then, while the action was still
+# running, target B was already reporting as being built causing other targets
+# depending on target A to be built prematurely.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-d1"], pass_toolset=0)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ SLEEP = @call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+actions .gen.
+{
+ echo 001
+ $(SLEEP) 4
+ echo 002
+}
+rule .use.1 { DEPENDS $(<) : $(>) ; }
+actions .use.1
+{
+ echo 003
+}
+
+rule .use.2 { DEPENDS $(<) : $(>) ; }
+actions .use.2
+{
+ $(SLEEP) 1
+ echo 004
+}
+
+.gen. g1.generated g2.generated ;
+.use.1 u1.user : g1.generated ;
+.use.2 u2.user : g2.generated ;
+
+DEPENDS all : u1.user u2.user ;
+""")
+
+t.run_build_system(["-ffile.jam", "-j2"], stdout="""\
+...found 5 targets...
+...updating 4 targets...
+.gen. g1.generated
+001
+002
+.use.1 u1.user
+003
+.use.2 u2.user
+004
+...updated 4 targets...
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_parallel_multifile_actions_2.py b/src/boost/tools/build/test/core_parallel_multifile_actions_2.py
new file mode 100755
index 000000000..c49e92380
--- /dev/null
+++ b/src/boost/tools/build/test/core_parallel_multifile_actions_2.py
@@ -0,0 +1,71 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic, Vladimir Prus
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Added to guard against a bug causing targets to be used before they
+# themselves have finished building. This used to happen for targets built by a
+# multi-file action that got triggered by another target, except when the
+# target triggering the action was the first one in the list of targets
+# produced by that action.
+#
+# Example:
+# When target A and target B were declared as created by a single action with
+# A being the first one listed, and target B triggered running that action
+# then, while the action was still running, target A was already reporting as
+# being built causing other targets depending on target A to be built
+# prematurely.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ SLEEP = @call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+actions link
+{
+ $(SLEEP) 1
+ echo 001 - linked
+}
+
+link dll lib ;
+
+actions install
+{
+ echo 002 - installed
+}
+
+install installed_dll : dll ;
+DEPENDS installed_dll : dll ;
+
+DEPENDS all : lib installed_dll ;
+""")
+
+t.run_build_system(["-ffile.jam", "-j2"], stdout="""\
+...found 4 targets...
+...updating 3 targets...
+link dll
+001 - linked
+install installed_dll
+002 - installed
+...updated 3 targets...
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_scanner.py b/src/boost/tools/build/test/core_scanner.py
new file mode 100644
index 000000000..af078a00b
--- /dev/null
+++ b/src/boost/tools/build/test/core_scanner.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the parsing of tokens
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+rule test1 ( args * )
+{
+ EXIT $(args) : 0 ;
+}
+
+test1
+a # a comment
+# another comment
+b
+c #| a multiline comment |# d
+#| another
+multiline
+comment
+|#
+e "#f" ;
+""")
+
+t.run_build_system(["-ffile.jam"], stdout="""\
+a b c d e #f
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_source_line_tracking.py b/src/boost/tools/build/test/core_source_line_tracking.py
new file mode 100755
index 000000000..61526a2c5
--- /dev/null
+++ b/src/boost/tools/build/test/core_source_line_tracking.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test Boost Jam parser's source line tracking & reporting.
+
+import BoostBuild
+
+
+def test_eof_in_string():
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", '\n\n\naaa = "\n\n\n\n\n\n')
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines('file.jam:4: unmatched " in string at keyword =')
+ t.expect_output_lines("file.jam:4: syntax error at EOF")
+ t.cleanup()
+
+
+def test_error_missing_argument(eof):
+ """
+ This use case used to cause a missing argument error to be reported in
+ module '(builtin)' in line -1 when the input file did not contain a
+ trailing newline.
+
+ """
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", """\
+rule f ( param ) { }
+f ;%s""" % __trailing_newline(eof))
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines("file.jam:2: in module scope")
+ t.expect_output_lines("file.jam:1:see definition of rule 'f' being called")
+ t.cleanup()
+
+
+def test_error_syntax(eof):
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", "ECHO%s" % __trailing_newline(eof))
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines("file.jam:1: syntax error at EOF")
+ t.cleanup()
+
+
+def test_traceback():
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", """\
+NOTFILE all ;
+ECHO [ BACKTRACE ] ;""")
+ t.run_build_system(["-ffile.jam"])
+ t.expect_output_lines("file.jam 2 module scope")
+ t.cleanup()
+
+
+def __trailing_newline(eof):
+ """
+ Helper function returning an empty string or a newling character to
+ append to the current output line depending on whether we want that line to
+ be the last line in the file (eof == True) or not (eof == False).
+
+ """
+ if eof:
+ return ""
+ return "\n"
+
+
+test_error_missing_argument(eof=False)
+test_error_missing_argument(eof=True)
+test_error_syntax(eof=False)
+test_error_syntax(eof=True)
+test_traceback()
+test_eof_in_string()
diff --git a/src/boost/tools/build/test/core_syntax_error_exit_status.py b/src/boost/tools/build/test/core_syntax_error_exit_status.py
new file mode 100644
index 000000000..452ea4304
--- /dev/null
+++ b/src/boost/tools/build/test/core_syntax_error_exit_status.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+
+# Copyright (C) Mateusz Loskot 2020.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that Jam syntax error results in non-zero exit status
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester()
+
+# Create the needed files.
+t.write("jamroot.jam", """
+exe hello : hello.cpp
+
+""")
+
+t.run_build_system(status=1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_typecheck.py b/src/boost/tools/build/test/core_typecheck.py
new file mode 100644
index 000000000..de1bece16
--- /dev/null
+++ b/src/boost/tools/build/test/core_typecheck.py
@@ -0,0 +1,47 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the typechecking facilities.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+t.write("file.jam", """
+module .typecheck
+{
+ rule "[path]" ( x )
+ {
+ if ! [ MATCH "^(::)" : $(x) ]
+ {
+ ECHO "Error: $(x) is not a path" ;
+ return true ;
+ }
+ }
+}
+
+rule do ( [path] a )
+{
+}
+
+do $(ARGUMENT) ;
+
+actions dummy { }
+dummy all ;
+""")
+
+t.run_build_system(["-sARGUMENT=::a/b/c"])
+t.run_build_system(["-sARGUMENT=a/b/c"], status=1, stdout="""\
+Error: a/b/c is not a path
+file.jam:18: in module scope
+*** argument error
+* rule do ( [path] a )
+* called with: ( a/b/c )
+* true a
+file.jam:16:see definition of rule 'do' being called
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_update_now.py b/src/boost/tools/build/test/core_update_now.py
new file mode 100755
index 000000000..627594bf8
--- /dev/null
+++ b/src/boost/tools/build/test/core_update_now.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python
+
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+
+def basic():
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+do-print target1 ;
+
+UPDATE_NOW target1 ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam"], stdout="""\
+...found 1 target...
+...updating 1 target...
+do-print target1
+updating target1
+...updated 1 target...
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def ignore_minus_n():
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+do-print target1 ;
+
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 1 target...
+...updating 1 target...
+do-print target1
+
+ echo updating target1
+
+updating target1
+...updated 1 target...
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def failed_target():
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+actions fail
+{
+ exit 1
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+fail target1 ;
+
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target2 ;
+do-print target2 ;
+DEPENDS target2 : target1 ;
+
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 target2 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 1 target...
+...updating 1 target...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+...failed updating 1 target...
+...found 2 targets...
+...updating 1 target...
+do-print target2
+
+ echo updating target2
+
+...updated 1 target...
+""")
+
+ t.cleanup()
+
+
+def missing_target():
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target2 ;
+do-print target2 ;
+DEPENDS target2 : target1 ;
+
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 target2 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], status=1, stdout="""\
+don't know how to make target1
+...found 1 target...
+...can't find 1 target...
+...found 2 targets...
+...can't make 1 target...
+""")
+
+ t.cleanup()
+
+
+def build_once():
+ """
+ Make sure that if we call UPDATE_NOW with ignore-minus-n, the target gets
+ updated exactly once regardless of previous calls to UPDATE_NOW with -n in
+ effect.
+
+ """
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+do-print target1 ;
+
+UPDATE_NOW target1 ;
+UPDATE_NOW target1 : : ignore-minus-n ;
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 1 target...
+...updating 1 target...
+do-print target1
+
+ echo updating target1
+
+...updated 1 target...
+do-print target1
+
+ echo updating target1
+
+updating target1
+...updated 1 target...
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def return_status():
+ """
+ Make sure that UPDATE_NOW returns a failure status if
+ the target failed in a previous call to UPDATE_NOW
+ """
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+actions fail
+{
+ exit 1
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+fail target1 ;
+
+ECHO "update1:" [ UPDATE_NOW target1 ] ;
+ECHO "update2:" [ UPDATE_NOW target1 ] ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam"], status=1, stdout="""\
+...found 1 target...
+...updating 1 target...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+...failed updating 1 target...
+update1:
+update2:
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def save_restore():
+ """Tests that ignore-minus-n and ignore-minus-q are
+ local to the call to UPDATE_NOW"""
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("actions.jam", """\
+rule fail
+{
+ NOTFILE $(<) ;
+ ALWAYS $(<) ;
+}
+actions fail
+{
+ exit 1
+}
+
+rule pass
+{
+ NOTFILE $(<) ;
+ ALWAYS $(<) ;
+}
+actions pass
+{
+ echo updating $(<)
+}
+""")
+ t.write("file.jam", """
+include actions.jam ;
+fail target1 ;
+fail target2 ;
+UPDATE_NOW target1 target2 : : $(IGNORE_MINUS_N) : $(IGNORE_MINUS_Q) ;
+fail target3 ;
+fail target4 ;
+UPDATE_NOW target3 target4 ;
+UPDATE ;
+""")
+ t.run_build_system(['-n', '-sIGNORE_MINUS_N=1', '-ffile.jam'],
+ stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+fail target2
+
+ exit 1
+
+...failed fail target2...
+...failed updating 2 targets...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+fail target4
+
+ exit 1
+
+...updated 2 targets...
+''')
+
+ t.run_build_system(['-q', '-sIGNORE_MINUS_N=1', '-ffile.jam'],
+ status=1, stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+...failed updating 1 target...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+...failed fail target3...
+...failed updating 1 target...
+''')
+
+ t.run_build_system(['-n', '-sIGNORE_MINUS_Q=1', '-ffile.jam'],
+ stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+fail target2
+
+ exit 1
+
+...updated 2 targets...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+fail target4
+
+ exit 1
+
+...updated 2 targets...
+''')
+
+ t.run_build_system(['-q', '-sIGNORE_MINUS_Q=1', '-ffile.jam'],
+ status=1, stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+fail target2
+
+ exit 1
+
+...failed fail target2...
+...failed updating 2 targets...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+...failed fail target3...
+...failed updating 1 target...
+''')
+
+ t.cleanup()
+
+
+basic()
+ignore_minus_n()
+failed_target()
+missing_target()
+build_once()
+return_status()
+save_restore()
diff --git a/src/boost/tools/build/test/core_variables_in_actions.py b/src/boost/tools/build/test/core_variables_in_actions.py
new file mode 100755
index 000000000..77834f430
--- /dev/null
+++ b/src/boost/tools/build/test/core_variables_in_actions.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that variables in actions get expanded but double quote characters
+# get treated as regular characters and not string literal delimiters when
+# determining string tokens concatenated to the variable being expanded.
+#
+# We also take care to make this test work correctly when run using both
+# Windows and Unix echo command variant. That is why we add the extra single
+# quotes around the text being echoed - they will make the double quotes be
+# displayed as regular characters in both cases but will be displayed
+# themselves only when using the Windows cmd shell's echo command.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+t.write("file.jam", """\
+rule dummy ( i )
+{
+ local a = 1 2 3 ;
+ ECHO From "rule:" $(a)" seconds" ;
+ a on $(i) = $(a) ;
+}
+
+actions dummy
+{
+ echo 'From action: $(a)" seconds"'
+}
+
+dummy all ;
+""")
+t.run_build_system(["-ffile.jam", "-d1"])
+t.expect_output_lines("From rule: 1 seconds 2 seconds 3 seconds")
+t.expect_output_lines('*From action: 1" 2" 3" seconds"*')
+t.cleanup()
diff --git a/src/boost/tools/build/test/core_varnames.py b/src/boost/tools/build/test/core_varnames.py
new file mode 100644
index 000000000..6b61ffcd3
--- /dev/null
+++ b/src/boost/tools/build/test/core_varnames.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the core rule for enumerating the variable names in a module.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+module foo
+{
+ rule bar { }
+ var1 = x y ;
+ var2 = fubar ;
+}
+
+expected = var1 var2 ;
+names = [ VARNAMES foo ] ;
+if $(names) in $(expected) && $(expected) in $(names)
+{
+ # everything OK
+}
+else
+{
+ EXIT expected to find variables $(expected:J=", ") in module foo,
+ but found $(names:J=", ") instead. ;
+}
+DEPENDS all : xx ;
+NOTFILE xx ;
+""")
+
+t.run_build_system(["-ffile.jam"], status=0)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/custom_generator.py b/src/boost/tools/build/test/custom_generator.py
new file mode 100644
index 000000000..00860f643
--- /dev/null
+++ b/src/boost/tools/build/test/custom_generator.py
@@ -0,0 +1,66 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Attempt to declare a generator for creating OBJ from RC files. That generator
+# should be considered together with standard CPP->OBJ generators and
+# successfully create the target. Since we do not have a RC compiler everywhere,
+# we fake the action. The resulting OBJ will be unusable, but it must be
+# created.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """
+import rcc ;
+""")
+
+t.write("rcc.jam", """
+import type ;
+import generators ;
+import print ;
+
+# Use 'RCC' to avoid conflicts with definitions in the standard rc.jam and
+# msvc.jam
+type.register RCC : rcc ;
+
+rule resource-compile ( targets * : sources * : properties * )
+{
+ print.output $(targets[1]) ;
+ print.text "rc-object" ;
+}
+
+generators.register-standard rcc.resource-compile : RCC : OBJ ;
+""")
+
+t.write("rcc.py", """
+import b2.build.type as type
+import b2.build.generators as generators
+
+from b2.manager import get_manager
+
+# Use 'RCC' to avoid conflicts with definitions in the standard rc.jam and
+# msvc.jam
+type.register('RCC', ['rcc'])
+
+generators.register_standard("rcc.resource-compile", ["RCC"], ["OBJ"])
+
+get_manager().engine().register_action(
+ "rcc.resource-compile",
+ '@($(STDOUT):E=rc-object) > "$(<)"')
+""")
+
+t.write("jamfile.jam", """
+obj r : r.rcc ;
+""")
+
+t.write("r.rcc", """
+""")
+
+t.run_build_system()
+t.expect_content("bin/r.obj", "rc-object")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/debugger-mi.py b/src/boost/tools/build/test/debugger-mi.py
new file mode 100644
index 000000000..fda2bd80f
--- /dev/null
+++ b/src/boost/tools/build/test/debugger-mi.py
@@ -0,0 +1,326 @@
+#!/usr/bin/python
+
+# Copyright 2016 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the mi interface for the debugger
+
+import BoostBuild
+import TestCmd
+import re
+
+def split_stdin_stdout(text):
+ """stdin is all text after the prompt up to and including
+ the next newline. Everything else is stdout. stdout
+ may contain regular expressions enclosed in {{}}."""
+ prompt = re.escape('(gdb) \n')
+ pattern = re.compile('(?<=%s)((?:\d*-.*)\n)' % prompt)
+ stdin = ''.join(re.findall(pattern, text))
+ stdout = re.sub(pattern, '', text)
+ outside_pattern = re.compile(r'(?:\A|(?<=\}\}))(?:[^\{]|(?:\{(?!\{)))*(?:(?=\{\{)|\Z)')
+
+ def escape_line(line):
+ line = re.sub(outside_pattern, lambda m: re.escape(m.group(0)), line)
+ return re.sub(r'\{\{|\}\}', '', line)
+
+ stdout = '\n'.join([escape_line(line) for line in stdout.split('\n')])
+ return (stdin,stdout)
+
+def run(tester, io):
+ (input,output) = split_stdin_stdout(io)
+ tester.run_build_system(stdin=input, stdout=output, match=TestCmd.match_re)
+
+def make_tester():
+ return BoostBuild.Tester(["-dmi"], pass_toolset=False, pass_d0=False,
+ use_test_config=False, ignore_toolset_requirements=False, match=TestCmd.match_re)
+
+def test_exec_run():
+ t = make_tester()
+ t.write("test.jam", """\
+ UPDATE ;
+ """)
+
+ run(t, """\
+=thread-group-added,id="i1"
+(gdb)
+72-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+72^running
+(gdb)
+*stopped,reason="exited-normally"
+(gdb)
+73-gdb-exit
+73^exit
+""")
+
+ t.cleanup()
+
+def test_exit_status():
+ t = make_tester()
+ t.write("test.jam", """\
+ EXIT : 1 ;
+ """)
+ run(t, """\
+=thread-group-added,id="i1"
+(gdb)
+72-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+72^running
+(gdb)
+
+*stopped,reason="exited",exit-code="1"
+(gdb)
+73-gdb-exit
+73^exit
+""")
+ t.cleanup()
+
+def test_exec_step():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule g ( )
+ {
+ a = 1 ;
+ b = 2 ;
+ }
+ rule f ( )
+ {
+ g ;
+ c = 3 ;
+ }
+ f ;
+ """)
+ run(t, """\
+=thread-group-added,id="i1"
+(gdb)
+-break-insert f
+^done,bkpt={number="1",type="breakpoint",disp="keep",enabled="y",func="f"}
+(gdb)
+72-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+72^running
+(gdb)
+*stopped,reason="breakpoint-hit",bkptno="1",disp="keep",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="8"},thread-id="1",stopped-threads="all"
+(gdb)
+1-exec-step
+1^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="g",args=[],file="test.jam",fullname="{{.*}}test.jam",line="3"},thread-id="1"
+(gdb)
+2-exec-step
+2^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="g",args=[],file="test.jam",fullname="{{.*}}test.jam",line="4"},thread-id="1"
+(gdb)
+3-exec-step
+3^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="9"},thread-id="1"
+(gdb)
+73-gdb-exit
+73^exit
+""")
+ t.cleanup()
+
+def test_exec_next():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule g ( )
+ {
+ a = 1 ;
+ }
+ rule f ( )
+ {
+ g ;
+ b = 2 ;
+ c = 3 ;
+ }
+ rule h ( )
+ {
+ f ;
+ g ;
+ }
+ h ;
+ d = 4 ;
+ """)
+ run(t, """\
+=thread-group-added,id="i1"
+(gdb)
+-break-insert f
+^done,bkpt={number="1",type="breakpoint",disp="keep",enabled="y",func="f"}
+(gdb)
+72-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+72^running
+(gdb)
+*stopped,reason="breakpoint-hit",bkptno="1",disp="keep",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="7"},thread-id="1",stopped-threads="all"
+(gdb)
+1-exec-next
+1^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="8"},thread-id="1"
+(gdb)
+2-exec-next
+2^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="9"},thread-id="1"
+(gdb)
+3-exec-next
+3^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="h",args=[],file="test.jam",fullname="{{.*}}test.jam",line="14"},thread-id="1"
+(gdb)
+4-exec-next
+4^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="module scope",args=[],file="test.jam",fullname="{{.*}}test.jam",line="17"},thread-id="1"
+(gdb)
+73-gdb-exit
+73^exit
+""")
+ t.cleanup()
+
+def test_exec_finish():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( )
+ {
+ a = 1 ;
+ }
+ rule g ( )
+ {
+ f ;
+ b = 2 ;
+ i ;
+ }
+ rule h ( )
+ {
+ g ;
+ i ;
+ }
+ rule i ( )
+ {
+ c = 3 ;
+ }
+ h ;
+ d = 4 ;
+ """)
+ run(t, """\
+=thread-group-added,id="i1"
+(gdb)
+-break-insert f
+^done,bkpt={number="1",type="breakpoint",disp="keep",enabled="y",func="f"}
+(gdb)
+72-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+72^running
+(gdb)
+*stopped,reason="breakpoint-hit",bkptno="1",disp="keep",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="3"},thread-id="1",stopped-threads="all"
+(gdb)
+1-exec-finish
+1^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="g",args=[],file="test.jam",fullname="{{.*}}test.jam",line="8"},thread-id="1"
+(gdb)
+2-exec-finish
+2^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="h",args=[],file="test.jam",fullname="{{.*}}test.jam",line="14"},thread-id="1"
+(gdb)
+3-exec-finish
+3^running
+(gdb)
+*stopped,reason="end-stepping-range",frame={func="module scope",args=[],file="test.jam",fullname="{{.*}}test.jam",line="21"},thread-id="1"
+(gdb)
+73-gdb-exit
+73^exit
+""")
+ t.cleanup()
+
+
+def test_breakpoints():
+ """Tests the interaction between the following commands:
+ break, clear, delete, disable, enable"""
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( )
+ {
+ a = 1 ;
+ }
+ rule g ( )
+ {
+ b = 2 ;
+ }
+ rule h ( )
+ {
+ c = 3 ;
+ d = 4 ;
+ }
+ f ;
+ g ;
+ h ;
+ UPDATE ;
+ """)
+ run(t, """\
+=thread-group-added,id="i1"
+(gdb)
+-break-insert f
+^done,bkpt={number="1",type="breakpoint",disp="keep",enabled="y",func="f"}
+(gdb)
+72-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+72^running
+(gdb)
+*stopped,reason="breakpoint-hit",bkptno="1",disp="keep",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="3"},thread-id="1",stopped-threads="all"
+(gdb)
+-interpreter-exec console kill
+^done
+(gdb)
+-break-insert g
+^done,bkpt={number="2",type="breakpoint",disp="keep",enabled="y",func="g"}
+(gdb)
+-break-disable 1
+^done
+(gdb)
+73-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+73^running
+(gdb)
+*stopped,reason="breakpoint-hit",bkptno="2",disp="keep",frame={func="g",args=[],file="test.jam",fullname="{{.*}}test.jam",line="7"},thread-id="1",stopped-threads="all"
+(gdb)
+-interpreter-exec console kill
+^done
+(gdb)
+-break-enable 1
+^done
+(gdb)
+74-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+74^running
+(gdb)
+*stopped,reason="breakpoint-hit",bkptno="1",disp="keep",frame={func="f",args=[],file="test.jam",fullname="{{.*}}test.jam",line="3"},thread-id="1",stopped-threads="all"
+(gdb)
+-interpreter-exec console kill
+^done
+(gdb)
+-break-delete 1
+^done
+(gdb)
+75-exec-run -ftest.jam
+=thread-created,id="1",group-id="i1"
+75^running
+(gdb)
+*stopped,reason="breakpoint-hit",bkptno="2",disp="keep",frame={func="g",args=[],file="test.jam",fullname="{{.*}}test.jam",line="7"},thread-id="1",stopped-threads="all"
+(gdb)
+76-gdb-exit
+76^exit
+""")
+ t.cleanup()
+
+test_exec_run()
+test_exit_status()
+test_exec_step()
+test_exec_next()
+test_exec_finish()
+test_breakpoints()
diff --git a/src/boost/tools/build/test/debugger.py b/src/boost/tools/build/test/debugger.py
new file mode 100644
index 000000000..24bbb9c4a
--- /dev/null
+++ b/src/boost/tools/build/test/debugger.py
@@ -0,0 +1,674 @@
+#!/usr/bin/python
+
+# Copyright 2016 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test for the debugger
+
+import BoostBuild
+import TestCmd
+import re
+
+def split_stdin_stdout(text):
+ """stdin is all text after the prompt up to and including
+ the next newline. Everything else is stdout. stdout
+ may contain regular expressions enclosed in {{}}."""
+ prompt = re.escape('(b2db) ')
+ pattern = re.compile('(?<=%s)(.*\n)' % prompt)
+ text = text.replace("{{bjam}}", "{{.*}}b2{{(?:\\.exe)?}}")
+ stdin = ''.join(re.findall(pattern, text))
+ stdout = re.sub(pattern, '', text)
+ outside_pattern = re.compile(r'(?:\A|(?<=\}\}))(?:[^\{]|(?:\{(?!\{)))*(?:(?=\{\{)|\Z)')
+
+ def escape_line(line):
+ line = re.sub(outside_pattern, lambda m: re.escape(m.group(0)), line)
+ return re.sub(r'\{\{|\}\}', '', line)
+
+ stdout = '\n'.join([escape_line(line) for line in stdout.split('\n')])
+ return (stdin,stdout)
+
+def run(tester, io):
+ (input,output) = split_stdin_stdout(io)
+ tester.run_build_system(stdin=input, stdout=output, match=TestCmd.match_re)
+
+def make_tester():
+ return BoostBuild.Tester(["-dconsole"], pass_toolset=False, pass_d0=False,
+ use_test_config=False, ignore_toolset_requirements=False, match=TestCmd.match_re)
+
+def test_run():
+ t = make_tester()
+ t.write("test.jam", """\
+ UPDATE ;
+ """)
+
+ run(t, """\
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Child {{\d+}} exited with status 0
+(b2db) quit
+""")
+
+ t.cleanup()
+
+def test_exit_status():
+ t = make_tester()
+ t.write("test.jam", """\
+ EXIT : 1 ;
+ """)
+ run(t, """\
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+
+Child {{\d+}} exited with status 1
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_step():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule g ( )
+ {
+ a = 1 ;
+ b = 2 ;
+ }
+ rule f ( )
+ {
+ g ;
+ c = 3 ;
+ }
+ f ;
+ """)
+ run(t, """\
+(b2db) break f
+Breakpoint 1 set at f
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( ) at test.jam:8
+8 g ;
+(b2db) step
+3 a = 1 ;
+(b2db) step
+4 b = 2 ;
+(b2db) step
+9 c = 3 ;
+(b2db) quit
+""")
+ t.cleanup()
+
+# Note: step doesn't need to worry about breakpoints,
+# as it always stops at the next line executed.
+
+def test_next():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule g ( )
+ {
+ a = 1 ;
+ }
+ rule f ( )
+ {
+ g ;
+ b = 2 ;
+ c = 3 ;
+ }
+ rule h ( )
+ {
+ f ;
+ g ;
+ }
+ h ;
+ d = 4 ;
+ """)
+ run(t, """\
+(b2db) break f
+Breakpoint 1 set at f
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( ) at test.jam:7
+7 g ;
+(b2db) next
+8 b = 2 ;
+(b2db) next
+9 c = 3 ;
+(b2db) next
+14 g ;
+(b2db) next
+17 d = 4 ;
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_next_breakpoint():
+ """next should stop if it encounters a breakpoint.
+ If the normal end point happens to be a breakpoint,
+ then it should be reported as normal stepping."""
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( recurse ? )
+ {
+ if $(recurse) { f ; }
+ a = 1 ;
+ }
+ rule g ( )
+ {
+ b = 2 ;
+ }
+ f true ;
+ g ;
+ """)
+ run(t, """\
+(b2db) break f
+Breakpoint 1 set at f
+(b2db) break g
+Breakpoint 2 set at g
+(b2db) break test.jam:4
+Breakpoint 3 set at test.jam:4
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( true ) at test.jam:3
+3 if $(recurse) { f ; }
+(b2db) next
+Breakpoint 1, f ( ) at test.jam:3
+3 if $(recurse) { f ; }
+(b2db) next
+4 a = 1 ;
+(b2db) next
+4 a = 1 ;
+(b2db) next
+11 g ;
+(b2db) next
+Breakpoint 2, g ( ) at test.jam:8
+8 b = 2 ;
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_finish():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( )
+ {
+ a = 1 ;
+ }
+ rule g ( )
+ {
+ f ;
+ b = 2 ;
+ i ;
+ }
+ rule h ( )
+ {
+ g ;
+ i ;
+ }
+ rule i ( )
+ {
+ c = 3 ;
+ }
+ h ;
+ d = 4 ;
+ """)
+ run(t, """\
+(b2db) break f
+Breakpoint 1 set at f
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( ) at test.jam:3
+3 a = 1 ;
+(b2db) finish
+8 b = 2 ;
+(b2db) finish
+14 i ;
+(b2db) finish
+21 d = 4 ;
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_finish_breakpoints():
+ """finish should stop when it reaches a breakpoint."""
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( recurse * )
+ {
+ if $(recurse)
+ {
+ a = [ f $(recurse[2-]) ] ;
+ }
+ }
+ rule g ( list * )
+ {
+ for local v in $(list)
+ {
+ x = $(v) ;
+ }
+ }
+ f 1 2 ;
+ g 1 2 ;
+ """)
+ run(t, """\
+(b2db) break test.jam:5
+Breakpoint 1 set at test.jam:5
+(b2db) break test.jam:12
+Breakpoint 2 set at test.jam:12
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( 1 2 ) at test.jam:5
+5 a = [ f $(recurse[2-]) ] ;
+(b2db) finish
+Breakpoint 1, f ( 2 ) at test.jam:5
+5 a = [ f $(recurse[2-]) ] ;
+(b2db) finish
+5 a = [ f $(recurse[2-]) ] ;
+(b2db) finish
+16 g 1 2 ;
+(b2db) finish
+Breakpoint 2, g ( 1 2 ) at test.jam:12
+12 x = $(v) ;
+(b2db) finish
+Breakpoint 2, g ( 1 2 ) at test.jam:12
+12 x = $(v) ;
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_continue_breakpoints():
+ """continue should stop when it reaches a breakpoint"""
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( recurse * )
+ {
+ if $(recurse)
+ {
+ a = [ f $(recurse[2-]) ] ;
+ }
+ }
+ rule g ( list * )
+ {
+ for local v in $(list)
+ {
+ x = $(v) ;
+ }
+ }
+ f 1 2 ;
+ g 1 2 ;
+ """)
+ run(t, """\
+(b2db) break test.jam:5
+Breakpoint 1 set at test.jam:5
+(b2db) break test.jam:12
+Breakpoint 2 set at test.jam:12
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( 1 2 ) at test.jam:5
+5 a = [ f $(recurse[2-]) ] ;
+(b2db) continue
+Breakpoint 1, f ( 2 ) at test.jam:5
+5 a = [ f $(recurse[2-]) ] ;
+(b2db) continue
+Breakpoint 1, f ( 1 2 ) at test.jam:5
+5 a = [ f $(recurse[2-]) ] ;
+(b2db) continue
+Breakpoint 2, g ( 1 2 ) at test.jam:12
+12 x = $(v) ;
+(b2db) continue
+Breakpoint 2, g ( 1 2 ) at test.jam:12
+12 x = $(v) ;
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_breakpoints():
+ """Tests the interaction between the following commands:
+ break, clear, delete, disable, enable"""
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( )
+ {
+ a = 1 ;
+ }
+ rule g ( )
+ {
+ b = 2 ;
+ }
+ rule h ( )
+ {
+ c = 3 ;
+ d = 4 ;
+ }
+ f ;
+ g ;
+ h ;
+ UPDATE ;
+ """)
+ run(t, """\
+(b2db) break f
+Breakpoint 1 set at f
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( ) at test.jam:3
+3 a = 1 ;
+(b2db) kill
+(b2db) break g
+Breakpoint 2 set at g
+(b2db) disable 1
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 2, g ( ) at test.jam:7
+7 b = 2 ;
+(b2db) kill
+(b2db) enable 1
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( ) at test.jam:3
+3 a = 1 ;
+(b2db) kill
+(b2db) delete 1
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 2, g ( ) at test.jam:7
+7 b = 2 ;
+(b2db) kill
+(b2db) break test.jam:12
+Breakpoint 3 set at test.jam:12
+(b2db) clear g
+Deleted breakpoint 2
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 3, h ( ) at test.jam:12
+12 d = 4 ;
+(b2db) kill
+(b2db) clear test.jam:12
+Deleted breakpoint 3
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Child {{\d+}} exited with status 0
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_breakpoints_running():
+ """Tests that breakpoints can be added and modified
+ while the program is running."""
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( )
+ {
+ a = 1 ;
+ }
+ rule g ( )
+ {
+ b = 2 ;
+ }
+ rule h ( )
+ {
+ c = 3 ;
+ d = 4 ;
+ }
+ f ;
+ g ;
+ h ;
+ UPDATE ;
+ """)
+ run(t, """\
+(b2db) break test.jam:14
+Breakpoint 1 set at test.jam:14
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:14
+14 f ;
+(b2db) break f
+Breakpoint 2 set at f
+(b2db) continue
+Breakpoint 2, f ( ) at test.jam:3
+3 a = 1 ;
+(b2db) kill
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:14
+14 f ;
+(b2db) break g
+Breakpoint 3 set at g
+(b2db) disable 2
+(b2db) continue
+Breakpoint 3, g ( ) at test.jam:7
+7 b = 2 ;
+(b2db) kill
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:14
+14 f ;
+(b2db) enable 2
+(b2db) continue
+Breakpoint 2, f ( ) at test.jam:3
+3 a = 1 ;
+(b2db) kill
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:14
+14 f ;
+(b2db) delete 2
+(b2db) continue
+Breakpoint 3, g ( ) at test.jam:7
+7 b = 2 ;
+(b2db) kill
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:14
+14 f ;
+(b2db) break test.jam:12
+Breakpoint 4 set at test.jam:12
+(b2db) clear g
+Deleted breakpoint 3
+(b2db) continue
+Breakpoint 4, h ( ) at test.jam:12
+12 d = 4 ;
+(b2db) kill
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:14
+14 f ;
+(b2db) clear test.jam:12
+Deleted breakpoint 4
+(b2db) continue
+Child {{\d+}} exited with status 0
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_backtrace():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( x * : y * : z * )
+ {
+ return $(x) ;
+ }
+ rule g ( x * : y * : z * )
+ {
+ return [ f $(x) : $(y) : $(z) ] ;
+ }
+ g 1 : 2 : 3 ;
+ """)
+ run(t, """\
+(b2db) break f
+Breakpoint 1 set at f
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( 1 : 2 : 3 ) at test.jam:3
+3 return $(x) ;
+(b2db) backtrace
+#0 in f ( 1 : 2 : 3 ) at test.jam:3
+#1 in g ( 1 : 2 : 3 ) at test.jam:7
+#2 in module scope at test.jam:9
+(b2db) quit
+""")
+ t.cleanup()
+
+def test_print():
+ t = make_tester()
+ t.write("test.jam", """\
+ rule f ( args * )
+ {
+ return $(args) ;
+ }
+ f x ;
+ f x y ;
+ """)
+
+ run(t, """\
+(b2db) break f
+Breakpoint 1 set at f
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, f ( x ) at test.jam:3
+3 return $(args) ;
+(b2db) print $(args)
+x
+(b2db) continue
+Breakpoint 1, f ( x y ) at test.jam:3
+3 return $(args) ;
+(b2db) print $(args)
+x y
+(b2db) disable 1
+(b2db) print [ f z ]
+z
+(b2db) quit
+""")
+
+ t.cleanup()
+
+def test_run_running():
+ t = make_tester()
+ t.write("test.jam", """\
+ UPDATE ;
+ """)
+
+ run(t, """\
+(b2db) break test.jam:1
+Breakpoint 1 set at test.jam:1
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:1
+1 UPDATE ;
+(b2db) run -ftest.jam
+Child {{\d+}} exited with status 0
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:1
+1 UPDATE ;
+(b2db) quit
+""")
+
+ t.cleanup()
+
+def test_error_not_running():
+ t = make_tester()
+ run(t, """\
+(b2db) continue
+The program is not being run.
+(b2db) step
+The program is not being run.
+(b2db) next
+The program is not being run.
+(b2db) finish
+The program is not being run.
+(b2db) kill
+The program is not being run.
+(b2db) backtrace
+The program is not being run.
+(b2db) print 1
+The program is not being run.
+(b2db) quit
+""")
+
+ t.cleanup()
+
+def test_bad_arguments():
+ t = make_tester()
+ t.write("test.jam", """\
+ UPDATE ;
+ """)
+
+ run(t, """\
+(b2db) break test.jam:1
+Breakpoint 1 set at test.jam:1
+(b2db) run -ftest.jam
+Starting program: {{bjam}} -ftest.jam
+Breakpoint 1, module scope at test.jam:1
+1 UPDATE ;
+(b2db) continue 1
+Too many arguments to continue.
+(b2db) step 1
+Too many arguments to step.
+(b2db) next 1
+Too many arguments to next.
+(b2db) finish 1
+Too many arguments to finish.
+(b2db) break
+Missing argument to break.
+(b2db) break x y
+Too many arguments to break.
+(b2db) disable
+Missing argument to disable.
+(b2db) disable 1 2
+Too many arguments to disable.
+(b2db) disable x
+Invalid breakpoint number x.
+(b2db) disable 2
+Unknown breakpoint 2.
+(b2db) enable
+Missing argument to enable.
+(b2db) enable 1 2
+Too many arguments to enable.
+(b2db) enable x
+Invalid breakpoint number x.
+(b2db) enable 2
+Unknown breakpoint 2.
+(b2db) delete
+Missing argument to delete.
+(b2db) delete 1 2
+Too many arguments to delete.
+(b2db) delete x
+Invalid breakpoint number x.
+(b2db) delete 2
+Unknown breakpoint 2.
+(b2db) clear
+Missing argument to clear.
+(b2db) clear test.jam:1 test.jam:1
+Too many arguments to clear.
+(b2db) clear test.jam:2
+No breakpoint at test.jam:2.
+(b2db) quit
+""")
+
+ t.cleanup()
+
+def test_unknown_command():
+ t = make_tester()
+ run(t, """\
+(b2db) xyzzy
+Unknown command: xyzzy
+(b2db) gnusto rezrov
+Unknown command: gnusto
+(b2db) quit
+""")
+
+ t.cleanup()
+
+test_run()
+test_exit_status()
+test_step()
+test_next()
+test_next_breakpoint()
+test_finish()
+test_finish_breakpoints()
+test_continue_breakpoints()
+test_breakpoints()
+test_breakpoints_running()
+test_backtrace()
+test_print()
+test_run_running()
+test_error_not_running()
+test_bad_arguments()
+test_unknown_command()
diff --git a/src/boost/tools/build/test/default_build.py b/src/boost/tools/build/test/default_build.py
new file mode 100644
index 000000000..6206507f4
--- /dev/null
+++ b/src/boost/tools/build/test/default_build.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that default build clause actually has any effect.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", "exe a : a.cpp : : debug release ;")
+t.write("a.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+t.expect_addition("bin/$toolset/release*/a.exe")
+
+# Check that explictly-specified build variant suppresses default-build.
+t.rm("bin")
+t.run_build_system(["release"])
+t.expect_addition(BoostBuild.List("bin/$toolset/release*/") * "a.exe a.obj")
+t.expect_nothing_more()
+
+# Now check that we can specify explicit build request and default-build will be
+# combined with it.
+t.run_build_system(["optimization=space"])
+t.expect_addition("bin/$toolset/debug/optimization-space*/a.exe")
+t.expect_addition("bin/$toolset/release/optimization-space*/a.exe")
+
+# Test that default-build must be identical in all alternatives. Error case.
+t.write("jamfile.jam", """\
+exe a : a.cpp : : debug ;
+exe a : b.cpp : : ;
+""")
+t.run_build_system(["-n", "--no-error-backtrace"], status=1)
+t.fail_test(t.stdout().find("default build must be identical in all alternatives") == -1)
+
+# Test that default-build must be identical in all alternatives. No Error case,
+# empty default build.
+t.write("jamfile.jam", """\
+exe a : a.cpp : <variant>debug ;
+exe a : b.cpp : <variant>release ;
+""")
+t.run_build_system(["-n", "--no-error-backtrace"], status=0)
+
+# Now try a harder example: default build which contains <define> should cause
+# <define> to be present when "b" is compiled. This happens only if
+# "build-project b" is placed first.
+t.write("jamfile.jam", """\
+project : default-build <define>FOO ;
+build-project a ;
+build-project b ;
+""")
+
+t.write("a/jamfile.jam", "exe a : a.cpp ../b//b ;")
+t.write("a/a.cpp", """\
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void foo();
+int main() { foo(); }
+""")
+
+t.write("b/jamfile.jam", "lib b : b.cpp ;")
+t.write("b/b.cpp", """\
+#ifdef FOO
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+#endif
+""")
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/default_features.py b/src/boost/tools/build/test/default_features.py
new file mode 100644
index 000000000..1d6d72a6f
--- /dev/null
+++ b/src/boost/tools/build/test/default_features.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that features with default values are always present in build properties
+# of any target.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Declare *non-propagated* feature foo.
+t.write("jamroot.jam", """
+import feature : feature ;
+feature foo : on off ;
+""")
+
+# Note that '<foo>on' will not be propagated to 'd/l'.
+t.write("jamfile.jam", """
+exe hello : hello.cpp d//l ;
+""")
+
+t.write("hello.cpp", """
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void foo();
+int main() { foo(); }
+""")
+
+t.write("d/jamfile.jam", """
+lib l : l.cpp : <foo>on:<define>FOO ;
+""")
+
+t.write("d/l.cpp", """
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+#ifdef FOO
+void foo() {}
+#endif
+""")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/default_toolset.py b/src/boost/tools/build/test/default_toolset.py
new file mode 100755
index 000000000..682e7fcc1
--- /dev/null
+++ b/src/boost/tools/build/test/default_toolset.py
@@ -0,0 +1,215 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the expected default toolset is used when no toolset is explicitly
+# specified on the command line or used from code via the using rule. Test that
+# the default toolset is correctly used just like any other explicitly used
+# toolset (e.g. toolset prerequisites, properties conditioned on toolset
+# related features, etc.).
+#
+# Note that we need to ignore regular site/user/test configuration files to
+# avoid them marking any toolsets not under our control as used.
+
+import BoostBuild
+
+
+# Line displayed by Boost Build when using the default toolset.
+configuring_default_toolset_message = \
+ 'warning: Configuring default toolset "%s".'
+
+
+###############################################################################
+#
+# test_conditions_on_default_toolset()
+# ------------------------------------
+#
+###############################################################################
+
+def test_conditions_on_default_toolset():
+ """Test that toolset and toolset subfeature conditioned properties get
+ applied correctly when the toolset is selected by default. Implicitly tests
+ that we can use the set-default-toolset rule to set the default toolset to
+ be used by Boost Build.
+ """
+
+ t = BoostBuild.Tester("--user-config= --ignore-site-config",
+ pass_toolset=False, use_test_config=False)
+
+ toolset_name = "myCustomTestToolset"
+ toolset_version = "v"
+ toolset_version_unused = "v_unused"
+ message_loaded = "Toolset '%s' loaded." % toolset_name
+ message_initialized = "Toolset '%s' initialized." % toolset_name ;
+
+ # Custom toolset.
+ t.write(toolset_name + ".jam", """
+import feature ;
+ECHO "%(message_loaded)s" ;
+feature.extend toolset : %(toolset_name)s ;
+feature.subfeature toolset %(toolset_name)s : version : %(toolset_version)s %(toolset_version_unused)s ;
+rule init ( version ) { ECHO "%(message_initialized)s" ; }
+""" % {'message_loaded' : message_loaded ,
+ 'message_initialized' : message_initialized,
+ 'toolset_name' : toolset_name ,
+ 'toolset_version' : toolset_version ,
+ 'toolset_version_unused': toolset_version_unused})
+
+ # Main Boost Build project script.
+ t.write("jamroot.jam", """
+import build-system ;
+import errors ;
+import feature ;
+import notfile ;
+
+build-system.set-default-toolset %(toolset_name)s : %(toolset_version)s ;
+
+feature.feature description : : free incidental ;
+
+# We use a rule instead of an action to avoid problems with action output not
+# getting piped to stdout by the testing system.
+rule buildRule ( names : targets ? : properties * )
+{
+ local descriptions = [ feature.get-values description : $(properties) ] ;
+ ECHO "descriptions:" /$(descriptions)/ ;
+ local toolset = [ feature.get-values toolset : $(properties) ] ;
+ ECHO "toolset:" /$(toolset)/ ;
+ local toolset-version = [ feature.get-values "toolset-$(toolset):version" : $(properties) ] ;
+ ECHO "toolset-version:" /$(toolset-version)/ ;
+}
+
+notfile testTarget
+ : @buildRule
+ :
+ :
+ <description>stand-alone
+ <toolset>%(toolset_name)s:<description>toolset
+ <toolset>%(toolset_name)s-%(toolset_version)s:<description>toolset-version
+ <toolset>%(toolset_name)s-%(toolset_version_unused)s:<description>toolset-version-unused ;
+""" % {'toolset_name' : toolset_name ,
+ 'toolset_version' : toolset_version,
+ 'toolset_version_unused': toolset_version_unused})
+
+ t.run_build_system()
+ t.expect_output_lines(configuring_default_toolset_message % toolset_name)
+ t.expect_output_lines(message_loaded)
+ t.expect_output_lines(message_initialized)
+ t.expect_output_lines("descriptions: /stand-alone/ /toolset/ "
+ "/toolset-version/")
+ t.expect_output_lines("toolset: /%s/" % toolset_name)
+ t.expect_output_lines("toolset-version: /%s/" % toolset_version)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_default_toolset_on_os()
+# ----------------------------
+#
+###############################################################################
+
+def test_default_toolset_on_os( os, expected_toolset ):
+ """Test that the given toolset is used as the default toolset on the given
+ os. Uses hardcoded knowledge of how Boost Build decides on which host OS it
+ is currently running. Note that we must not do much after tricking Boost
+ Build into believing it has a specific host OS as this might mess up other
+ important internal Boost Build state.
+ """
+
+ t = BoostBuild.Tester("--user-config= --ignore-site-config",
+ pass_toolset=False, use_test_config=False)
+
+ t.write("jamroot.jam", "modules.poke os : .name : %s ;" % os)
+
+ # We need to tell the test system to ignore stderr output as attempting to
+ # load missing toolsets might cause random failures with which we are not
+ # concerned in this test.
+ t.run_build_system(stderr=None)
+ t.expect_output_lines(configuring_default_toolset_message %
+ expected_toolset)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_default_toolset_requirements()
+# -----------------------------------
+#
+###############################################################################
+
+def test_default_toolset_requirements():
+ """Test that default toolset's requirements get applied correctly.
+ """
+
+ t = BoostBuild.Tester("--user-config= --ignore-site-config",
+ pass_toolset=False, use_test_config=False,
+ ignore_toolset_requirements=False)
+
+ toolset_name = "customTestToolsetWithRequirements"
+
+ # Custom toolset.
+ t.write(toolset_name + ".jam", """
+import feature ;
+import toolset ;
+feature.extend toolset : %(toolset_name)s ;
+toolset.add-requirements <description>toolset-requirement ;
+rule init ( ) { }
+""" % {'toolset_name': toolset_name})
+
+ # Main Boost Build project script.
+ t.write("jamroot.jam", """
+import build-system ;
+import errors ;
+import feature ;
+import notfile ;
+
+build-system.set-default-toolset %(toolset_name)s ;
+
+feature.feature description : : free incidental ;
+
+# We use a rule instead of an action to avoid problems with action output not
+# getting piped to stdout by the testing system.
+rule buildRule ( names : targets ? : properties * )
+{
+ local descriptions = [ feature.get-values description : $(properties) ] ;
+ ECHO "descriptions:" /$(descriptions)/ ;
+ local toolset = [ feature.get-values toolset : $(properties) ] ;
+ ECHO "toolset:" /$(toolset)/ ;
+}
+
+notfile testTarget
+ : @buildRule
+ :
+ :
+ <description>target-requirement
+ <description>toolset-requirement:<description>conditioned-requirement
+ <description>unrelated-condition:<description>unrelated-description ;
+""" % {'toolset_name': toolset_name})
+
+ t.run_build_system()
+ t.expect_output_lines(configuring_default_toolset_message % toolset_name)
+ t.expect_output_lines("descriptions: /conditioned-requirement/ "
+ "/target-requirement/ /toolset-requirement/")
+ t.expect_output_lines("toolset: /%s/" % toolset_name)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_default_toolset_on_os("NT" , "msvc")
+test_default_toolset_on_os("LINUX" , "gcc" )
+test_default_toolset_on_os("CYGWIN" , "gcc" )
+test_default_toolset_on_os("SomeOtherOS", "gcc" )
+test_default_toolset_requirements()
+test_conditions_on_default_toolset()
diff --git a/src/boost/tools/build/test/dependency_property.py b/src/boost/tools/build/test/dependency_property.py
new file mode 100644
index 000000000..b67539255
--- /dev/null
+++ b/src/boost/tools/build/test/dependency_property.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test: virtual targets with different dependency properties were
+# considered different by 'virtual-target.register', but the code which
+# determined the actual target paths ignored dependency properties so both
+# targets ended up being in the same location.
+#
+# This test has flip-flopped several times between passing and failing.
+# Currently, the library is only considered relevant for linking and thus
+# does not cause a conflict. SJW 20180115
+
+import BoostBuild
+
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """\
+lib foo : foo.cpp ;
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp : <library>foo ;
+""")
+
+t.write("hello.cpp", "int main() {}\n")
+
+t.write("foo.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""")
+
+t.run_build_system(["--no-error-backtrace"], status=0)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/dependency_test.py b/src/boost/tools/build/test/dependency_test.py
new file mode 100644
index 000000000..852955775
--- /dev/null
+++ b/src/boost/tools/build/test/dependency_test.py
@@ -0,0 +1,239 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+def test_basic():
+ t = BoostBuild.Tester(["-d3", "-d+12"], use_test_config=False)
+
+ t.write("a.cpp", """
+#include <a.h>
+# include "a.h"
+#include <x.h>
+int main() {}
+""")
+ t.write("a.h", "\n")
+ t.write("a_c.c", """\
+#include <a.h>
+# include "a.h"
+#include <x.h>
+""")
+ t.write("b.cpp", """\
+#include "a.h"
+int main() {}
+""")
+ t.write("b.h", "\n")
+ t.write("c.cpp", """\
+#include "x.h"
+int main() {}
+""")
+ t.write("e.cpp", """\
+#include "x.h"
+int main() {}
+""")
+ t.write("x.foo", "")
+ t.write("y.foo", "")
+
+ t.write("src1/a.h", '#include "b.h"\n')
+ t.write("src1/b.h", '#include "c.h"\n')
+ t.write("src1/c.h", "\n")
+ t.write("src1/z.h", """\
+extern int dummy_variable_suppressing_empty_file_warning_on_hp_cxx_compiler;
+""")
+
+ t.write("src2/b.h", "\n")
+
+ t.write("jamroot.jam", """\
+import foo ;
+import types/cpp ;
+import types/exe ;
+
+project test : requirements <include>src1 ;
+
+exe a : x.foo a.cpp a_c.c ;
+exe b : b.cpp ;
+
+# Because of <define>FOO, c.cpp will be compiled to a different directory than
+# everything for main target "a". Therefore, without <implicit-dependency>, C
+# preprocessor processing that module will not find "x.h", which is part of
+# "a"'s dependency graph.
+#
+# --------------------------
+# More detailed explanation:
+# --------------------------
+# c.cpp includes x.h which does not exist on the current include path so Boost
+# Jam will try to match it to existing Jam targets to cover cases as this one
+# where the file is generated by the same build.
+#
+# However, as x.h is not part of "c" metatarget's dependency graph, Boost
+# Build will not actualize its target by default, i.e. create its Jam target.
+#
+# To get the Jam target created in time, we use the <implicit-dependency>
+# feature. This tells Boost Build that it needs to actualize the dependency
+# graph for metatarget "a", even though that metatarget has not been directly
+# mentioned and is not a dependency for any of the metatargets mentioned in the
+# current build request.
+#
+# Note that Boost Build does not automatically add a dependency between the
+# Jam targets in question so, if Boost Jam does not add a dependency on a target
+# from that other dependency graph (x.h in our case), i.e. if c.cpp does not
+# actually include x.h, us actualizing it will have no effect in the end as
+# Boost Jam will not have a reason to actually build those targets in spite of
+# knowing about them.
+exe c : c.cpp : <define>FOO <implicit-dependency>a ;
+""")
+
+ t.write("foo.jam", """\
+import generators ;
+import modules ;
+import os ;
+import print ;
+import type ;
+import types/cpp ;
+
+type.register FOO : foo ;
+
+generators.register-standard foo.foo : FOO : CPP H ;
+
+nl = "
+" ;
+
+rule foo ( targets * : sources * : properties * )
+{
+ # On NT, you need an exported symbol in order to have an import library
+ # generated. We will not really use the symbol defined here, just force the
+ # import library creation.
+ if ( [ os.name ] = NT || [ modules.peek : OS ] in CYGWIN ) &&
+ <main-target-type>LIB in $(properties)
+ {
+ .decl = "void __declspec(dllexport) foo() {}" ;
+ }
+ print.output $(<[1]) ;
+ print.text $(.decl:E="//")$(nl) ;
+ print.output $(<[2]) ;
+ print.text "#include <z.h>"$(nl) ;
+}
+""")
+
+ t.write("foo.py",
+r"""import bjam
+import b2.build.type as type
+import b2.build.generators as generators
+
+from b2.manager import get_manager
+
+type.register("FOO", ["foo"])
+generators.register_standard("foo.foo", ["FOO"], ["CPP", "H"])
+
+def prepare_foo(targets, sources, properties):
+ if properties.get('os') in ['windows', 'cygwin']:
+ bjam.call('set-target-variable', targets, "DECL",
+ "void __declspec(dllexport) foo() {}")
+
+get_manager().engine().register_action("foo.foo",
+ "echo -e $(DECL:E=//)\\n > $(<[1])\n"
+ "echo -e "#include <z.h>\\n" > $(<[2])\n", function=prepare_foo)
+""")
+
+ # Check that main target 'c' was able to find 'x.h' from 'a's dependency
+ # graph.
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug*/c.exe")
+
+ # Check handling of first level includes.
+
+ # Both 'a' and 'b' include "a.h" and should be updated.
+ t.touch("a.h")
+ t.run_build_system()
+
+ t.expect_touch("bin/$toolset/debug*/a.exe")
+ t.expect_touch("bin/$toolset/debug*/a.obj")
+ t.expect_touch("bin/$toolset/debug*/a_c.obj")
+ t.expect_touch("bin/$toolset/debug*/b.exe")
+ t.expect_touch("bin/$toolset/debug*/b.obj")
+ t.expect_nothing_more()
+
+ # Only source files using include <a.h> should be compiled.
+ t.touch("src1/a.h")
+ t.run_build_system()
+
+ t.expect_touch("bin/$toolset/debug*/a.exe")
+ t.expect_touch("bin/$toolset/debug*/a.obj")
+ t.expect_touch("bin/$toolset/debug*/a_c.obj")
+ t.expect_nothing_more()
+
+ # "src/a.h" includes "b.h" (in the same dir).
+ t.touch("src1/b.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/a.exe")
+ t.expect_touch("bin/$toolset/debug*/a.obj")
+ t.expect_touch("bin/$toolset/debug*/a_c.obj")
+ t.expect_nothing_more()
+
+ # Included by "src/b.h". We had a bug: file included using double quotes
+ # (e.g. "b.h") was not scanned at all in this case.
+ t.touch("src1/c.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/a.exe")
+
+ t.touch("b.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ # Test dependency on a generated header.
+ #
+ # TODO: we have also to check that generated header is found correctly if
+ # it is different for different subvariants. Lacking any toolset support,
+ # this check will be implemented later.
+ t.touch("x.foo")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/a.obj")
+ t.expect_touch("bin/$toolset/debug*/a_c.obj")
+
+ # Check that generated headers are scanned for dependencies as well.
+ t.touch("src1/z.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/a.obj")
+ t.expect_touch("bin/$toolset/debug*/a_c.obj")
+
+ t.cleanup()
+
+
+def test_scanned_includes_with_absolute_paths():
+ """
+ Regression test: on Windows, <includes> with absolute paths were not
+ considered when scanning dependencies.
+
+ """
+ t = BoostBuild.Tester(["-d3", "-d+12"])
+
+ t.write("jamroot.jam", """\
+path-constant TOP : . ;
+exe app : main.cpp : <include>$(TOP)/include ;
+""");
+
+ t.write("main.cpp", """\
+#include <dir/header.h>
+int main() {}
+""")
+
+ t.write("include/dir/header.h", "\n")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug*/main.obj")
+
+ t.touch("include/dir/header.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/main.obj")
+
+ t.cleanup()
+
+
+test_basic()
+test_scanned_includes_with_absolute_paths()
diff --git a/src/boost/tools/build/test/disambiguation.py b/src/boost/tools/build/test/disambiguation.py
new file mode 100644
index 000000000..9544aa0f6
--- /dev/null
+++ b/src/boost/tools/build/test/disambiguation.py
@@ -0,0 +1,32 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that it is possible to add a suffix to a main target name to disambiguate
+# that main target from another, and that this does not affect the names of the
+# generated targets.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+exe hello.exe : hello.obj ;
+obj hello.obj : hello.cpp : <variant>debug ;
+obj hello.obj2 : hello.cpp : <variant>release ;
+""")
+
+t.write("hello.cpp", """
+int main() {}
+""")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+t.expect_addition("bin/$toolset/debug*/hello.obj")
+t.expect_addition("bin/$toolset/release*/hello.obj")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/dll_path.py b/src/boost/tools/build/test/dll_path.py
new file mode 100644
index 000000000..f7331cdfb
--- /dev/null
+++ b/src/boost/tools/build/test/dll_path.py
@@ -0,0 +1,163 @@
+#!/usr/bin/python
+
+# Copyright (C) 2003. Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the <dll-path> property is correctly set when using
+# <hardcode-dll-paths>true.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# The point of this test is to have exe "main" which uses library "b", which
+# uses library "a". When "main" is built with <hardcode-dll-paths>true, paths
+# to both libraries should be present as values of <dll-path> feature. We
+# create a special target type which reports <dll-path> values on its sources
+# and compare the list of found values with out expectations.
+
+t.write("jamroot.jam", "using dll_paths ;")
+t.write("jamfile.jam", """\
+exe main : main.cpp b//b ;
+explicit main ;
+path-list mp : main ;
+""")
+
+t.write("main.cpp", "int main() {}\n")
+t.write("dll_paths.jam", """\
+import "class" : new ;
+import feature ;
+import generators ;
+import print ;
+import sequence ;
+import type ;
+
+rule init ( )
+{
+ type.register PATH_LIST : pathlist ;
+
+ class dll-paths-list-generator : generator
+ {
+ rule __init__ ( )
+ {
+ generator.__init__ dll_paths.list : EXE : PATH_LIST ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local dll-paths ;
+ for local s in $(sources)
+ {
+ local a = [ $(s).action ] ;
+ if $(a)
+ {
+ local p = [ $(a).properties ] ;
+ dll-paths += [ $(p).get <dll-path> ] ;
+ }
+ }
+ return [ generator.generated-targets $(sources) :
+ [ $(property-set).add-raw $(dll-paths:G=<dll-path>) ] :
+ $(project) $(name) ] ;
+ }
+ }
+ generators.register [ new dll-paths-list-generator ] ;
+}
+
+rule list ( target : sources * : properties * )
+{
+ local paths = [ feature.get-values <dll-path> : $(properties) ] ;
+ paths = [ sequence.insertion-sort $(paths) ] ;
+ print.output $(target) ;
+ print.text $(paths) ;
+}
+""")
+
+t.write("dll_paths.py", """\
+import bjam
+
+import b2.build.type as type
+import b2.build.generators as generators
+
+from b2.manager import get_manager
+
+def init():
+ type.register("PATH_LIST", ["pathlist"])
+
+ class DllPathsListGenerator(generators.Generator):
+
+ def __init__(self):
+ generators.Generator.__init__(self, "dll_paths.list", False,
+ ["EXE"], ["PATH_LIST"])
+
+ def generated_targets(self, sources, ps, project, name):
+ dll_paths = []
+ for s in sources:
+ a = s.action()
+ if a:
+ p = a.properties()
+ dll_paths += p.get('dll-path')
+ dll_paths.sort()
+ return generators.Generator.generated_targets(self, sources,
+ ps.add_raw(["<dll-path>" + p for p in dll_paths]), project,
+ name)
+
+ generators.register(DllPathsListGenerator())
+
+command = \"\"\"
+echo $(PATHS) > $(<[1])
+\"\"\"
+def function(target, sources, ps):
+ bjam.call('set-target-variable', target, "PATHS", ps.get('dll-path'))
+
+get_manager().engine().register_action("dll_paths.list", command,
+ function=function)
+""")
+
+t.write("a/jamfile.jam", "lib a : a.cpp ;")
+t.write("a/a.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../a//a ;")
+t.write("b/b.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+bar() {}
+""")
+
+t.run_build_system(["hardcode-dll-paths=true"])
+
+t.expect_addition("bin/$toolset/debug*/mp.pathlist")
+
+es1 = t.adjust_name("a/bin/$toolset/debug*")
+es2 = t.adjust_name("b/bin/$toolset/debug*")
+
+t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es1)
+t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es2)
+
+t.rm("bin/$toolset/debug*/mp.pathlist")
+
+# Now run the same checks with pre-built libraries
+adll = t.glob_file("a/bin/$toolset/debug*/a.dll")
+bdll = t.glob_file("b/bin/$toolset/debug*/b.dll")
+t.write("b/jamfile.jam", """
+local bdll = %s ;
+# Make sure that it is found even with multiple source-locations
+project : source-location c $(bdll:D) ;
+lib b : ../a//a : <file>$(bdll:D=) ;
+""" % bdll.replace("\\", "\\\\"))
+t.run_build_system(["hardcode-dll-paths=true"])
+t.expect_addition("bin/$toolset/debug*/mp.pathlist")
+
+t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es1)
+t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es2)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/double_loading.py b/src/boost/tools/build/test/double_loading.py
new file mode 100644
index 000000000..c708b00f7
--- /dev/null
+++ b/src/boost/tools/build/test/double_loading.py
@@ -0,0 +1,31 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+# Regression test for double loading of the same Jamfile.
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", "build-project subdir ;")
+t.write("subdir/jamfile.jam", 'ECHO "Loaded subdir" ;')
+
+t.run_build_system(subdir="subdir")
+t.expect_output_lines("Loaded subdir")
+
+
+# Regression test for a more contrived case. The top-level Jamfile refers to
+# subdir via use-project, while subdir's Jamfile is being loaded. The
+# motivation why use-project referring to subprojects is useful can be found
+# at: http://article.gmane.org/gmane.comp.lib.boost.build/3906
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", "use-project /subdir : subdir ;")
+t.write("subdir/jamfile.jam", "project subdir ;")
+
+t.run_build_system(subdir="subdir");
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/duplicate.py b/src/boost/tools/build/test/duplicate.py
new file mode 100644
index 000000000..1d0d5f7f2
--- /dev/null
+++ b/src/boost/tools/build/test/duplicate.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This test tries to stage the same file to the same location by *two* different
+# stage rules, in two different projects. This is not exactly good thing to do,
+# but still, V2 should handle this. We had two bugs:
+# - since the file is referred from two projects, we created to different
+# virtual targets
+# - we also failed to figure out that the two target corresponding to the copied
+# files (created in two projects) are actually equivalent.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("a.cpp", """
+""")
+
+t.write("jamroot.jam", """
+build-project a ;
+build-project b ;
+""")
+
+t.write("a/jamfile.jam", """
+stage bin : ../a.cpp : <location>../dist ;
+""")
+
+t.write("b/jamfile.jam", """
+stage bin : ../a.cpp : <location>../dist ;
+""")
+
+t.run_build_system()
+t.expect_addition("dist/a.cpp")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/example_customization.py b/src/boost/tools/build/test/example_customization.py
new file mode 100644
index 000000000..462de5622
--- /dev/null
+++ b/src/boost/tools/build/test/example_customization.py
@@ -0,0 +1,21 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'customization' example.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.set_tree("../example/customization")
+
+t.run_build_system()
+
+t.expect_addition(["bin/$toolset/debug*/codegen.exe",
+ "bin/$toolset/debug*/usage.cpp"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/example_gettext.py b/src/boost/tools/build/test/example_gettext.py
new file mode 100644
index 000000000..aa836130e
--- /dev/null
+++ b/src/boost/tools/build/test/example_gettext.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'gettext' example.
+
+import BoostBuild
+import os
+import string
+
+t = BoostBuild.Tester()
+
+t.set_tree("../example/gettext")
+
+t.run_build_system(stderr=None)
+
+t.expect_addition(["bin/$toolset/debug*/main.exe",
+ "bin/$toolset/debug*/russian.mo"])
+
+file = t.adjust_names(["bin/$toolset/debug*/main.exe"])[0]
+
+input_fd = os.popen(file)
+input = input_fd.read();
+
+t.fail_test(input.find("international hello") != 0)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/example_libraries.py b/src/boost/tools/build/test/example_libraries.py
new file mode 100644
index 000000000..60607b149
--- /dev/null
+++ b/src/boost/tools/build/test/example_libraries.py
@@ -0,0 +1,21 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'libraries' example.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.set_tree("../example/libraries")
+
+t.run_build_system()
+
+t.expect_addition(["app/bin/$toolset/debug*/app.exe",
+ "util/foo/bin/$toolset/debug*/bar.dll"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/example_make.py b/src/boost/tools/build/test/example_make.py
new file mode 100644
index 000000000..d72423cb2
--- /dev/null
+++ b/src/boost/tools/build/test/example_make.py
@@ -0,0 +1,17 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'make' example.
+
+import BoostBuild
+import sys
+
+t = BoostBuild.Tester(['example.python.interpreter=%s' % sys.executable])
+t.set_tree("../example/make")
+t.run_build_system()
+t.expect_addition(["bin/main.cpp"])
+t.cleanup()
diff --git a/src/boost/tools/build/test/example_qt4.py b/src/boost/tools/build/test/example_qt4.py
new file mode 100644
index 000000000..936e6f718
--- /dev/null
+++ b/src/boost/tools/build/test/example_qt4.py
@@ -0,0 +1,26 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'qt4' examples.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.set_tree("../example/qt/qt4/hello")
+t.run_build_system()
+t.expect_addition(["bin/$toolset/debug*/threading-multi/arrow"])
+
+t.set_tree("../example/qt/qt4/moccable-cpp")
+t.run_build_system()
+t.expect_addition(["bin/$toolset/debug*/threading-multi/main"])
+
+t.set_tree("../example/qt/qt4/uic")
+t.run_build_system()
+t.expect_addition(["bin/$toolset/debug*/threading-multi/hello"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/exit_status.py b/src/boost/tools/build/test/exit_status.py
new file mode 100755
index 000000000..11c4abf76
--- /dev/null
+++ b/src/boost/tools/build/test/exit_status.py
@@ -0,0 +1,26 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2010.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that build failure results in non-zero exit status
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester()
+
+# Create the needed files.
+t.write("jamroot.jam", """
+exe hello : hello.cpp ;
+""")
+
+t.write("hello.cpp", """
+int main() {
+""")
+
+t.run_build_system(status=1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/expansion.py b/src/boost/tools/build/test/expansion.py
new file mode 100644
index 000000000..9042407f7
--- /dev/null
+++ b/src/boost/tools/build/test/expansion.py
@@ -0,0 +1,140 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+import BoostBuild
+
+t = BoostBuild.Tester(arguments=["--config="], pass_toolset=0)
+
+t.write("source.input", "")
+
+t.write("test-properties.jam", """
+import feature : feature ;
+import generators ;
+import toolset ;
+import type ;
+
+# We're not using the toolset at all, and we want to
+# suppress toolset initialization to avoid surprises.
+feature.extend toolset : null ;
+
+type.register CHECK : check ;
+type.register INPUT : input ;
+feature expected-define : : free ;
+feature unexpected-define : : free ;
+toolset.flags test-properties DEFINES : <define> ;
+toolset.flags test-properties EXPECTED : <expected-define> ;
+toolset.flags test-properties UNEXPECTED : <unexpected-define> ;
+generators.register-standard test-properties.check : INPUT : CHECK ;
+rule check ( target : source : properties * )
+{
+ local defines = [ on $(target) return $(DEFINES) ] ;
+ for local macro in [ on $(target) return $(EXPECTED) ]
+ {
+ if ! ( $(macro) in $(defines) )
+ {
+ EXIT expected $(macro) for $(target) in $(properties) : 1 ;
+ }
+ }
+ for local macro in [ on $(target) return $(UNEXPECTED) ]
+ {
+ if $(macro) in $(defines)
+ {
+ EXIT unexpected $(macro) for $(target) in $(properties) : 1 ;
+ }
+ }
+}
+actions check
+{
+ echo okay > $(<)
+}
+""")
+
+t.write("jamfile.jam", """
+import test-properties ;
+# See if default value of composite feature 'cf' will be expanded to
+# <define>CF_IS_OFF.
+check a : source.input : <expected-define>CF_IS_OFF ;
+
+# See if subfeature in requirements in expanded.
+check b : source.input : <cf>on-1
+ <expected-define>CF_1 <unexpected-define>CF_IS_OFF ;
+
+# See if conditional requirements are recursively expanded.
+check c : source.input : <toolset>null:<variant>release
+ <variant>release:<define>FOO <expected-define>FOO
+ ;
+
+# Composites specified in the default build should not
+# be expanded if they are overridden in the the requirements.
+check d : source.input : <cf>on <unexpected-define>CF_IS_OFF : <cf>off ;
+
+# Overriding a feature should clear subfeatures and
+# apply default values of subfeatures.
+check e : source.input : <cf>always
+ <unexpected-define>CF_IS_OFF <expected-define>CF_2 <unexpected-define>CF_1
+ : <cf>on-1 ;
+
+# Subfeatures should not be changed if the parent feature doesn't change
+check f : source.input : <cf>on <expected-define>CF_1 : <cf>on-1 ;
+
+# If a subfeature is not specific to the value of the parent feature,
+# then changing the parent value should not clear the subfeature.
+check g : source.input : <fopt>off <expected-define>FOPT_2 : <fopt>on-2 ;
+
+# If the default value of a composite feature adds an optional
+# feature which has a subfeature with a default, then that
+# default should be added.
+check h : source.input : <expected-define>CX_2 ;
+
+# If the default value of a feature is used, then the
+# default value of its subfeatures should also be used.
+check i : source.input : <expected-define>SF_1 ;
+
+# Subfeatures should be expanded when listed in a
+# target reference.
+check j-impl : source.input : <expected-define>CF_1 ;
+explicit j-impl ;
+alias j : j-impl/<cf>on-1 ;
+""")
+
+t.write("jamroot.jam", """
+import feature ;
+feature.feature cf : off on always : composite incidental ;
+feature.compose <cf>off : <define>CF_IS_OFF ;
+feature.subfeature cf on : version : 1 2 : composite optional incidental ;
+feature.compose <cf-on:version>1 : <define>CF_1 ;
+feature.subfeature cf always : version : 1 2 : composite incidental ;
+feature.compose <cf-always:version>1 : <define>CF_2 ;
+feature.feature fopt : on off : optional incidental ;
+feature.subfeature fopt : version : 1 2 : composite incidental ;
+feature.compose <fopt-version>2 : <define>FOPT_2 ;
+
+feature.feature cx1 : on : composite incidental ;
+feature.feature cx2 : on : optional incidental ;
+feature.subfeature cx2 on : sub : 1 : composite incidental ;
+feature.compose <cx1>on : <cx2>on ;
+feature.compose <cx2-on:sub>1 : <define>CX_2 ;
+
+feature.feature sf : a : incidental ;
+feature.subfeature sf a : sub : 1 : composite incidental ;
+feature.compose <sf-a:sub>1 : <define>SF_1 ;
+""")
+
+t.expand_toolset("jamfile.jam")
+
+t.run_build_system()
+t.expect_addition(["bin/debug/a.check",
+ "bin/debug/b.check",
+ "bin/null/release/c.check",
+ "bin/debug/d.check",
+ "bin/debug/e.check",
+ "bin/debug/f.check",
+ "bin/debug/g.check",
+ "bin/debug/h.check",
+ "bin/debug/i.check"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/explicit.py b/src/boost/tools/build/test/explicit.py
new file mode 100644
index 000000000..14d421752
--- /dev/null
+++ b/src/boost/tools/build/test/explicit.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+explicit hello2 ;
+""")
+
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.ignore("*.tds")
+t.expect_addition(BoostBuild.List("bin/$toolset/debug*/hello") * \
+ [".exe", ".obj"])
+t.expect_nothing_more()
+
+t.run_build_system(["hello2"])
+t.expect_addition("bin/$toolset/debug*/hello2.exe")
+
+t.rm(".")
+
+
+# Test that 'explicit' used in a helper rule applies to the current project, and
+# not to the Jamfile where the helper rule is defined.
+t.write("jamroot.jam", """\
+rule myinstall ( name : target )
+{
+ install $(name)-bin : $(target) ;
+ explicit $(name)-bin ;
+ alias $(name) : $(name)-bin ;
+}
+""")
+
+t.write("sub/a.cpp", "\n")
+t.write("sub/jamfile.jam", "myinstall dist : a.cpp ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("sub/dist-bin/a.cpp")
+
+t.rm("sub/dist-bin")
+
+t.write("sub/jamfile.jam", """\
+myinstall dist : a.cpp ;
+explicit dist ;
+""")
+
+t.run_build_system(subdir="sub")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/feature_cxxflags.py b/src/boost/tools/build/test/feature_cxxflags.py
new file mode 100755
index 000000000..a4eeb52d4
--- /dev/null
+++ b/src/boost/tools/build/test/feature_cxxflags.py
@@ -0,0 +1,37 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the cxxflags feature
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# cxxflags should be applied to C++ compilation,
+# but not to C.
+t.write("Jamroot.jam", """
+obj test-cpp : test.cpp : <cxxflags>-DOKAY ;
+obj test-c : test.c : <cxxflags>-DBAD ;
+""")
+
+t.write("test.cpp", """
+#ifndef OKAY
+#error Cannot compile without OKAY
+#endif
+""")
+
+t.write("test.c", """
+#ifdef BAD
+#error Cannot compile with BAD
+#endif
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/test-cpp.obj")
+t.expect_addition("bin/$toolset/debug*/test-c.obj")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/feature_implicit_dependency.py b/src/boost/tools/build/test/feature_implicit_dependency.py
new file mode 100644
index 000000000..0b40da1bf
--- /dev/null
+++ b/src/boost/tools/build/test/feature_implicit_dependency.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+
+# Copyright (c) Steven Watanabe 2018.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that a single main target can be used for
+# implicit dependencies of multiple different types.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=False)
+
+t.write("input.sss", "")
+
+t.write("Jamroot.jam", """
+import type ;
+import common ;
+import generators ;
+import "class" : new ;
+import feature : feature ;
+import toolset : flags ;
+
+type.register AAA : aaa ;
+type.register BBB : bbb ;
+type.register CCC : ccc ;
+type.register DDD : ddd ;
+type.register SSS : sss ;
+
+feature aaa-path : : free path ;
+feature bbb-path : : free path ;
+
+class aaa-action : action
+{
+ rule adjust-properties ( property-set )
+ {
+ local s = [ $(self.targets[1]).creating-subvariant ] ;
+ return [ $(property-set).add-raw
+ [ $(s).implicit-includes aaa-path : AAA ] ] ;
+ }
+}
+
+class aaa-generator : generator
+{
+ rule action-class ( )
+ {
+ return aaa-action ;
+ }
+}
+
+class bbb-action : action
+{
+ rule adjust-properties ( property-set )
+ {
+ local s = [ $(self.targets[1]).creating-subvariant ] ;
+ return [ $(property-set).add-raw
+ [ $(s).implicit-includes bbb-path : BBB ] ] ;
+ }
+}
+
+class bbb-generator : generator
+{
+ rule action-class ( )
+ {
+ return bbb-action ;
+ }
+}
+
+generators.register-standard common.copy : SSS : AAA ;
+generators.register-standard common.copy : SSS : BBB ;
+
+# Produce two targets from a single source
+rule make-aaa-bbb ( project name ? : property-set : sources * )
+{
+ local result ;
+ local aaa = [ generators.construct $(project) $(name) : AAA :
+ [ $(property-set).add-raw <location-prefix>a-loc ] : $(sources) ] ;
+ local bbb = [ generators.construct $(project) $(name) : BBB :
+ [ $(property-set).add-raw <location-prefix>b-loc ] : $(sources) ] ;
+ return [ $(aaa[1]).add $(bbb[1]) ] $(aaa[2-]) $(bbb[2-]) ;
+}
+
+generate input : input.sss : <generating-rule>@make-aaa-bbb ;
+explicit input ;
+
+flags make-ccc AAAPATH : <aaa-path> ;
+rule make-ccc ( target : sources * : properties * )
+{
+ ECHO aaa path\: [ on $(target) return $(AAAPATH) ] ;
+ common.copy $(target) : $(sources) ;
+}
+
+flags make-ddd BBBPATH : <bbb-path> ;
+rule make-ddd ( target : sources * : properties * )
+{
+ ECHO bbb path\: [ on $(target) return $(BBBPATH) ] ;
+ common.copy $(target) : $(sources) ;
+}
+
+generators.register [ new aaa-generator $(__name__).make-ccc : SSS : CCC ] ;
+generators.register [ new bbb-generator $(__name__).make-ddd : SSS : DDD ] ;
+
+# This should have <aaapath>bin/a-loc
+ccc output-c : input.sss : <implicit-dependency>input ;
+# This should have <bbbpath>bin/b-loc
+ddd output-d : input.sss : <implicit-dependency>input ;
+""")
+
+t.run_build_system()
+t.expect_output_lines(["aaa path: bin/a-loc", "bbb path: bin/b-loc"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/feature_relevant.py b/src/boost/tools/build/test/feature_relevant.py
new file mode 100644
index 000000000..4e7a0c451
--- /dev/null
+++ b/src/boost/tools/build/test/feature_relevant.py
@@ -0,0 +1,142 @@
+#!/usr/bin/python
+
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the <relevant> feature
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("xxx.jam", """
+import type ;
+import feature : feature ;
+import toolset : flags ;
+import generators ;
+type.register XXX : xxx ;
+type.register YYY : yyy ;
+feature xxxflags : : free ;
+generators.register-standard xxx.run : YYY : XXX ;
+# xxxflags is relevant because it is used by flags
+flags xxx.run OPTIONS : <xxxflags> ;
+actions run
+{
+ echo okay > $(<)
+}
+""")
+
+t.write("zzz.jam", """
+import xxx ;
+import type ;
+import feature : feature ;
+import generators ;
+type.register ZZZ : zzz ;
+feature zzz.enabled : off on : propagated ;
+# zzz.enabled is relevant because it is used in the generator's
+# requirements
+generators.register-standard zzz.run : XXX : ZZZ : <zzz.enabled>on ;
+actions run
+{
+ echo okay > $(<)
+}
+""")
+
+t.write("aaa.jam", """
+import zzz ;
+import type ;
+import feature : feature ;
+import generators ;
+import toolset : flags ;
+type.register AAA : aaa ;
+feature aaaflags : : free ;
+generators.register-standard aaa.run : ZZZ : AAA ;
+flags aaa.run OPTIONS : <aaaflags> ;
+actions run
+{
+ echo okay > $(<)
+}
+""")
+
+t.write("Jamroot.jam", """
+import xxx ;
+import zzz ;
+import aaa ;
+import feature : feature ;
+
+# f1 is relevant, because it is composite and <xxxflags> is relevant
+feature f1 : n y : composite propagated ;
+feature.compose <f1>y : <xxxflags>-no1 ;
+# f2 is relevant, because it is used in a conditional
+feature f2 : n y : propagated ;
+# f3 is relevant, because it is used to choose the target alternative
+feature f3 : n y : propagated ;
+# f4 is relevant, because it is marked as such explicitly
+feature f4 : n y : propagated ;
+# f5 is relevant because of the conditional usage-requirements
+feature f5 : n y : propagated ;
+# f6 is relevant because the indirect conditional indicates so
+feature f6 : n y : propagated ;
+# f7 is relevant because the icond7 says so
+feature f7 : n y : propagated ;
+
+# The same as f[n], except not propagated
+feature g1 : n y : composite ;
+feature.compose <g1>y : <xxxflags>-no1 ;
+feature g2 : n y ;
+feature g3 : n y ;
+feature g4 : n y ;
+feature g5 : n y ;
+feature g6 : n y ;
+feature g7 : n y ;
+
+project : default-build
+ <f1>y <f2>y <f3>y <f4>y <f5>y <f6>y <f7>y
+ <g1>y <g2>y <g3>y <g4>y <g5>y <g6>y <g7>y <zzz.enabled>on ;
+
+rule icond6 ( properties * )
+{
+ local result ;
+ if <f6>y in $(properties) || <g6>y in $(properties)
+ {
+ result += <xxxflags>-yes6 ;
+ }
+ return $(result)
+ <relevant>xxxflags:<relevant>f6
+ <relevant>xxxflags:<relevant>g6 ;
+}
+
+rule icond7 ( properties * )
+{
+ local result ;
+ if <f7>y in $(properties) || <g7>y in $(properties)
+ {
+ result += <aaaflags>-yes7 ;
+ }
+ return $(result)
+ <relevant>aaaflags:<relevant>f7
+ <relevant>aaaflags:<relevant>g7 ;
+}
+
+zzz out : in.yyy
+ : <f2>y:<xxxflags>-no2 <g2>y:<xxxflags>-no2 <relevant>f4 <relevant>g4
+ <conditional>@icond6
+ :
+ : <f5>y:<aaaflags>-yes5 <g5>y:<aaaflags>-yes5 <conditional>@icond7
+ ;
+alias out : : <f3>n ;
+alias out : : <g3>n ;
+# Features that are relevant for out are also relevant for check-propagate
+aaa check-propagate : out ;
+""")
+
+t.write("in.yyy", "")
+
+t.run_build_system()
+t.expect_addition("bin/f1-y/f2-y/f3-y/f4-y/f6-y/g1-y/g2-y/g3-y/g4-y/g6-y/out.xxx")
+t.expect_addition("bin/f1-y/f2-y/f3-y/f4-y/f6-y/g1-y/g2-y/g3-y/g4-y/g6-y/zzz.enabled-on/out.zzz")
+t.expect_addition("bin/f1-y/f2-y/f3-y/f4-y/f5-y/f6-y/f7-y/zzz.enabled-on/check-propagate.aaa")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/feature_suppress_import_lib.py b/src/boost/tools/build/test/feature_suppress_import_lib.py
new file mode 100644
index 000000000..8dc666727
--- /dev/null
+++ b/src/boost/tools/build/test/feature_suppress_import_lib.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the suppress-import-lib feature
+
+# This used to cause the pdb and the import lib to get mixed up
+# if there are any exports.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("Jamroot.jam", """
+lib l : l.cpp : <suppress-import-lib>true ;
+""")
+
+t.write("l.cpp", """
+void
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+f() {}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/l.obj")
+t.expect_addition("bin/$toolset/debug*/l.dll")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/file_types.py b/src/boost/tools/build/test/file_types.py
new file mode 100644
index 000000000..e6d9bf840
--- /dev/null
+++ b/src/boost/tools/build/test/file_types.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+#
+# Copyright 2018 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the mapping of various suffixes
+# In particular, .so[.version] needs to
+# be mapped as a SHARED_LIB.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("Jamroot.jam", """\
+import type : type ;
+ECHO [ type source.c ] ;
+ECHO [ type source.cc ] ;
+ECHO [ type source.cxx ] ;
+ECHO [ type source.cpp ] ;
+ECHO [ type source.o ] ;
+ECHO [ type source.obj ] ;
+ECHO [ type boost_system.lib ] ;
+ECHO [ type boost_system.so ] ;
+ECHO [ type boost_system.dll ] ;
+EXIT [ type boost_system.so.1.66.0 ] : 0 ;
+""")
+
+t.run_build_system(stdout="""\
+C
+CPP
+CPP
+CPP
+OBJ
+OBJ
+STATIC_LIB
+SHARED_LIB
+SHARED_LIB
+SHARED_LIB
+""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/flags.py b/src/boost/tools/build/test/flags.py
new file mode 100644
index 000000000..bdb2b6b7d
--- /dev/null
+++ b/src/boost/tools/build/test/flags.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+
+# Copyright (C) Steven Watanabe 2018
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the check-has-flag rule
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# We need an object file before we can run the actual test.
+t.write('input.cpp', 'void f() {}\n')
+t.write('Jamroot.jam', 'obj input : input.cpp ;')
+t.run_build_system()
+
+linker_input = t.glob_file('bin/$toolset/debug*/input.obj')
+
+# Check every possible result of pass or fail.
+t.write('Jamroot.jam', '''
+import flags ;
+import modules ;
+OBJECT_FILE = [ modules.peek : OBJECT_FILE ] ;
+obj fail_cpp : test.cpp : [ check-has-flag <cxxflags>--illegal-flag-cpp
+ : <define>ERROR : <define>OK ] ;
+obj pass_cpp : test.cpp : [ check-has-flag <cxxflags>-DMACRO_CPP
+ : <define>OK : <define>ERROR ] ;
+obj fail_c : test.cpp : [ check-has-flag <cflags>--illegal-flag-c
+ : <define>ERROR : <define>OK ] ;
+obj pass_c : test.cpp : [ check-has-flag <cflags>-DMACRO_C
+ : <define>OK : <define>ERROR ] ;
+obj fail_link : test.cpp : [ check-has-flag <linkflags>--illegal-flag-link
+ : <define>ERROR : <define>OK ] ;
+# The only thing that we can be certain the linker
+# will accept is the name of an object file.
+obj pass_link : test.cpp : [ check-has-flag <linkflags>$(OBJECT_FILE)
+ : <define>OK : <define>ERROR ] ;
+''')
+
+t.write('test.cpp', '''
+#ifdef ERROR
+#error ERROR defined
+#endif
+#ifndef OK
+#error ERROR not defined
+#endif
+''')
+
+# Don't check the status immediately, so that we have a chance
+# to print config.log. Also, we need a minimum of d2 to make
+# sure that we always see the commands and output.
+t.run_build_system(['-sOBJECT_FILE=' + linker_input, '-d2'], status=None)
+
+if t.status != 0:
+ log_file = t.read('bin/config.log')
+ BoostBuild.annotation("config.log", log_file)
+ t.fail_test(True)
+
+t.expect_output_lines([' - has --illegal-flag-cpp : no',
+ ' - has -DMACRO_CPP : yes',
+ ' - has --illegal-flag-c : no',
+ ' - has -DMACRO_C : yes',
+ ' - has --illegal-flag-link : no',
+ ' - has *bin*/input.* : yes'])
+t.expect_addition('bin/$toolset/debug*/fail_cpp.obj')
+t.expect_addition('bin/$toolset/debug*/pass_cpp.obj')
+t.expect_addition('bin/$toolset/debug*/fail_c.obj')
+t.expect_addition('bin/$toolset/debug*/pass_c.obj')
+t.expect_addition('bin/$toolset/debug*/fail_link.obj')
+t.expect_addition('bin/$toolset/debug*/pass_link.obj')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/gcc_runtime.py b/src/boost/tools/build/test/gcc_runtime.py
new file mode 100644
index 000000000..684afed10
--- /dev/null
+++ b/src/boost/tools/build/test/gcc_runtime.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that on gcc, we correctly report a problem when static runtime is
+# requested for building a shared library.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+t.write("jamroot.jam", "lib hello : hello.cpp ;")
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system(["runtime-link=static"])
+t.expect_output_lines("warning: On gcc, DLLs can not be built with "
+ "'<runtime-link>static'.")
+t.expect_nothing_more()
+
+t.run_build_system(["link=static", "runtime-link=static"])
+t.expect_addition("bin/$toolset/debug*/link-static*/hello.obj")
+t.expect_addition("bin/$toolset/debug*/link-static*/hello.lib")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/generator_selection.py b/src/boost/tools/build/test/generator_selection.py
new file mode 100755
index 000000000..50d4ff3cd
--- /dev/null
+++ b/src/boost/tools/build/test/generator_selection.py
@@ -0,0 +1,158 @@
+#!/usr/bin/python
+
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that generators get selected correctly.
+#
+# We do not use the internal C++-compiler CPP --> OBJ generator to avoid
+# problems with specific compilers or their configurations, e.g. IBM's AIX test
+# runner 'AIX Version 5.3 TL7 SP5 (5300-07-05-0831)' using the 'IBM XL C/C++
+# for AIX, V12.1 (Version: 12.01.0000.0000)' reporting errors when run with a
+# source file whose suffix is not '.cpp'.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_generator_added_after_already_building_a_target_of_its_target_type()
+# -------------------------------------------------------------------------
+#
+###############################################################################
+
+def test_generator_added_after_already_building_a_target_of_its_target_type():
+ """
+ Regression test for a Boost Build bug causing it to not use a generator
+ if it got added after already building a target of its target type.
+
+ """
+ t = BoostBuild.Tester()
+
+ t.write("dummy.cpp", "void f() {}\n")
+
+ t.write("jamroot.jam", """\
+import common ;
+import generators ;
+import type ;
+type.register MY_OBJ : my_obj ;
+generators.register-standard common.copy : CPP : MY_OBJ ;
+
+# Building this dummy target must not cause a later defined CPP target type
+# generator not to be recognized as viable.
+my-obj dummy : dummy.cpp ;
+alias the-other-obj : Other//other-obj ;
+""")
+
+ t.write("Other/source.extension", "A dummy source file.")
+
+ t.write("Other/mygen.jam", """\
+import common ;
+import generators ;
+import type ;
+type.register MY_TYPE : extension ;
+generators.register-standard $(__name__).generate-a-cpp-file : MY_TYPE : CPP ;
+rule generate-a-cpp-file { ECHO Generating a CPP file... ; }
+CREATE-FILE = [ common.file-creation-command ] ;
+actions generate-a-cpp-file { $(CREATE-FILE) "$(<)" }
+""")
+
+ t.write("Other/mygen.py", """\
+from __future__ import print_function
+import b2.build.generators as generators
+import b2.build.type as type
+
+from b2.manager import get_manager
+
+import os
+
+type.register('MY_TYPE', ['extension'])
+generators.register_standard('mygen.generate-a-cpp-file', ['MY_TYPE'], ['CPP'])
+if os.name == 'nt':
+ action = 'echo void g() {} > "$(<)"'
+else:
+ action = 'echo "void g() {}" > "$(<)"'
+def f(*args):
+ print("Generating a CPP file...")
+
+get_manager().engine().register_action("mygen.generate-a-cpp-file", action,
+ function=f)
+""")
+
+ t.write("Other/jamfile.jam", """\
+import mygen ;
+my-obj other-obj : source.extension ;
+""")
+
+ t.run_build_system()
+ t.expect_output_lines("Generating a CPP file...")
+ t.expect_addition("bin/dummy.my_obj")
+ t.expect_addition("Other/bin/other-obj.cpp")
+ t.expect_addition("Other/bin/other-obj.my_obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_using_a_derived_source_type_created_after_generator_already_used()
+# -----------------------------------------------------------------------
+#
+###############################################################################
+
+def test_using_a_derived_source_type_created_after_generator_already_used():
+ """
+ Regression test for a Boost Build bug causing it to not use a generator
+ with a source type derived from one of the generator's sources but created
+ only after already using the generateor.
+
+ """
+ t = BoostBuild.Tester()
+
+ t.write("dummy.xxx", "Hello. My name is Peter Pan.\n")
+
+ t.write("jamroot.jam", """\
+import common ;
+import generators ;
+import type ;
+type.register XXX : xxx ;
+type.register YYY : yyy ;
+generators.register-standard common.copy : XXX : YYY ;
+
+# Building this dummy target must not cause a later defined XXX2 target type not
+# to be recognized as a viable source type for building YYY targets.
+yyy dummy : dummy.xxx ;
+alias the-test-output : Other//other ;
+""")
+
+ t.write("Other/source.xxx2", "Hello. My name is Tinkerbell.\n")
+
+ t.write("Other/jamfile.jam", """\
+import type ;
+type.register XXX2 : xxx2 : XXX ;
+# We are careful not to do anything between defining our new XXX2 target type
+# and using the XXX --> YYY generator that could potentially cover the Boost
+# Build bug by clearing its internal viable source target type state.
+yyy other : source.xxx2 ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/dummy.yyy")
+ t.expect_addition("Other/bin/other.yyy")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_generator_added_after_already_building_a_target_of_its_target_type()
+test_using_a_derived_source_type_created_after_generator_already_used()
diff --git a/src/boost/tools/build/test/generators_test.py b/src/boost/tools/build/test/generators_test.py
new file mode 100644
index 000000000..f612a25eb
--- /dev/null
+++ b/src/boost/tools/build/test/generators_test.py
@@ -0,0 +1,433 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import re
+
+
+def test_basic():
+ t = BoostBuild.Tester()
+ __write_appender(t, "appender.jam")
+ t.write("a.cpp", "")
+ t.write("b.cxx", "")
+ t.write("c.tui", "")
+ t.write("d.wd", "")
+ t.write("e.cpp", "")
+ t.write("x.l", "")
+ t.write("y.x_pro", "")
+ t.write("z.cpp", "")
+ t.write("lib/c.cpp", "int bar() { return 0; }\n")
+ t.write("lib/jamfile.jam", "my-lib auxilliary : c.cpp ;")
+ t.write("jamroot.jam",
+r"""import appender ;
+
+import "class" : new ;
+import generators ;
+import type ;
+
+
+################################################################################
+#
+# We use our own custom EXE, LIB & OBJ target generators as using the regular
+# ones would force us to have to deal with different compiler/linker specific
+# 'features' that really have nothing to do with this test. For example, IBM XL
+# C/C++ for AIX, V12.1 (Version: 12.01.0000.0000) compiler exits with a non-zero
+# exit code and thus fails our build when run with a source file using an
+# unknown suffix like '.marked_cpp'.
+#
+################################################################################
+
+type.register MY_EXE : my_exe ;
+type.register MY_LIB : my_lib ;
+type.register MY_OBJ : my_obj ;
+
+appender.register compile-c : C : MY_OBJ ;
+appender.register compile-cpp : CPP : MY_OBJ ;
+appender.register link-lib composing : MY_OBJ : MY_LIB ;
+appender.register link-exe composing : MY_OBJ MY_LIB : MY_EXE ;
+
+
+################################################################################
+#
+# LEX --> C
+#
+################################################################################
+
+type.register LEX : l ;
+
+appender.register lex-to-c : LEX : C ;
+
+
+################################################################################
+#
+# /--> tUI_H --\
+# tUI --< >--> CPP
+# \------------/
+#
+################################################################################
+
+type.register tUI : tui ;
+type.register tUI_H : tui_h ;
+
+appender.register ui-to-cpp : tUI tUI_H : CPP ;
+appender.register ui-to-h : tUI : tUI_H ;
+
+
+################################################################################
+#
+# /--> X1 --\
+# X_PRO --< >--> CPP
+# \--> X2 --/
+#
+################################################################################
+
+type.register X1 : x1 ;
+type.register X2 : x2 ;
+type.register X_PRO : x_pro ;
+
+appender.register x1-x2-to-cpp : X1 X2 : CPP ;
+appender.register x-pro-to-x1-x2 : X_PRO : X1 X2 ;
+
+
+################################################################################
+#
+# When the main target type is NM_EXE, build OBJ from CPP-MARKED and not from
+# anything else, e.g. directly from CPP.
+#
+################################################################################
+
+type.register CPP_MARKED : marked_cpp : CPP ;
+type.register POSITIONS : positions ;
+type.register NM.TARGET.CPP : target_cpp : CPP ;
+type.register NM_EXE : : MY_EXE ;
+
+appender.register marked-to-target-cpp : CPP_MARKED : NM.TARGET.CPP ;
+appender.register cpp-to-marked-positions : CPP : CPP_MARKED POSITIONS ;
+
+class "nm::target::cpp-obj-generator" : generator
+{
+ rule __init__ ( id )
+ {
+ generator.__init__ $(id) : NM.TARGET.CPP : MY_OBJ ;
+ generator.set-rule-name appender.appender ;
+ }
+
+ rule requirements ( )
+ {
+ return <main-target-type>NM_EXE ;
+ }
+
+ rule run ( project name ? : properties * : source : multiple ? )
+ {
+ if [ $(source).type ] = CPP
+ {
+ local converted = [ generators.construct $(project) : NM.TARGET.CPP
+ : $(properties) : $(source) ] ;
+ if $(converted)
+ {
+ return [ generators.construct $(project) : MY_OBJ :
+ $(properties) : $(converted[2]) ] ;
+ }
+ }
+ }
+}
+generators.register [ new "nm::target::cpp-obj-generator" target-obj ] ;
+generators.override target-obj : all ;
+
+
+################################################################################
+#
+# A more complex test case scenario with the following generators:
+# 1. WHL --> CPP, WHL_LR0, H, H(%_symbols)
+# 2. DLP --> CPP
+# 3. WD --> WHL(%_parser) DLP(%_lexer)
+# 4. A custom generator of higher priority than generators 1. & 2. that helps
+# disambiguate between them when generating CPP files from WHL and DLP
+# sources.
+#
+################################################################################
+
+type.register WHL : whl ;
+type.register DLP : dlp ;
+type.register WHL_LR0 : lr0 ;
+type.register WD : wd ;
+
+local whale-generator-id = [ appender.register whale : WHL : CPP WHL_LR0 H
+ H(%_symbols) ] ;
+local dolphin-generator-id = [ appender.register dolphin : DLP : CPP ] ;
+appender.register wd : WD : WHL(%_parser) DLP(%_lexer) ;
+
+class wd-to-cpp : generator
+{
+ rule __init__ ( id : sources * : targets * )
+ {
+ generator.__init__ $(id) : $(sources) : $(targets) ;
+ }
+
+ rule run ( project name ? : property-set : source )
+ {
+ local new-sources = $(source) ;
+ if ! [ $(source).type ] in WHL DLP
+ {
+ local r1 = [ generators.construct $(project) $(name) : WHL :
+ $(property-set) : $(source) ] ;
+ local r2 = [ generators.construct $(project) $(name) : DLP :
+ $(property-set) : $(source) ] ;
+ new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ;
+ }
+
+ local result ;
+ for local i in $(new-sources)
+ {
+ local t = [ generators.construct $(project) $(name) : CPP :
+ $(property-set) : $(i) ] ;
+ result += $(t[2-]) ;
+ }
+ return $(result) ;
+ }
+}
+generators.override $(__name__).wd-to-cpp : $(whale-generator-id) ;
+generators.override $(__name__).wd-to-cpp : $(dolphin-generator-id) ;
+generators.register [ new wd-to-cpp $(__name__).wd-to-cpp : : CPP ] ;
+
+
+################################################################################
+#
+# Declare build targets.
+#
+################################################################################
+
+# This should not cause two CPP --> MY_OBJ constructions for a.cpp or b.cpp.
+my-exe a : a.cpp b.cxx obj_1 obj_2 c.tui d.wd x.l y.x_pro lib//auxilliary ;
+my-exe f : a.cpp b.cxx obj_1 obj_2 lib//auxilliary ;
+
+# This should cause two CPP --> MY_OBJ constructions for z.cpp.
+my-obj obj_1 : z.cpp ;
+my-obj obj_2 : z.cpp ;
+
+nm-exe e : e.cpp ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/" * BoostBuild.List("a.my_exe "
+ "a.my_obj b.my_obj c.tui_h c.cpp c.my_obj d_parser.whl d_lexer.dlp "
+ "d_parser.cpp d_lexer.cpp d_lexer.my_obj d_parser.lr0 d_parser.h "
+ "d_parser.my_obj d_parser_symbols.h x.c x.my_obj y.x1 y.x2 y.cpp "
+ "y.my_obj e.marked_cpp e.positions e.target_cpp e.my_obj e.my_exe "
+ "f.my_exe obj_1.my_obj obj_2.my_obj"))
+ t.expect_addition("lib/bin/" * BoostBuild.List("c.my_obj "
+ "auxilliary.my_lib"))
+ t.expect_nothing_more()
+
+ folder = "bin"
+ t.expect_content_lines("%s/obj_1.my_obj" % folder, " Sources: 'z.cpp'")
+ t.expect_content_lines("%s/obj_2.my_obj" % folder, " Sources: 'z.cpp'")
+ t.expect_content_lines("%s/a.my_obj" % folder, " Sources: 'a.cpp'")
+
+ lines = t.stdout().splitlines()
+ source_lines = [x for x in lines if re.match("^ Sources: '", x)]
+ if not __match_count_is(source_lines, "'z.cpp'", 2):
+ BoostBuild.annotation("failure", "z.cpp must be compiled exactly "
+ "twice.")
+ t.fail_test(1)
+ if not __match_count_is(source_lines, "'a.cpp'", 1):
+ BoostBuild.annotation("failure", "a.cpp must be compiled exactly "
+ "once.")
+ t.fail_test(1)
+ t.cleanup()
+
+
+def test_generated_target_names():
+ """
+ Test generator generated target names. Unless given explicitly, target
+ names should be determined based on their specified source names. All
+ sources for generating a target need to have matching names in order for
+ Boost Build to be able to implicitly determine the target's name.
+
+ We use the following target generation structure with differently named
+ BBX targets:
+ /---> BB1 ---\
+ AAA --<----> BB2 ---->--> CCC --(composing)--> DDD
+ \---> BB3 ---/
+
+ The extra generator at the end is needed because generating a top-level
+ CCC target directly would requires us to explicitly specify a name for it.
+ The extra generator needs to be composing in order not to explicitly
+ request a specific name for its CCC source target based on its own target
+ name.
+
+ We also check for a regression where only the first two sources were
+ checked to see if their names match. Note that we need to try out all file
+ renaming combinations as we do not know what ordering Boost Build is going
+ to use when passing in those files as generator sources.
+
+ """
+ jamfile_template = """\
+import type ;
+type.register AAA : _a ;
+type.register BB1 : _b1 ;
+type.register BB2 : _b2 ;
+type.register BB3 : _b3 ;
+type.register CCC : _c ;
+type.register DDD : _d ;
+
+import appender ;
+appender.register aaa-to-bbX : AAA : BB1%s BB2%s BB3%s ;
+appender.register bbX-to-ccc : BB1 BB2 BB3 : CCC ;
+appender.register ccc-to-ddd composing : CCC : DDD ;
+
+ddd _xxx : _xxx._a ;
+"""
+
+ t = BoostBuild.Tester()
+ __write_appender(t, "appender.jam")
+ t.write("_xxx._a", "")
+
+ def test_one(t, rename1, rename2, rename3, status):
+ def f(rename):
+ if rename: return "(%_x)"
+ return ""
+
+ jamfile = jamfile_template % (f(rename1), f(rename2), f(rename3))
+ t.write("jamroot.jam", jamfile, wait=False)
+
+ # Remove any preexisting targets left over from a previous test run
+ # so we do not have to be careful about tracking which files have been
+ # newly added and which preexisting ones have only been modified.
+ t.rm("bin")
+
+ t.run_build_system(status=status)
+
+ if status:
+ t.expect_output_lines("*.bbX-to-ccc: source targets have "
+ "different names: cannot determine target name")
+ else:
+ def suffix(rename):
+ if rename: return "_x"
+ return ""
+ name = "bin/_xxx"
+ e = t.expect_addition
+ e("%s%s._b1" % (name, suffix(rename1)))
+ e("%s%s._b2" % (name, suffix(rename2)))
+ e("%s%s._b3" % (name, suffix(rename3)))
+ e("%s%s._c" % (name, suffix(rename1 and rename2 and rename3)))
+ e("%s._d" % name)
+ t.expect_nothing_more()
+
+ test_one(t, False, False, False, status=0)
+ test_one(t, True , False, False, status=1)
+ test_one(t, False, True , False, status=1)
+ test_one(t, False, False, True , status=1)
+ test_one(t, True , True , False, status=1)
+ test_one(t, True , False, True , status=1)
+ test_one(t, False, True , True , status=1)
+ test_one(t, True , True , True , status=0)
+ t.cleanup()
+
+
+def __match_count_is(lines, pattern, expected):
+ count = 0
+ for x in lines:
+ if re.search(pattern, x):
+ count += 1
+ if count > expected:
+ return False
+ return count == expected
+
+
+def __write_appender(t, name):
+ t.write(name,
+r"""# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for registering test generators that construct their targets by
+# simply appending their given input data, e.g. list of sources & targets.
+
+import "class" : new ;
+import generators ;
+import modules ;
+import sequence ;
+
+rule register ( id composing ? : source-types + : target-types + )
+{
+ local caller-module = [ CALLER_MODULE ] ;
+ id = $(caller-module).$(id) ;
+ local g = [ new generator $(id) $(composing) : $(source-types) :
+ $(target-types) ] ;
+ $(g).set-rule-name $(__name__).appender ;
+ generators.register $(g) ;
+ return $(id) ;
+}
+
+if [ modules.peek : NT ]
+{
+ X = ")" ;
+ ECHO_CMD = (echo. ;
+}
+else
+{
+ X = \" ;
+ ECHO_CMD = "echo $(X)" ;
+}
+
+local appender-runs ;
+
+# We set up separate actions for building each target in order to avoid having
+# to iterate over them in action (i.e. shell) code. We have to be extra careful
+# though to achieve the exact same effect as if doing all the work in just one
+# action. Otherwise Boost Jam might, under some circumstances, run only some of
+# our actions. To achieve this we register a series of actions for all the
+# targets (since they all have the same target list - either all or none of them
+# get run independent of which target actually needs to get built), each
+# building only a single target. Since all our actions use the same targets, we
+# can not use 'on-target' parameters to pass data to a specific action so we
+# pass them using the second 'sources' parameter which our actions then know how
+# to interpret correctly. This works well since Boost Jam does not automatically
+# add dependency relations between specified action targets & sources and so the
+# second argument, even though most often used to pass in a list of sources, can
+# actually be used for passing in any type of information.
+rule appender ( targets + : sources + : properties * )
+{
+ appender-runs = [ CALC $(appender-runs:E=0) + 1 ] ;
+ local target-index = 0 ;
+ local target-count = [ sequence.length $(targets) ] ;
+ local original-targets ;
+ for t in $(targets)
+ {
+ target-index = [ CALC $(target-index) + 1 ] ;
+ local appender-run = $(appender-runs) ;
+ if $(targets[2])-defined
+ {
+ appender-run += "[$(target-index)/$(target-count)]" ;
+ }
+ append $(targets) : $(appender-run:J=" ") $(t) $(sources) ;
+ }
+}
+
+actions append
+{
+ $(ECHO_CMD)-------------------------------------------------$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)>> "$(>[2])"
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)>> "$(>[2])"
+ $(ECHO_CMD) Target: '$(>[2])'$(X)
+ $(ECHO_CMD) Target: '$(>[2])'$(X)>> "$(>[2])"
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)>> "$(>[2])"
+ $(ECHO_CMD)=================================================$(X)
+ $(ECHO_CMD)-------------------------------------------------$(X)>> "$(>[2])"
+}
+""")
+
+
+test_basic()
+test_generated_target_names()
diff --git a/src/boost/tools/build/test/implicit_dependency.py b/src/boost/tools/build/test/implicit_dependency.py
new file mode 100644
index 000000000..dac9c7c54
--- /dev/null
+++ b/src/boost/tools/build/test/implicit_dependency.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the <implicit-dependency> is respected even if the target referred to is
+# not built itself, but only referred to by <implicit-dependency>.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+make a.h : : gen-header ;
+explicit a.h ;
+
+exe hello : hello.cpp : <implicit-dependency>a.h ;
+
+import os ;
+if [ os.name ] = NT
+{
+ actions gen-header
+ {
+ echo int i; > $(<)
+ }
+}
+else
+{
+ actions gen-header
+ {
+ echo "int i;" > $(<)
+ }
+}
+""")
+
+t.write("hello.cpp", """
+#include "a.h"
+int main() { return i; }
+""")
+
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+
+t.rm("bin")
+
+t.write("jamroot.jam", """
+make dir/a.h : : gen-header ;
+explicit dir/a.h ;
+
+exe hello : hello.cpp : <implicit-dependency>dir/a.h ;
+
+import os ;
+if [ os.name ] = NT
+{
+ actions gen-header
+ {
+ echo int i; > $(<)
+ }
+}
+else
+{
+ actions gen-header
+ {
+ echo "int i;" > $(<)
+ }
+}
+""")
+
+t.write("hello.cpp", """
+#include "dir/a.h"
+int main() { return i; }
+""")
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/indirect_conditional.py b/src/boost/tools/build/test/indirect_conditional.py
new file mode 100644
index 000000000..b59c7800d
--- /dev/null
+++ b/src/boost/tools/build/test/indirect_conditional.py
@@ -0,0 +1,148 @@
+#!/usr/bin/python
+
+# Copyright (C) 2006. Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+def test_basic():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe a1 : a1.cpp : <conditional>@a1-rule ;
+rule a1-rule ( properties * )
+{
+ if <variant>debug in $(properties)
+ {
+ return <define>OK ;
+ }
+}
+
+exe a2 : a2.cpp : <conditional>@$(__name__).a2-rule
+ <variant>debug:<optimization>speed ;
+rule a2-rule ( properties * )
+{
+ if <optimization>speed in $(properties)
+ {
+ return <define>OK ;
+ }
+}
+
+exe a3 : a3.cpp :
+ <conditional>@$(__name__).a3-rule-1
+ <conditional>@$(__name__).a3-rule-2 ;
+rule a3-rule-1 ( properties * )
+{
+ if <optimization>speed in $(properties)
+ {
+ return <define>OK ;
+ }
+}
+rule a3-rule-2 ( properties * )
+{
+ if <variant>debug in $(properties)
+ {
+ return <optimization>speed ;
+ }
+}
+""")
+
+ t.write("a1.cpp", "#ifdef OK\nint main() {}\n#endif\n")
+ t.write("a2.cpp", "#ifdef OK\nint main() {}\n#endif\n")
+ t.write("a3.cpp", "#ifdef OK\nint main() {}\n#endif\n")
+
+ t.run_build_system()
+
+ t.expect_addition("bin/$toolset/debug*/a1.exe")
+ t.expect_addition("bin/$toolset/debug/optimization-speed*/a2.exe")
+ t.expect_addition("bin/$toolset/debug/optimization-speed*/a3.exe")
+
+ t.cleanup()
+
+def test_inherit():
+ """Tests that paths etc. are handled correctly when an indirect
+ conditional is inherited by a subproject."""
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("Jamroot.jam", """
+import feature ;
+import indirect ;
+exe d1 : d1.cpp ;
+explicit d1 ;
+project : requirements <conditional>@c1 ;
+build-project subdir ;
+feature.feature myrule : : free ;
+rule c1 ( properties * )
+{
+ return <dependency>d1 <include>include <myrule>@parent-generate ;
+}
+rule parent-generate ( project name : property-set : sources * )
+{
+ return $(sources) ;
+}
+rule my-generate ( project name : property-set : sources * )
+{
+ local r = [ $(property-set).get <myrule> ] ;
+ r = [ MATCH @(.*) : $(r) ] ;
+ return [ indirect.call
+ $(r) $(project) $(name) : $(property-set) : $(sources) ] ;
+}
+""")
+ t.write("d1.cpp", "int main(){}\n")
+ t.write("subdir/Jamfile", """
+generate srcs : main.cpp : <generating-rule>@my-generate ;
+exe main : srcs ;
+""")
+ t.write("include/a.h", "")
+ t.write("subdir/main.cpp", "#include <a.h>\nint main() {}\n")
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug*/d1.obj")
+ t.expect_addition("bin/$toolset/debug*/d1.exe")
+ t.expect_addition("subdir/bin/$toolset/debug*/main.obj")
+ t.expect_addition("subdir/bin/$toolset/debug*/main.exe")
+ t.expect_nothing_more()
+ t.cleanup()
+
+def test_glob_in_indirect_conditional():
+ """
+ Regression test: project-rules.glob rule run from inside an indirect
+ conditional should report an error as it depends on the 'currently loaded
+ project' concept and indirect conditional rules get called only after all
+ the project modules have already finished loading.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+use-project /library-example/foo : util/foo ;
+build-project app ;
+""")
+ t.write("app/app.cpp", "int main() {}\n");
+ t.write("app/jamfile.jam", "exe app : app.cpp /library-example/foo//bar ;")
+ t.write("util/foo/bar.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""")
+ t.write("util/foo/jamfile.jam", """\
+rule print-my-sources ( properties * )
+{
+ ECHO My sources: ;
+ ECHO [ glob *.cpp ] ;
+}
+lib bar : bar.cpp : <conditional>@print-my-sources ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_output_lines(["My sources:", "bar.cpp"], False)
+ t.expect_output_lines("error: Reference to the project currently being "
+ "loaded requested when there was no project module being loaded.")
+
+ t.cleanup()
+
+
+test_basic()
+test_inherit()
+test_glob_in_indirect_conditional()
diff --git a/src/boost/tools/build/test/inherit_toolset.py b/src/boost/tools/build/test/inherit_toolset.py
new file mode 100644
index 000000000..f80b1fec7
--- /dev/null
+++ b/src/boost/tools/build/test/inherit_toolset.py
@@ -0,0 +1,100 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("a.cpp", "\n")
+
+t.write("yfc1.jam", """\
+import feature ;
+import generators ;
+
+feature.extend toolset : yfc1 ;
+rule init ( ) { }
+
+generators.register-standard yfc1.compile : CPP : OBJ : <toolset>yfc1 ;
+generators.register-standard yfc1.link : OBJ : EXE : <toolset>yfc1 ;
+
+actions compile { yfc1-compile }
+actions link { yfc1-link }
+""")
+
+t.write(
+ 'yfc1.py',
+"""
+from b2.build import feature, generators
+from b2.manager import get_manager
+
+MANAGER = get_manager()
+ENGINE = MANAGER.engine()
+
+feature.extend('toolset', ['yfc1'])
+
+generators.register_standard('yfc1.compile', ['CPP'], ['OBJ'], ['<toolset>yfc1'])
+generators.register_standard('yfc1.link', ['OBJ'], ['EXE'], ['<toolset>yfc1'])
+
+ENGINE.register_action(
+ 'yfc1.compile',
+ 'yfc1-compile'
+)
+
+ENGINE.register_action(
+ 'yfc1.link',
+ 'yfc1-link'
+)
+
+def init(*args):
+ pass
+
+"""
+)
+
+t.write("yfc2.jam", """\
+import feature ;
+import toolset ;
+
+feature.extend toolset : yfc2 ;
+toolset.inherit yfc2 : yfc1 ;
+rule init ( ) { }
+
+actions link { yfc2-link }
+""")
+
+t.write(
+ 'yfc2.py',
+"""
+from b2.build import feature, toolset
+from b2.manager import get_manager
+
+MANAGER = get_manager()
+ENGINE = MANAGER.engine()
+
+feature.extend('toolset', ['yfc2'])
+toolset.inherit('yfc2', 'yfc1')
+
+ENGINE.register_action('yfc2.link', 'yfc2-link')
+
+def init(*args):
+ pass
+"""
+)
+
+t.write("jamfile.jam", "exe a : a.cpp ;")
+t.write("jamroot.jam", "using yfc1 ;")
+
+t.run_build_system(["-n", "-d2", "yfc1"])
+t.fail_test(t.stdout().find("yfc1-link") == -1)
+
+# Make sure we do not have to explicitly 'use' yfc1.
+t.write("jamroot.jam", "using yfc2 ;")
+
+t.run_build_system(["-n", "-d2", "yfc2"])
+t.fail_test(t.stdout().find("yfc2-link") == -1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/inherited_dependency.py b/src/boost/tools/build/test/inherited_dependency.py
new file mode 100755
index 000000000..ae939f487
--- /dev/null
+++ b/src/boost/tools/build/test/inherited_dependency.py
@@ -0,0 +1,237 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt) or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+tester = BoostBuild.Tester(use_test_config=False)
+
+
+################################################################################
+#
+# Test without giving the project an explicit id.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.write("test.cpp", """
+#ifdef _WIN32
+ __declspec(dllexport)
+#endif
+void foo() {}
+""")
+
+tester.write("a/test1.cpp", """
+int main() {}
+""")
+
+tester.write("a/jamfile.jam", """
+exe test1 : test1.cpp ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug*/test.obj")
+tester.expect_addition("a/bin/$toolset/debug*/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Run the same test from the "a" directory.
+#
+################################################################################
+
+tester.run_build_system(subdir="a")
+
+tester.expect_addition("bin/$toolset/debug*/test.obj")
+tester.expect_addition("a/bin/$toolset/debug*/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# This time, do give the project an id.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project test_project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug*/test.obj")
+tester.expect_addition("a/bin/$toolset/debug*/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Now, give the project an id in its attributes.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project : id test_project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug*/test.obj")
+tester.expect_addition("a/bin/$toolset/debug*/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Give the project an id in both ways at once.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project test_project1 : id test_project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug*/test.obj")
+tester.expect_addition("a/bin/$toolset/debug*/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Test an absolute path in native format.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+import path ;
+path-constant here : . ;
+current-location = [ path.native [ path.root [ path.make $(here) ] [ path.pwd ]
+ ] ] ;
+project test : requirements <source>$(current-location)/a/test1.cpp ;
+exe test : test.cpp ;
+""")
+
+tester.run_build_system()
+tester.expect_addition("bin/$toolset/debug*/test.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Test an absolute path in canonical format.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+import path ;
+path-constant here : . ;
+current-location = [ path.root [ path.make $(here) ] [ path.pwd ] ] ;
+project test : requirements <source>$(current-location)/a/test1.cpp ;
+exe test : test.cpp ;
+""")
+
+tester.run_build_system()
+tester.expect_addition("bin/$toolset/debug*/test.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Test dependency properties (e.g. <source>) whose targets are specified using a
+# relative path.
+#
+################################################################################
+
+# Use jamroot.jam rather than jamfile.jam to avoid inheriting the <source> from
+# the parent as that would would make test3 a source of itself.
+tester.write("b/jamroot.jam", """
+obj test3 : test3.cpp ;
+""")
+
+tester.write("b/test3.cpp", """
+void bar() {}
+""")
+
+tester.write("jamroot.jam", """
+project test : requirements <source>b//test3 ;
+build-project a ;
+""")
+
+tester.write("a/jamfile.jam", """
+exe test : test1.cpp ;
+""")
+
+tester.write("a/test1.cpp", """
+void bar();
+int main() { bar(); }
+""")
+
+tester.run_build_system()
+tester.expect_addition("b/bin/$toolset/debug*/test3.obj")
+tester.expect_addition("a/bin/$toolset/debug*/test.exe")
+
+tester.rm("bin")
+tester.rm("a")
+tester.rm("jamroot.jam")
+tester.rm("test.cpp")
+
+
+################################################################################
+#
+# Test that source-location is respected.
+#
+################################################################################
+
+tester.write("build/jamroot.jam", """
+project : requirements <source>test.cpp : source-location ../src ;
+""")
+
+tester.write("src/test.cpp", """
+int main() {}
+""")
+
+tester.write("build/a/jamfile.jam", """
+project : source-location ../../a_src ;
+exe test : test1.cpp ;
+""")
+
+tester.write("a_src/test1.cpp", """
+""")
+
+tester.run_build_system(subdir="build/a")
+tester.expect_addition("build/a/bin/$toolset/debug*/test.exe")
+
+tester.cleanup()
diff --git a/src/boost/tools/build/test/inline.py b/src/boost/tools/build/test/inline.py
new file mode 100644
index 000000000..03d91a5d0
--- /dev/null
+++ b/src/boost/tools/build/test/inline.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+project : requirements <link>static ;
+exe a : a.cpp [ lib helper : helper.cpp ] ;
+""")
+
+t.write("a.cpp", """\
+extern void helper();
+int main() {}
+""")
+
+t.write("helper.cpp", "void helper() {}\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a__helper.lib")
+t.rm("bin/$toolset/debug*/a__helper.lib")
+
+t.run_build_system(["a__helper"])
+t.expect_addition("bin/$toolset/debug*/a__helper.lib")
+
+t.rm("bin")
+
+
+# Now check that inline targets with the same name but present in different
+# places are not confused between each other, and with top-level targets.
+t.write("jamroot.jam", """\
+project : requirements <link>static ;
+exe a : a.cpp [ lib helper : helper.cpp ] ;
+exe a2 : a.cpp [ lib helper : helper.cpp ] ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/link-static*/a.exe")
+t.expect_addition("bin/$toolset/debug*/a__helper.lib")
+t.expect_addition("bin/$toolset/debug*/a2__helper.lib")
+
+
+# Check that the 'alias' target does not change the name of inline targets, and
+# that inline targets are explicit.
+t.write("jamroot.jam", """\
+project : requirements <link>static ;
+alias a : [ lib helper : helper.cpp ] ;
+explicit a ;
+""")
+t.rm("bin")
+
+t.run_build_system()
+t.expect_nothing_more()
+
+t.run_build_system(["a"])
+t.expect_addition("bin/$toolset/debug*/helper.lib")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/lib_source_property.py b/src/boost/tools/build/test/lib_source_property.py
new file mode 100644
index 000000000..24a90773c
--- /dev/null
+++ b/src/boost/tools/build/test/lib_source_property.py
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test: if a library had no explicit sources, but only <source>
+# properties, it was built as if it were a searched library, and the specified
+# sources were not compiled.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+lib a : : <source>a.cpp ;
+""")
+
+t.write("a.cpp", """
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.obj")
+
+t.rm("bin")
+
+
+# Now try with <conditional>.
+t.write("jamroot.jam", """
+rule test ( properties * )
+{
+ return <source>a.cpp ;
+}
+lib a : : <conditional>@test ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.obj")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/lib_zlib.py b/src/boost/tools/build/test/lib_zlib.py
new file mode 100755
index 000000000..04d32ba70
--- /dev/null
+++ b/src/boost/tools/build/test/lib_zlib.py
@@ -0,0 +1,184 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("zlib/zlib.h", 'zlib')
+t.write("zlib/deflate.c", 'deflate')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib : : <source>$(here)/zlib ;
+alias zlib : /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('deflate.c', 'deflate')
+action('-c -x c -I./zlib -o $deflate.o $deflate.c')
+action('-c -x c -I./zlib -DZLIB_DLL -o $deflate-shared.o $deflate.c')
+action('--dll $deflate-shared.o -o $deflate.so')
+action('--archive $deflate.o -o $deflate.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/zlib/mock/debug/z.dll')
+t.expect_addition('bin/standalone/zlib/mock/debug/link-static/z.lib')
+
+# Build from source specified in the environment
+t.rm('bin')
+t.rm('zlib')
+
+t.write("zlib root/zlib.h", 'zlib')
+t.write("zlib root/deflate.c", 'deflate')
+
+t.write("Jamroot.jam", """
+using zlib ;
+alias zlib : /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('deflate.c', 'deflate')
+action(['-c', '-x', 'c', '-I./zlib root', '-o', '$deflate.o', '$deflate.c'])
+action(['-c', '-x', 'c', '-I./zlib root', '-DZLIB_DLL', '-o', '$deflate-shared.o', '$deflate.c'])
+action('--dll $deflate-shared.o -o $deflate.so')
+action('--archive $deflate.o -o $deflate.a')
+''')
+t.run_build_system(['-sZLIB_SOURCE=zlib root'])
+t.expect_addition('bin/standalone/zlib/mock/debug/z.dll')
+t.expect_addition('bin/standalone/zlib/mock/debug/link-static/z.lib')
+
+
+t.rm('zlib root')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('zlib.h.cpp', '#include <zlib.h>\\n')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=z -o $config.exe')
+action('-c -x c++ $zlib.h.cpp -o $zlib.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=z -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=z -o $config.exe')
+action('-c -x c++ $zlib.h.cpp -o $zlib.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=z -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib : : <name>myzlib <include>$(here)/zlib <search>$(here)/zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+t.write('zlib/zlib.h', 'zlib')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zlib --static-lib=myzlib -o $config.exe')
+action('-c -x c++ $test.cpp -I./zlib -o $test.o')
+action('$test.o -L./zlib --static-lib=myzlib -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib : : <name>myzlib <include>$(here)/zlib <search>$(here)/zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zlib --shared-lib=myzlib -o $config.exe')
+action('-c -x c++ $test.cpp -I./zlib -o $test.o')
+action('$test.o -L./zlib --shared-lib=myzlib -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - both static and shared libraries
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib : : <name>myzlib <include>$(here)/zlib <search>$(here)/zlib ;
+exe test : test.cpp /zlib//zlib
+ : <link>shared:<define>SHARED : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zlib --static-lib=myzlib -o $config.exe')
+action('$main.o -L./zlib --shared-lib=myzlib -o $config.exe')
+action('-c -x c++ $test.cpp -I./zlib -o $test-static.o')
+action('-c -x c++ $test.cpp -I./zlib -DSHARED -o $test-shared.o')
+action('$test-static.o -L./zlib --static-lib=myzlib -o $test')
+action('$test-shared.o -L./zlib --shared-lib=myzlib -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization from the environment
+t.rm('bin')
+t.write('Jamroot.jam', """
+using zlib ;
+exe test : test.cpp /zlib//zlib
+ : : <link>static <link>shared ;
+""")
+t.write('zlib root/zlib.h', 'zlib')
+MockToolset.set_expected(t, common_stuff + '''
+action(['$main.o', '-L./zlib root', '--shared-lib=myzlib', '-o', '$config.exe'])
+action(['-c', '-x', 'c++', '$test.cpp', '-I./zlib root', '-o', '$test.o'])
+action(['$test.o', '-L./zlib root', '--shared-lib=myzlib', '-o', '$test'])
+''')
+t.run_build_system(['-sZLIB_INCLUDE=zlib root',
+ '-sZLIB_LIBRARY_PATH=zlib root',
+ '-sZLIB_NAME=myzlib'])
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/libjpeg.py b/src/boost/tools/build/test/libjpeg.py
new file mode 100755
index 000000000..e6a5c2ba2
--- /dev/null
+++ b/src/boost/tools/build/test/libjpeg.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("libjpeg/jpeglib.h", 'libjpeg')
+t.write("libjpeg/jerror.c", 'jpeg')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg : : <source>$(here)/libjpeg ;
+alias libjpeg : /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('jerror.c', 'jpeg')
+action('-c -x c -I./libjpeg -o $jerror.o $jerror.c')
+action('--dll $jerror.o -o $jpeg.so')
+action('--archive $jerror.o -o $jpeg.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/libjpeg/mock/debug/jpeg.dll')
+t.expect_addition('bin/standalone/libjpeg/mock/debug/link-static/jpeg.lib')
+
+t.rm('libjpeg')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('jpeg.h.cpp', '#include <stdio.h>\\n#include <jpeglib.h>\\n')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=jpeg -o $config.exe')
+action('-c -x c++ $jpeg.h.cpp -o $jpeg.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=jpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=jpeg -o $config.exe')
+action('-c -x c++ $jpeg.h.cpp -o $jpeg.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=jpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg : : <name>mylibjpeg <include>$(here)/libjpeg <search>$(here)/libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+t.write('libjpeg/jpeglib.h', 'libjpeg')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libjpeg --static-lib=mylibjpeg -o $config.exe')
+action('-c -x c++ $test.cpp -I./libjpeg -o $test.o')
+action('$test.o -L./libjpeg --static-lib=mylibjpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg : : <name>mylibjpeg <include>$(here)/libjpeg <search>$(here)/libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libjpeg --shared-lib=mylibjpeg -o $config.exe')
+action('-c -x c++ $test.cpp -I./libjpeg -o $test.o')
+action('$test.o -L./libjpeg --shared-lib=mylibjpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/liblzma.py b/src/boost/tools/build/test/liblzma.py
new file mode 100755
index 000000000..6bc767fbb
--- /dev/null
+++ b/src/boost/tools/build/test/liblzma.py
@@ -0,0 +1,118 @@
+#!/usr/bin/python
+
+# Copy-paste-modify from zlib.py
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('lzma.h.cpp', '#include <lzma.h>\\n')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using lzma ;
+exe test : test.cpp /lzma//lzma : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=lzma -o $config.exe')
+action('-c -x c++ $lzma.h.cpp -o $lzma.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=lzma -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using lzma ;
+exe test : test.cpp /lzma//lzma : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=lzma -o $config.exe')
+action('-c -x c++ $lzma.h.cpp -o $lzma.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=lzma -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using lzma : : <name>mylzma <include>$(here)/lzma <search>$(here)/lzma ;
+exe test : test.cpp /lzma//lzma : : <link>static <link>shared ;
+""")
+
+t.write('lzma/lzma.h', 'lzma')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./lzma --static-lib=mylzma -o $config.exe')
+action('-c -x c++ $test.cpp -I./lzma -o $test.o')
+action('$test.o -L./lzma --static-lib=mylzma -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using lzma : : <name>mylzma <include>$(here)/lzma <search>$(here)/lzma ;
+exe test : test.cpp /lzma//lzma : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./lzma --shared-lib=mylzma -o $config.exe')
+action('-c -x c++ $test.cpp -I./lzma -o $test.o')
+action('$test.o -L./lzma --shared-lib=mylzma -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - both static and shared libraries
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using lzma : : <name>mylzma <include>$(here)/lzma <search>$(here)/lzma ;
+exe test : test.cpp /lzma//lzma
+ : <link>shared:<define>SHARED : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./lzma --static-lib=mylzma -o $config.exe')
+action('$main.o -L./lzma --shared-lib=mylzma -o $config.exe')
+action('-c -x c++ $test.cpp -I./lzma -o $test-static.o')
+action('-c -x c++ $test.cpp -I./lzma -DSHARED -o $test-shared.o')
+action('$test-static.o -L./lzma --static-lib=mylzma -o $test')
+action('$test-shared.o -L./lzma --shared-lib=mylzma -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/libpng.py b/src/boost/tools/build/test/libpng.py
new file mode 100755
index 000000000..3e7e5cd2d
--- /dev/null
+++ b/src/boost/tools/build/test/libpng.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("libpng/png.h", 'libpng')
+t.write("libpng/png.c", 'png')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng : : <source>$(here)/libpng ;
+alias libpng : /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('png.c', 'png')
+action('-c -x c -I./libpng -o $png.o $png.c')
+action('--dll $png.o -o $png.so')
+action('--archive $png.o -o $png.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/libpng/mock/debug/png.dll')
+t.expect_addition('bin/standalone/libpng/mock/debug/link-static/png.lib')
+
+t.rm('libpng')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('png.h.cpp', '#include <png.h>')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=png -o $config.exe')
+action('-c -x c++ $png.h.cpp -o $png.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=png -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=png -o $config.exe')
+action('-c -x c++ $png.h.cpp -o $png.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=png -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng : : <name>mylibpng <include>$(here)/libpng <search>$(here)/libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+t.write('libpng/png.h', 'libpng')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libpng --static-lib=mylibpng -o $config.exe')
+action('-c -x c++ $test.cpp -I./libpng -o $test.o')
+action('$test.o -L./libpng --static-lib=mylibpng -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng : : <name>mylibpng <include>$(here)/libpng <search>$(here)/libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libpng --shared-lib=mylibpng -o $config.exe')
+action('-c -x c++ $test.cpp -I./libpng -o $test.o')
+action('$test.o -L./libpng --shared-lib=mylibpng -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/library_chain.py b/src/boost/tools/build/test/library_chain.py
new file mode 100644
index 000000000..6a977cda6
--- /dev/null
+++ b/src/boost/tools/build/test/library_chain.py
@@ -0,0 +1,152 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that a chain of libraries works ok, no matter if we use static or shared
+# linking.
+
+import BoostBuild
+import os
+import string
+import sys
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Stage the binary, so that it will be relinked without hardcode-dll-paths.
+# That will check that we pass correct -rpath-link, even if not passing -rpath.
+t.write("jamfile.jam", """\
+stage dist : main ;
+exe main : main.cpp b ;
+""")
+
+t.write("main.cpp", """\
+void foo();
+int main() { foo(); }
+""")
+
+t.write("jamroot.jam", "")
+
+t.write("a/a.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+gee() {}
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+geek() {}
+""")
+
+t.write("a/jamfile.jam", "lib a : a.cpp ;")
+
+t.write("b/b.cpp", """\
+void geek();
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() { geek(); }
+""")
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../a//a ;")
+
+t.run_build_system(["-d2"], stderr=None)
+t.expect_addition("bin/$toolset/debug*/main.exe")
+t.rm(["bin", "a/bin", "b/bin"])
+
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static*/main.exe")
+t.rm(["bin", "a/bin", "b/bin"])
+
+
+# Check that <library> works for static linking.
+t.write("b/jamfile.jam", "lib b : b.cpp : <library>../a//a ;")
+
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static*/main.exe")
+
+t.rm(["bin", "a/bin", "b/bin"])
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../a//a/<link>shared : <link>static ;")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/main.exe")
+
+t.rm(["bin", "a/bin", "b/bin"])
+
+
+# Test that putting a library in sources of a searched library works.
+t.write("jamfile.jam", """\
+exe main : main.cpp png ;
+lib png : z : <name>png ;
+lib z : : <name>zzz ;
+""")
+
+t.run_build_system(["-a", "-d+2"], status=None, stderr=None)
+# Try to find the "zzz" string either in response file (for Windows compilers),
+# or in the standard output.
+rsp = t.adjust_names("bin/$toolset/debug*/main.exe.rsp")[0]
+if os.path.exists(rsp) and ( open(rsp).read().find("zzz") != -1 ):
+ pass
+elif t.stdout().find("zzz") != -1:
+ pass
+else:
+ t.fail_test(1)
+
+# Test main -> libb -> liba chain in the case where liba is a file and not a
+# B2 target.
+t.rm(".")
+
+t.write("jamroot.jam", "")
+t.write("a/jamfile.jam", """\
+lib a : a.cpp ;
+install dist : a ;
+""")
+
+t.write("a/a.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+void a() {}
+""")
+
+t.run_build_system(subdir="a")
+t.expect_addition("a/dist/a.dll")
+
+if sys.platform == 'win32':
+ # This is a Windows import library.
+ file = t.adjust_name("a.implib")
+else:
+ file = t.adjust_name("a.dll")
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../a/dist/%s ;" % file)
+
+t.write("b/b.cpp", """\
+#if defined(_WIN32)
+__declspec(dllimport)
+#endif
+void a();
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+void b() { a(); }
+""")
+
+t.write("jamroot.jam", "exe main : main.cpp b//b ;")
+
+t.write("main.cpp", """\
+#if defined(_WIN32)
+__declspec(dllimport)
+#endif
+void b();
+int main() { b(); }
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/main.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/library_order.py b/src/boost/tools/build/test/library_order.py
new file mode 100644
index 000000000..4b0585e00
--- /dev/null
+++ b/src/boost/tools/build/test/library_order.py
@@ -0,0 +1,94 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that on compilers sensitive to library order on linker's command line,
+# we generate the correct order.
+
+import BoostBuild
+
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("main.cpp", """\
+void a();
+int main() { a(); }
+""")
+
+t.write("a.cpp", """\
+void b();
+void a() { b(); }
+""")
+
+t.write("b.cpp", """\
+void c();
+void b() { c(); }
+""")
+
+t.write("c.cpp", """\
+void d();
+void c() { d(); }
+""")
+
+t.write("d.cpp", """\
+void d() {}
+""")
+
+# The order of libraries in 'main' is crafted so that we get an error unless we
+# do something about the order ourselves.
+t.write("jamroot.jam", """\
+exe main : main.cpp libd libc libb liba ;
+lib libd : d.cpp ;
+lib libc : c.cpp : <link>static <use>libd ;
+lib libb : b.cpp : <use>libc ;
+lib liba : a.cpp : <use>libb ;
+""")
+
+t.run_build_system(["-d2"])
+t.expect_addition("bin/$toolset/debug*/main.exe")
+
+
+# Test the order between searched libraries.
+t.write("jamroot.jam", """\
+exe main : main.cpp png z ;
+lib png : z : <name>png ;
+lib z : : <name>zzz ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("png") > t.stdout().find("zzz"))
+
+t.write("jamroot.jam", """\
+exe main : main.cpp png z ;
+lib png : : <name>png ;
+lib z : png : <name>zzz ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("png") < t.stdout().find("zzz"))
+
+
+# Test the order between prebuilt libraries.
+t.write("first.a", "")
+t.write("second.a", "")
+t.write("jamroot.jam", """\
+exe main : main.cpp first second ;
+lib first : second : <file>first.a ;
+lib second : : <file>second.a ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("first") > t.stdout().find("second"))
+
+t.write("jamroot.jam", """
+exe main : main.cpp first second ;
+lib first : : <file>first.a ;
+lib second : first : <file>second.a ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("first") < t.stdout().find("second"))
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/library_property.py b/src/boost/tools/build/test/library_property.py
new file mode 100644
index 000000000..6dc571440
--- /dev/null
+++ b/src/boost/tools/build/test/library_property.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the <library> property has no effect on "obj" targets. Previously,
+# it affected all targets, so
+#
+# project : requirements <library>foo ;
+# exe a : a.cpp helper ;
+# obj helper : helper.cpp : <optimization>off ;
+#
+# caused 'foo' to be built with and without optimization.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+project : requirements <library>lib//x ;
+exe a : a.cpp foo ;
+obj foo : foo.cpp : <variant>release ;
+""")
+
+t.write("a.cpp", """
+void aux();
+int main() { aux(); }
+""")
+
+t.write("foo.cpp", """
+void gee();
+void aux() { gee(); }
+""")
+
+t.write("lib/x.cpp", """
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+gee() {}
+""")
+
+t.write("lib/jamfile.jam", """
+lib x : x.cpp ;
+""")
+
+t.write("lib/jamroot.jam", """
+""")
+
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+t.expect_nothing("lib/bin/$toolset/release/x.obj")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/libtiff.py b/src/boost/tools/build/test/libtiff.py
new file mode 100755
index 000000000..cb0d07b0f
--- /dev/null
+++ b/src/boost/tools/build/test/libtiff.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("libtiff/tiff.h", 'libtiff')
+t.write("libtiff/tiff.c", 'tiff')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff : : <source>$(here)/libtiff ;
+alias libtiff : /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('tiff.c', 'tiff')
+action('-c -x c -I./libtiff -o $tiff.o $tiff.c')
+action('--dll $tiff.o -o $tiff.so')
+action('--archive $tiff.o -o $tiff.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/libtiff/mock/debug/tiff.dll')
+t.expect_addition('bin/standalone/libtiff/mock/debug/link-static/tiff.lib')
+
+t.rm('libtiff')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('tiff.h.cpp', '#include <tiff.h>')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=tiff -o $config.exe')
+action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=tiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=tiff -o $config.exe')
+action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=tiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+t.write('libtiff/tiff.h', 'libtiff')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libtiff --static-lib=mylibtiff -o $config.exe')
+action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
+action('$test.o -L./libtiff --static-lib=mylibtiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libtiff --shared-lib=mylibtiff -o $config.exe')
+action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
+action('$test.o -L./libtiff --shared-lib=mylibtiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/libzstd.py b/src/boost/tools/build/test/libzstd.py
new file mode 100755
index 000000000..c582c6ad9
--- /dev/null
+++ b/src/boost/tools/build/test/libzstd.py
@@ -0,0 +1,118 @@
+#!/usr/bin/python
+
+# Copy-paste-modify from zlib.py
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('zstd.h.cpp', '#include <zstd.h>\\n')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zstd ;
+exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=zstd -o $config.exe')
+action('-c -x c++ $zstd.h.cpp -o $zstd.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=zstd -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zstd ;
+exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=zstd -o $config.exe')
+action('-c -x c++ $zstd.h.cpp -o $zstd.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=zstd -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zstd : : <name>myzstd <include>$(here)/zstd <search>$(here)/zstd ;
+exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
+""")
+
+t.write('zstd/zstd.h', 'zstd')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zstd --static-lib=myzstd -o $config.exe')
+action('-c -x c++ $test.cpp -I./zstd -o $test.o')
+action('$test.o -L./zstd --static-lib=myzstd -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zstd : : <name>myzstd <include>$(here)/zstd <search>$(here)/zstd ;
+exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zstd --shared-lib=myzstd -o $config.exe')
+action('-c -x c++ $test.cpp -I./zstd -o $test.o')
+action('$test.o -L./zstd --shared-lib=myzstd -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - both static and shared libraries
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zstd : : <name>myzstd <include>$(here)/zstd <search>$(here)/zstd ;
+exe test : test.cpp /zstd//zstd
+ : <link>shared:<define>SHARED : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zstd --static-lib=myzstd -o $config.exe')
+action('$main.o -L./zstd --shared-lib=myzstd -o $config.exe')
+action('-c -x c++ $test.cpp -I./zstd -o $test-static.o')
+action('-c -x c++ $test.cpp -I./zstd -DSHARED -o $test-shared.o')
+action('$test-static.o -L./zstd --static-lib=myzstd -o $test')
+action('$test-shared.o -L./zstd --shared-lib=myzstd -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/link.py b/src/boost/tools/build/test/link.py
new file mode 100755
index 000000000..e0524ef0e
--- /dev/null
+++ b/src/boost/tools/build/test/link.py
@@ -0,0 +1,350 @@
+#!/usr/bin/python
+
+# Copyright 2014-2015 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the link-directory rule used to create the
+# common boost/ directory in the new git layout.
+
+import BoostBuild
+
+def ignore_config(t):
+ """These files are created by the configuration logic in link.jam
+ They may or may not exist, depending on the system."""
+ t.ignore("bin/symlink/test-hardlink")
+ t.ignore("bin/test-hardlink-source")
+ t.ignore("bin/test-symlink")
+ t.ignore("bin/test-symlink-source")
+
+def test_basic():
+ """Test creation of a single link"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+
+ t.run_build_system()
+
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ ignore_config(t)
+ t.expect_nothing_more()
+ t.cleanup()
+
+def test_merge_two():
+ """Test merging two directories"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file2.h", "file2")
+
+ t.run_build_system()
+
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ t.expect_addition("include/file2.h")
+ t.expect_content("include/file2.h", "file2")
+ ignore_config(t)
+ t.expect_nothing_more()
+ t.cleanup()
+
+def test_merge_existing(group1, group2):
+ """Test adding a link when a different symlink already exists"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file2.h", "file2")
+
+ t.run_build_system(group1)
+
+ if "dir1-link" in group1:
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ if "dir2-link" in group1:
+ t.expect_addition("include/file2.h")
+ t.expect_content("include/file2.h", "file2")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.run_build_system(group2)
+
+ if "dir1-link" in group2:
+ if "dir1-link" not in group1:
+ t.expect_addition("include/file1.h")
+ else:
+ # When a directory is split the link needs to be recreated.
+ # On Windows, the test system checks the mod time of the
+ # link rather than the link target, so this may be seen as
+ # an update.
+ t.ignore_touch("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ else:
+ t.ignore_removal("include/file1.h")
+
+ if "dir2-link" in group2:
+ if "dir2-link" not in group1:
+ t.expect_addition("include/file2.h")
+ else:
+ t.ignore_touch("include/file2.h")
+ t.expect_content("include/file2.h", "file2")
+ else:
+ t.ignore_removal("include/file2.h")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_merge_existing_all():
+ test_merge_existing(["dir1-link"], ["dir2-link"])
+ test_merge_existing(["dir2-link"], ["dir1-link"])
+ test_merge_existing(["dir1-link"], ["dir1-link", "dir2-link"])
+ test_merge_existing(["dir2-link"], ["dir1-link", "dir2-link"])
+
+def test_merge_recursive():
+ "Test merging several directories including common prefixes"
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ link-directory dir3-link : src/dir3/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file2.h", "file2")
+ t.write("src/dir2/include/nested/file3.h", "file3")
+ t.write("src/dir3/include/nested/file4.h", "file4")
+
+ t.run_build_system()
+
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ t.expect_addition("include/file2.h")
+ t.expect_content("include/file2.h", "file2")
+ t.expect_addition("include/nested/file3.h")
+ t.expect_content("include/nested/file3.h", "file3")
+ t.expect_addition("include/nested/file4.h")
+ t.expect_content("include/nested/file4.h", "file4")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_merge_recursive_existing(group1, group2):
+ "Test merging several directories including common prefixes."
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ link-directory dir3-link : src/dir3/include : <location>. ;
+ link-directory dir4-link : src/dir4/include : <location>. ;
+ link-directory dir5-link : src/dir5/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/nested/file2.h", "file2")
+ t.write("src/dir3/include/nested/file3.h", "file3")
+ t.write("src/dir4/include/nested/xxx/yyy/file4.h", "file4")
+ t.write("src/dir5/include/nested/xxx/yyy/file5.h", "file5")
+
+ t.run_build_system(group1)
+ t.run_build_system(group2 + ["-d+12"])
+
+ def check_file(target, file):
+ if target in group2:
+ if target in group1:
+ t.ignore_touch(file)
+ else:
+ t.expect_addition(file)
+
+ check_file("dir1-link", "include/file1.h")
+ check_file("dir2-link", "include/nested/file2.h")
+ check_file("dir3-link", "include/nested/file3.h")
+ check_file("dir4-link", "include/nested/xxx/yyy/file4.h")
+ check_file("dir5-link", "include/nested/xxx/yyy/file5.h")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_merge_recursive_existing_all():
+ # These should create a link
+ test_merge_recursive_existing(["dir2-link"], ["dir2-link", "dir1-link"])
+ test_merge_recursive_existing(["dir2-link"], ["dir1-link", "dir2-link"])
+ # These should create a directory
+ test_merge_recursive_existing(["dir2-link"], ["dir2-link", "dir3-link"])
+ test_merge_recursive_existing(["dir2-link"], ["dir3-link", "dir2-link"])
+ # It should work even if we have to create many intermediate subdirectories
+ test_merge_recursive_existing(["dir4-link"], ["dir4-link", "dir5-link"])
+ test_merge_recursive_existing(["dir4-link"], ["dir5-link", "dir4-link"])
+
+def test_include_scan():
+ """Make sure that the #include scanner finds the headers"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ obj test : test.cpp :
+ <include>include
+ <implicit-dependency>dir1-link
+ <implicit-dependency>dir2-link ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "#include <file2.h>\n")
+ t.write("src/dir2/include/file2.h", "int f();\n")
+ t.write("test.cpp", """\
+ #include <file1.h>
+ int main() { f(); }
+ """);
+
+ t.run_build_system(["test"])
+
+ t.expect_addition("bin/$toolset/debug*/test.obj")
+
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_include_scan_merge_existing():
+ """Make sure that files are replaced if needed when merging in
+ a new directory"""
+ t = BoostBuild.Tester()
+
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ obj test : test.cpp :
+ <include>include
+ <implicit-dependency>dir1-link
+ <implicit-dependency>dir2-link ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "int f();")
+ t.write("src/dir2/include/file2.h", "#include <file1.h>")
+ t.write("test.cpp", """\
+ #include <file2.h>
+ int main() { f(); }
+ """)
+
+ t.run_build_system(["dir2-link"])
+
+ t.run_build_system(["test"])
+ t.expect_addition("include/file1.h")
+ t.expect_addition("bin/$toolset/debug*/test.obj")
+ t.ignore_touch("include/file2.h")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_update_file_link(params1, params2):
+ """Tests the behavior of updates when changing the link mode.
+ The link needs to be updated iff the original was a copy."""
+ t = BoostBuild.Tester()
+
+ t.write("jamroot.jam", """\
+ import link ;
+ import project ;
+ import property-set ;
+ import modules ;
+
+ if --no-symlinks in [ modules.peek : ARGV ]
+ {
+ modules.poke link : .can-symlink : false ;
+ }
+
+ if --no-hardlinks in [ modules.peek : ARGV ]
+ {
+ modules.poke link : .can-hardlink : false ;
+ }
+
+ .project = [ project.current ] ;
+ .has-files = [ glob include/file1.h ] ;
+
+ rule can-link ( properties * ) {
+ if ( ! [ link.can-symlink $(.project) ] ) &&
+ ( ! [ link.can-hardlink $(.project) ] )
+ {
+ ECHO links unsupported ;
+ }
+ }
+
+ # Use two directories so that we link to individual files.
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ alias check-linking : : <conditional>@can-link ;
+ """)
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file2.h", "file2")
+
+ t.run_build_system(params1)
+ ignore_config(t)
+ t.expect_addition("include/file1.h")
+ t.expect_addition("include/file2.h")
+ t.expect_nothing_more()
+
+ using_links = "links unsupported" not in t.stdout()
+
+ t.touch("src/dir1/include/file1.h")
+
+ t.run_build_system(params2)
+ if not using_links: t.expect_touch("include/file1.h")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_update_file_link_all():
+ """Test all nine possible combinations of two runs."""
+ possible_args = [[], ["--no-symlinks"], ["--no-symlinks", "--no-hardlinks"]]
+ for arg1 in possible_args:
+ for arg2 in possible_args:
+ test_update_file_link(arg1, arg2)
+
+def test_error_duplicate():
+ """Test that linking a single file from
+ multiple sources causes a hard error."""
+ t = BoostBuild.Tester()
+
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file1.h", "file2")
+
+ t.run_build_system(status=1)
+ t.expect_output_lines(
+ ["error: Cannot create link include/file1.h to src/dir2/include/file1.h.",
+ "error: Link previously defined to another file, src/dir1/include/file1.h."])
+
+ t.cleanup()
+
+test_basic()
+test_merge_two()
+test_merge_existing_all()
+test_merge_recursive()
+test_merge_recursive_existing_all()
+test_include_scan()
+test_include_scan_merge_existing()
+test_update_file_link_all()
+test_error_duplicate()
diff --git a/src/boost/tools/build/test/load_dir.py b/src/boost/tools/build/test/load_dir.py
new file mode 100644
index 000000000..1b082b1c6
--- /dev/null
+++ b/src/boost/tools/build/test/load_dir.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+
+"""
+Traverses a directory and output the code that would create the same directory
+structure during testing. Assumes that the instance of Tester is called 't'.
+"""
+
+from __future__ import print_function
+
+import sys
+import os
+import stat
+import string
+
+def usage():
+ print("usage: load_dir.py directory")
+
+
+def remove_first_component(path):
+ result = [path]
+ while 1:
+ s = os.path.split(result[0])
+ if not s[0]:
+ break
+ result[:1] = list(s)
+ return os.path.join(*result[1:])
+
+
+def create_file(arg, dirname, fnames):
+ for n in fnames:
+ path = os.path.join(dirname, n)
+ if not os.path.isdir(path):
+ print("t.write(\"%s\", \"\"\"" % (remove_first_component(path),),)
+ f = open(path, "r")
+ for l in f:
+ print(l)
+ print('\n""")\n')
+
+
+header = """#!/usr/bin/python
+
+# Copyright (C) FILL SOMETHING HERE 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+"""
+
+footer = """
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/FILL_SOME_HERE.exe")
+
+t.cleanup()
+"""
+
+
+def main():
+ if len(sys.argv) != 2:
+ usage()
+ else:
+ path = sys.argv[1]
+
+ if not os.access(path, os.F_OK):
+ print("Path '%s' does not exist" % (path,))
+ sys.exit(1)
+
+ if not os.path.isdir(path):
+ print("Path '%s' is not a directory" % (path,))
+
+ print(header)
+
+ for root, _, files in os.walk(path):
+ create_file(None, root, files)
+
+ print(footer)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/boost/tools/build/test/load_order.py b/src/boost/tools/build/test/load_order.py
new file mode 100644
index 000000000..6e0055026
--- /dev/null
+++ b/src/boost/tools/build/test/load_order.py
@@ -0,0 +1,71 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we load parent projects before loading children.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+use-project /child : child ;
+ECHO "Setting parent requirements" ;
+project : requirements <define>PASS_THE_TEST ;
+alias x : child//main ;
+""")
+
+t.write("child/jamfile.jam", """\
+ECHO "Setting child requirements" ;
+project /child ;
+exe main : main.cpp ;
+""")
+
+t.write("child/main.cpp", """\
+#if defined(PASS_THE_TEST)
+int main() {}
+#endif
+""")
+
+t.run_build_system()
+
+t.expect_addition("child/bin/$toolset/debug*/main.exe")
+t.fail_test(t.stdout().find("Setting child requirements") < t.stdout().find(
+ "Setting parent requirements"))
+
+
+# Regression test: parent requirements were ignored in some cases.
+t.rm(".")
+t.write("jamroot.jam", "build-project src ;")
+t.write("src/jamfile.jam", "project : requirements <define>EVERYTHING_OK ;")
+t.write("src/app/jamfile.jam", "exe test : test.cpp ;")
+t.write("src/app/test.cpp", """\
+#ifdef EVERYTHING_OK
+int main() {}
+#endif
+""")
+
+t.run_build_system(subdir="src/app")
+t.expect_addition("src/app/bin/$toolset/debug*/test.exe")
+
+
+# child/child2 used to be loaded before child
+t.rm(".")
+t.write("jamroot.jam", """\
+use-project /child/child2 : child/child2 ;
+rule parent-rule ( )
+{
+ ECHO "Running parent-rule" ;
+}
+""")
+t.write("child/jamfile.jam", "")
+t.write("child/child1/jamfile.jam", "")
+t.write("child/child2/jamfile.jam", "parent-rule ;")
+
+t.run_build_system(subdir="child/child1")
+t.expect_output_lines("Running parent-rule")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/loop.py b/src/boost/tools/build/test/loop.py
new file mode 100644
index 000000000..ffb99181d
--- /dev/null
+++ b/src/boost/tools/build/test/loop.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """\
+lib main : main.cpp l ;
+lib l : l.cpp main ;
+""")
+
+t.write("main.cpp", "")
+t.write("l.cpp", "")
+
+t.run_build_system(["--no-error-backtrace"], status=1)
+t.fail_test(t.stdout().find(
+ "error: Recursion in main target references") == -1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/make_rule.py b/src/boost/tools/build/test/make_rule.py
new file mode 100644
index 000000000..ad8fd42fd
--- /dev/null
+++ b/src/boost/tools/build/test/make_rule.py
@@ -0,0 +1,54 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'make' rule.
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester(pass_toolset=1)
+
+t.write("jamroot.jam", """\
+import feature ;
+feature.feature test_feature : : free ;
+
+import toolset ;
+toolset.flags creator STRING : <test_feature> ;
+
+actions creator
+{
+ echo $(STRING) > $(<)
+}
+
+make foo.bar : : creator : <test_feature>12345678 ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/foo.bar")
+t.fail_test(t.read("bin/foo.bar").find("12345678") == -1)
+
+
+# Regression test. Make sure that if a main target is requested two times, and
+# build requests differ only in incidental properties, the main target is
+# created only once. The bug was discovered by Kirill Lapshin.
+t.write("jamroot.jam", """\
+exe a : dir//hello1.cpp ;
+exe b : dir//hello1.cpp/<hardcode-dll-paths>true ;
+""")
+
+t.write("dir/jamfile.jam", """\
+import common ;
+make hello1.cpp : hello.cpp : common.copy ;
+""")
+
+t.write("dir/hello.cpp", "int main() {}\n")
+
+# Show only action names.
+t.run_build_system(["-d1", "-n"])
+t.fail_test(t.stdout().count("copy") != 1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/message.py b/src/boost/tools/build/test/message.py
new file mode 100755
index 000000000..30edced4d
--- /dev/null
+++ b/src/boost/tools/build/test/message.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2003.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test for the regression testing framework.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(["-d0"], use_test_config=False)
+
+t.write("Jamroot.jam", """
+project
+ :
+ : usage-requirements <define>TEST=1
+ : default-build <link>static
+;
+message hello : "Hello World!" ;
+alias hello : : <link>shared ;
+obj test : test.cpp hello : <link>static ;
+""")
+
+t.write("test.cpp", """
+#ifndef TEST
+#error TEST not defined
+#endif
+""")
+
+t.run_build_system(["test"], stdout="""Hello World!
+""")
+
+t.expect_addition("bin/$toolset/link-static*/test.obj")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/module_actions.py b/src/boost/tools/build/test/module_actions.py
new file mode 100644
index 000000000..33c563526
--- /dev/null
+++ b/src/boost/tools/build/test/module_actions.py
@@ -0,0 +1,105 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Demonstration that module variables have the correct effect in actions.
+
+import BoostBuild
+import os
+import re
+
+t = BoostBuild.Tester(["-d+1"], pass_toolset=0)
+
+t.write("boost-build.jam", "boost-build . ;")
+t.write("bootstrap.jam", """\
+# Top-level rule causing a target to be built by invoking the specified action.
+rule make ( target : sources * : act )
+{
+ DEPENDS all : $(target) ;
+ DEPENDS $(target) : $(sources) ;
+ $(act) $(target) : $(sources) ;
+}
+
+X1 = X1-global ;
+X2 = X2-global ;
+X3 = X3-global ;
+
+module A
+{
+ X1 = X1-A ;
+
+ rule act ( target )
+ {
+ NOTFILE $(target) ;
+ ALWAYS $(target) ;
+ }
+
+ actions act { echo A.act $(<): $(X1) $(X2) $(X3) }
+
+ make t1 : : A.act ;
+ make t2 : : A.act ;
+ make t3 : : A.act ;
+}
+
+module B
+{
+ X2 = X2-B ;
+
+ actions act { echo B.act $(<): $(X1) $(X2) $(X3) }
+
+ make t1 : : B.act ;
+ make t2 : : B.act ;
+ make t3 : : B.act ;
+}
+
+actions act { echo act $(<): $(X1) $(X2) $(X3) }
+
+make t1 : : act ;
+make t2 : : act ;
+make t3 : : act ;
+
+X1 on t1 = X1-t1 ;
+X2 on t2 = X2-t2 ;
+X3 on t3 = X3-t3 ;
+
+DEPENDS all : t1 t2 t3 ;
+""")
+
+expected_lines = [
+ "...found 4 targets...",
+ "...updating 3 targets...",
+ "A.act t1",
+ "A.act t1: X1-t1 ",
+ "B.act t1",
+ "B.act t1: X1-t1 X2-B ",
+ "act t1",
+ "act t1: X1-t1 X2-global X3-global ",
+ "A.act t2",
+ "A.act t2: X1-A X2-t2 ",
+ "B.act t2",
+ "B.act t2: X2-t2 ",
+ "act t2",
+ "act t2: X1-global X2-t2 X3-global ",
+ "A.act t3",
+ "A.act t3: X1-A X3-t3 ",
+ "B.act t3",
+ "B.act t3: X2-B X3-t3 ",
+ "act t3",
+ "act t3: X1-global X2-global X3-t3 ",
+ "...updated 3 targets...",
+ ""]
+
+# Accommodate for the fact that on Unixes, a call to 'echo 1 2 3 '
+# produces '1 2 3' (note the spacing).
+if os.name != 'nt':
+ expected_lines = [re.sub(" +", " ", x.rstrip()) for x in expected_lines]
+
+t.run_build_system()
+t.expect_output_lines(expected_lines)
+t.expect_nothing_more()
+t.cleanup()
diff --git a/src/boost/tools/build/test/ndebug.py b/src/boost/tools/build/test/ndebug.py
new file mode 100644
index 000000000..87fbc6c6d
--- /dev/null
+++ b/src/boost/tools/build/test/ndebug.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that building with optimization brings NDEBUG define, and, more
+# importantly, that dependency targets are built with NDEBUG as well, even if
+# they are not directly requested.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "exe hello : hello.cpp lib//lib1 ;")
+t.write("hello.cpp", """\
+#ifdef NDEBUG
+void foo();
+int main() { foo(); }
+#endif
+""")
+t.write("lib/jamfile.jam", "lib lib1 : lib1.cpp ;")
+t.write("lib/lib1.cpp", """\
+#ifdef NDEBUG
+void foo() {}
+#endif
+""")
+
+# 'release' builds should get the NDEBUG define. We use static linking to avoid
+# messing with imports/exports on Windows.
+t.run_build_system(["link=static", "release"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/no_type.py b/src/boost/tools/build/test/no_type.py
new file mode 100644
index 000000000..0384ec604
--- /dev/null
+++ b/src/boost/tools/build/test/no_type.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we cannot specify targets of unknown type as sources. This is based
+# on the fact that Unix 'ar' will happily consume just about anything.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", "static-lib a : a.foo ;")
+t.write("a.foo", "")
+
+t.run_build_system(status=1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/notfile.py b/src/boost/tools/build/test/notfile.py
new file mode 100644
index 000000000..eebc457ba
--- /dev/null
+++ b/src/boost/tools/build/test/notfile.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Basic tests for the 'notfile' rule.
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """\
+import notfile ;
+notfile say : "echo hi" ;
+exe hello : hello.cpp ;
+notfile hello_valgrind : @valgrind : hello ;
+actions valgrind { valgrind $(>[1]) }
+""")
+
+t.write("hello.cpp", """\
+#include <iostream>
+int main() { std::cout << "Hello!\\n"; }
+""")
+
+t.run_build_system(["-n", "-d+2"])
+
+t.fail_test(t.stdout().find("echo hi") == -1)
+
+name = t.adjust_names("bin/$toolset/debug*/hello.exe")[0]
+name = os.path.join(*name.split("/"))
+t.expect_output_lines(" valgrind *%s " % name)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/ordered_include.py b/src/boost/tools/build/test/ordered_include.py
new file mode 100644
index 000000000..ef1d8745d
--- /dev/null
+++ b/src/boost/tools/build/test/ordered_include.py
@@ -0,0 +1,251 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt) or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+def test_default_order():
+ tester = BoostBuild.Tester(use_test_config=False)
+ tester.write("jamroot.jam", """
+
+ import order ;
+ import "class" : new ;
+
+ obj test : test.cpp : <include>b <include>a ;
+ """)
+
+ tester.write("test.cpp", """
+ #include <test.hpp>
+ int main() { f(); }
+ """)
+
+ tester.write("a/test.hpp", """
+ void f();
+ """)
+
+ tester.write("b/test.hpp", """
+ """)
+
+ tester.run_build_system()
+
+ tester.expect_addition("bin/$toolset/debug*/test.obj")
+
+ # Check that the dependencies are correct
+ tester.touch("a/test.hpp")
+ tester.run_build_system()
+ tester.expect_touch("bin/$toolset/debug*/test.obj")
+ tester.expect_nothing_more()
+
+ tester.touch("b/test.hpp")
+ tester.run_build_system()
+ tester.expect_nothing_more()
+
+ tester.cleanup()
+
+def test_default_order_mixed():
+ tester = BoostBuild.Tester(use_test_config=False)
+ tester.write("jamroot.jam", """
+
+ import order ;
+ import "class" : new ;
+
+ obj test : test.cpp : <include>b <include>a <include>c&&d ;
+ """)
+
+ tester.write("test.cpp", """
+ #include <test.hpp>
+ int main() { f(); }
+ """)
+
+ tester.write("a/test.hpp", """
+ void f();
+ """)
+
+ tester.write("b/test.hpp", """
+ """)
+
+ tester.run_build_system()
+
+ tester.expect_addition("bin/$toolset/debug*/test.obj")
+
+ # Check that the dependencies are correct
+ tester.touch("a/test.hpp")
+ tester.run_build_system()
+ tester.expect_touch("bin/$toolset/debug*/test.obj")
+ tester.expect_nothing_more()
+
+ tester.touch("b/test.hpp")
+ tester.run_build_system()
+ tester.expect_nothing_more()
+
+ tester.cleanup()
+
+def test_basic():
+ tester = BoostBuild.Tester(use_test_config=False)
+ tester.write("jamroot.jam", """
+ obj test : test.cpp : <include>a&&b ;
+ """)
+
+ tester.write("test.cpp", """
+ #include <test1.hpp>
+ #include <test2.hpp>
+ int main() {}
+ """)
+
+ tester.write("a/test1.hpp", """
+ """)
+
+ tester.write("b/test2.hpp", """
+ """)
+
+ tester.run_build_system()
+
+ tester.expect_addition("bin/$toolset/debug*/test.obj")
+
+ # Check that the dependencies are correct
+ tester.touch("a/test1.hpp")
+ tester.run_build_system()
+ tester.expect_touch("bin/$toolset/debug*/test.obj")
+
+ tester.touch("b/test2.hpp")
+ tester.run_build_system()
+ tester.expect_touch("bin/$toolset/debug*/test.obj")
+
+ tester.cleanup()
+
+def test_order1():
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("jamroot.jam", """
+ obj test : test.cpp : <include>a&&b ;
+ """)
+ t.write("test.cpp", """
+ #include <test.h>
+ int main() {}
+ """)
+ t.write("a/test.h", """
+ """)
+ t.write("b/test.h", """
+ #error should find a/test.h
+ """)
+ t.run_build_system()
+
+ t.touch("a/test.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("b/test.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_order2():
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("jamroot.jam", """
+ obj test : test.cpp : <include>b&&a ;
+ """)
+ t.write("test.cpp", """
+ #include <test.h>
+ int main() {}
+ """)
+ t.write("a/test.h", """
+ #error should find b/test.h
+ """)
+ t.write("b/test.h", """
+ """)
+ t.run_build_system()
+
+ t.touch("a/test.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("b/test.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/test.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_order_graph():
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("jamroot.jam", """
+ obj test : test.cpp :
+ <include>b&&a
+ <include>c&&b
+ <include>a
+ <include>c
+ <include>b
+ <include>e&&b&&d
+ ;
+ """)
+ t.write("test.cpp", """
+ #include <test1.h>
+ #include <test2.h>
+ #include <test3.h>
+ #include <test4.h>
+ int main() {}
+ """)
+ t.write("b/test1.h", "")
+ t.write("a/test1.h", "#error should find b/test1.h\n")
+
+ t.write("c/test2.h", "")
+ t.write("b/test2.h", "#error should find c/test2.h\n")
+
+ t.write("e/test3.h", "")
+ t.write("b/test3.h", "#error should find e/test3.h\n")
+
+ t.write("b/test4.h", "")
+ t.write("d/test4.h", "#error should find b/test4.h\n")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug*/test.obj")
+
+ t.touch("b/test1.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("a/test1.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("c/test2.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("b/test2.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("e/test3.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("b/test3.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("b/test4.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug*/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("d/test4.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+test_default_order()
+test_default_order_mixed()
+test_basic()
+test_order1()
+test_order2()
+test_order_graph()
diff --git a/src/boost/tools/build/test/ordered_properties.py b/src/boost/tools/build/test/ordered_properties.py
new file mode 100644
index 000000000..23068a01c
--- /dev/null
+++ b/src/boost/tools/build/test/ordered_properties.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This checks that B2 does not reorder <include> properties
+# lexicographically.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("a.cpp", """
+#include <a.h>
+int main() { foo(); }
+""")
+
+t.write("jamroot.jam", """
+exe a : a.cpp : <include>d2 <include>d1 ;
+""")
+
+t.write("d1/a.h", """
+""")
+
+t.write("d2/a.h", """
+inline void foo() {}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/out_of_tree.py b/src/boost/tools/build/test/out_of_tree.py
new file mode 100644
index 000000000..400101e93
--- /dev/null
+++ b/src/boost/tools/build/test/out_of_tree.py
@@ -0,0 +1,29 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that we can build a project when the current directory is outside of
+# that project tree, that is 'bjam some_dir' works.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+# Create the needed files.
+t.write("p1/jamroot.jam", "exe hello : hello.cpp ;")
+t.write("p1/hello.cpp", "int main() {}\n")
+t.write("p2/jamroot.jam", """\
+exe hello2 : hello.cpp ;
+exe hello3 : hello.cpp ;
+""")
+t.write("p2/hello.cpp", "int main() {}\n")
+
+t.run_build_system(["p1", "p2//hello3"])
+t.expect_addition("p1/bin/$toolset/debug*/hello.exe")
+t.expect_addition("p2/bin/$toolset/debug*/hello3.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/package.py b/src/boost/tools/build/test/package.py
new file mode 100644
index 000000000..7cc5e33c0
--- /dev/null
+++ b/src/boost/tools/build/test/package.py
@@ -0,0 +1,231 @@
+#!/usr/bin/python
+
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the package module.
+
+import BoostBuild
+import os
+
+def setup():
+ t = BoostBuild.Tester(["p//install", "p//data"],
+ use_test_config=False)
+
+ t.write("p/jamroot.jam", "")
+ t.write("p/jamfile.jam", """\
+ import package ;
+ exe a : a.cpp ;
+ lib b : b.cpp ;
+ package.install install Test :
+ : a
+ : b/<link>static b/<link>shared
+ : a.h ;
+ package.install-data data : Test : a.txt ;
+ """)
+ t.write("p/a.cpp", "int main() {}")
+ t.write("p/b.cpp", """
+ int
+ #ifdef _WIN32
+ __declspec(dllexport)
+ #endif
+ must_export_something;
+ """)
+ t.write("p/a.h", "")
+ t.write("p/a.txt", "")
+ return t
+
+def test_defaults():
+ t = setup()
+
+ # Since the default install location is outside out test area,
+ # we don't want to actually execute the build.
+ t.run_build_system(["-n", "-d1"])
+
+ installdir = "C:/Test" if os.name == 'nt' else "/usr/local"
+ t.expect_output_lines([
+ x.replace('/', os.sep) for x in
+ ["common.copy %s/bin/%s" % (installdir, t.adjust_name("a.exe")),
+ "common.copy %s/lib/%s" % (installdir, t.adjust_name("b.dll")),
+ "common.copy %s/lib/%s" % (installdir, t.adjust_name("b.lib")),
+ "common.copy %s/include/a.h" % installdir,
+ "common.copy %s/share/Test/a.txt" % installdir]])
+
+ t.cleanup()
+
+def test_prefix():
+ t = setup()
+ # An explicit --prefix on the command should override all of these:
+ t.write("project-config.jam", """
+ option.set prefix : bad ;
+ option.set bindir : bad/bin ;
+ option.set libdir : bad/lib ;
+ option.set includedir : bad/include ;
+ option.set datarootdir : bad/share ;
+ """)
+
+ t.run_build_system(["--prefix=installdir"])
+ t.expect_addition("installdir/bin/a.exe")
+ t.expect_addition("installdir/lib/b.dll")
+ t.expect_addition("installdir/lib/b.lib")
+ t.expect_addition("installdir/include/a.h")
+ t.expect_addition("installdir/share/Test/a.txt")
+
+ t.cleanup()
+
+def test_subdirs():
+ t = setup()
+ # Command line options override config files
+ t.write("project-config.jam", """
+ option.set prefix : bad ;
+ option.set bindir : bad/bin ;
+ option.set libdir : bad/lib ;
+ option.set includedir : bad/include ;
+ option.set datarootdir : bad/share ;
+ """)
+
+ t.run_build_system(["--libdir=installdir/lib64",
+ "--bindir=installdir/binx",
+ "--includedir=installdir/includex",
+ "--datarootdir=installdir/sharex"])
+ t.expect_addition("installdir/binx/a.exe")
+ t.expect_addition("installdir/lib64/b.dll")
+ t.expect_addition("installdir/lib64/b.lib")
+ t.expect_addition("installdir/includex/a.h")
+ t.expect_addition("installdir/sharex/Test/a.txt")
+
+ t.cleanup()
+
+def test_subdirs_with_prefix():
+ t = setup()
+ # Command line options override config files
+ t.write("project-config.jam", """
+ option.set prefix : bad ;
+ option.set bindir : bad/bin ;
+ option.set libdir : bad/lib ;
+ option.set includedir : bad/include ;
+ option.set datarootdir : bad/share ;
+ """)
+
+ t.run_build_system(["--prefix=bad",
+ "--libdir=installdir/lib64",
+ "--bindir=installdir/binx",
+ "--includedir=installdir/includex",
+ "--datarootdir=installdir/sharex"])
+ t.expect_addition("installdir/binx/a.exe")
+ t.expect_addition("installdir/lib64/b.dll")
+ t.expect_addition("installdir/lib64/b.lib")
+ t.expect_addition("installdir/includex/a.h")
+ t.expect_addition("installdir/sharex/Test/a.txt")
+
+ t.cleanup()
+
+def test_prefix_config_file():
+ t = setup()
+ # An explicit --prefix on the command should override all of these:
+ t.write("project-config.jam", """
+ option.set prefix : installdir ;
+ """)
+
+ t.run_build_system()
+ t.expect_addition("installdir/bin/a.exe")
+ t.expect_addition("installdir/lib/b.dll")
+ t.expect_addition("installdir/lib/b.lib")
+ t.expect_addition("installdir/include/a.h")
+ t.expect_addition("installdir/share/Test/a.txt")
+
+ t.cleanup()
+
+def test_subdirs_config_file():
+ t = setup()
+ # An explicit --prefix on the command should override all of these:
+ t.write("project-config.jam", """
+ option.set prefix : installdir ;
+ option.set libdir : installdir/lib64 ;
+ option.set bindir : installdir/binx ;
+ option.set includedir : installdir/includex ;
+ option.set datarootdir : installdir/sharex ;
+ """)
+
+ t.run_build_system()
+ t.expect_addition("installdir/binx/a.exe")
+ t.expect_addition("installdir/lib64/b.dll")
+ t.expect_addition("installdir/lib64/b.lib")
+ t.expect_addition("installdir/includex/a.h")
+ t.expect_addition("installdir/sharex/Test/a.txt")
+
+ t.cleanup()
+
+def test_multiple():
+ '''If no prefix is specified, we might use several default
+ install prefixes.'''
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("p/jamroot.jam", "")
+ t.write("p/jamfile.jam", """\
+ import package ;
+ exe a : a.cpp ;
+ lib b : b.cpp ;
+ package.install installx TestX : <install-default-prefix>xxx
+ : a
+ : b/<link>static b/<link>shared
+ : a.h ;
+ package.install instally TestY : <install-default-prefix>yyy
+ : a
+ : b/<link>static b/<link>shared
+ : a.h ;
+ """)
+ t.write("p/a.cpp", "int main() {}")
+ t.write("p/b.cpp", """
+ int
+ #ifdef _WIN32
+ __declspec(dllexport)
+ #endif
+ must_export_something;
+ """)
+ t.write("p/a.h", "")
+ t.run_build_system(["p//installx", "p//instally"])
+ t.expect_addition("p/xxx/bin/a.exe")
+ t.expect_addition("p/xxx/lib/b.dll")
+ t.expect_addition("p/xxx/lib/b.lib")
+ t.expect_addition("p/xxx/include/a.h")
+ t.expect_addition("p/yyy/bin/a.exe")
+ t.expect_addition("p/yyy/lib/b.dll")
+ t.expect_addition("p/yyy/lib/b.lib")
+ t.expect_addition("p/yyy/include/a.h")
+
+def test_paths():
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("Jamroot.jam", """\
+ import package ;
+ import assert ;
+ import os ;
+ if [ os.name ] = NT
+ {
+ default-prefix = "/C:/Test" ;
+ }
+ else
+ {
+ default-prefix = /usr/local ;
+ }
+ paths = [ package.paths Test ] ;
+ assert.result $(default-prefix) : $(paths).prefix ;
+ assert.result $(default-prefix)/lib : $(paths).libdir ;
+ assert.result $(default-prefix)/bin : $(paths).bindir ;
+ assert.result $(default-prefix)/include : $(paths).includedir ;
+ assert.result $(default-prefix)/share : $(paths).datarootdir ;
+ package.add-path-option bardir : bar : libdir ;
+ assert.result $(default-prefix)/lib/bar : $(paths).get bardir ;
+ """)
+ t.run_build_system()
+ t.cleanup()
+
+test_defaults()
+test_prefix()
+test_subdirs()
+test_subdirs_with_prefix()
+test_prefix_config_file()
+test_subdirs_config_file()
+test_multiple()
+test_paths()
diff --git a/src/boost/tools/build/test/param.py b/src/boost/tools/build/test/param.py
new file mode 100644
index 000000000..14b3d7d94
--- /dev/null
+++ b/src/boost/tools/build/test/param.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("Jamroot.jam", """\
+import param ;
+import assert ;
+import errors : try catch ;
+rule test1 ( )
+{
+ param.handle-named-params ;
+}
+test1 ;
+rule test2 ( sources * )
+{
+ param.handle-named-params sources ;
+ return $(sources) ;
+}
+assert.result : test2 ;
+assert.result test1.cpp test2.cpp : test2 test1.cpp test2.cpp ;
+assert.result test1.cpp test2.cpp : test2 sources test1.cpp test2.cpp ;
+rule test3 ( sources * : requirements * )
+{
+ param.handle-named-params sources requirements ;
+ return $(sources) -- $(requirements) ;
+}
+assert.result -- : test3 ;
+assert.result -- <link>shared : test3 : <link>shared ;
+assert.result test1.cpp -- <link>shared : test3 test1.cpp : <link>shared ;
+assert.result test1.cpp -- <link>shared
+ : test3 test1.cpp : requirements <link>shared ;
+assert.result test1.cpp -- <link>shared
+ : test3 sources test1.cpp : requirements <link>shared ;
+assert.result test1.cpp -- <link>shared
+ : test3 requirements <link>shared : sources test1.cpp ;
+assert.result -- : test3 sources ;
+assert.result -- : test3 requirements ;
+assert.result -- <link>shared : test3 requirements <link>shared ;
+try ;
+{
+ test3 sources test1.cpp : sources test2.cpp ;
+}
+catch Parameter 'sources' passed more than once. ;
+try ;
+{
+ test3 sources test1.cpp : <link>shared ;
+}
+catch "Positional arguments must appear first." ;
+EXIT : 0 ;
+""")
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/path_features.py b/src/boost/tools/build/test/path_features.py
new file mode 100644
index 000000000..5b23150be
--- /dev/null
+++ b/src/boost/tools/build/test/path_features.py
@@ -0,0 +1,163 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+def test_basic():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "lib a : a.cpp : <include>. ;")
+ t.write("a.cpp", """\
+#include <a.h>
+void
+# ifdef _WIN32
+__declspec(dllexport)
+# endif
+foo() {}
+""")
+ t.write("a.h", "//empty file\n")
+ t.write("d/jamfile.jam", "exe b : b.cpp ..//a ;")
+ t.write("d/b.cpp", """\
+void foo();
+int main() { foo(); }
+""")
+ t.run_build_system(subdir="d")
+
+ # Path features with condition.
+ t.write("jamroot.jam", "lib a : a.cpp : <variant>debug:<include>. ;")
+ t.rm("bin")
+ t.run_build_system(subdir="d")
+
+
+ # Path features with condition in usage requirements.
+ t.write("jamroot.jam", """\
+lib a : a.cpp : <include>. : : <variant>debug:<include>. ;
+""")
+ t.write("d/b.cpp", """\
+#include <a.h>
+void foo();
+int main() { foo(); }
+""")
+ t.rm("d/bin")
+ t.run_build_system(subdir="d")
+
+ t.cleanup()
+
+
+def test_absolute_paths():
+ """
+ Test that absolute paths inside requirements are ok. The problems
+ appeared only when building targets in subprojects.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "build-project x ;")
+ t.write("x/jamfile.jam", """\
+local pwd = [ PWD ] ;
+project : requirements <include>$(pwd)/x/include ;
+exe m : m.cpp : <include>$(pwd)/x/include2 ;
+""")
+ t.write("x/m.cpp", """\
+#include <h1.hpp>
+#include <h2.hpp>
+int main() {}
+""")
+ t.write("x/include/h1.hpp", "\n")
+ t.write("x/include2/h2.hpp", "\n")
+
+ t.run_build_system()
+ t.expect_addition("x/bin/$toolset/debug*/m.exe")
+
+ t.cleanup()
+
+
+def test_ordered_paths():
+ """Test that "&&" in path features is handled correctly."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "build-project sub ;")
+ t.write("sub/jamfile.jam", "exe a : a.cpp : <include>../h1&&../h2 ;")
+ t.write("sub/a.cpp", """\
+#include <header.h>
+int main() { return OK; }
+""")
+ t.write("h2/header.h", "int const OK = 0;\n")
+ t.run_build_system()
+ t.expect_addition("sub/bin/$toolset/debug*/a.exe")
+
+ t.cleanup()
+
+
+def test_paths_set_by_indirect_conditionals():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ header = "child_dir/folder_to_include/some_header.h"
+
+ t.write("jamroot.jam", """
+build-project child_dir ;
+rule attach-include-parent ( properties * )
+{
+ return <include>another_folder ;
+}
+# requirements inherited from a parent project will bind paths
+# relative to the project that actually names the rule.
+project : requirements <conditional>@attach-include-parent ;
+""")
+ t.write("child_dir/jamfile.jam", """\
+import remote/remote ;
+
+# If we set the <include>folder_to_include property directly, it will work
+obj x1 : x.cpp : <conditional>@attach-include-local ;
+obj x2 : x.cpp : <conditional>@remote.attach-include-remote ;
+
+rule attach-include-local ( properties * )
+{
+ return <include>folder_to_include ;
+}
+""")
+ t.write("child_dir/remote/remote.jam", """\
+rule attach-include-remote ( properties * )
+{
+ return <include>folder_to_include ;
+}
+""")
+ t.write("child_dir/x.cpp", """\
+#include <some_header.h>
+#include <header2.h>
+int main() {}
+""")
+ t.write(header, "int some_func();\n")
+ t.write("another_folder/header2.h", "int f2();\n")
+ t.write("child_dir/folder_to_include/jamfile.jam", "")
+
+ expected_x1 = "child_dir/bin/$toolset/debug*/x1.obj"
+ expected_x2 = "child_dir/bin/$toolset/debug*/x2.obj"
+
+ t.run_build_system()
+ t.expect_addition(expected_x1)
+ t.expect_addition(expected_x2)
+
+ t.touch(header)
+ t.run_build_system(subdir="child_dir")
+ t.expect_touch(expected_x1)
+ t.expect_touch(expected_x2)
+
+ t.touch(header)
+ t.run_build_system([".."], subdir="child_dir/folder_to_include")
+ t.expect_touch(expected_x1)
+ t.expect_touch(expected_x2)
+
+ t.cleanup()
+
+
+test_basic()
+test_absolute_paths()
+test_ordered_paths()
+test_paths_set_by_indirect_conditionals()
diff --git a/src/boost/tools/build/test/pch.py b/src/boost/tools/build/test/pch.py
new file mode 100644
index 000000000..58cf894a9
--- /dev/null
+++ b/src/boost/tools/build/test/pch.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+
+# Copyright 2006 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+from time import sleep
+
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """
+import pch ;
+cpp-pch pch : pch.hpp : <toolset>msvc:<source>pch.cpp <include>. ;
+exe hello : hello.cpp pch : <include>. ;
+""")
+
+t.write("pch.hpp.bad", """
+THIS WILL NOT COMPILE
+""")
+
+# Note that pch.hpp is written after pch.hpp.bad, so its timestamp will not be
+# less than timestamp of pch.hpp.bad.
+sleep(1)
+t.write("pch.hpp", """
+class TestClass
+{
+public:
+ TestClass( int, int ) {}
+};
+""")
+
+t.write("pch.cpp", """#include <pch.hpp>
+""")
+
+t.write("hello.cpp", """#include <pch.hpp>
+int main() { TestClass c(1, 2); }
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+
+
+# Now make the header unusable, without changing timestamp. If everything is OK,
+# B2 will not recreate PCH, and compiler will happily use pre-compiled
+# header, not noticing that the real header is bad.
+
+t.copy_preserving_timestamp("pch.hpp.bad", "pch.hpp")
+
+t.rm("bin/$toolset/debug/hello.obj")
+t.rm("bin/$toolset/debug/*/hello.obj")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/hello.obj")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/prebuilt.py b/src/boost/tools/build/test/prebuilt.py
new file mode 100644
index 000000000..e67b726e8
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt.py
@@ -0,0 +1,43 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can use already built sources
+
+import BoostBuild
+
+t = BoostBuild.Tester(["debug", "release"], use_test_config=False)
+
+t.set_tree('prebuilt')
+
+t.expand_toolset("ext/jamroot.jam")
+t.expand_toolset("jamroot.jam")
+
+# First, build the external project.
+t.run_build_system(subdir="ext")
+
+# Then pretend that we do not have the sources for the external project, and
+# can only use compiled binaries.
+t.copy("ext/jamfile2.jam", "ext/jamfile.jam")
+t.expand_toolset("ext/jamfile.jam")
+
+# Now check that we can build the main project, and that correct prebuilt file
+# is picked, depending of variant. This also checks that correct includes for
+# prebuilt libraries are used.
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+t.expect_addition("bin/$toolset/release*/hello.exe")
+
+t.rm("bin")
+
+
+# Now test that prebuilt file specified by absolute name works too.
+t.copy("ext/jamfile3.jam", "ext/jamfile.jam")
+t.expand_toolset("ext/jamfile.jam")
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+t.expect_addition("bin/$toolset/release*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/prebuilt/ext/a.cpp b/src/boost/tools/build/test/prebuilt/ext/a.cpp
new file mode 100644
index 000000000..c49a04153
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/ext/a.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+#ifdef RELEASE
+void release() {}
+#else
+void debug() {}
+#endif
diff --git a/src/boost/tools/build/test/prebuilt/ext/debug/a.h b/src/boost/tools/build/test/prebuilt/ext/debug/a.h
new file mode 100644
index 000000000..31b318226
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/ext/debug/a.h
@@ -0,0 +1,13 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void debug();
diff --git a/src/boost/tools/build/test/prebuilt/ext/jamfile.jam b/src/boost/tools/build/test/prebuilt/ext/jamfile.jam
new file mode 100644
index 000000000..e563f0d74
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/ext/jamfile.jam
@@ -0,0 +1,13 @@
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+project ext
+ : requirements <variant>release:<define>RELEASE
+ ;
+
+lib a : a.cpp ;
+
+install dist : a : <variant>release:<location>release
+ <variant>debug:<location>debug ;
diff --git a/src/boost/tools/build/test/prebuilt/ext/jamfile2.jam b/src/boost/tools/build/test/prebuilt/ext/jamfile2.jam
new file mode 100644
index 000000000..6481808c6
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/ext/jamfile2.jam
@@ -0,0 +1,41 @@
+
+import os ;
+
+local dll-suffix = so ;
+local prefix = "lib" ;
+if [ os.name ] in NT
+{
+ if [ MATCH ^(gcc) : $toolset ]
+ {
+ dll-suffix = dll.a ;
+ prefix = lib ;
+ }
+ else
+ {
+ dll-suffix = lib ;
+ prefix = "" ;
+ }
+}
+else if [ os.name ] in CYGWIN
+{
+ dll-suffix = dll ;
+}
+else if [ os.name ] in MACOSX
+{
+ dll-suffix = dylib ;
+}
+
+project ext ;
+
+lib a :
+ : <file>debug/$(prefix)a.$(dll-suffix) <variant>debug
+ :
+ : <include>debug
+ ;
+
+lib a :
+ : <file>release/$(prefix)a.$(dll-suffix) <variant>release
+ :
+ : <include>release
+ ;
+
diff --git a/src/boost/tools/build/test/prebuilt/ext/jamfile3.jam b/src/boost/tools/build/test/prebuilt/ext/jamfile3.jam
new file mode 100644
index 000000000..be2257fa2
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/ext/jamfile3.jam
@@ -0,0 +1,48 @@
+
+# This Jamfile is the same as Jamfile2, except that
+# it tries to access prebuilt targets using absolute
+# paths. It used to be broken on Windows.
+
+import os ;
+
+local dll-suffix = so ;
+local prefix = "lib" ;
+if [ os.name ] in NT
+{
+ if [ MATCH ^(gcc) : $toolset ]
+ {
+ dll-suffix = dll.a ;
+ prefix = lib ;
+ }
+ else
+ {
+ dll-suffix = lib ;
+ prefix = "" ;
+ }
+}
+else if [ os.name ] in CYGWIN
+{
+ dll-suffix = dll ;
+}
+else if [ os.name ] in MACOSX
+{
+ dll-suffix = dylib ;
+}
+
+project ext ;
+
+# Assumed bjam was invoked from the project root
+local pwd = [ PWD ] ;
+
+lib a :
+ : <file>$(pwd)/ext/debug/$(prefix)a.$(dll-suffix) <variant>debug
+ :
+ : <include>debug
+ ;
+
+lib a :
+ : <file>$(pwd)/ext/release/$(prefix)a.$(dll-suffix) <variant>release
+ :
+ : <include>release
+ ;
+
diff --git a/src/boost/tools/build/test/prebuilt/ext/jamroot.jam b/src/boost/tools/build/test/prebuilt/ext/jamroot.jam
new file mode 100644
index 000000000..c7617d5d3
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/ext/jamroot.jam
@@ -0,0 +1,5 @@
+# Copyright 2002, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
diff --git a/src/boost/tools/build/test/prebuilt/ext/release/a.h b/src/boost/tools/build/test/prebuilt/ext/release/a.h
new file mode 100644
index 000000000..9ab71d88c
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/ext/release/a.h
@@ -0,0 +1,13 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void release();
diff --git a/src/boost/tools/build/test/prebuilt/hello.cpp b/src/boost/tools/build/test/prebuilt/hello.cpp
new file mode 100644
index 000000000..4c1ab7036
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/hello.cpp
@@ -0,0 +1,20 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <a.h>
+
+int main()
+{
+ #ifdef RELEASE
+ release();
+ #else
+ debug();
+ #endif
+ return 0;
+}
diff --git a/src/boost/tools/build/test/prebuilt/jamfile.jam b/src/boost/tools/build/test/prebuilt/jamfile.jam
new file mode 100644
index 000000000..18b731ae1
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/jamfile.jam
@@ -0,0 +1,13 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+project test
+ : requirements <variant>release:<define>RELEASE
+ ;
+
+use-project /ext : ext ;
+
+exe hello : hello.cpp /ext//a ;
+
diff --git a/src/boost/tools/build/test/prebuilt/jamroot.jam b/src/boost/tools/build/test/prebuilt/jamroot.jam
new file mode 100644
index 000000000..f022c0d64
--- /dev/null
+++ b/src/boost/tools/build/test/prebuilt/jamroot.jam
@@ -0,0 +1,4 @@
+# Copyright 2002, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
diff --git a/src/boost/tools/build/test/preprocessor.py b/src/boost/tools/build/test/preprocessor.py
new file mode 100755
index 000000000..715ae3e58
--- /dev/null
+++ b/src/boost/tools/build/test/preprocessor.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the C/C++ preprocessor.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """
+project ;
+preprocessed hello : hello.cpp ;
+preprocessed a : a.c ;
+exe hello.exe : hello a : <define>FAIL ;
+""")
+
+t.write("hello.cpp", """
+#ifndef __cplusplus
+#error "This file must be compiled as C++"
+#endif
+#ifdef FAIL
+#error "Not preprocessed?"
+#endif
+extern "C" int foo();
+int main() { return foo(); }
+""")
+
+t.write("a.c", """
+/* This will not compile unless in C mode. */
+#ifdef __cplusplus
+#error "This file must be compiled as C"
+#endif
+#ifdef FAIL
+#error "Not preprocessed?"
+#endif
+int foo()
+{
+ int new = 0;
+ new = (new+1)*7;
+ return new;
+}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/hello.ii")
+t.expect_addition("bin/$toolset/debug*/a.i")
+t.expect_addition("bin/$toolset/debug*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/print.py b/src/boost/tools/build/test/print.py
new file mode 100644
index 000000000..6579bce54
--- /dev/null
+++ b/src/boost/tools/build/test/print.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+
+# Copyright 2003 Douglas Gregor
+# Copyright 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", "import gcc ;")
+
+t.write("jamfile.jam", """
+import print ;
+print.output foo ;
+print.text \\\"Something\\\" ;
+DEPENDS all : foo ;
+ALWAYS foo ;
+""")
+
+t.run_build_system()
+t.expect_content("foo", """\"Something\"""")
+
+t.write("jamfile.jam", """
+import print ;
+print.output foo ;
+print.text \\\n\\\"Somethingelse\\\" ;
+DEPENDS all : foo ;
+ALWAYS foo ;
+""")
+
+t.run_build_system()
+t.expect_content("foo", """\"Something\"
+\"Somethingelse\"""")
+
+t.write("jamfile.jam", """
+import print ;
+print.output foo ;
+print.text \\\"Different\\\" : true ;
+DEPENDS all : foo ;
+ALWAYS foo ;
+""")
+
+t.run_build_system()
+t.expect_content("foo", """\"Different\"""")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/project-test3/a.cpp b/src/boost/tools/build/test/project-test3/a.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/a.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test3/jamfile.jam b/src/boost/tools/build/test/project-test3/jamfile.jam
new file mode 100644
index 000000000..f07960770
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/jamfile.jam
@@ -0,0 +1,13 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+use-project /lib2 : lib2 ;
+use-project /lib3 : lib3 ;
+
+make a.exe : a.obj lib//b.obj /lib2//c.obj lib2//d.obj lib2/helper//e.obj /lib3//f.obj : yfc-link ;
+make a.obj : a.cpp : yfc-compile ;
+
+build-project lib2 ;
+build-project lib ;
diff --git a/src/boost/tools/build/test/project-test3/jamroot.jam b/src/boost/tools/build/test/project-test3/jamroot.jam
new file mode 100644
index 000000000..d7cd490eb
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/jamroot.jam
@@ -0,0 +1,67 @@
+# Copyright 2002-2005 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import os ;
+import gcc ;
+import property ;
+import toolset ;
+
+rule properties-as-path ( properties * )
+{
+ local r ;
+ for local p in $(properties)
+ {
+ if $(p:G) != <action>
+ {
+ r += $(p) ;
+ }
+ }
+ return [ property.as-path
+ [ property.remove incidental : $(r) ] ] ;
+}
+
+toolset.flags yfc-compile KNOWN-PROPERTIES : <toolset> <optimization> ;
+toolset.flags yfc-link KNOWN-PROPERTIES : <toolset> <optimization> ;
+
+rule yfc-compile ( target : sources * : property-set * )
+{
+ PROPERTIES on $(target) = [ properties-as-path $(property-set) ] ;
+}
+
+actions yfc-compile
+{
+ echo $(PROPERTIES) > $(<)
+ echo $(>) >> $(<)
+}
+
+rule yfc-link ( target : sources * : property-set * )
+{
+ PROPERTIES on $(target) = [ properties-as-path $(property-set) ] ;
+}
+
+actions yfc-link
+{
+ echo $(PROPERTIES) > $(<)
+ echo $(>) >> $(<)
+}
+
+if [ os.name ] = VMS
+{
+ actions yfc-compile
+ {
+ PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W)
+ PIPE WRITE SYS$OUTPUT "$(>:J= ",")" | APPEND /NEW SYS$INPUT $(<:W)
+ }
+
+ actions yfc-link
+ {
+ PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W)
+ OPEN /APPEND FOUT $(<:W)
+ WRITE FOUT "$(>:J= ",")"
+ CLOSE FOUT
+ }
+}
+
+IMPORT $(__name__) : yfc-compile yfc-link : : yfc-compile yfc-link ;
diff --git a/src/boost/tools/build/test/project-test3/lib/b.cpp b/src/boost/tools/build/test/project-test3/lib/b.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib/b.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test3/lib/jamfile.jam b/src/boost/tools/build/test/project-test3/lib/jamfile.jam
new file mode 100644
index 000000000..76b0829a9
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib/jamfile.jam
@@ -0,0 +1,9 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+use-project /lib2 : ../lib2 ;
+
+make b.obj : b.cpp : yfc-compile ;
+make m.exe : b.obj /lib2//c.obj : yfc-link ;
diff --git a/src/boost/tools/build/test/project-test3/lib2/c.cpp b/src/boost/tools/build/test/project-test3/lib2/c.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib2/c.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test3/lib2/d.cpp b/src/boost/tools/build/test/project-test3/lib2/d.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib2/d.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test3/lib2/helper/e.cpp b/src/boost/tools/build/test/project-test3/lib2/helper/e.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib2/helper/e.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test3/lib2/helper/jamfile.jam b/src/boost/tools/build/test/project-test3/lib2/helper/jamfile.jam
new file mode 100644
index 000000000..0c82f9248
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib2/helper/jamfile.jam
@@ -0,0 +1,9 @@
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+project lib2/helper ;
+
+make e.obj : e.cpp : yfc-compile ;
+
diff --git a/src/boost/tools/build/test/project-test3/lib2/jamfile.jam b/src/boost/tools/build/test/project-test3/lib2/jamfile.jam
new file mode 100644
index 000000000..b6b0abc44
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib2/jamfile.jam
@@ -0,0 +1,11 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+project lib2 ;
+use-project /lib2/helper : helper ;
+
+make c.obj : c.cpp : yfc-compile ;
+make d.obj : d.cpp : yfc-compile ;
+make l.exe : c.obj ..//a.obj : yfc-link ;
diff --git a/src/boost/tools/build/test/project-test3/lib3/f.cpp b/src/boost/tools/build/test/project-test3/lib3/f.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib3/f.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test3/lib3/jamfile.jam b/src/boost/tools/build/test/project-test3/lib3/jamfile.jam
new file mode 100644
index 000000000..261062994
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib3/jamfile.jam
@@ -0,0 +1,47 @@
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This project-root.jam also serves the role of Jamfile
+project lib3 ;
+
+use-project /lib2/helper : ../lib2/helper ;
+
+import os ;
+import property ;
+
+rule properties-as-path ( properties * )
+{
+ local r ;
+ for local p in $(properties)
+ {
+ if $(p:G) != <action>
+ {
+ r += $(p) ;
+ }
+ }
+ return [ property.as-path
+ [ property.remove incidental : $(r) ] ] ;
+}
+
+rule mfc-compile ( target : sources * : property-set * )
+{
+ PROPERTIES on $(target) = [ properties-as-path $(property-set) ] ;
+}
+
+actions mfc-compile
+{
+ echo $(PROPERTIES) > $(<)
+ echo $(>) >> $(<)
+}
+
+if [ os.name ] = VMS
+{
+ actions mfc-compile
+ {
+ PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W)
+ PIPE WRITE SYS$OUTPUT "$(>:J= ",")" | APPEND /NEW SYS$INPUT $(<:W)
+ }
+}
+
+make f.obj : f.cpp /lib2/helper//e.obj : mfc-compile ;
diff --git a/src/boost/tools/build/test/project-test3/lib3/jamroot.jam b/src/boost/tools/build/test/project-test3/lib3/jamroot.jam
new file mode 100644
index 000000000..971f03096
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/lib3/jamroot.jam
@@ -0,0 +1,5 @@
+# Copyright 2002 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
diff --git a/src/boost/tools/build/test/project-test3/readme.txt b/src/boost/tools/build/test/project-test3/readme.txt
new file mode 100644
index 000000000..da27e54b2
--- /dev/null
+++ b/src/boost/tools/build/test/project-test3/readme.txt
@@ -0,0 +1,7 @@
+Copyright 2002 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This test checks that we have minimally working 'make' rule and that we can use target from
+different project with different project roots.
diff --git a/src/boost/tools/build/test/project-test4/a.cpp b/src/boost/tools/build/test/project-test4/a.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/a.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test4/a_gcc.cpp b/src/boost/tools/build/test/project-test4/a_gcc.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/a_gcc.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test4/jamfile.jam b/src/boost/tools/build/test/project-test4/jamfile.jam
new file mode 100644
index 000000000..a34d5f2db
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/jamfile.jam
@@ -0,0 +1,11 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+project test : requirements <include>everything <threading>single ;
+
+make a.exe : a.obj lib//b.obj/<optimization>speed : yfc-link ;
+make b.exe : a.obj : yfc-link : <define>MACROS ;
+make a.obj : a.cpp : yfc-compile ;
diff --git a/src/boost/tools/build/test/project-test4/jamfile3.jam b/src/boost/tools/build/test/project-test4/jamfile3.jam
new file mode 100644
index 000000000..9770362d7
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/jamfile3.jam
@@ -0,0 +1,5 @@
+
+make a.exe : a.obj lib//b.obj/<optimization>on a_gcc.obj : yfc-link : <toolset>gcc ;
+make a.exe : a.obj lib//b.obj/<optimization>on : yfc-link : <threading>multi ;
+make a.obj : a.cpp : yfc-compile ;
+make a_gcc.obj : a_gcc.cpp : yfc-compile ;
diff --git a/src/boost/tools/build/test/project-test4/jamfile4.jam b/src/boost/tools/build/test/project-test4/jamfile4.jam
new file mode 100644
index 000000000..e3257801a
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/jamfile4.jam
@@ -0,0 +1,4 @@
+
+project test : requirements <include>everything <threading>single ;
+
+build-project lib2 ;
diff --git a/src/boost/tools/build/test/project-test4/jamfile5.jam b/src/boost/tools/build/test/project-test4/jamfile5.jam
new file mode 100644
index 000000000..1010be5e4
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/jamfile5.jam
@@ -0,0 +1,6 @@
+
+project test : requirements <include>everything <threading>single ;
+
+make a.exe : a.obj lib//b.obj/<variant>release : yfc-link ;
+make b.exe : a.obj : yfc-link : <define>MACROS ;
+make a.obj : a.cpp : yfc-compile ;
diff --git a/src/boost/tools/build/test/project-test4/jamroot.jam b/src/boost/tools/build/test/project-test4/jamroot.jam
new file mode 100644
index 000000000..d8cf571ae
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/jamroot.jam
@@ -0,0 +1,68 @@
+# Copyright 2002, 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import os ;
+import gcc ;
+import property ;
+import toolset ;
+
+rule properties-as-path ( properties * )
+{
+ local r ;
+ for local p in $(properties)
+ {
+ if $(p:G) != <action>
+ {
+ r += $(p) ;
+ }
+ }
+ return [ property.as-path
+ [ property.remove incidental : $(r) ] ] ;
+}
+
+
+toolset.flags yfc-compile KNOWN-PROPERTIES : <toolset> <optimization> ;
+toolset.flags yfc-link KNOWN-PROPERTIES : <toolset> <optimization> ;
+
+
+rule yfc-compile ( target : sources * : property-set * )
+{
+ PROPERTIES on $(target) = [ properties-as-path $(property-set) ] ;
+}
+
+actions yfc-compile
+{
+ echo $(PROPERTIES) > $(<)
+ echo $(>) >> $(<)
+}
+
+rule yfc-link ( target : sources * : property-set * )
+{
+ PROPERTIES on $(target) = [ properties-as-path $(property-set) ] ;
+}
+
+actions yfc-link
+{
+ echo $(PROPERTIES) > $(<)
+ echo $(>) >> $(<)
+}
+
+if [ os.name ] = VMS
+{
+ actions yfc-compile
+ {
+ PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W)
+ PIPE WRITE SYS$OUTPUT "$(>:J= ",")" | APPEND /NEW SYS$INPUT $(<:W)
+ }
+
+ actions yfc-link
+ {
+ PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W)
+ OPEN /APPEND FOUT $(<:W)
+ WRITE FOUT "$(>:J= ",")"
+ CLOSE FOUT
+ }
+}
+
+#IMPORT $(__name__) : yfc-compile yfc-link : : yfc-compile yfc-link ;
diff --git a/src/boost/tools/build/test/project-test4/lib/b.cpp b/src/boost/tools/build/test/project-test4/lib/b.cpp
new file mode 100644
index 000000000..ccecbb414
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/lib/b.cpp
@@ -0,0 +1,8 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
diff --git a/src/boost/tools/build/test/project-test4/lib/jamfile.jam b/src/boost/tools/build/test/project-test4/lib/jamfile.jam
new file mode 100644
index 000000000..1bdb7c122
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/lib/jamfile.jam
@@ -0,0 +1,6 @@
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+make b.obj : b.cpp : yfc-compile ;
diff --git a/src/boost/tools/build/test/project-test4/lib/jamfile1.jam b/src/boost/tools/build/test/project-test4/lib/jamfile1.jam
new file mode 100644
index 000000000..be2c3649a
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/lib/jamfile1.jam
@@ -0,0 +1,2 @@
+
+make b.obj : b.cpp : yfc-compile ;
diff --git a/src/boost/tools/build/test/project-test4/lib/jamfile2.jam b/src/boost/tools/build/test/project-test4/lib/jamfile2.jam
new file mode 100644
index 000000000..d47274bdf
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/lib/jamfile2.jam
@@ -0,0 +1,4 @@
+
+project lib : requirements <threading>multi ;
+
+make b.obj : b.cpp : yfc-compile ;
diff --git a/src/boost/tools/build/test/project-test4/lib/jamfile3.jam b/src/boost/tools/build/test/project-test4/lib/jamfile3.jam
new file mode 100644
index 000000000..73a78324b
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/lib/jamfile3.jam
@@ -0,0 +1,2 @@
+
+make b.obj : b.cpp : yfc-compile : <rtti>off ;
diff --git a/src/boost/tools/build/test/project-test4/lib2/jamfile.jam b/src/boost/tools/build/test/project-test4/lib2/jamfile.jam
new file mode 100644
index 000000000..389492bf0
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/lib2/jamfile.jam
@@ -0,0 +1,8 @@
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+project
+ : requirements <rtti>off
+ ;
diff --git a/src/boost/tools/build/test/project-test4/lib2/jamfile2.jam b/src/boost/tools/build/test/project-test4/lib2/jamfile2.jam
new file mode 100644
index 000000000..94b144d06
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/lib2/jamfile2.jam
@@ -0,0 +1,4 @@
+
+project mylib
+ : requirements <rtti>off
+ ;
diff --git a/src/boost/tools/build/test/project-test4/readme.txt b/src/boost/tools/build/test/project-test4/readme.txt
new file mode 100644
index 000000000..0c0ba2ca4
--- /dev/null
+++ b/src/boost/tools/build/test/project-test4/readme.txt
@@ -0,0 +1,6 @@
+Copyright 2002 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This test checks for correct properties of generated and used targets.
diff --git a/src/boost/tools/build/test/project_dependencies.py b/src/boost/tools/build/test/project_dependencies.py
new file mode 100644
index 000000000..600bc0ed5
--- /dev/null
+++ b/src/boost/tools/build/test/project_dependencies.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can specify a dependency property in project requirements, and
+# that it will not cause every main target in the project to be generated in its
+# own subdirectory.
+
+# The whole test is somewhat moot now.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "build-project src ;")
+
+t.write("lib/jamfile.jam", "lib lib1 : lib1.cpp ;")
+
+t.write("lib/lib1.cpp", """
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}\n
+""")
+
+t.write("src/jamfile.jam", """
+project : requirements <library>../lib//lib1 ;
+exe a : a.cpp ;
+exe b : b.cpp ;
+""")
+
+t.write("src/a.cpp", """
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void foo();
+int main() { foo(); }
+""")
+
+t.copy("src/a.cpp", "src/b.cpp")
+
+t.run_build_system()
+
+# Test that there is no "main-target-a" part.
+# t.expect_addition("src/bin/$toolset/debug*/a.exe")
+# t.expect_addition("src/bin/$toolset/debug*/b.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/project_glob.py b/src/boost/tools/build/test/project_glob.py
new file mode 100644
index 000000000..362b450d9
--- /dev/null
+++ b/src/boost/tools/build/test/project_glob.py
@@ -0,0 +1,212 @@
+#!/usr/bin/python
+
+# Copyright (C) 2003. Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'glob' rule in Jamfile context.
+
+import BoostBuild
+
+
+def test_basic():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/a.cpp", "int main() {}\n")
+ t.write("d1/jamfile.jam", "exe a : [ glob *.cpp ] ../d2/d//l ;")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+ t.write("d3/d/jamfile.jam", "exe a : [ glob ../*.cpp ] ;")
+ t.write("d3/a.cpp", "int main() {}\n")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug*/a.exe")
+
+ t.run_build_system(subdir="d3/d")
+ t.expect_addition("d3/d/bin/$toolset/debug*/a.exe")
+
+ t.rm("d2/d/bin")
+ t.run_build_system(subdir="d2/d")
+ t.expect_addition("d2/d/bin/$toolset/debug*/l.dll")
+
+ t.cleanup()
+
+
+def test_source_location():
+ """
+ Test that when 'source-location' is explicitly-specified glob works
+ relative to the source location.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/a.cpp", "very bad non-compilable file\n")
+ t.write("d1/src/a.cpp", "int main() {}\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob *.cpp ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug*/a.exe")
+
+ t.cleanup()
+
+
+def test_wildcards_and_exclusion_patterns():
+ """
+ Test that wildcards can include directories. Also test exclusion
+ patterns.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ t.write("d1/src/bar/bad.cpp", "very bad non-compilable file\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob foo/*.cpp bar/*.cpp : bar/bad* ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug*/a.exe")
+
+ t.cleanup()
+
+
+def test_glob_tree():
+ """Test that 'glob-tree' works."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ t.write("d1/src/bar/bad.cpp", "very bad non-compilable file\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob-tree *.cpp : bad* ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug*/a.exe")
+
+ t.cleanup()
+
+
+def test_directory_names_in_glob_tree():
+ """Test that directory names in patterns for 'glob-tree' are rejected."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/a.cpp", "very bad non-compilable file\n")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ t.write("d1/src/bar/bad.cpp", "very bad non-compilable file\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob-tree foo/*.cpp bar/*.cpp : bad* ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1", status=1)
+ t.expect_output_lines("error: The patterns * may not include directory")
+
+ t.cleanup()
+
+
+def test_glob_with_absolute_names():
+ """Test that 'glob' works with absolute names."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/a.cpp", "very bad non-compilable file\n")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ # Note that to get the current dir, we use bjam's PWD, not Python's
+ # os.getcwd(), because the former will always return a long path while the
+ # latter might return a short path, which would confuse path.glob.
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+local pwd = [ PWD ] ; # Always absolute.
+exe a : [ glob $(pwd)/src/foo/*.cpp $(pwd)/src/bar/*.cpp ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug*/a.exe")
+
+ t.cleanup()
+
+
+def test_glob_excludes_in_subdirectory():
+ """
+ Regression test: glob excludes used to be broken when building from a
+ subdirectory.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "build-project p ;")
+ t.write("p/p.c", "int main() {}\n")
+ t.write("p/p_x.c", "very bad non-compilable file\n")
+ t.write("p/jamfile.jam", "exe p : [ glob *.c : p_x.c ] ;")
+
+ t.run_build_system(subdir="p")
+ t.expect_addition("p/bin/$toolset/debug*/p.exe")
+
+ t.cleanup()
+
+
+test_basic()
+test_source_location()
+test_wildcards_and_exclusion_patterns()
+test_glob_tree()
+test_directory_names_in_glob_tree()
+test_glob_with_absolute_names()
+test_glob_excludes_in_subdirectory()
diff --git a/src/boost/tools/build/test/project_id.py b/src/boost/tools/build/test/project_id.py
new file mode 100755
index 000000000..6477f5957
--- /dev/null
+++ b/src/boost/tools/build/test/project_id.py
@@ -0,0 +1,414 @@
+#!/usr/bin/python
+
+# Copyright (C) 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests Boost Build's project-id handling.
+
+import BoostBuild
+import sys
+
+
+def test_assigning_project_ids():
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("jamroot.jam", """\
+import assert ;
+import modules ;
+import notfile ;
+import project ;
+
+rule assert-project-id ( id ? : module-name ? )
+{
+ module-name ?= [ CALLER_MODULE ] ;
+ assert.result $(id) : project.attribute $(module-name) id ;
+}
+
+# Project rule modifies the main project id.
+assert-project-id ; # Initial project id is empty
+project foo ; assert-project-id /foo ;
+project ; assert-project-id /foo ;
+project foo ; assert-project-id /foo ;
+project bar ; assert-project-id /bar ;
+project /foo ; assert-project-id /foo ;
+project "" ; assert-project-id /foo ;
+
+# Calling the use-project rule does not modify the project's main id.
+use-project id1 : a ;
+# We need to load the 'a' Jamfile module manually as the use-project rule will
+# only schedule the load to be done after the current module load finishes.
+a-module = [ project.load a ] ;
+assert-project-id : $(a-module) ;
+use-project id2 : a ;
+assert-project-id : $(a-module) ;
+modules.call-in $(a-module) : project baz ;
+assert-project-id /baz : $(a-module) ;
+use-project id3 : a ;
+assert-project-id /baz : $(a-module) ;
+
+# Make sure the project id still holds after all the scheduled use-project loads
+# complete. We do this by scheduling the assert for the Jam action scheduling
+# phase.
+notfile x : @assert-a-rule ;
+rule assert-a-rule ( target : : properties * )
+{
+ assert-project-id /baz : $(a-module) ;
+}
+""")
+ t.write("a/jamfile.jam", """\
+# Initial project id for this module is empty.
+assert-project-id ;
+""")
+ t.run_build_system()
+ t.cleanup()
+
+
+def test_using_project_ids_in_target_references():
+ t = BoostBuild.Tester()
+ __write_appender(t, "appender.jam")
+ t.write("jamroot.jam", """\
+import type ;
+type.register AAA : _a ;
+type.register BBB : _b ;
+
+import appender ;
+appender.register aaa-to-bbb : AAA : BBB ;
+
+use-project id1 : a ;
+use-project /id2 : a ;
+
+bbb b1 : /id1//target ;
+bbb b2 : /id2//target ;
+bbb b3 : /id3//target ;
+bbb b4 : a//target ;
+bbb b5 : /project-a1//target ;
+bbb b6 : /project-a2//target ;
+bbb b7 : /project-a3//target ;
+
+use-project id3 : a ;
+""")
+ t.write("a/source._a", "")
+ t.write("a/jamfile.jam", """\
+project project-a1 ;
+project /project-a2 ;
+import alias ;
+alias target : source._a ;
+project /project-a3 ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/b%d._b" % x for x in range(1, 8))
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+def test_repeated_ids_for_different_projects():
+ t = BoostBuild.Tester()
+
+ t.write("a/jamfile.jam", "")
+ t.write("jamroot.jam", "project foo ; use-project foo : a ;")
+ t.run_build_system(status=1)
+ t.expect_output_lines("""\
+error: Attempt to redeclare already registered project id '/foo'.
+error: Original project:
+error: Name: Jamfile<*>
+error: Module: Jamfile<*>
+error: Main id: /foo
+error: File: jamroot.jam
+error: Location: .
+error: New project:
+error: Module: Jamfile<*>
+error: File: a*jamfile.jam
+error: Location: a""")
+
+ t.write("jamroot.jam", "use-project foo : a ; project foo ;")
+ t.run_build_system(status=1)
+ t.expect_output_lines("""\
+error: Attempt to redeclare already registered project id '/foo'.
+error: Original project:
+error: Name: Jamfile<*>
+error: Module: Jamfile<*>
+error: Main id: /foo
+error: File: jamroot.jam
+error: Location: .
+error: New project:
+error: Module: Jamfile<*>
+error: File: a*jamfile.jam
+error: Location: a""")
+
+ t.write("jamroot.jam", """\
+import modules ;
+import project ;
+modules.call-in [ project.load a ] : project foo ;
+project foo ;
+""")
+ t.run_build_system(status=1)
+ t.expect_output_lines("""\
+error: at jamroot.jam:4
+error: Attempt to redeclare already registered project id '/foo'.
+error: Original project:
+error: Name: Jamfile<*>
+error: Module: Jamfile<*>
+error: Main id: /foo
+error: File: a*jamfile.jam
+error: Location: a
+error: New project:
+error: Module: Jamfile<*>
+error: File: jamroot.jam
+error: Location: .""")
+
+ t.cleanup()
+
+
+def test_repeated_ids_for_same_project():
+ t = BoostBuild.Tester()
+
+ t.write("jamroot.jam", "project foo ; project foo ;")
+ t.run_build_system()
+
+ t.write("jamroot.jam", "project foo ; use-project foo : . ;")
+ t.run_build_system()
+
+ t.write("jamroot.jam", "project foo ; use-project foo : ./. ;")
+ t.run_build_system()
+
+ t.write("jamroot.jam", """\
+project foo ;
+use-project foo : . ;
+use-project foo : ./aaa/.. ;
+use-project foo : ./. ;
+""")
+ t.run_build_system()
+
+ # On Windows we have a case-insensitive file system and we can use
+ # backslashes as path separators.
+ # FIXME: Make a similar test pass on Cygwin.
+ if sys.platform in ['win32']:
+ t.write("a/fOo bAr/b/jamfile.jam", "")
+ t.write("jamroot.jam", r"""
+use-project bar : "a/foo bar/b" ;
+use-project bar : "a/foO Bar/b" ;
+use-project bar : "a/foo BAR/b/" ;
+use-project bar : "a\\.\\FOO bar\\b\\" ;
+""")
+ t.run_build_system()
+ t.rm("a")
+
+ t.write("bar/jamfile.jam", "")
+ t.write("jamroot.jam", """\
+use-project bar : bar ;
+use-project bar : bar/ ;
+use-project bar : bar// ;
+use-project bar : bar/// ;
+use-project bar : bar//// ;
+use-project bar : bar/. ;
+use-project bar : bar/./ ;
+use-project bar : bar/////./ ;
+use-project bar : bar/../bar/xxx/.. ;
+use-project bar : bar/..///bar/xxx///////.. ;
+use-project bar : bar/./../bar/xxx/.. ;
+use-project bar : bar/.////../bar/xxx/.. ;
+use-project bar : bar/././../bar/xxx/.. ;
+use-project bar : bar/././//////////../bar/xxx/.. ;
+use-project bar : bar/.///.////../bar/xxx/.. ;
+use-project bar : bar/./././xxx/.. ;
+use-project bar : bar/xxx////.. ;
+use-project bar : bar/xxx/.. ;
+use-project bar : bar///////xxx/.. ;
+""")
+ t.run_build_system()
+ t.rm("bar")
+
+ # On Windows we have a case-insensitive file system and we can use
+ # backslashes as path separators.
+ # FIXME: Make a similar test pass on Cygwin.
+ if sys.platform in ['win32']:
+ t.write("baR/jamfile.jam", "")
+ t.write("jamroot.jam", r"""
+use-project bar : bar ;
+use-project bar : BAR ;
+use-project bar : bAr ;
+use-project bar : bAr/ ;
+use-project bar : bAr\\ ;
+use-project bar : bAr\\\\ ;
+use-project bar : bAr\\\\///// ;
+use-project bar : bAr/. ;
+use-project bar : bAr/./././ ;
+use-project bar : bAr\\.\\.\\.\\ ;
+use-project bar : bAr\\./\\/.\\.\\ ;
+use-project bar : bAr/.\\././ ;
+use-project bar : Bar ;
+use-project bar : BaR ;
+use-project bar : BaR/./../bAr/xxx/.. ;
+use-project bar : BaR/./..\\bAr\\xxx/.. ;
+use-project bar : BaR/xxx/.. ;
+use-project bar : BaR///\\\\\\//xxx/.. ;
+use-project bar : Bar\\xxx/.. ;
+use-project bar : BAR/xXx/.. ;
+use-project bar : BAR/xXx\\\\/\\/\\//\\.. ;
+""")
+ t.run_build_system()
+ t.rm("baR")
+
+ t.cleanup()
+
+
+def test_unresolved_project_references():
+ t = BoostBuild.Tester()
+
+ __write_appender(t, "appender.jam")
+ t.write("a/source._a", "")
+ t.write("a/jamfile.jam", "import alias ; alias target : source._a ;")
+ t.write("jamroot.jam", """\
+import type ;
+type.register AAA : _a ;
+type.register BBB : _b ;
+
+import appender ;
+appender.register aaa-to-bbb : AAA : BBB ;
+
+use-project foo : a ;
+
+bbb b1 : a//target ;
+bbb b2 : /foo//target ;
+bbb b-invalid : invalid//target ;
+bbb b-root-invalid : /invalid//target ;
+bbb b-missing-root : foo//target ;
+bbb b-invalid-target : /foo//invalid ;
+""")
+
+ t.run_build_system(["b1", "b2"])
+ t.expect_addition("bin/b%d._b" % x for x in range(1, 3))
+ t.expect_nothing_more()
+
+ t.run_build_system(["b-invalid"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: 'invalid//target'
+error: referred to from project at
+error: '.'
+error: could not resolve project reference 'invalid'""")
+
+ t.run_build_system(["b-root-invalid"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: '/invalid//target'
+error: referred to from project at
+error: '.'
+error: could not resolve project reference '/invalid'""")
+
+ t.run_build_system(["b-missing-root"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: 'foo//target'
+error: referred to from project at
+error: '.'
+error: could not resolve project reference 'foo' - possibly missing a """
+ "leading slash ('/') character.")
+
+ t.run_build_system(["b-invalid-target"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: '/foo//invalid'
+error: referred to from project at
+error: '.'""")
+ t.expect_output_lines("*could not resolve project reference*", False)
+
+ t.cleanup()
+
+
+def __write_appender(t, name):
+ t.write(name,
+r"""# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for registering test generators that construct their targets by
+# simply appending their given input data, e.g. list of sources & targets.
+
+import "class" : new ;
+import generators ;
+import modules ;
+import sequence ;
+
+rule register ( id composing ? : source-types + : target-types + )
+{
+ local caller-module = [ CALLER_MODULE ] ;
+ id = $(caller-module).$(id) ;
+ local g = [ new generator $(id) $(composing) : $(source-types) :
+ $(target-types) ] ;
+ $(g).set-rule-name $(__name__).appender ;
+ generators.register $(g) ;
+ return $(id) ;
+}
+
+if [ modules.peek : NT ]
+{
+ X = ")" ;
+ ECHO_CMD = (echo. ;
+}
+else
+{
+ X = \" ;
+ ECHO_CMD = "echo $(X)" ;
+}
+
+local appender-runs ;
+
+# We set up separate actions for building each target in order to avoid having
+# to iterate over them in action (i.e. shell) code. We have to be extra careful
+# though to achieve the exact same effect as if doing all the work in just one
+# action. Otherwise Boost Jam might, under some circumstances, run only some of
+# our actions. To achieve this we register a series of actions for all the
+# targets (since they all have the same target list - either all or none of them
+# get run independent of which target actually needs to get built), each
+# building only a single target. Since all our actions use the same targets, we
+# can not use 'on-target' parameters to pass data to a specific action so we
+# pass them using the second 'sources' parameter which our actions then know how
+# to interpret correctly. This works well since Boost Jam does not automatically
+# add dependency relations between specified action targets & sources and so the
+# second argument, even though most often used to pass in a list of sources, can
+# actually be used for passing in any type of information.
+rule appender ( targets + : sources + : properties * )
+{
+ appender-runs = [ CALC $(appender-runs:E=0) + 1 ] ;
+ local target-index = 0 ;
+ local target-count = [ sequence.length $(targets) ] ;
+ local original-targets ;
+ for t in $(targets)
+ {
+ target-index = [ CALC $(target-index) + 1 ] ;
+ local appender-run = $(appender-runs) ;
+ if $(targets[2])-defined
+ {
+ appender-run += [$(target-index)/$(target-count)] ;
+ }
+ append $(targets) : $(appender-run:J=" ") $(t) $(sources) ;
+ }
+}
+
+actions append
+{
+ $(ECHO_CMD)-------------------------------------------------$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)>> "$(>[2])"
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)>> "$(>[2])"
+ $(ECHO_CMD) Target: '$(>[2])'$(X)
+ $(ECHO_CMD) Target: '$(>[2])'$(X)>> "$(>[2])"
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)>> "$(>[2])"
+ $(ECHO_CMD)=================================================$(X)
+ $(ECHO_CMD)-------------------------------------------------$(X)>> "$(>[2])"
+}
+""")
+
+
+test_assigning_project_ids()
+test_using_project_ids_in_target_references()
+test_repeated_ids_for_same_project()
+test_repeated_ids_for_different_projects()
+test_unresolved_project_references()
diff --git a/src/boost/tools/build/test/project_root_constants.py b/src/boost/tools/build/test/project_root_constants.py
new file mode 100644
index 000000000..25006a015
--- /dev/null
+++ b/src/boost/tools/build/test/project_root_constants.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester()
+
+# Create the needed files.
+t.write("jamroot.jam", """\
+constant FOO : foobar gee ;
+ECHO $(FOO) ;
+""")
+
+t.run_build_system()
+t.expect_output_lines("foobar gee")
+
+# Regression test: when absolute paths were passed to path-constant rule,
+# B2 failed to recognize path as absolute and prepended the current
+# dir.
+t.write("jamroot.jam", """\
+import path ;
+local here = [ path.native [ path.pwd ] ] ;
+path-constant HERE : $(here) ;
+if $(HERE) != $(here)
+{
+ ECHO "PWD =" $(here) ;
+ ECHO "path constant =" $(HERE) ;
+ EXIT ;
+}
+""")
+t.write("jamfile.jam", "")
+
+t.run_build_system()
+
+t.write("jamfile.jam", """\
+# This tests that rule 'hello' will be imported to children unlocalized, and
+# will still access variables in this Jamfile.
+x = 10 ;
+constant FOO : foo ;
+rule hello ( ) { ECHO "Hello $(x)" ; }
+""")
+
+t.write("d/jamfile.jam", """\
+ECHO "d: $(FOO)" ;
+constant BAR : bar ;
+""")
+
+t.write("d/d2/jamfile.jam", """\
+ECHO "d2: $(FOO)" ;
+ECHO "d2: $(BAR)" ;
+hello ;
+""")
+
+t.run_build_system(subdir="d/d2")
+t.expect_output_lines("d: foo\nd2: foo\nd2: bar\nHello 10")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/project_root_rule.py b/src/boost/tools/build/test/project_root_rule.py
new file mode 100644
index 000000000..503b3cad2
--- /dev/null
+++ b/src/boost/tools/build/test/project_root_rule.py
@@ -0,0 +1,34 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that we can declare a rule in Jamroot that will be can be called in
+# child Jamfile to declare a target. Specifically test for use of 'glob' in that
+# rule.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+t.write("jamroot.jam", """
+project : requirements <link>static ;
+rule my-lib ( name ) { lib $(name) : [ glob *.cpp ] ; }
+""")
+
+t.write("sub/a.cpp", """
+""")
+
+t.write("sub/jamfile.jam", """
+my-lib foo ;
+""")
+
+
+t.run_build_system(subdir="sub")
+
+t.expect_addition("sub/bin/$toolset/debug*/foo.lib")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/project_test3.py b/src/boost/tools/build/test/project_test3.py
new file mode 100644
index 000000000..9203cd885
--- /dev/null
+++ b/src/boost/tools/build/test/project_test3.py
@@ -0,0 +1,135 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(translate_suffixes=0)
+
+# First check some startup.
+t.set_tree("project-test3")
+os.remove("jamroot.jam")
+t.run_build_system(status=1)
+
+t.expect_output_lines("*.yfc-compile\" unknown in module*")
+
+t.set_tree("project-test3")
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/a.obj")
+t.expect_content("bin/$toolset/debug*/a.obj", """\
+$toolset/debug*
+a.cpp
+""")
+
+t.expect_addition("bin/$toolset/debug*/a.exe")
+t.expect_content("bin/$toolset/debug*/a.exe",
+"$toolset/debug*\n" +
+"bin/$toolset/debug*/a.obj lib/bin/$toolset/debug*/b.obj " +
+"lib2/bin/$toolset/debug*/c.obj lib2/bin/$toolset/debug*/d.obj " +
+"lib2/helper/bin/$toolset/debug*/e.obj " +
+"lib3/bin/$toolset/debug*/f.obj\n"
+)
+
+t.expect_addition("lib/bin/$toolset/debug*/b.obj")
+t.expect_content("lib/bin/$toolset/debug*/b.obj", """\
+$toolset/debug*
+lib/b.cpp
+""")
+
+t.expect_addition("lib/bin/$toolset/debug*/m.exe")
+t.expect_content("lib/bin/$toolset/debug*/m.exe", """\
+$toolset/debug*
+lib/bin/$toolset/debug*/b.obj lib2/bin/$toolset/debug*/c.obj
+""")
+
+t.expect_addition("lib2/bin/$toolset/debug*/c.obj")
+t.expect_content("lib2/bin/$toolset/debug*/c.obj", """\
+$toolset/debug*
+lib2/c.cpp
+""")
+
+t.expect_addition("lib2/bin/$toolset/debug*/d.obj")
+t.expect_content("lib2/bin/$toolset/debug*/d.obj", """\
+$toolset/debug*
+lib2/d.cpp
+""")
+
+t.expect_addition("lib2/bin/$toolset/debug*/l.exe")
+t.expect_content("lib2/bin/$toolset/debug*/l.exe", """\
+$toolset/debug*
+lib2/bin/$toolset/debug*/c.obj bin/$toolset/debug*/a.obj
+""")
+
+t.expect_addition("lib2/helper/bin/$toolset/debug*/e.obj")
+t.expect_content("lib2/helper/bin/$toolset/debug*/e.obj", """\
+$toolset/debug*
+lib2/helper/e.cpp
+""")
+
+t.expect_addition("lib3/bin/$toolset/debug*/f.obj")
+t.expect_content("lib3/bin/$toolset/debug*/f.obj", """\
+$toolset/debug*
+lib3/f.cpp lib2/helper/bin/$toolset/debug*/e.obj
+""")
+
+t.touch("a.cpp")
+t.run_build_system()
+t.expect_touch(["bin/$toolset/debug*/a.obj",
+ "bin/$toolset/debug*/a.exe",
+ "lib2/bin/$toolset/debug*/l.exe"])
+
+t.run_build_system(["release", "optimization=off,speed"])
+t.expect_addition(["bin/$toolset/release/optimization-off*/a.exe",
+ "bin/$toolset/release/optimization-off*/a.obj",
+ "bin/$toolset/release*/a.exe",
+ "bin/$toolset/release*/a.obj"])
+
+t.run_build_system(["--clean-all"])
+t.expect_removal(["bin/$toolset/debug*/a.obj",
+ "bin/$toolset/debug*/a.exe",
+ "lib/bin/$toolset/debug*/b.obj",
+ "lib/bin/$toolset/debug*/m.exe",
+ "lib2/bin/$toolset/debug*/c.obj",
+ "lib2/bin/$toolset/debug*/d.obj",
+ "lib2/bin/$toolset/debug*/l.exe",
+ "lib3/bin/$toolset/debug*/f.obj"])
+
+# Now test target ids in command line.
+t.set_tree("project-test3")
+t.run_build_system(["lib//b.obj"])
+t.expect_addition("lib/bin/$toolset/debug*/b.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["--clean", "lib//b.obj"])
+t.expect_removal("lib/bin/$toolset/debug*/b.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["lib//b.obj"])
+t.expect_addition("lib/bin/$toolset/debug*/b.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["release", "lib2/helper//e.obj", "/lib3//f.obj"])
+t.expect_addition("lib2/helper/bin/$toolset/release*/e.obj")
+t.expect_addition("lib3/bin/$toolset/release*/f.obj")
+t.expect_nothing_more()
+
+# Test project ids in command line work as well.
+t.set_tree("project-test3")
+t.run_build_system(["/lib2"])
+t.expect_addition("lib2/bin/$toolset/debug*/" *
+ BoostBuild.List("c.obj d.obj l.exe"))
+t.expect_addition("bin/$toolset/debug*/a.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["lib"])
+t.expect_addition("lib/bin/$toolset/debug*/" *
+ BoostBuild.List("b.obj m.exe"))
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/project_test4.py b/src/boost/tools/build/test/project_test4.py
new file mode 100644
index 000000000..45faf0dc5
--- /dev/null
+++ b/src/boost/tools/build/test/project_test4.py
@@ -0,0 +1,65 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(translate_suffixes=0)
+
+
+t.set_tree("project-test4")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/a.obj")
+t.expect_content("bin/$toolset/debug*/a.obj",
+"""$toolset/debug*/include-everything*
+a.cpp
+""")
+
+t.expect_addition("bin/$toolset/debug*/a.exe")
+t.expect_content("bin/$toolset/debug*/a.exe",
+"$toolset/debug*/include-everything*\n" +
+"bin/$toolset/debug*/a.obj lib/bin/$toolset/debug/optimization-speed*/b.obj\n"
+)
+
+t.expect_addition("lib/bin/$toolset/debug/optimization-speed*/b.obj")
+t.expect_content("lib/bin/$toolset/debug/optimization-speed*/b.obj",
+"""$toolset/debug/include-everything/optimization-speed*
+lib/b.cpp
+""")
+
+t.expect_addition("bin/$toolset/debug*/b.exe")
+t.expect_content("bin/$toolset/debug*/b.exe",
+"$toolset/debug/define-MACROS/include-everything*\n" +
+"bin/$toolset/debug*/a.obj\n"
+)
+
+t.copy("lib/jamfile3.jam", "lib/jamfile.jam")
+
+# Link-compatibility check for rtti is disabled...
+#t.run_build_system(status=None)
+#import string
+#t.fail_test(t.stdout().find(
+#"""warning: targets produced from b.obj are link incompatible
+#warning: with main target a.exe""") !=-0)
+
+# Test that if we specified composite property in target reference, everything
+# works OK.
+
+t.copy("lib/jamfile1.jam", "lib/jamfile.jam")
+t.copy("jamfile5.jam", "jamfile.jam")
+
+t.run_build_system()
+
+t.expect_addition("lib/bin/$toolset/release*/b.obj")
+
+t.expect_content("bin/$toolset/debug*/a.exe",
+"$toolset/debug/include-everything*\n" +
+"bin/$toolset/debug*/a.obj lib/bin/$toolset/release*/b.obj\n"
+)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/property_expansion.py b/src/boost/tools/build/test/property_expansion.py
new file mode 100644
index 000000000..53fc13616
--- /dev/null
+++ b/src/boost/tools/build/test/property_expansion.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that free property inside.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+variant debug-AA : debug : <define>AA ;
+alias all : hello ;
+exe hello : hello.cpp ;
+explicit hello ;
+""")
+
+t.write("hello.cpp", """\
+#ifdef AA
+int main() {}
+#endif
+""")
+
+t.run_build_system(["debug-AA"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/qt4.py b/src/boost/tools/build/test/qt4.py
new file mode 100755
index 000000000..170f6079b
--- /dev/null
+++ b/src/boost/tools/build/test/qt4.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+
+# (c) Copyright Juergen Hunold 2008
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
+# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+# Run test in real directory in order to find Boost.Test via Boost Top-Level
+# Jamroot.
+qt4_dir = os.getcwd() + "/qt4"
+
+t = BoostBuild.Tester(workdir=qt4_dir)
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/qt4/jamroot.jam b/src/boost/tools/build/test/qt4/jamroot.jam
new file mode 100644
index 000000000..3d8e7d73b
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/jamroot.jam
@@ -0,0 +1,82 @@
+# (c) Copyright Juergen Hunold 2008
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
+# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import qt4 ;
+import testing ;
+import cast ;
+
+
+
+if [ qt4.initialized ]
+{
+ use-project /boost : ../../../.. ;
+
+ project qttest
+ : requirements
+ <library>/boost/test//boost_unit_test_framework
+ ;
+
+ alias qt-tests :
+ # Check for explicit libraries, <use>/qt should not link any lib
+ [ link-fail qtcorefail.cpp : <use>/qt ]
+
+ [ run qtcore.cpp /qt//QtCore ]
+ [ run qtsql.cpp /qt//QtSql ]
+ [ run qtxml.cpp /qt//QtXml ]
+ [ run qtnetwork.cpp /qt//QtNetwork ]
+ [ run qtscript.cpp /qt//QtScript ]
+ [ run qtscripttools.cpp /qt//QtScriptTools ]
+ [ run qtxmlpatterns.cpp /qt//QtXmlPatterns ]
+
+ # ToDo: runable example code
+ [ link qtsvg.cpp /qt//QtSvg ]
+ [ link qtgui.cpp /qt//QtGui ]
+
+ # Multimedia toolkits.
+ [ link qtwebkit.cpp /qt//QtWebKit ]
+ [ link phonon.cpp /qt//phonon ]
+ [ link qtmultimedia.cpp /qt//QtMultimedia ]
+
+ # QML
+ [ link qtdeclarative.cpp /qt//QtDeclarative ]
+
+ # Help systems.
+ [ link qthelp.cpp /qt//QtHelp ]
+ [ link qtassistant.cpp /qt//QtAssistantClient : <conditional>@check_for_assistant ]
+
+ # Check working and disabled Qt3Support
+ [ link qt3support.cpp /qt//Qt3Support : <qt3support>on ]
+ [ compile-fail qt3support.cpp /qt//Qt3Support : <qt3support>off ]
+
+ # Testing using QtTest. Simple sample
+ # ToDo: better support for "automoc" aka '#include "qttest.moc"'
+ [ run qttest.cpp [ cast _ moccable-cpp : qttest.cpp ] /qt//QtTest : : : <define>TEST_MOCK ]
+
+ # Test moc rule
+ [ run mock.cpp mock.h /qt//QtCore : : : <define>TEST_MOCK ]
+
+ # Test resource compiler
+ [ run rcc.cpp rcc.qrc /qt//QtCore : : : <rccflags>"-compress 9 -threshold 10" ]
+
+ : # requirements
+ : # default-build
+ : # usage-requirements
+ ;
+}
+
+# QtAssistant is removed from Qt >= 4.6
+rule check_for_assistant ( properties * )
+{
+ # Extract version number from toolset
+ local version = [ MATCH "<qt>([0-9.]+).*"
+ : $(properties) ] ;
+
+ if $(version) > "4.6.99"
+ {
+ result += <build>no ;
+ }
+}
+
+
diff --git a/src/boost/tools/build/test/qt4/mock.cpp b/src/boost/tools/build/test/qt4/mock.cpp
new file mode 100644
index 000000000..8f7a35c2d
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/mock.cpp
@@ -0,0 +1,26 @@
+// (c) Copyright Juergen Hunold 2011
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtMoc
+
+#include "mock.h"
+
+#include <boost/test/unit_test.hpp>
+
+Mock::Mock()
+{
+}
+
+/*!
+ Check that the compiler get the correct #defines.
+ The logic to test the moc is in the header file "mock.h"
+ */
+BOOST_AUTO_TEST_CASE(construct_mock)
+{
+ delete new Mock();
+
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(TEST_MOCK), true);
+}
diff --git a/src/boost/tools/build/test/qt4/mock.h b/src/boost/tools/build/test/qt4/mock.h
new file mode 100644
index 000000000..1cc95b057
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/mock.h
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2011
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include <QtCore/QObject>
+
+class Mock : public QObject
+{
+ /*!
+ Test that the moc gets the necessary #defines
+ Else the moc will not see the Q_OBJECT macro, issue a warning
+ and linking will fail due to missing vtable symbols.
+ */
+#if defined(TEST_MOCK)
+ Q_OBJECT
+#endif
+ public:
+
+ Mock();
+};
diff --git a/src/boost/tools/build/test/qt4/phonon.cpp b/src/boost/tools/build/test/qt4/phonon.cpp
new file mode 100644
index 000000000..3151f5911
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/phonon.cpp
@@ -0,0 +1,23 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtPhonon
+
+#include <phonon/MediaObject>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_PHONON_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( phonon_object)
+{
+ Phonon::MediaObject player;
+}
diff --git a/src/boost/tools/build/test/qt4/qt3support.cpp b/src/boost/tools/build/test/qt4/qt3support.cpp
new file mode 100644
index 000000000..35d8c73b9
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qt3support.cpp
@@ -0,0 +1,29 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE Qt3Support
+
+#include <Q3Table>
+
+#include <boost/test/unit_test.hpp>
+
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SQL_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_NETWORK_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_QT3SUPPORT_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT3_SUPPORT), true);
+}
+
+BOOST_AUTO_TEST_CASE( q3table )
+{
+ Q3Table q3table;
+
+}
+
diff --git a/src/boost/tools/build/test/qt4/qtassistant.cpp b/src/boost/tools/build/test/qt4/qtassistant.cpp
new file mode 100644
index 000000000..e2a6ed7bd
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtassistant.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtAssistant
+
+#include <QAssistantClient>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( empty_assistant)
+{
+ QAssistantClient client(QString());
+}
diff --git a/src/boost/tools/build/test/qt4/qtcore.cpp b/src/boost/tools/build/test/qt4/qtcore.cpp
new file mode 100644
index 000000000..f3c09039b
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtcore.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCore
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qstring_test)
+{
+ QString dummy;
+
+ BOOST_CHECK_EQUAL(dummy.isEmpty(), true);
+}
diff --git a/src/boost/tools/build/test/qt4/qtcorefail.cpp b/src/boost/tools/build/test/qt4/qtcorefail.cpp
new file mode 100644
index 000000000..15fd36aef
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtcorefail.cpp
@@ -0,0 +1,23 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCoreFail
+
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qstring_test)
+{
+ QString dummy;
+
+ BOOST_CHECK_EQUAL(dummy.isEmpty(), true);
+}
diff --git a/src/boost/tools/build/test/qt4/qtdeclarative.cpp b/src/boost/tools/build/test/qt4/qtdeclarative.cpp
new file mode 100644
index 000000000..817855bad
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtdeclarative.cpp
@@ -0,0 +1,27 @@
+// (c) Copyright Juergen Hunold 2011
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtMultimedia
+
+#include <QApplication>
+#include <QDeclarativeView>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_DECLARATIVE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( declarative )
+{
+ QApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QDeclarativeView view;
+}
diff --git a/src/boost/tools/build/test/qt4/qtgui.cpp b/src/boost/tools/build/test/qt4/qtgui.cpp
new file mode 100644
index 000000000..75d9dacbe
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtgui.cpp
@@ -0,0 +1,42 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtGui
+
+#include <QApplication>
+
+#include <boost/test/unit_test.hpp>
+
+struct Fixture
+{
+ Fixture()
+ : application(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv,
+ false)
+ {
+ BOOST_TEST_MESSAGE( "setup QApplication fixture" );
+ }
+
+ ~Fixture()
+ {
+ BOOST_TEST_MESSAGE( "teardown QApplication fixture" );
+ }
+
+ QApplication application;
+};
+
+BOOST_GLOBAL_FIXTURE( Fixture );
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qtgui_test)
+{
+ BOOST_CHECK_EQUAL(true, true);
+}
diff --git a/src/boost/tools/build/test/qt4/qthelp.cpp b/src/boost/tools/build/test/qt4/qthelp.cpp
new file mode 100644
index 000000000..32327de58
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qthelp.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtHelp
+
+#include <QtHelp>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( empty_engine)
+{
+ QHelpEngine engine(QString());
+}
diff --git a/src/boost/tools/build/test/qt4/qtmultimedia.cpp b/src/boost/tools/build/test/qt4/qtmultimedia.cpp
new file mode 100644
index 000000000..dc5914aff
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtmultimedia.cpp
@@ -0,0 +1,25 @@
+// (c) Copyright Juergen Hunold 2009
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtMultimedia
+
+#include <QAudioDeviceInfo>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_MULTIMEDIA_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( audiodevices)
+{
+ QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioOutput);
+ for(int i = 0; i < devices.size(); ++i) {
+ BOOST_TEST_MESSAGE(QAudioDeviceInfo(devices.at(i)).deviceName().constData());
+ }
+}
diff --git a/src/boost/tools/build/test/qt4/qtnetwork.cpp b/src/boost/tools/build/test/qt4/qtnetwork.cpp
new file mode 100644
index 000000000..3f628d880
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtnetwork.cpp
@@ -0,0 +1,33 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtNetwork
+
+#include <QHostInfo>
+
+#include <QTextStream>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_NETWORK_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( hostname )
+{
+ QHostInfo info(QHostInfo::fromName("www.boost.org")); //blocking lookup
+
+ QTextStream stream(stdout, QIODevice::WriteOnly);
+
+ Q_FOREACH(QHostAddress address, info.addresses())
+ {
+ BOOST_CHECK_EQUAL(address.isNull(), false);
+ stream << address.toString() << endl;
+ }
+}
diff --git a/src/boost/tools/build/test/qt4/qtscript.cpp b/src/boost/tools/build/test/qt4/qtscript.cpp
new file mode 100644
index 000000000..65353daec
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtscript.cpp
@@ -0,0 +1,37 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtScript
+
+#include <QScriptEngine>
+
+#include <QCoreApplication>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SCRIPT_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( script )
+{
+ QCoreApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QScriptEngine myEngine;
+ QScriptValue three = myEngine.evaluate("1 + 2");
+
+ BOOST_CHECK_EQUAL(three.toNumber(), 3);
+ BOOST_CHECK_EQUAL(three.toString(), QLatin1String("3"));
+}
diff --git a/src/boost/tools/build/test/qt4/qtscripttools.cpp b/src/boost/tools/build/test/qt4/qtscripttools.cpp
new file mode 100644
index 000000000..4d0b7f256
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtscripttools.cpp
@@ -0,0 +1,47 @@
+// (c) Copyright Juergen Hunold 2009
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtScriptTools
+
+#include <QScriptEngine>
+
+#include <QScriptEngineDebugger>
+
+#include <QApplication>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+namespace utf = boost::unit_test::framework;
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SCRIPTTOOLS_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( script )
+{
+ QApplication app(utf::master_test_suite().argc,
+ utf::master_test_suite().argv);
+
+ QScriptEngine myEngine;
+ QScriptValue three = myEngine.evaluate("1 + 2");
+
+ QScriptEngineDebugger debugger;
+ debugger.attachTo(&myEngine);
+
+ BOOST_CHECK_EQUAL(three.toNumber(), 3);
+ BOOST_CHECK_EQUAL(three.toString(), QLatin1String("3"));
+
+ debugger.detach();
+}
diff --git a/src/boost/tools/build/test/qt4/qtsql.cpp b/src/boost/tools/build/test/qt4/qtsql.cpp
new file mode 100644
index 000000000..aa506b1c7
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtsql.cpp
@@ -0,0 +1,37 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSql
+
+#include <QSqlDatabase>
+
+#include <QTextStream>
+#include <QStringList>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SQL_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( drivers )
+{
+ QTextStream stream(stdout, QIODevice::WriteOnly);
+
+ Q_FOREACH(QString it, QSqlDatabase:: drivers())
+ {
+ stream << it << endl;
+ }
+}
+
+BOOST_AUTO_TEST_CASE( construct )
+{
+ QSqlDatabase database;
+ BOOST_CHECK_EQUAL(database.isOpen(), false);
+}
diff --git a/src/boost/tools/build/test/qt4/qtsvg.cpp b/src/boost/tools/build/test/qt4/qtsvg.cpp
new file mode 100644
index 000000000..8a1394380
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtsvg.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSvg
+
+#include <QtSvg>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SVG_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( generator_construct)
+{
+ QSvgGenerator generator;
+}
diff --git a/src/boost/tools/build/test/qt4/qttest.cpp b/src/boost/tools/build/test/qt4/qttest.cpp
new file mode 100644
index 000000000..a2744cdc7
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qttest.cpp
@@ -0,0 +1,30 @@
+// (c) Copyright Juergen Hunold 2008-2011
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include <QtTest>
+
+class QtTest: public QObject
+{
+ /*!
+ Test if the moc gets the #define
+ */
+#if defined(TEST_MOCK)
+ Q_OBJECT
+#endif
+
+private Q_SLOTS:
+ void toUpper();
+};
+
+void
+QtTest::toUpper()
+{
+ QString str = "Hello";
+ QCOMPARE(str.toUpper(), QString("HELLO"));
+}
+
+QTEST_MAIN(QtTest)
+#include "qttest.moc"
+
diff --git a/src/boost/tools/build/test/qt4/qtwebkit.cpp b/src/boost/tools/build/test/qt4/qtwebkit.cpp
new file mode 100644
index 000000000..7d85f1473
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtwebkit.cpp
@@ -0,0 +1,24 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebKit
+
+#include <QWebPage>
+#include <QApplication>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBKIT_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( webkit )
+{
+ QWebPage page;
+ BOOST_CHECK_EQUAL(page.isModified(), false);
+}
diff --git a/src/boost/tools/build/test/qt4/qtxml.cpp b/src/boost/tools/build/test/qt4/qtxml.cpp
new file mode 100644
index 000000000..8002c2658
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtxml.cpp
@@ -0,0 +1,29 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtXml
+
+#include <QtXml>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( reader_construct)
+{
+ QXmlStreamReader reader;
+ BOOST_CHECK_EQUAL(reader.atEnd(), false);
+}
+
+BOOST_AUTO_TEST_CASE( writer_construct)
+{
+ QXmlStreamWriter writer;
+ BOOST_CHECK_EQUAL(writer.device(), static_cast<QIODevice*>(0));
+}
+
diff --git a/src/boost/tools/build/test/qt4/qtxmlpatterns.cpp b/src/boost/tools/build/test/qt4/qtxmlpatterns.cpp
new file mode 100644
index 000000000..6835fdad8
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/qtxmlpatterns.cpp
@@ -0,0 +1,76 @@
+// (c) Copyright Juergen Hunold 2008
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtXmlPatterns
+
+#include <QXmlQuery>
+#include <QXmlSerializer>
+
+#include <QCoreApplication>
+#include <QString>
+#include <QTextStream>
+#include <QBuffer>
+
+#include <boost/test/unit_test.hpp>
+
+
+struct Fixture
+{
+ Fixture()
+ : application(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv)
+ {
+ BOOST_TEST_MESSAGE( "setup QCoreApplication fixture" );
+ }
+
+ ~Fixture()
+ {
+ BOOST_TEST_MESSAGE( "teardown QCoreApplication fixture" );
+ }
+
+ QCoreApplication application;
+};
+
+BOOST_GLOBAL_FIXTURE( Fixture );
+
+QByteArray doc("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<html xmlns=\"http://www.w3.org/1999/xhtml/\" xml:lang=\"en\" lang=\"en\">"
+" <head>"
+" <title>Global variables report for globals.gccxml</title>"
+" </head>"
+"<body><p>Some Test text</p></body></html>");
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XMLPATTERNS_LIB), true);
+
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), false);
+}
+
+BOOST_AUTO_TEST_CASE( extract )
+{
+
+ QBuffer buffer(&doc); // This is a QIODevice.
+ buffer.open(QIODevice::ReadOnly);
+ QXmlQuery query;
+ query.bindVariable("myDocument", &buffer);
+ query.setQuery("declare variable $myDocument external;"
+ "doc($myDocument)");///p[1]");
+
+ BOOST_CHECK_EQUAL(query.isValid(), true);
+
+ QByteArray result;
+ QBuffer out(&result);
+ out.open(QIODevice::WriteOnly);
+
+ QXmlSerializer serializer(query, &out);
+ BOOST_CHECK_EQUAL(query.evaluateTo(&serializer), true);
+
+ QTextStream stream(stdout);
+ BOOST_CHECK_EQUAL(result.isEmpty(), false);
+ stream << "hallo" << result << endl;
+}
+
diff --git a/src/boost/tools/build/test/qt4/rcc.cpp b/src/boost/tools/build/test/qt4/rcc.cpp
new file mode 100644
index 000000000..cae553bb1
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/rcc.cpp
@@ -0,0 +1,20 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCore
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+std::ostream& operator<<(std::ostream& out, QString const& text)
+{
+ out << text.toUtf8().constData();
+ return out;
+}
+
+BOOST_AUTO_TEST_CASE (check_exists)
+{
+ BOOST_CHECK(QFile::exists(":/test/rcc.cpp"));
+}
diff --git a/src/boost/tools/build/test/qt4/rcc.qrc b/src/boost/tools/build/test/qt4/rcc.qrc
new file mode 100644
index 000000000..13ca38a5d
--- /dev/null
+++ b/src/boost/tools/build/test/qt4/rcc.qrc
@@ -0,0 +1,5 @@
+<!DOCTYPE RCC><RCC version="1.0">
+ <qresource prefix="/test/">
+ <file>rcc.cpp</file>
+ </qresource>
+</RCC>
diff --git a/src/boost/tools/build/test/qt5.py b/src/boost/tools/build/test/qt5.py
new file mode 100755
index 000000000..d9e1226e8
--- /dev/null
+++ b/src/boost/tools/build/test/qt5.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+
+# (c) Copyright Juergen Hunold 2012
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
+# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+# Run test in real directory in order to find Boost.Test via Boost Top-Level
+# Jamroot.
+qt5_dir = os.getcwd() + "/qt5"
+
+t = BoostBuild.Tester(workdir=qt5_dir)
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/qt5/jamroot.jam b/src/boost/tools/build/test/qt5/jamroot.jam
new file mode 100644
index 000000000..782922557
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/jamroot.jam
@@ -0,0 +1,104 @@
+# (c) Copyright Juergen Hunold 2008
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
+# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import qt5 ;
+import testing ;
+import cast ;
+
+path-constant CWD : . ;
+
+
+if [ qt5.initialized ]
+{
+ use-project /boost : ../../../.. ;
+
+ project qttest
+ : requirements
+ <library>/boost/test//boost_unit_test_framework
+ ;
+
+ alias qt-tests :
+ # Check for explicit libraries, <use>/qt should not link any lib
+ [ link-fail qtcorefail.cpp : <use>/qt ]
+
+ [ run qtcore.cpp /qt5//QtCore ]
+ [ run qtsql.cpp /qt5//QtSql ]
+ [ run qtxml.cpp /qt5//QtXml ]
+ [ run qtnetwork.cpp /qt5//QtNetwork ]
+ [ run qtscript.cpp /qt5//QtScript ]
+ [ run qtscripttools.cpp /qt5//QtScriptTools ]
+ [ run qtxmlpatterns.cpp /qt5//QtXmlPatterns ]
+
+ [ run qtpositioning.cpp /qt5//QtPositioning ]
+
+ # ToDo: runable example code
+ [ link qtsvg.cpp /qt5//QtSvg ]
+ [ link qtwidgets.cpp /qt5//QtWidgets ]
+
+ # Multimedia toolkits.
+ [ link qtwebkit.cpp /qt5//QtWebKit ]
+ [ link qtwebkitwidgets.cpp /qt5//QtWebKitWidgets ]
+ [ link qtmultimedia.cpp /qt5//QtMultimedia ]
+
+ # QtQuick version1
+ [ link qtdeclarative.cpp /qt5//QtDeclarative ]
+
+ # QtQuick version2
+ [ run qtquick.cpp /qt5//QtQuick : "--" -platform offscreen : $(CWD)/qtquick.qml ]
+
+ [ run qtwebengine.cpp /qt5//QtWebEngine ]
+ [ run qtwebenginewidgets.cpp /qt5//QtWebEngineWidgets ]
+
+ # QtSerialPort
+ [ run qtserialport.cpp /qt5//QtSerialPort ]
+
+ [ run qtlocation.cpp /qt5//QtLocation ]
+
+ [ run qtwebchannel.cpp /qt5//QtWebChannel ]
+ [ run qtwebsockets.cpp /qt5//QtWebSockets ]
+ [ run qtwebview.cpp /qt5//QtWebView ]
+
+ [ run qtpurchasing.cpp /qt5//QtPurchasing ]
+
+ [ run qtcharts.cpp /qt5//QtCharts ]
+
+ [ run qt3dcore.cpp /qt5//Qt3DCore ]
+ [ run qt3drender.cpp /qt5//Qt3DRender ]
+ [ run qt3dinput.cpp /qt5//Qt3DInput ]
+ [ run qt3dlogic.cpp /qt5//Qt3DLogic ]
+
+ [ run qtdatavisualization.cpp /qt5//QtDataVisualization ]
+
+ # Qt Connectivity
+ [ run qtbluetooth.cpp /qt5//QtBluetooth ]
+ [ run qtnfc.cpp /qt5//QtNfc ]
+
+ [ run qtgamepad.cpp /qt5//QtGamepad ]
+
+ [ run qtscxml.cpp /qt5//QtScxml ]
+
+ [ run qtserialbus.cpp /qt5//QtSerialBus ]
+
+
+ # Help systems.
+ [ link qthelp.cpp /qt5//QtHelp ]
+
+ # Testing using QtTest. Simple sample
+ # ToDo: better support for "automoc" aka '#include "qttest.moc"'
+ [ run qttest.cpp [ cast _ moccable5-cpp : qttest.cpp ] /qt5//QtTest : : : <define>TEST_MOCK ]
+
+ # Test moc rule
+ [ run mock.cpp mock.h /qt5//QtCore : : : <define>TEST_MOCK ]
+
+ # Test resource compiler
+ [ run rcc.cpp rcc.qrc /qt5//QtCore : : : <rccflags>"-compress 9 -threshold 10" ]
+
+ : # requirements
+ : # default-build
+ : # usage-requirements
+ ;
+}
+
+
diff --git a/src/boost/tools/build/test/qt5/mock.cpp b/src/boost/tools/build/test/qt5/mock.cpp
new file mode 100644
index 000000000..82fc608dc
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/mock.cpp
@@ -0,0 +1,26 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtMoc
+
+#include "mock.h"
+
+#include <boost/test/unit_test.hpp>
+
+Mock::Mock()
+{
+}
+
+/*!
+ Check that the compiler get the correct #defines.
+ The logic to test the moc is in the header file "mock.h"
+ */
+BOOST_AUTO_TEST_CASE(construct_mock)
+{
+ delete new Mock();
+
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(TEST_MOCK), true);
+}
diff --git a/src/boost/tools/build/test/qt5/mock.h b/src/boost/tools/build/test/qt5/mock.h
new file mode 100644
index 000000000..eac177d4d
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/mock.h
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include <QtCore/QObject>
+
+class Mock : public QObject
+{
+ /*!
+ Test that the moc gets the necessary #defines
+ Else the moc will not see the Q_OBJECT macro, issue a warning
+ and linking will fail due to missing vtable symbols.
+ */
+#if defined(TEST_MOCK)
+ Q_OBJECT
+#endif
+ public:
+
+ Mock();
+};
diff --git a/src/boost/tools/build/test/qt5/qt3dcore.cpp b/src/boost/tools/build/test/qt5/qt3dcore.cpp
new file mode 100644
index 000000000..9d1871bdc
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qt3dcore.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2015
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE Qt3DCore
+#include <Qt3DCore>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE ( sample_code )
+{
+ Qt3DCore::QTransform torusTransform;
+ torusTransform.setScale3D(QVector3D(1.5, 1, 0.5));
+ torusTransform.setRotation(QQuaternion::fromAxisAndAngle(QVector3D(1, 0, 0), 45.0f));
+}
diff --git a/src/boost/tools/build/test/qt5/qt3dinput.cpp b/src/boost/tools/build/test/qt5/qt3dinput.cpp
new file mode 100644
index 000000000..46cee14a3
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qt3dinput.cpp
@@ -0,0 +1,24 @@
+// (c) Copyright Juergen Hunold 2015
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE Qt3DInput
+#include <Qt3DInput>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DINPUT_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DRENDER_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE ( sample_code )
+{
+ Qt3DCore::QEntity rootEntity;
+
+}
+
diff --git a/src/boost/tools/build/test/qt5/qt3dlogic.cpp b/src/boost/tools/build/test/qt5/qt3dlogic.cpp
new file mode 100644
index 000000000..088f42099
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qt3dlogic.cpp
@@ -0,0 +1,20 @@
+// (c) Copyright Juergen Hunold 2015
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE Qt3DLogic
+#include <Qt3DLogic>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DLOGIC_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE ( sample_code )
+{
+ Qt3DLogic::QLogicAspect logicAspect;
+}
diff --git a/src/boost/tools/build/test/qt5/qt3drender.cpp b/src/boost/tools/build/test/qt5/qt3drender.cpp
new file mode 100644
index 000000000..d4578054d
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qt3drender.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2015
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE Qt3DRender
+#include <Qt3DRender>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DRENDER_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE ( sample_code )
+{
+ Qt3DCore::QEntity rootEntity;
+ Qt3DRender::QMaterial material(&rootEntity);
+}
diff --git a/src/boost/tools/build/test/qt5/qtassistant.cpp b/src/boost/tools/build/test/qt5/qtassistant.cpp
new file mode 100644
index 000000000..c15ee4eca
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtassistant.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtAssistant
+
+#include <QAssistantClient>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( empty_assistant)
+{
+ QAssistantClient client(QString());
+}
diff --git a/src/boost/tools/build/test/qt5/qtbluetooth.cpp b/src/boost/tools/build/test/qt5/qtbluetooth.cpp
new file mode 100644
index 000000000..53beff17b
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtbluetooth.cpp
@@ -0,0 +1,34 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtBluetooth
+
+#include <QtBluetooth>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_BLUETOOTH_LIB), true);
+}
+
+/*!
+ Try to detect a device
+ */
+BOOST_AUTO_TEST_CASE( bluetooth )
+{
+ QList<QBluetoothHostInfo> localAdapters = QBluetoothLocalDevice::allDevices();
+
+ if (!localAdapters.empty())
+ {
+ QBluetoothLocalDevice adapter(localAdapters.at(0).address());
+ adapter.setHostMode(QBluetoothLocalDevice::HostDiscoverable);
+ }
+ else
+ {
+ BOOST_TEST(localAdapters.size() == 0);
+ }
+}
diff --git a/src/boost/tools/build/test/qt5/qtcharts.cpp b/src/boost/tools/build/test/qt5/qtcharts.cpp
new file mode 100644
index 000000000..d29c4fd03
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtcharts.cpp
@@ -0,0 +1,15 @@
+// (c) Copyright Juergen Hunold 2015
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCharts
+#include <QtCharts>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WIDGETS_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CHARTS_LIB), true);
+}
diff --git a/src/boost/tools/build/test/qt5/qtcore.cpp b/src/boost/tools/build/test/qt5/qtcore.cpp
new file mode 100644
index 000000000..6a2c62c8d
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtcore.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCore
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qstring_test)
+{
+ QString dummy;
+
+ BOOST_CHECK_EQUAL(dummy.isEmpty(), true);
+}
diff --git a/src/boost/tools/build/test/qt5/qtcorefail.cpp b/src/boost/tools/build/test/qt5/qtcorefail.cpp
new file mode 100644
index 000000000..8032d47cd
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtcorefail.cpp
@@ -0,0 +1,23 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCoreFail
+
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qstring_test)
+{
+ QString dummy;
+
+ BOOST_CHECK_EQUAL(dummy.isEmpty(), true);
+}
diff --git a/src/boost/tools/build/test/qt5/qtdatavisualization.cpp b/src/boost/tools/build/test/qt5/qtdatavisualization.cpp
new file mode 100644
index 000000000..bc35c04c5
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtdatavisualization.cpp
@@ -0,0 +1,31 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtDataVisualization
+
+#include <QtDataVisualization>
+
+#include <QGuiApplication>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_DATAVISUALIZATION_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( datavisualization )
+{
+ QGuiApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+
+ QtDataVisualization::Q3DBars graph;
+
+ graph.setShadowQuality(QtDataVisualization::QAbstract3DGraph::ShadowQualitySoftMedium);
+ graph.activeTheme()->setBackgroundEnabled(false);
+ graph.activeTheme()->setLabelBackgroundEnabled(true);
+}
diff --git a/src/boost/tools/build/test/qt5/qtdeclarative.cpp b/src/boost/tools/build/test/qt5/qtdeclarative.cpp
new file mode 100644
index 000000000..df70f5e46
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtdeclarative.cpp
@@ -0,0 +1,26 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtDeclarative
+
+#include <QCoreApplication>
+#include <QDeclarativeView>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_DECLARATIVE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( declarative )
+{
+ QCoreApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QDeclarativeView view;
+}
diff --git a/src/boost/tools/build/test/qt5/qtgamepad.cpp b/src/boost/tools/build/test/qt5/qtgamepad.cpp
new file mode 100644
index 000000000..c6c6aea50
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtgamepad.cpp
@@ -0,0 +1,29 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtGamepad
+
+#include <QtGamepad>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GAMEPAD_LIB), true);
+}
+
+/*!
+ Try to detect a device
+ */
+BOOST_AUTO_TEST_CASE( gamepad )
+{
+ auto gamepads = QGamepadManager::instance()->connectedGamepads();
+ if (gamepads.isEmpty()) {
+ return;
+ }
+
+ QGamepad gamepad(*gamepads.begin());
+}
diff --git a/src/boost/tools/build/test/qt5/qthelp.cpp b/src/boost/tools/build/test/qt5/qthelp.cpp
new file mode 100644
index 000000000..b0e877a6a
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qthelp.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtHelp
+
+#include <QtHelp>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( empty_engine)
+{
+ QHelpEngine engine(QString());
+}
diff --git a/src/boost/tools/build/test/qt5/qtlocation.cpp b/src/boost/tools/build/test/qt5/qtlocation.cpp
new file mode 100644
index 000000000..9806dca93
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtlocation.cpp
@@ -0,0 +1,30 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtPositioning
+
+#include <QGeoAddress>
+#include <QGeoLocation>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_POSITIONING_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_NETWORK_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_LOCATION_LIB), true);
+}
+
+BOOST_TEST_DONT_PRINT_LOG_VALUE(QGeoAddress)
+
+BOOST_AUTO_TEST_CASE( geo_location )
+{
+ QGeoLocation geolocation;
+
+ QGeoAddress address;
+
+ BOOST_CHECK_EQUAL(geolocation.address(), address);
+}
diff --git a/src/boost/tools/build/test/qt5/qtmultimedia.cpp b/src/boost/tools/build/test/qt5/qtmultimedia.cpp
new file mode 100644
index 000000000..dc5914aff
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtmultimedia.cpp
@@ -0,0 +1,25 @@
+// (c) Copyright Juergen Hunold 2009
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtMultimedia
+
+#include <QAudioDeviceInfo>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_MULTIMEDIA_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( audiodevices)
+{
+ QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioOutput);
+ for(int i = 0; i < devices.size(); ++i) {
+ BOOST_TEST_MESSAGE(QAudioDeviceInfo(devices.at(i)).deviceName().constData());
+ }
+}
diff --git a/src/boost/tools/build/test/qt5/qtnetwork.cpp b/src/boost/tools/build/test/qt5/qtnetwork.cpp
new file mode 100644
index 000000000..d34246689
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtnetwork.cpp
@@ -0,0 +1,33 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtNetwork
+
+#include <QHostInfo>
+
+#include <QTextStream>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_NETWORK_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( hostname )
+{
+ QHostInfo info(QHostInfo::fromName("www.boost.org")); //blocking lookup
+
+ QTextStream stream(stdout, QIODevice::WriteOnly);
+
+ Q_FOREACH(QHostAddress address, info.addresses())
+ {
+ BOOST_CHECK_EQUAL(address.isNull(), false);
+ stream << address.toString() << endl;
+ }
+}
diff --git a/src/boost/tools/build/test/qt5/qtnfc.cpp b/src/boost/tools/build/test/qt5/qtnfc.cpp
new file mode 100644
index 000000000..df3805f67
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtnfc.cpp
@@ -0,0 +1,28 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtNfc
+
+#include <QtNfc>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_NFC_LIB), true);
+}
+
+/*!
+ Try to detect a device
+ */
+BOOST_AUTO_TEST_CASE( nfc )
+{
+ QNearFieldManager manager;
+ if (!manager.isAvailable())
+ {
+ BOOST_TEST_MESSAGE("No Nfc");
+ }
+}
diff --git a/src/boost/tools/build/test/qt5/qtpositioning.cpp b/src/boost/tools/build/test/qt5/qtpositioning.cpp
new file mode 100644
index 000000000..427b41ba9
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtpositioning.cpp
@@ -0,0 +1,23 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtPositioning
+
+#include <QGeoCoordinate>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_POSITIONING_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( geo_coordinate )
+{
+ QGeoCoordinate geocoordinate;
+
+ BOOST_CHECK_EQUAL(geocoordinate.type(), QGeoCoordinate::InvalidCoordinate);
+}
diff --git a/src/boost/tools/build/test/qt5/qtpurchasing.cpp b/src/boost/tools/build/test/qt5/qtpurchasing.cpp
new file mode 100644
index 000000000..9a49ed2cc
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtpurchasing.cpp
@@ -0,0 +1,44 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtPurchasing
+
+#include <QtPurchasing>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_PURCHASING_LIB), true);
+}
+
+class DummyProduct : public QInAppProduct
+{
+public:
+
+ DummyProduct() : QInAppProduct{QStringLiteral("One"),
+ QString{},
+ QString{},
+ Consumable,
+ QStringLiteral("DummyProduct"),
+ nullptr} {};
+ void purchase() override {};
+};
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+BOOST_AUTO_TEST_CASE (purchase)
+{
+ DummyProduct product;
+
+ BOOST_TEST(product.price() == QLatin1String("One"));
+ BOOST_TEST(product.identifier() == QLatin1String("DummyProduct"));
+}
diff --git a/src/boost/tools/build/test/qt5/qtquick.cpp b/src/boost/tools/build/test/qt5/qtquick.cpp
new file mode 100644
index 000000000..bec0d809c
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtquick.cpp
@@ -0,0 +1,43 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtQuick
+#include <QDir>
+#include <QTimer>
+#include <QGuiApplication>
+#include <QQmlEngine>
+#include <QQuickView>
+#include <QDebug>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_QML_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_QUICK_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE (simple_test)
+{
+ QGuiApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QQuickView view;
+
+ QString fileName(boost::unit_test::framework::master_test_suite().argv[1]);
+
+ view.connect(view.engine(), SIGNAL(quit()), &app, SLOT(quit()));
+ view.setSource(QUrl::fromLocalFile(fileName)); \
+
+ QTimer::singleShot(2000, &app, SLOT(quit())); // Auto-close window
+
+ if (QGuiApplication::platformName() == QLatin1String("qnx") ||
+ QGuiApplication::platformName() == QLatin1String("eglfs")) {
+ view.setResizeMode(QQuickView::SizeRootObjectToView);
+ view.showFullScreen();
+ } else {
+ view.show();
+ }
+ BOOST_CHECK_EQUAL(app.exec(), 0);
+}
diff --git a/src/boost/tools/build/test/qt5/qtquick.qml b/src/boost/tools/build/test/qt5/qtquick.qml
new file mode 100644
index 000000000..26b23eb2a
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtquick.qml
@@ -0,0 +1,20 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+import QtQuick 2.0
+
+Rectangle {
+ id: page
+ width: 400; height: 200
+ color: "#d6d6d6"
+ Text {
+ id: helloText
+ text: "B2 built!"
+ color: "darkgray"
+ anchors.horizontalCenter: page.horizontalCenter
+ anchors.verticalCenter: page.verticalCenter
+ font.pointSize: 30; font.italic: true ; font.bold: true
+ }
+}
diff --git a/src/boost/tools/build/test/qt5/qtscript.cpp b/src/boost/tools/build/test/qt5/qtscript.cpp
new file mode 100644
index 000000000..d48c073eb
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtscript.cpp
@@ -0,0 +1,37 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtScript
+
+#include <QScriptEngine>
+
+#include <QCoreApplication>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SCRIPT_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( script )
+{
+ QCoreApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QScriptEngine myEngine;
+ QScriptValue three = myEngine.evaluate("1 + 2");
+
+ BOOST_CHECK_EQUAL(three.toNumber(), 3);
+ BOOST_CHECK_EQUAL(three.toString(), QLatin1String("3"));
+}
diff --git a/src/boost/tools/build/test/qt5/qtscripttools.cpp b/src/boost/tools/build/test/qt5/qtscripttools.cpp
new file mode 100644
index 000000000..002056a0f
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtscripttools.cpp
@@ -0,0 +1,47 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtScriptTools
+
+#include <QScriptEngine>
+
+#include <QScriptEngineDebugger>
+
+#include <QCoreApplication>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+namespace utf = boost::unit_test::framework;
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SCRIPTTOOLS_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( script )
+{
+ QCoreApplication app(utf::master_test_suite().argc,
+ utf::master_test_suite().argv);
+
+ QScriptEngine myEngine;
+ QScriptValue three = myEngine.evaluate("1 + 2");
+
+ QScriptEngineDebugger debugger;
+ debugger.attachTo(&myEngine);
+
+ BOOST_CHECK_EQUAL(three.toNumber(), 3);
+ BOOST_CHECK_EQUAL(three.toString(), QLatin1String("3"));
+
+ debugger.detach();
+}
diff --git a/src/boost/tools/build/test/qt5/qtscxml.cpp b/src/boost/tools/build/test/qt5/qtscxml.cpp
new file mode 100644
index 000000000..9e423a18a
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtscxml.cpp
@@ -0,0 +1,33 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtScxml
+
+#include <QtScxml>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SCXML_LIB), true);
+}
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+/*!
+ */
+BOOST_AUTO_TEST_CASE( scxml )
+{
+ QString sessionId = QScxmlStateMachine::generateSessionId(QStringLiteral("dummy"));
+
+ BOOST_TEST(sessionId.isEmpty() == false);
+ BOOST_TEST(sessionId == QString{"dummy1"});
+}
diff --git a/src/boost/tools/build/test/qt5/qtserialbus.cpp b/src/boost/tools/build/test/qt5/qtserialbus.cpp
new file mode 100644
index 000000000..5849351d8
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtserialbus.cpp
@@ -0,0 +1,25 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSerialBus
+
+#include <QtSerialBus>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SERIALBUS_LIB), true);
+}
+
+/*!
+ create a canbus instance
+ */
+BOOST_AUTO_TEST_CASE( serialBus )
+{
+ auto canbus = QCanBus::instance();
+ Q_UNUSED(canbus);
+}
diff --git a/src/boost/tools/build/test/qt5/qtserialport.cpp b/src/boost/tools/build/test/qt5/qtserialport.cpp
new file mode 100644
index 000000000..fd24ed92b
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtserialport.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSerialPort
+
+#include <QtSerialPort>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SERIALPORT_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( serialport )
+{
+ QSerialPort serialPort;
+ serialPort.setPortName(QStringLiteral("test serialport"));
+}
diff --git a/src/boost/tools/build/test/qt5/qtsql.cpp b/src/boost/tools/build/test/qt5/qtsql.cpp
new file mode 100644
index 000000000..127c5a3f4
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtsql.cpp
@@ -0,0 +1,37 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSql
+
+#include <QSqlDatabase>
+
+#include <QTextStream>
+#include <QStringList>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SQL_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( drivers )
+{
+ QTextStream stream(stdout, QIODevice::WriteOnly);
+
+ Q_FOREACH(QString it, QSqlDatabase:: drivers())
+ {
+ stream << it << endl;
+ }
+}
+
+BOOST_AUTO_TEST_CASE( construct )
+{
+ QSqlDatabase database;
+ BOOST_CHECK_EQUAL(database.isOpen(), false);
+}
diff --git a/src/boost/tools/build/test/qt5/qtsvg.cpp b/src/boost/tools/build/test/qt5/qtsvg.cpp
new file mode 100644
index 000000000..ccfd6b4d1
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtsvg.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSvg
+
+#include <QtSvg>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SVG_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( generator_construct)
+{
+ QSvgGenerator generator;
+}
diff --git a/src/boost/tools/build/test/qt5/qttest.cpp b/src/boost/tools/build/test/qt5/qttest.cpp
new file mode 100644
index 000000000..ddc8f686a
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qttest.cpp
@@ -0,0 +1,30 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include <QtTest>
+
+class QtTest: public QObject
+{
+ /*!
+ Test if the moc gets the #define
+ */
+#if defined(TEST_MOCK)
+ Q_OBJECT
+#endif
+
+private Q_SLOTS:
+ void toUpper();
+};
+
+void
+QtTest::toUpper()
+{
+ QString str = "Hello";
+ QCOMPARE(str.toUpper(), QString("HELLO"));
+}
+
+QTEST_MAIN(QtTest)
+#include "qttest.moc"
+
diff --git a/src/boost/tools/build/test/qt5/qtwebchannel.cpp b/src/boost/tools/build/test/qt5/qtwebchannel.cpp
new file mode 100644
index 000000000..e4f05b7f2
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebchannel.cpp
@@ -0,0 +1,29 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebChannel
+
+#include <QtWebChannel>
+
+#include <QGuiApplication>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBCHANNEL_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( webchannel )
+{
+ QGuiApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+
+ QWebChannel channel;
+ QObject dummy;
+ channel.registerObject(QStringLiteral("dummy"), &dummy);
+}
diff --git a/src/boost/tools/build/test/qt5/qtwebengine.cpp b/src/boost/tools/build/test/qt5/qtwebengine.cpp
new file mode 100644
index 000000000..d4c1b0726
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebengine.cpp
@@ -0,0 +1,30 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebEngine
+
+#include <QtWebEngine>
+#include <QGuiApplication>
+
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBENGINE_LIB), true);
+}
+
+/*!
+ Just call the global initialization function
+ */
+BOOST_AUTO_TEST_CASE( webengine )
+{
+ QGuiApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+
+ QtWebEngine::initialize();
+}
diff --git a/src/boost/tools/build/test/qt5/qtwebenginewidgets.cpp b/src/boost/tools/build/test/qt5/qtwebenginewidgets.cpp
new file mode 100644
index 000000000..f0c3c2d8b
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebenginewidgets.cpp
@@ -0,0 +1,40 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebEngineWidgets
+
+#include <QtWebEngineWidgets>
+
+#include <QWebEngineProfile>
+#include <QWebEngineSettings>
+#include <QWebEngineScript>
+
+#include <QApplication>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WIDGETS_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBENGINE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBENGINECORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBENGINEWIDGETS_LIB), true);
+}
+
+/*!
+ Also tests the core library
+ */
+BOOST_AUTO_TEST_CASE( webengine_widgets )
+{
+ QApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+
+ QWebEngineSettings *defaultSettings = QWebEngineSettings::globalSettings();
+ QWebEngineProfile *defaultProfile = QWebEngineProfile::defaultProfile();
+
+ defaultSettings->setAttribute(QWebEngineSettings::FullScreenSupportEnabled, true);
+ defaultProfile->setPersistentCookiesPolicy(QWebEngineProfile::NoPersistentCookies);
+}
diff --git a/src/boost/tools/build/test/qt5/qtwebkit.cpp b/src/boost/tools/build/test/qt5/qtwebkit.cpp
new file mode 100644
index 000000000..aa6fdc92f
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebkit.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebKit
+
+#include <QWebSettings>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBKIT_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( webkit )
+{
+ BOOST_CHECK(QWebSettings::globalSettings());
+}
diff --git a/src/boost/tools/build/test/qt5/qtwebkitwidgets.cpp b/src/boost/tools/build/test/qt5/qtwebkitwidgets.cpp
new file mode 100644
index 000000000..52c05c9a9
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebkitwidgets.cpp
@@ -0,0 +1,23 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebKitWidgets
+
+#include <QWebPage>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBKITWIDGETS_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( webkit )
+{
+ QWebPage page;
+ BOOST_CHECK_EQUAL(page.isModified(), false);
+}
diff --git a/src/boost/tools/build/test/qt5/qtwebsocket.cpp b/src/boost/tools/build/test/qt5/qtwebsocket.cpp
new file mode 100644
index 000000000..f46aa58b3
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebsocket.cpp
@@ -0,0 +1,26 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebSockets
+
+#include <QtWebSockets>
+
+#include <QCoreApplication>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBSOCKETS_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( websocket )
+{
+ QCoreApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+
+ QWebSocket socket;
+}
diff --git a/src/boost/tools/build/test/qt5/qtwebsockets.cpp b/src/boost/tools/build/test/qt5/qtwebsockets.cpp
new file mode 100644
index 000000000..9829ce916
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebsockets.cpp
@@ -0,0 +1,24 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebSockets
+
+#include <QtWebSockets>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBSOCKETS_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( websocket )
+{
+ QWebSocket socket;
+ socket.setPauseMode(QAbstractSocket::PauseNever);
+
+ BOOST_TEST(socket.isValid() == false);
+}
diff --git a/src/boost/tools/build/test/qt5/qtwebview.cpp b/src/boost/tools/build/test/qt5/qtwebview.cpp
new file mode 100644
index 000000000..dfd130f37
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwebview.cpp
@@ -0,0 +1,31 @@
+// (c) Copyright Juergen Hunold 2016
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebView
+
+#include <QtWebView>
+
+#include <QGuiApplication>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBENGINE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBENGINECORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBVIEW_LIB), true);
+}
+
+/*!
+ Just call the global initialization function
+ */
+BOOST_AUTO_TEST_CASE( webview )
+{
+ QGuiApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+
+ QtWebView::initialize();
+}
diff --git a/src/boost/tools/build/test/qt5/qtwidgets.cpp b/src/boost/tools/build/test/qt5/qtwidgets.cpp
new file mode 100644
index 000000000..b868240a5
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtwidgets.cpp
@@ -0,0 +1,43 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtGui
+
+#include <QtWidgets/QApplication>
+
+#include <boost/test/unit_test.hpp>
+
+struct Fixture
+{
+ Fixture()
+ : application(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv,
+ false)
+ {
+ BOOST_TEST_MESSAGE( "setup QApplication fixture" );
+ }
+
+ ~Fixture()
+ {
+ BOOST_TEST_MESSAGE( "teardown QApplication fixture" );
+ }
+
+ QApplication application;
+};
+
+BOOST_GLOBAL_FIXTURE( Fixture );
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WIDGETS_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qtgui_test)
+{
+ BOOST_CHECK_EQUAL(true, true);
+}
diff --git a/src/boost/tools/build/test/qt5/qtxml.cpp b/src/boost/tools/build/test/qt5/qtxml.cpp
new file mode 100644
index 000000000..3df6dd2c1
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtxml.cpp
@@ -0,0 +1,29 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtXml
+
+#include <QtXml>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( reader_construct)
+{
+ QXmlStreamReader reader;
+ BOOST_CHECK_EQUAL(reader.atEnd(), false);
+}
+
+BOOST_AUTO_TEST_CASE( writer_construct)
+{
+ QXmlStreamWriter writer;
+ BOOST_CHECK_EQUAL(writer.device(), static_cast<QIODevice*>(0));
+}
+
diff --git a/src/boost/tools/build/test/qt5/qtxmlpatterns.cpp b/src/boost/tools/build/test/qt5/qtxmlpatterns.cpp
new file mode 100644
index 000000000..d87e3d3fe
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/qtxmlpatterns.cpp
@@ -0,0 +1,76 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtXmlPatterns
+
+#include <QXmlQuery>
+#include <QXmlSerializer>
+
+#include <QCoreApplication>
+#include <QString>
+#include <QTextStream>
+#include <QBuffer>
+
+#include <boost/test/unit_test.hpp>
+
+
+struct Fixture
+{
+ Fixture()
+ : application(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv)
+ {
+ BOOST_TEST_MESSAGE( "setup QCoreApplication fixture" );
+ }
+
+ ~Fixture()
+ {
+ BOOST_TEST_MESSAGE( "teardown QCoreApplication fixture" );
+ }
+
+ QCoreApplication application;
+};
+
+BOOST_GLOBAL_FIXTURE( Fixture );
+
+QByteArray doc("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<html xmlns=\"http://www.w3.org/1999/xhtml/\" xml:lang=\"en\" lang=\"en\">"
+" <head>"
+" <title>Global variables report for globals.gccxml</title>"
+" </head>"
+"<body><p>Some Test text</p></body></html>");
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XMLPATTERNS_LIB), true);
+
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), false);
+}
+
+BOOST_AUTO_TEST_CASE( extract )
+{
+
+ QBuffer buffer(&doc); // This is a QIODevice.
+ buffer.open(QIODevice::ReadOnly);
+ QXmlQuery query;
+ query.bindVariable("myDocument", &buffer);
+ query.setQuery("declare variable $myDocument external;"
+ "doc($myDocument)");///p[1]");
+
+ BOOST_CHECK_EQUAL(query.isValid(), true);
+
+ QByteArray result;
+ QBuffer out(&result);
+ out.open(QIODevice::WriteOnly);
+
+ QXmlSerializer serializer(query, &out);
+ BOOST_CHECK_EQUAL(query.evaluateTo(&serializer), true);
+
+ QTextStream stream(stdout);
+ BOOST_CHECK_EQUAL(result.isEmpty(), false);
+ stream << "hallo" << result << endl;
+}
+
diff --git a/src/boost/tools/build/test/qt5/rcc.cpp b/src/boost/tools/build/test/qt5/rcc.cpp
new file mode 100644
index 000000000..cae553bb1
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/rcc.cpp
@@ -0,0 +1,20 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCore
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+std::ostream& operator<<(std::ostream& out, QString const& text)
+{
+ out << text.toUtf8().constData();
+ return out;
+}
+
+BOOST_AUTO_TEST_CASE (check_exists)
+{
+ BOOST_CHECK(QFile::exists(":/test/rcc.cpp"));
+}
diff --git a/src/boost/tools/build/test/qt5/rcc.qrc b/src/boost/tools/build/test/qt5/rcc.qrc
new file mode 100644
index 000000000..13ca38a5d
--- /dev/null
+++ b/src/boost/tools/build/test/qt5/rcc.qrc
@@ -0,0 +1,5 @@
+<!DOCTYPE RCC><RCC version="1.0">
+ <qresource prefix="/test/">
+ <file>rcc.cpp</file>
+ </qresource>
+</RCC>
diff --git a/src/boost/tools/build/test/railsys.py b/src/boost/tools/build/test/railsys.py
new file mode 100644
index 000000000..7b7f8bd0b
--- /dev/null
+++ b/src/boost/tools/build/test/railsys.py
@@ -0,0 +1,14 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.set_tree("railsys")
+t.run_build_system("--v2", subdir="program")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/railsys/libx/include/test_libx.h b/src/boost/tools/build/test/railsys/libx/include/test_libx.h
new file mode 100644
index 000000000..fe573fc16
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/libx/include/test_libx.h
@@ -0,0 +1,25 @@
+// Copyright (c) 2003 Institute of Transport,
+// Railway Construction and Operation,
+// University of Hanover, Germany
+//
+// Use, modification and distribution are subject to the
+// Boost Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#ifdef _WIN32
+#ifdef LIBX_SOURCE
+__declspec(dllexport)
+#else
+__declspec(dllimport)
+#endif
+#endif
+class TestLibX
+{
+public:
+
+ TestLibX();
+
+ // Needed to suppress 'unused variable' warning
+ // in some cases.
+ void do_something() {}
+};
diff --git a/src/boost/tools/build/test/railsys/libx/jamroot.jam b/src/boost/tools/build/test/railsys/libx/jamroot.jam
new file mode 100644
index 000000000..d09982dd1
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/libx/jamroot.jam
@@ -0,0 +1,13 @@
+# Copyright (c) 2002 Institute of Transport,
+# Railway Construction and Operation,
+# University of Hanover, Germany
+# Copyright (c) 2006 Jürgen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tell that Qt3 should be used. QTDIR will give installation prefix.
+using qt3 ;
+
+
diff --git a/src/boost/tools/build/test/railsys/libx/src/jamfile.jam b/src/boost/tools/build/test/railsys/libx/src/jamfile.jam
new file mode 100644
index 000000000..639e0cc90
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/libx/src/jamfile.jam
@@ -0,0 +1,19 @@
+# Copyright (c) 2003 Institute of Transport,
+# Railway Construction and Operation,
+# University of Hanover, Germany
+#
+# Copyright (c) 2006 Jürgen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+project libx
+ : requirements
+ <include>../include
+ : usage-requirements
+ <include>../include
+ ;
+
+
+lib libx : test_libx.cpp ;
diff --git a/src/boost/tools/build/test/railsys/libx/src/test_libx.cpp b/src/boost/tools/build/test/railsys/libx/src/test_libx.cpp
new file mode 100644
index 000000000..be1fbc27f
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/libx/src/test_libx.cpp
@@ -0,0 +1,15 @@
+// Copyright (c) 2003 Institute of Transport,
+// Railway Construction and Operation,
+// University of Hanover, Germany
+//
+// Use, modification and distribution are subject to the
+// Boost Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+
+#define LIBX_SOURCE
+#include <test_libx.h>
+
+TestLibX::TestLibX()
+{
+}
diff --git a/src/boost/tools/build/test/railsys/program/include/test_a.h b/src/boost/tools/build/test/railsys/program/include/test_a.h
new file mode 100644
index 000000000..8002859e1
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/program/include/test_a.h
@@ -0,0 +1,22 @@
+// Copyright (c) 2003 Institute of Transport,
+// Railway Construction and Operation,
+// University of Hanover, Germany
+//
+// Use, modification and distribution are subject to the
+// Boost Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+
+#include <qobject.h>
+
+class TestA : public QObject
+{
+ Q_OBJECT
+
+public:
+
+ TestA();
+
+ // Needed to suppress 'unused variable' varning.
+ void do_something() { }
+};
diff --git a/src/boost/tools/build/test/railsys/program/jamfile.jam b/src/boost/tools/build/test/railsys/program/jamfile.jam
new file mode 100644
index 000000000..9e36f408f
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/program/jamfile.jam
@@ -0,0 +1,45 @@
+# ================================================================
+#
+# Railsys
+# --------------
+#
+# Copyright (c) 2002 Institute of Transport,
+# Railway Construction and Operation,
+# University of Hanover, Germany
+# Copyright (c) 2006 Jürgen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+# 02/21/02! Jürgen Hunold
+#
+# $Id$
+#
+# ================================================================
+
+local BOOST_ROOT = [ modules.peek : BOOST_ROOT ] ;
+
+use-project /libx : ../libx/src ;
+
+project program
+ : requirements
+ <include>$(BOOST_ROOT)
+ <threading>multi
+ <library>/qt3//qt
+ <hardcode-dll-paths>true
+ <stdlib>stlport
+ <use>/libx
+ <library>/libx//libx
+
+ : usage-requirements
+ <include>$(BOOST_ROOT)
+ :
+ default-build release
+ <threading>multi
+ <library>/qt3//qt
+ <hardcode-dll-paths>true
+ ;
+
+build-project main ;
+
diff --git a/src/boost/tools/build/test/railsys/program/jamroot.jam b/src/boost/tools/build/test/railsys/program/jamroot.jam
new file mode 100644
index 000000000..23d42195f
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/program/jamroot.jam
@@ -0,0 +1,14 @@
+# Copyright (c) 2002 Institute of Transport,
+# Railway Construction and Operation,
+# University of Hanover, Germany
+# Copyright (c) 2006 Jürgen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tell that Qt3 should be used. QTDIR will give installation prefix.
+using qt3 ;
+
+# Not that good, but sufficient for testing
+using stlport : : /path/to/stlport ;
diff --git a/src/boost/tools/build/test/railsys/program/liba/jamfile.jam b/src/boost/tools/build/test/railsys/program/liba/jamfile.jam
new file mode 100644
index 000000000..f74311d0d
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/program/liba/jamfile.jam
@@ -0,0 +1,14 @@
+# Copyright (c) 2003 Institute of Transport,
+# Railway Construction and Operation,
+# University of Hanover, Germany
+# Copyright (c) 2006 Jürgen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+project liba ;
+
+lib liba : test ../include/test_a.h ;
+
+obj test : test_a.cpp : <optimization>off ;
diff --git a/src/boost/tools/build/test/railsys/program/liba/test_a.cpp b/src/boost/tools/build/test/railsys/program/liba/test_a.cpp
new file mode 100644
index 000000000..f9e538857
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/program/liba/test_a.cpp
@@ -0,0 +1,17 @@
+// Copyright (c) 2003 Institute of Transport,
+// Railway Construction and Operation,
+// University of Hanover, Germany
+//
+// Use, modification and distribution are subject to the
+// Boost Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "../include/test_a.h"
+
+#include <test_libx.h>
+
+TestA::TestA()
+{
+ TestLibX aTestLibX;
+ aTestLibX.do_something();
+}
diff --git a/src/boost/tools/build/test/railsys/program/main/jamfile.jam b/src/boost/tools/build/test/railsys/program/main/jamfile.jam
new file mode 100644
index 000000000..095978eaf
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/program/main/jamfile.jam
@@ -0,0 +1,12 @@
+# Copyright (c) 2002 Institute of Transport,
+# Railway Construction and Operation,
+# University of Hanover, Germany
+# Copyright (c) 2006 Jürgen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+project main ;
+
+exe test_a : main.cpp ../liba//liba /libx ;
diff --git a/src/boost/tools/build/test/railsys/program/main/main.cpp b/src/boost/tools/build/test/railsys/program/main/main.cpp
new file mode 100644
index 000000000..3f13f4bfc
--- /dev/null
+++ b/src/boost/tools/build/test/railsys/program/main/main.cpp
@@ -0,0 +1,19 @@
+// Copyright (c) 2002 Institute of Transport,
+// Railway Construction and Operation,
+// University of Hanover, Germany
+//
+// Use, modification and distribution are subject to the
+// Boost Software License, Version 1.0. (See accompanying file
+// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#include "../include/test_a.h"
+
+#include <test_libx.h>
+
+int main()
+{
+ TestLibX stTestLibX;
+ TestA stTestA;
+
+ stTestLibX.do_something();
+};
diff --git a/src/boost/tools/build/test/readme.txt b/src/boost/tools/build/test/readme.txt
new file mode 100644
index 000000000..48459f805
--- /dev/null
+++ b/src/boost/tools/build/test/readme.txt
@@ -0,0 +1,6 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+ See test_system.html for detailed information on using the Boost Build test
+system.
diff --git a/src/boost/tools/build/test/rebuilds.py b/src/boost/tools/build/test/rebuilds.py
new file mode 100644
index 000000000..8242e3ec0
--- /dev/null
+++ b/src/boost/tools/build/test/rebuilds.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python
+
+# Copyright 2005 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+def wait_for_bar(t):
+ """
+ Wait to make the test system correctly recognize the 'bar' file as
+ touched after the next build run. Without the wait, the next build run may
+ rebuild the 'bar' file with the new and the old file modification timestamp
+ too close to each other - which could, depending on the currently supported
+ file modification timestamp resolution, be detected as 'no change' by the
+ testing system.
+
+ """
+ t.wait_for_time_change("bar", touch=False)
+
+
+t = BoostBuild.Tester(["-ffile.jam", "-d+3", "-d+12", "-d+13"],
+ pass_toolset=0)
+
+t.write("file.jam", """\
+rule make
+{
+ DEPENDS $(<) : $(>) ;
+ DEPENDS all : $(<) ;
+}
+actions make
+{
+ echo "******" making $(<) from $(>) "******"
+ echo made from $(>) > $(<)
+}
+
+make aux1 : bar ;
+make foo : bar ;
+REBUILDS foo : bar ;
+make bar : baz ;
+make aux2 : bar ;
+""")
+
+t.write("baz", "nothing")
+
+t.run_build_system(["bar"])
+t.expect_addition("bar")
+t.expect_nothing_more()
+
+wait_for_bar(t)
+t.run_build_system(["foo"])
+t.expect_touch("bar")
+t.expect_addition("foo")
+t.expect_nothing_more()
+
+t.run_build_system()
+t.expect_addition(["aux1", "aux2"])
+t.expect_nothing_more()
+
+t.touch("bar")
+wait_for_bar(t)
+t.run_build_system()
+t.expect_touch(["foo", "bar", "aux1", "aux2"])
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/relative_sources.py b/src/boost/tools/build/test/relative_sources.py
new file mode 100644
index 000000000..29f590fed
--- /dev/null
+++ b/src/boost/tools/build/test/relative_sources.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can specify sources using relative names.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Test that relative path to source, 'src', is preserved.
+t.write("jamroot.jam", "exe a : src/a.cpp ;")
+t.write("src/a.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/src/a.obj")
+
+# Test that the relative path to source is preserved
+# when using 'glob'.
+t.rm("bin")
+t.write("jamroot.jam", "exe a : [ glob src/*.cpp ] ;")
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/src/a.obj")
+
+
+# Test that relative path with ".." is *not* added to
+# target path.
+t.rm(".")
+t.write("jamroot.jam", "")
+t.write("a.cpp", "int main() { return 0; }\n")
+t.write("build/Jamfile", "exe a : ../a.cpp ; ")
+t.run_build_system(subdir="build")
+t.expect_addition("build/bin/$toolset/debug*/a.obj")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/remove_requirement.py b/src/boost/tools/build/test/remove_requirement.py
new file mode 100644
index 000000000..9655ad3ae
--- /dev/null
+++ b/src/boost/tools/build/test/remove_requirement.py
@@ -0,0 +1,91 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+t.write("jamroot.jam", """
+project : requirements <threading>multi <variant>debug:<link>static ;
+# Force link to be relevant
+project : requirements <link>shared:<define>TEST_DLL ;
+
+build-project sub ;
+build-project sub2 ;
+build-project sub3 ;
+build-project sub4 ;
+""")
+
+t.write("sub/jamfile.jam", """
+exe hello : hello.cpp : -<threading>multi ;
+""")
+
+t.write("sub/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub2/jamfile.jam", """
+project : requirements -<threading>multi ;
+exe hello : hello.cpp ;
+""")
+
+t.write("sub2/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub3/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub3/jamfile.jam", """
+exe hello : hello.cpp : "-<variant>debug:<link>static" ;
+""")
+
+t.write("sub4/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub4/jamfile.jam", """
+project : requirements "-<variant>debug:<link>static" ;
+exe hello : hello.cpp ;
+""")
+
+t.run_build_system()
+
+t.expect_addition("sub/bin/$toolset/debug*/link-static*/hello.exe")
+t.expect_addition("sub2/bin/$toolset/debug*/link-static*/hello.exe")
+t.expect_addition("sub3/bin/$toolset/debug*/threading-multi*/hello.exe")
+t.expect_addition("sub4/bin/$toolset/debug*/threading-multi*/hello.exe")
+
+t.rm(".")
+
+# Now test that path requirements can be removed as well.
+t.write("jamroot.jam", """
+build-project sub ;
+""")
+
+t.write("sub/jamfile.jam", """
+project : requirements <include>broken ;
+exe hello : hello.cpp : -<include>broken ;
+""")
+
+t.write("sub/hello.cpp", """
+#include "math.h"
+int main() {}
+""")
+
+t.write("sub/broken/math.h", """
+Broken
+""")
+
+
+t.run_build_system()
+
+t.expect_addition("sub/bin/$toolset/debug*/hello.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/rescan_header.py b/src/boost/tools/build/test/rescan_header.py
new file mode 100755
index 000000000..1257a223c
--- /dev/null
+++ b/src/boost/tools/build/test/rescan_header.py
@@ -0,0 +1,265 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Test a header loop that depends on (but does not contain) a generated header.
+t.write("test.cpp", '#include "header1.h"\n')
+
+t.write("header1.h", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.h", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header1.h"
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", "/* empty file */\n")
+
+t.write("jamroot.jam", """\
+import common ;
+make header3.h : header3.in : @common.copy ;
+obj test : test.cpp : <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2"])
+t.expect_addition("bin/header3.h")
+t.expect_addition("bin/$toolset/debug*/test.obj")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test a linear sequence of generated headers.
+t.write("test.cpp", '#include "header1.h"\n')
+
+t.write("header1.in", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", "/* empty file */\n")
+
+t.write("jamroot.jam", """\
+import common ;
+make header1.h : header1.in : @common.copy ;
+make header2.h : header2.in : @common.copy ;
+make header3.h : header3.in : @common.copy ;
+obj test : test.cpp :
+ <implicit-dependency>header1.h
+ <implicit-dependency>header2.h
+ <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/header1.h")
+t.expect_addition("bin/header2.h")
+t.expect_addition("bin/header3.h")
+t.expect_addition("bin/$toolset/debug*/test.obj")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test a loop in generated headers.
+t.write("test.cpp", '#include "header1.h"\n')
+
+t.write("header1.in", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header1.h"
+#endif
+""")
+
+t.write("jamroot.jam", """\
+import common ;
+
+actions copy {
+ sleep 1
+ cp $(>) $(<)
+}
+
+make header1.h : header1.in : @common.copy ;
+make header2.h : header2.in : @common.copy ;
+make header3.h : header3.in : @common.copy ;
+obj test : test.cpp :
+ <implicit-dependency>header1.h
+ <implicit-dependency>header2.h
+ <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/header1.h")
+t.expect_addition("bin/header2.h")
+t.expect_addition("bin/header3.h")
+t.expect_addition("bin/$toolset/debug*/test.obj")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test that all the dependencies of a loop are updated before any of the
+# dependents.
+t.write("test1.cpp", '#include "header1.h"\n')
+
+t.write("test2.cpp", """\
+#include "header2.h"
+int main() {}
+""")
+
+t.write("header1.h", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.h", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header1.h"
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", "\n")
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("jamroot.jam", """\
+import common ;
+import os ;
+
+if [ os.name ] = NT
+{
+ SLEEP = call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+rule copy { common.copy $(<) : $(>) ; }
+actions copy { $(SLEEP) 1 }
+
+make header3.h : header3.in : @copy ;
+exe test : test2.cpp test1.cpp : <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/header3.h")
+t.expect_addition("bin/$toolset/debug*/test1.obj")
+t.expect_addition("bin/$toolset/debug*/test2.obj")
+t.expect_addition("bin/$toolset/debug*/test.exe")
+t.expect_nothing_more()
+
+t.touch("header3.in")
+t.run_build_system(["-j2", "test"])
+t.expect_touch("bin/header3.h")
+t.expect_touch("bin/$toolset/debug*/test1.obj")
+t.expect_touch("bin/$toolset/debug*/test2.obj")
+t.expect_touch("bin/$toolset/debug*/test.exe")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test a loop that includes a generated header
+t.write("test1.cpp", '#include "header1.h"\n')
+t.write("test2.cpp", """\
+#include "header2.h"
+int main() {}
+""")
+
+t.write("header1.h", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.h", """\
+#ifndef HEADER3_H
+#define HEADER3_H
+#include "header1.h"
+#endif
+""")
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("jamroot.jam", """\
+import common ;
+import os ;
+
+if [ os.name ] = NT
+{
+ SLEEP = call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+rule copy { common.copy $(<) : $(>) ; }
+actions copy { $(SLEEP) 1 }
+
+make header2.h : header2.in : @copy ;
+exe test : test2.cpp test1.cpp : <implicit-dependency>header2.h <include>. ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/header2.h")
+t.expect_addition("bin/$toolset/debug*/test1.obj")
+t.expect_addition("bin/$toolset/debug*/test2.obj")
+t.expect_addition("bin/$toolset/debug*/test.exe")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/resolution.py b/src/boost/tools/build/test/resolution.py
new file mode 100644
index 000000000..9cde218fa
--- /dev/null
+++ b/src/boost/tools/build/test/resolution.py
@@ -0,0 +1,35 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests for the target id resolution process.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+# Create the needed files
+t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+# This should use the 'hello' target, even if there is a 'hello' file in the
+# current dir.
+install s : hello : <location>. ;
+""")
+
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug*/hello.obj")
+
+t.touch("hello.cpp")
+t.run_build_system(["s"])
+# If 'hello' in the 's' target resolved to file in the current dir, nothing
+# will be rebuilt.
+t.expect_touch("bin/$toolset/debug*/hello.obj")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/results-python.txt b/src/boost/tools/build/test/results-python.txt
new file mode 100644
index 000000000..83b351b2c
--- /dev/null
+++ b/src/boost/tools/build/test/results-python.txt
@@ -0,0 +1,132 @@
+Note: skipping extra tests
+unit_tests : FAILED
+module_actions : PASSED
+startup_v2 : PASSED
+core_d12 : PASSED
+core_typecheck : PASSED
+core_delete_module : PASSED
+core_language : PASSED
+core_arguments : PASSED
+core_varnames : PASSED
+core_import_module : PASSED
+absolute_sources : PASSED
+alias : PASSED
+alternatives : PASSED
+bad_dirname : PASSED
+build_dir : PASSED
+build_file : PASSED
+build_no : PASSED
+builtin_echo : PASSED
+builtin_exit : PASSED
+builtin_split_by_characters : FAILED
+c_file : PASSED
+chain : PASSED
+clean : PASSED
+composite : PASSED
+conditionals : PASSED
+conditionals2 : PASSED
+conditionals3 : PASSED
+conditionals_multiple : PASSED
+configuration : FAILED
+copy_time : PASSED
+core_action_output : PASSED
+core_action_status : PASSED
+core_actions_quietly : PASSED
+core_at_file : PASSED
+core_bindrule : PASSED
+core_multifile_actions : PASSED
+core_nt_cmd_line : PASSED
+core_option_d2 : PASSED
+core_option_l : PASSED
+core_option_n : PASSED
+core_parallel_actions : PASSED
+core_parallel_multifile_actions_1 : PASSED
+core_parallel_multifile_actions_2 : PASSED
+core_source_line_tracking : PASSED
+core_update_now : PASSED
+core_variables_in_actions : PASSED
+custom_generator : PASSED
+default_build : PASSED
+default_features : PASSED
+dependency_property : PASSED
+dependency_test : FAILED
+direct_request_test : PASSED
+disambiguation : PASSED
+dll_path : PASSED
+double_loading : PASSED
+duplicate : PASSED
+example_libraries : PASSED
+example_make : PASSED
+exit_status : PASSED
+expansion : PASSED
+explicit : PASSED
+free_features_request : PASSED
+generator_selection : FAILED
+generators_test : FAILED
+implicit_dependency : PASSED
+indirect_conditional : FAILED
+inherit_toolset : FAILED
+inherited_dependency : PASSED
+inline : PASSED
+lib_source_property : PASSED
+library_chain : PASSED
+library_property : PASSED
+link : FAILED
+load_order : FAILED
+loop : PASSED
+make_rule : PASSED
+message : FAILED
+ndebug : PASSED
+no_type : PASSED
+notfile : PASSED
+ordered_include : PASSED
+out_of_tree : PASSED
+path_features : FAILED
+prebuilt : PASSED
+print : FAILED
+project_dependencies : PASSED
+project_glob : PASSED
+project_id : FAILED
+project_root_constants : PASSED
+project_root_rule : PASSED
+project_test3 : FAILED
+project_test4 : FAILED
+property_expansion : PASSED
+rebuilds : PASSED
+regression : PASSED
+relative_sources : PASSED
+remove_requirement : PASSED
+rescan_header : PASSED
+resolution : PASSED
+scanner_causing_rebuilds : FAILED
+searched_lib : PASSED
+skipping : PASSED
+sort_rule : PASSED
+source_locations : PASSED
+source_order : FAILED
+space_in_path : PASSED
+stage : PASSED
+standalone : PASSED
+static_and_shared_library : PASSED
+suffix : PASSED
+tag : PASSED
+test_result_dumping : PASSED
+test_rc : FAILED
+testing_support : PASSED
+timedata : FAILED
+unit_test : PASSED
+unused : FAILED
+use_requirements : PASSED
+using : PASSED
+wrapper : PASSED
+wrong_project : PASSED
+zlib : PASSED
+symlink : PASSED
+library_order : FAILED
+gcc_runtime : FAILED
+pch : PASSED
+
+ === Test summary ===
+ PASS: 103
+ FAIL: 23
+
diff --git a/src/boost/tools/build/test/rootless.py b/src/boost/tools/build/test/rootless.py
new file mode 100644
index 000000000..3dc2de5bc
--- /dev/null
+++ b/src/boost/tools/build/test/rootless.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2018 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(translate_suffixes=0)
+
+t.set_tree("rootless/test1")
+t.run_build_system(status=1)
+t.expect_output_lines("error: no Jamfile in current directory*")
+
+t.set_tree("rootless/test1")
+t.run_build_system(subdir="sub_root")
+t.expect_addition("sub_root/bin/a.txt")
+
+t.set_tree("rootless/test1")
+t.run_build_system(subdir="sub_root", extra_args=["--build-dir=../bin"])
+t.expect_output_lines("warning: the --build-dir option will be ignored")
+
+t.set_tree("rootless/test2")
+t.run_build_system(subdir="sub_root", extra_args=["--build-dir=../bin"])
+t.expect_addition("bin/foo/a.txt")
+
+t.set_tree("rootless/test3")
+t.run_build_system()
+
+t.set_tree("rootless/test3")
+t.run_build_system(subdir="sub/inner")
+t.expect_addition("bins/sub/inner/a.txt")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/rootless/test1/sub_root/a.cpp b/src/boost/tools/build/test/rootless/test1/sub_root/a.cpp
new file mode 100644
index 000000000..412a2bb4f
--- /dev/null
+++ b/src/boost/tools/build/test/rootless/test1/sub_root/a.cpp
@@ -0,0 +1,6 @@
+// Copyright 2018 Rene Rivera
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+int main() {}
diff --git a/src/boost/tools/build/test/rootless/test1/sub_root/jamfile.jam b/src/boost/tools/build/test/rootless/test1/sub_root/jamfile.jam
new file mode 100644
index 000000000..0a17bc400
--- /dev/null
+++ b/src/boost/tools/build/test/rootless/test1/sub_root/jamfile.jam
@@ -0,0 +1,10 @@
+# Copyright 2018 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+actions foo
+{
+ echo "$(>)" > "$(<)"
+}
+make a.txt : a.cpp : @foo ;
diff --git a/src/boost/tools/build/test/rootless/test2/sub_root/a.cpp b/src/boost/tools/build/test/rootless/test2/sub_root/a.cpp
new file mode 100644
index 000000000..412a2bb4f
--- /dev/null
+++ b/src/boost/tools/build/test/rootless/test2/sub_root/a.cpp
@@ -0,0 +1,6 @@
+// Copyright 2018 Rene Rivera
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+int main() {}
diff --git a/src/boost/tools/build/test/rootless/test2/sub_root/jamfile.jam b/src/boost/tools/build/test/rootless/test2/sub_root/jamfile.jam
new file mode 100644
index 000000000..62416c0b7
--- /dev/null
+++ b/src/boost/tools/build/test/rootless/test2/sub_root/jamfile.jam
@@ -0,0 +1,13 @@
+# Copyright 2018 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+project foo ;
+
+actions foo
+{
+ echo "$(>)" > "$(<)"
+}
+
+make a.txt : a.cpp : @foo ;
diff --git a/src/boost/tools/build/test/rootless/test3/jamfile.jam b/src/boost/tools/build/test/rootless/test3/jamfile.jam
new file mode 100644
index 000000000..ab8da6a06
--- /dev/null
+++ b/src/boost/tools/build/test/rootless/test3/jamfile.jam
@@ -0,0 +1,6 @@
+# Copyright 2018 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+project root-foo : build-dir bins ;
diff --git a/src/boost/tools/build/test/rootless/test3/sub/inner/a.cpp b/src/boost/tools/build/test/rootless/test3/sub/inner/a.cpp
new file mode 100644
index 000000000..412a2bb4f
--- /dev/null
+++ b/src/boost/tools/build/test/rootless/test3/sub/inner/a.cpp
@@ -0,0 +1,6 @@
+// Copyright 2018 Rene Rivera
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+int main() {}
diff --git a/src/boost/tools/build/test/rootless/test3/sub/inner/jamfile.jam b/src/boost/tools/build/test/rootless/test3/sub/inner/jamfile.jam
new file mode 100644
index 000000000..6aeddab3b
--- /dev/null
+++ b/src/boost/tools/build/test/rootless/test3/sub/inner/jamfile.jam
@@ -0,0 +1,11 @@
+# Copyright 2018 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+actions foo
+{
+ echo "$(>)" > "$(<)"
+}
+
+make a.txt : a.cpp : @foo ;
diff --git a/src/boost/tools/build/test/scanner_causing_rebuilds.py b/src/boost/tools/build/test/scanner_causing_rebuilds.py
new file mode 100755
index 000000000..d1ff66bfd
--- /dev/null
+++ b/src/boost/tools/build/test/scanner_causing_rebuilds.py
@@ -0,0 +1,132 @@
+#!/usr/bin/python
+
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests for a bug causing Boost Build's scanner targets to be rebuilt.
+# unnecessarily in the following scenario:
+# * We want to build target X requiring target A.
+# * We have a multi-file action generating targets A & B.
+# * Out action generates target B with a more recent timestamp than target A.
+# * Target A includes target B.
+# * Target A has a registered include scanner.
+# Now even if our targets A & B have already been built and are up-to-date
+# (e.g. in a state left by a previous successful build run), our scanner target
+# tasked with scanning target A will be marked for updating, thus causing any
+# targets depending on it to be updated/rebuilt as well.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("foo.jam", r"""
+import common ;
+import generators ;
+import modules ;
+import type ;
+import types/cpp ;
+
+type.register FOO : foo ;
+type.register BAR : bar ;
+generators.register-standard foo.foo : FOO : CPP BAR ;
+
+local rule sleep-cmd ( delay )
+{
+ if [ modules.peek : NT ]
+ {
+ return ping 127.0.0.1 -n $(delay) -w 1000 >NUL ;
+ }
+ else
+ {
+ return sleep $(delay) ;
+ }
+}
+
+.touch = [ common.file-creation-command ] ;
+.sleep = [ sleep-cmd 2 ] ;
+
+rule foo ( cpp bar : foo : properties * )
+{
+ # We add the INCLUDE relationship between our generated CPP & BAR targets
+ # explicitly instead of relying on Boost Jam's internal implementation
+ # detail - automatically adding such relationships between all files
+ # generated by the same action. This way our test will continue to function
+ # correctly even if the related Boost Jam implementation detail changes.
+ # Note that adding this relationship by adding an #include directive in our
+ # generated CPP file is not good enough as such a relationship would get
+ # added only after the scanner target's relationships have already been
+ # established and they (as affected by our initial INCLUDE relationship) are
+ # the original reason for this test failing.
+ INCLUDES $(cpp) : $(bar) ;
+}
+
+actions foo
+{
+ $(.touch) "$(<[1])"
+ $(.sleep)
+ $(.touch) "$(<[2])"
+}
+""")
+
+t.write(
+ 'foo.py',
+"""
+import os
+
+from b2.build import type as type_, generators
+from b2.tools import common
+from b2.manager import get_manager
+
+MANAGER = get_manager()
+ENGINE = MANAGER.engine()
+
+type_.register('FOO', ['foo'])
+type_.register('BAR', ['bar'])
+generators.register_standard('foo.foo', ['FOO'], ['CPP', 'BAR'])
+
+def sleep_cmd(delay):
+ if os.name == 'nt':
+ return 'ping 127.0.0.1 -n {} -w 1000 >NUL'.format(delay)
+ return 'sleep {}'.format(delay)
+
+def foo(targets, sources, properties):
+ cpp, bar = targets
+ foo = sources[0]
+ # We add the INCLUDE relationship between our generated CPP & BAR targets
+ # explicitly instead of relying on Boost Jam's internal implementation
+ # detail - automatically adding such relationships between all files
+ # generated by the same action. This way our test will continue to function
+ # correctly even if the related Boost Jam implementation detail changes.
+ # Note that adding this relationship by adding an #include directive in our
+ # generated CPP file is not good enough as such a relationship would get
+ # added only after the scanner target's relationships have already been
+ # established and they (as affected by our initial INCLUDE relationship) are
+ # the original reason for this test failing.
+ bjam.call('INCLUDES', cpp, bar)
+
+ENGINE.register_action(
+ 'foo.foo',
+ '''
+ {touch} "$(<[1])"
+ {sleep}
+ {touch} "$(<[2])"
+ '''.format(touch=common.file_creation_command(), sleep=sleep_cmd(2))
+)
+"""
+)
+
+t.write("x.foo", "")
+t.write("jamroot.jam", """\
+import foo ;
+lib x : x.foo : <link>static ;
+""")
+
+
+# Get everything built once.
+t.run_build_system()
+
+# Simply rerunning the build without touching any of its source target files
+# should not cause any files to be affected.
+t.run_build_system()
+t.expect_nothing_more()
diff --git a/src/boost/tools/build/test/searched_lib.py b/src/boost/tools/build/test/searched_lib.py
new file mode 100644
index 000000000..2081230a1
--- /dev/null
+++ b/src/boost/tools/build/test/searched_lib.py
@@ -0,0 +1,186 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test usage of searched-libs: one which are found via -l
+# switch to the linker/compiler.
+
+import BoostBuild
+import os
+import string
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# To start with, we have to prepare a library to link with.
+t.write("lib/jamroot.jam", "")
+t.write("lib/jamfile.jam", "lib test_lib : test_lib.cpp ;")
+t.write("lib/test_lib.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""");
+
+t.run_build_system(subdir="lib")
+t.expect_addition("lib/bin/$toolset/debug*/test_lib.dll")
+
+
+# Auto adjusting of suffixes does not work, since we need to
+# change dll to lib.
+if ( ( os.name == "nt" ) or os.uname()[0].lower().startswith("cygwin") ) and \
+ ( BoostBuild.get_toolset() != "gcc" ):
+ t.copy("lib/bin/$toolset/debug*/test_lib.implib", "lib/test_lib.implib")
+ t.copy("lib/bin/$toolset/debug*/test_lib.dll", "lib/test_lib.dll")
+else:
+ t.copy("lib/bin/$toolset/debug*/test_lib.dll", "lib/test_lib.dll")
+
+
+# Test that the simplest usage of searched library works.
+t.write("jamroot.jam", "")
+
+t.write("jamfile.jam", """\
+import path ;
+import project ;
+exe main : main.cpp helper ;
+lib helper : helper.cpp test_lib ;
+lib test_lib : : <name>test_lib <search>lib ;
+""")
+
+t.write("main.cpp", """\
+void helper();
+int main() { helper(); }
+""")
+
+t.write("helper.cpp", """\
+void foo();
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+helper() { foo(); }
+""")
+
+t.run_build_system(["-d2"])
+t.expect_addition("bin/$toolset/debug*/main.exe")
+t.rm("bin/$toolset/debug/main.exe")
+t.rm("bin/$toolset/debug/*/main.exe")
+
+
+# Test that 'unit-test' will correctly add runtime paths to searched libraries.
+t.write("jamfile.jam", """\
+import path ;
+import project ;
+import testing ;
+
+project : requirements <hardcode-dll-paths>false ;
+
+unit-test main : main.cpp helper ;
+lib helper : helper.cpp test_lib ;
+lib test_lib : : <name>test_lib <search>lib ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/main.passed")
+t.rm("bin/$toolset/debug/main.exe")
+t.rm("bin/$toolset/debug/*/main.exe")
+
+
+# Now try using searched lib from static lib. Request shared version of searched
+# lib, since we do not have a static one handy.
+t.write("jamfile.jam", """\
+exe main : main.cpp helper ;
+lib helper : helper.cpp test_lib/<link>shared : <link>static ;
+lib test_lib : : <name>test_lib <search>lib ;
+""")
+
+t.run_build_system(stderr=None)
+t.expect_addition("bin/$toolset/debug*/main.exe")
+t.expect_addition("bin/$toolset/debug/link-static*/helper.lib")
+t.rm("bin/$toolset/debug/main.exe")
+t.rm("bin/$toolset/debug/*/main.exe")
+
+# A regression test: <library>property referring to searched-lib was being
+# mishandled. As the result, we were putting target name to the command line!
+# Note that
+# g++ ...... <.>z
+# works nicely in some cases, sending output from compiler to file 'z'. This
+# problem shows up when searched libs are in usage requirements.
+t.write("jamfile.jam", "exe main : main.cpp d/d2//a ;")
+t.write("main.cpp", """\
+void foo();
+int main() { foo(); }
+""")
+
+t.write("d/d2/jamfile.jam", """\
+lib test_lib : : <name>test_lib <search>../../lib ;
+lib a : a.cpp : : : <library>test_lib ;
+""")
+
+t.write("d/d2/a.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport) int force_library_creation_for_a;
+#endif
+""")
+
+t.run_build_system()
+
+
+# A regression test. Searched targets were not associated with any properties.
+# For that reason, if the same searched lib is generated with two different
+# properties, we had an error saying they are actualized to the same Jam target
+# name.
+t.write("jamroot.jam", "")
+
+t.write("a.cpp", "")
+
+# The 'l' library will be built in two variants: 'debug' (directly requested)
+# and 'release' (requested from 'a').
+t.write("jamfile.jam", """\
+exe a : a.cpp l/<variant>release ;
+lib l : : <name>l_d <variant>debug ;
+lib l : : <name>l_r <variant>release ;
+""")
+
+t.run_build_system(["-n"])
+
+
+# A regression test. Two virtual target with the same properties were created
+# for 'l' target, which caused and error to be reported when actualizing
+# targets. The final error is correct, but we should not create two duplicated
+# targets. Thanks to Andre Hentz for finding this bug.
+t.write("jamroot.jam", "")
+t.write("a.cpp", "")
+t.write("jamfile.jam", """\
+project a : requirements <runtime-link>static ;
+static-lib a : a.cpp l ;
+lib l : : <name>l_f ;
+""")
+
+t.run_build_system(["-n"])
+
+
+# Make sure plain "lib foobar ; " works.
+t.write("jamfile.jam", """\
+exe a : a.cpp foobar ;
+lib foobar ;
+""")
+
+t.run_build_system(["-n", "-d2"])
+t.fail_test(t.stdout().find("foobar") == -1)
+
+
+# Make sure plain "lib foo bar ; " works.
+t.write("jamfile.jam", """\
+exe a : a.cpp foo bar ;
+lib foo bar ;
+""")
+
+t.run_build_system(["-n", "-d2"])
+t.fail_test(t.stdout().find("foo") == -1)
+t.fail_test(t.stdout().find("bar") == -1)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/skipping.py b/src/boost/tools/build/test/skipping.py
new file mode 100644
index 000000000..a187a4bed
--- /dev/null
+++ b/src/boost/tools/build/test/skipping.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that V2 does not fail gracelessy when any target is skipped.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+t.write("b.cpp", "int main() {}\n")
+t.write("c.cpp", "int main() {}\n")
+t.write("jamroot.jam", """\
+import feature ;
+feature.feature foo : 1 2 : link-incompatible ;
+exe a : a.cpp : <foo>1 ;
+exe b : b.cpp : <foo>2 ;
+exe c : c.cpp ;
+""")
+
+t.run_build_system(["foo=1"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/sort_rule.py b/src/boost/tools/build/test/sort_rule.py
new file mode 100755
index 000000000..395373263
--- /dev/null
+++ b/src/boost/tools/build/test/sort_rule.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python
+
+# Copyright (C) 2008. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests for the Boost Jam builtin SORT rule.
+
+from __future__ import print_function
+
+import BoostBuild
+
+
+###############################################################################
+#
+# testSORTCorrectness()
+# ---------------------
+#
+###############################################################################
+
+def testSORTCorrectness():
+ """Testing that Boost Jam's SORT builtin rule actually sorts correctly."""
+ t = BoostBuild.Tester(["-ftest.jam", "-d1"], pass_toolset=False,
+ use_test_config=False)
+
+ t.write("test.jam", """\
+NOCARE all ;
+source-data = 1 8 9 2 7 3 4 7 1 27 27 9 98 98 1 1 4 5 6 2 3 4 8 1 -2 -2 0 0 0 ;
+target-data = -2 -2 0 0 0 1 1 1 1 1 2 2 27 27 3 3 4 4 4 5 6 7 7 8 8 9 9 98 98 ;
+ECHO "starting up" ;
+sorted-data = [ SORT $(source-data) ] ;
+ECHO "done" ;
+if $(sorted-data) != $(target-data)
+{
+ ECHO "Source :" $(source-data) ;
+ ECHO "Expected :" $(target-data) ;
+ ECHO "SORT returned:" $(sorted-data) ;
+ EXIT "SORT error" : -2 ;
+}
+""")
+
+ t.run_build_system()
+ t.expect_output_lines("starting up")
+ t.expect_output_lines("done")
+ t.expect_output_lines("SORT error", False)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# testSORTDuration()
+# ------------------
+#
+###############################################################################
+
+def testSORTDuration():
+ """
+ Regression test making sure Boost Jam's SORT builtin rule does not get
+ quadratic behaviour again in this use case.
+
+ """
+ t = BoostBuild.Tester(["-ftest.jam", "-d1"], pass_toolset=False,
+ use_test_config=False)
+
+ f = open(t.workpath("test.jam"), "w")
+ print("data = ", file=f)
+ for i in range(0, 20000):
+ if i % 2:
+ print('"aaa"', file=f)
+ else:
+ print('"bbb"', file=f)
+ print(""";
+
+ECHO "starting up" ;
+sorted = [ SORT $(data) ] ;
+ECHO "done" ;
+NOCARE all ;
+""", file=f)
+ f.close()
+
+ t.run_build_system(expected_duration=1)
+ t.expect_output_lines("starting up")
+ t.expect_output_lines("done")
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+testSORTCorrectness()
+testSORTDuration()
diff --git a/src/boost/tools/build/test/source_locations.py b/src/boost/tools/build/test/source_locations.py
new file mode 100644
index 000000000..8123a1864
--- /dev/null
+++ b/src/boost/tools/build/test/source_locations.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+
+# Copyright (C) Craig Rodrigues 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that projects with multiple source-location directories are handled OK.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+path-constant SRC1 : "./src1" ;
+path-constant SRC2 : "./src2" ;
+path-constant SRC3 : "./src3" ;
+path-constant BUILD : "build" ;
+
+project : requirements <include>$(SRC1)/include <threading>multi
+ : build-dir $(BUILD) ;
+
+build-project project1 ;
+""")
+
+t.write("project1/jamfile.jam", """
+project project1 : source-location $(SRC1) $(SRC2) $(SRC3) ;
+SRCS = s1.cpp s2.cpp testfoo.cpp ;
+exe test : $(SRCS) ;
+""")
+
+t.write("src1/s1.cpp", "int main() {}\n")
+t.write("src2/s2.cpp", "void hello() {}\n")
+t.write("src3/testfoo.cpp", "void testfoo() {}\n")
+
+# This file should not be picked up, because "src2" is before "src3" in the list
+# of source directories.
+t.write("src3/s2.cpp", "void hello() {}\n")
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/source_order.py b/src/boost/tools/build/test/source_order.py
new file mode 100755
index 000000000..f21710a8c
--- /dev/null
+++ b/src/boost/tools/build/test/source_order.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+
+# Copyright 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that action sources are not reordered
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("check-order.jam", """\
+import type ;
+import generators ;
+
+type.register ORDER_TEST : order-test ;
+
+SPACE = " " ;
+nl = "\n" ;
+actions check-order
+{
+ echo$(SPACE)$(>[1])>$(<[1])
+ echo$(SPACE)$(>[2-])>>$(<[1])$(nl)
+}
+
+generators.register-composing check-order.check-order : C : ORDER_TEST ;
+""")
+
+t.write(
+ 'check-order.py',
+"""
+import bjam
+
+from b2.build import type as type_, generators
+from b2.tools import common
+from b2.manager import get_manager
+
+MANAGER = get_manager()
+ENGINE = MANAGER.engine()
+
+type_.register('ORDER_TEST', ['order-test'])
+
+generators.register_composing('check-order.check-order', ['C'], ['ORDER_TEST'])
+
+def check_order(targets, sources, properties):
+ ENGINE.set_target_variable(targets, 'SPACE', ' ')
+ ENGINE.set_target_variable(targets, 'nl', '\\n')
+
+ENGINE.register_action(
+ 'check-order.check-order',
+ function=check_order,
+ command='''
+ echo$(SPACE)$(>[1])>$(<[1])
+ echo$(SPACE)$(>[2-])>>$(<[1])$(nl)
+ '''
+)
+"""
+)
+
+# The aliases are necessary for this test, since
+# the targets were sorted by virtual target
+# id, not by file name.
+t.write("jamroot.jam", """\
+import check-order ;
+alias file1 : file1.c ;
+alias file2 : file2.c ;
+alias file3 : file3.c ;
+order-test check : file2 file1 file3 ;
+""")
+
+t.write("file1.c", "")
+t.write("file2.c", "")
+t.write("file3.c", "")
+
+t.run_build_system()
+t.expect_addition("bin/check.order-test")
+t.expect_content("bin/check.order-test", """\
+file2.c
+file1.c
+file3.c
+""", True)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/space_in_path.py b/src/boost/tools/build/test/space_in_path.py
new file mode 100755
index 000000000..7f0c041a3
--- /dev/null
+++ b/src/boost/tools/build/test/space_in_path.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that paths containing spaces are handled correctly by actions.
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("has space/jamroot.jam", """\
+import testing ;
+unit-test test : test.cpp ;
+actions write-file
+{
+ @(STDOUT:E=okay) >"$(<)"
+}
+make test.txt : : @write-file ;
+""")
+t.write("has space/test.cpp", "int main() {}\n")
+
+tmpdir = t.workpath("has space")
+try:
+ oldtmp = os.environ["TMP"]
+except:
+ oldtmp = None
+try:
+ oldtmpdir = os.environ["TMPDIR"]
+except:
+ oldtmpdir = None
+os.environ["TMP"] = tmpdir; # Windows
+os.environ["TMPDIR"] = tmpdir; # *nix
+
+try:
+ t.run_build_system(["has space"])
+ t.expect_addition("has space/bin/test.txt")
+ t.expect_addition("has space/bin/$toolset/debug*/test.passed")
+finally:
+ if oldtmp is not None:
+ os.environ["TMP"] = oldtmp
+ else:
+ del os.environ["TMP"]
+ if oldtmpdir is not None:
+ os.environ["TMPDIR"] = oldtmpdir
+ else:
+ del os.environ["TMPDIR"]
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/stage.py b/src/boost/tools/build/test/stage.py
new file mode 100644
index 000000000..4dd4e2f94
--- /dev/null
+++ b/src/boost/tools/build/test/stage.py
@@ -0,0 +1,207 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test staging.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", """\
+lib a : a.cpp ;
+stage dist : a a.h auxilliary/1 ;
+""")
+t.write("a.cpp", """\
+int
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+must_export_something;
+""")
+t.write("a.h", "")
+t.write("auxilliary/1", "")
+
+t.run_build_system()
+t.expect_addition(["dist/a.dll", "dist/a.h", "dist/1"])
+
+
+# Regression test: the following was causing a "duplicate target name" error.
+t.write("jamfile.jam", """\
+project : requirements <hardcode-dll-paths>true ;
+lib a : a.cpp ;
+stage dist : a a.h auxilliary/1 ;
+alias dist-alias : dist ;
+""")
+
+t.run_build_system()
+
+
+# Test the <location> property.
+t.write("jamfile.jam", """\
+lib a : a.cpp ;
+stage dist : a : <variant>debug:<location>ds <variant>release:<location>rs ;
+""")
+
+t.run_build_system()
+t.expect_addition("ds/a.dll")
+
+t.run_build_system(["release"])
+t.expect_addition("rs/a.dll")
+
+
+# Test the <location> property in subprojects. Thanks to Kirill Lapshin for the
+# bug report.
+
+t.write("jamroot.jam", "path-constant DIST : dist ;")
+t.write("jamfile.jam", "build-project d ;")
+t.write("d/jamfile.jam", """\
+exe a : a.cpp ;
+stage dist : a : <location>$(DIST) ;
+""")
+t.write("d/a.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_addition("dist/a.exe")
+
+t.rm("dist")
+
+# Workaround a BIG BUG: the response file is not deleted, even if application
+# *is* deleted. We will try to use the same response file when building from
+# subdir, with very bad results.
+t.rm("d/bin")
+t.run_build_system(subdir="d")
+t.expect_addition("dist/a.exe")
+
+
+# Check that 'stage' does not incorrectly reset target suffixes.
+t.write("a.cpp", "int main() {}\n")
+t.write("jamroot.jam", """\
+import type ;
+type.register MYEXE : : EXE ;
+type.set-generated-target-suffix MYEXE : <optimization>off : myexe ;
+""")
+
+# Since <optimization>off is in properties when 'a' is built and staged, its
+# suffix should be "myexe".
+t.write("jamfile.jam", """\
+stage dist : a ;
+myexe a : a.cpp ;
+""")
+
+t.run_build_system()
+t.expect_addition("dist/a.myexe")
+
+# Test 'stage's ability to traverse dependencies.
+t.write("a.cpp", "int main() {}\n")
+t.write("l.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+t.write("jamfile.jam", """\
+lib l : l.cpp ;
+exe a : a.cpp l ;
+stage dist : a : <install-dependencies>on <install-type>EXE <install-type>LIB ;
+""")
+t.write("jamroot.jam", "")
+t.rm("dist")
+
+t.run_build_system()
+t.expect_addition("dist/a.exe")
+t.expect_addition("dist/l.dll")
+
+# Check that <use> properties are ignored the traversing target for staging.
+t.copy("l.cpp", "l2.cpp")
+t.copy("l.cpp", "l3.cpp")
+t.write("jamfile.jam", """\
+lib l2 : l2.cpp ;
+lib l3 : l3.cpp ;
+lib l : l.cpp : <use>l2 <dependency>l3 ;
+exe a : a.cpp l ;
+stage dist : a : <install-dependencies>on <install-type>EXE <install-type>LIB ;
+""")
+t.rm("dist")
+
+t.run_build_system()
+t.expect_addition("dist/l3.dll")
+t.expect_nothing("dist/l2.dll")
+
+# Check if <dependency> on 'stage' works.
+t.rm(".")
+t.write("jamroot.jam", """\
+stage a1 : a1.txt : <location>dist ;
+stage a2 : a2.txt : <location>dist <dependency>a1 ;
+""")
+t.write("a1.txt", "")
+t.write("a2.txt", "")
+t.run_build_system(["a2"])
+t.expect_addition(["dist/a1.txt", "dist/a2.txt"])
+
+# Regression test: check that <location>. works.
+t.rm(".")
+t.write("jamroot.jam", "stage a1 : d/a1.txt : <location>. ;")
+t.write("d/a1.txt", "")
+
+t.run_build_system()
+t.expect_addition("a1.txt")
+
+# Test that relative paths of sources can be preserved.
+t.rm(".")
+t.write("jamroot.jam", "install dist : a/b/c.h : <install-source-root>. ;")
+t.write("a/b/c.h", "")
+
+t.run_build_system()
+t.expect_addition("dist/a/b/c.h")
+
+t.write("jamroot.jam", "install dist : a/b/c.h : <install-source-root>a ;")
+t.write("a/b/c.h", "")
+
+t.run_build_system()
+t.expect_addition("dist/b/c.h")
+
+t.rm(".")
+t.write("build/jamroot.jam", """\
+install dist : ../a/b/c.h : <location>../dist <install-source-root>../a ;
+""")
+t.write("a/b/c.h", "")
+
+t.run_build_system(subdir="build")
+t.expect_addition("dist/b/c.h")
+
+t.write("jamroot.jam", "install dist2 : a/b/c.h : <install-source-root>a ;")
+t.write("a/b/c.h", "")
+t.write("sub/jamfile.jam", "alias h : ..//dist2 ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("dist2/b/c.h")
+
+# Test that when installing .cpp files, we do not scan include dependencies.
+t.rm(".")
+t.write("jamroot.jam", "install dist : a.cpp ;")
+t.write("a.cpp", '#include "a.h"')
+t.write("a.h", "")
+
+t.run_build_system()
+t.expect_addition("dist/a.cpp")
+
+t.touch("a.h")
+
+t.run_build_system()
+t.expect_nothing("dist/a.cpp")
+
+# Test that <name> property works, when there is just one file in sources.
+t.rm(".")
+t.write("jamroot.jam", "install dist : a.cpp : <name>b.cpp ;")
+t.write("a.cpp", "test file")
+
+t.run_build_system()
+t.expect_addition("dist/b.cpp")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/standalone.py b/src/boost/tools/build/test/standalone.py
new file mode 100644
index 000000000..6d9e9e862
--- /dev/null
+++ b/src/boost/tools/build/test/standalone.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# Regression tests: standalone project were not able to refer to targets
+# declared in themselves.
+
+t.write("a.cpp", "int main() {}\n")
+t.write("jamroot.jam", "import standalone ;")
+t.write("standalone.jam", """\
+import alias ;
+import project ;
+
+project.initialize $(__name__) ;
+project standalone ;
+
+local pwd = [ PWD ] ;
+
+alias x : $(pwd)/../a.cpp ;
+alias runtime : x ;
+""")
+
+t.write("standalone.py", """\
+from b2.manager import get_manager
+
+# FIXME: this is ugly as death
+get_manager().projects().initialize(__name__)
+
+import os ;
+
+# This use of list as parameter is also ugly.
+project(['standalone'])
+
+pwd = os.getcwd()
+alias('x', [os.path.join(pwd, '../a.cpp')])
+alias('runtime', ['x'])
+""")
+
+
+t.write("sub/jamfile.jam", "stage bin : /standalone//runtime ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("sub/bin/a.cpp")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/startup/boost-root/boost-build.jam b/src/boost/tools/build/test/startup/boost-root/boost-build.jam
new file mode 100644
index 000000000..098889f7b
--- /dev/null
+++ b/src/boost/tools/build/test/startup/boost-root/boost-build.jam
@@ -0,0 +1,7 @@
+# Copyright 2002 Dave Abrahams
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Emulate v1 behavior; with the boost-build file in the boost root directory.
+boost-build build ;
diff --git a/src/boost/tools/build/test/startup/boost-root/build/boost-build.jam b/src/boost/tools/build/test/startup/boost-root/build/boost-build.jam
new file mode 100644
index 000000000..610ec79ee
--- /dev/null
+++ b/src/boost/tools/build/test/startup/boost-root/build/boost-build.jam
@@ -0,0 +1,6 @@
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The presence of this file emulates the Boost 1.27.0 release
+include $(BOOST_ROOT)/tools/build/bootstrap.jam ;
diff --git a/src/boost/tools/build/test/startup/boost-root/build/bootstrap.jam b/src/boost/tools/build/test/startup/boost-root/build/bootstrap.jam
new file mode 100644
index 000000000..2ee3507c3
--- /dev/null
+++ b/src/boost/tools/build/test/startup/boost-root/build/bootstrap.jam
@@ -0,0 +1,7 @@
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+ECHO build system bootstrapped ;
+DEPENDS all : nothing ;
+NOTFILE nothing ;
diff --git a/src/boost/tools/build/test/startup/bootstrap-env/boost-build.jam b/src/boost/tools/build/test/startup/bootstrap-env/boost-build.jam
new file mode 100644
index 000000000..67a285e7c
--- /dev/null
+++ b/src/boost/tools/build/test/startup/bootstrap-env/boost-build.jam
@@ -0,0 +1,5 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+boost-build ;
diff --git a/src/boost/tools/build/test/startup/bootstrap-explicit/boost-build.jam b/src/boost/tools/build/test/startup/bootstrap-explicit/boost-build.jam
new file mode 100644
index 000000000..27d9108b7
--- /dev/null
+++ b/src/boost/tools/build/test/startup/bootstrap-explicit/boost-build.jam
@@ -0,0 +1,6 @@
+# Copyright 2002 Dave Abrahams
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+boost-build ../boost-root/build ;
diff --git a/src/boost/tools/build/test/startup/bootstrap-implicit/readme.txt b/src/boost/tools/build/test/startup/bootstrap-implicit/readme.txt
new file mode 100644
index 000000000..0278716e5
--- /dev/null
+++ b/src/boost/tools/build/test/startup/bootstrap-implicit/readme.txt
@@ -0,0 +1,5 @@
+Copyright 2002 Dave Abrahams
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+This file is only here so that cvs update -P won't fail to create a directory
diff --git a/src/boost/tools/build/test/startup/no-bootstrap1/boost-build.jam b/src/boost/tools/build/test/startup/no-bootstrap1/boost-build.jam
new file mode 100644
index 000000000..b1b4dc696
--- /dev/null
+++ b/src/boost/tools/build/test/startup/no-bootstrap1/boost-build.jam
@@ -0,0 +1,6 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Bootstrap file not found via implicit lookup in BOOST_BUILD_PATH
+boost-build ;
diff --git a/src/boost/tools/build/test/startup/no-bootstrap1/subdir/readme.txt b/src/boost/tools/build/test/startup/no-bootstrap1/subdir/readme.txt
new file mode 100644
index 000000000..00f428d44
--- /dev/null
+++ b/src/boost/tools/build/test/startup/no-bootstrap1/subdir/readme.txt
@@ -0,0 +1,5 @@
+Copyright 2002 Dave Abrahams
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+This file is only here so cvs update -P will create the directory.
diff --git a/src/boost/tools/build/test/startup/no-bootstrap2/boost-build.jam b/src/boost/tools/build/test/startup/no-bootstrap2/boost-build.jam
new file mode 100644
index 000000000..505dcd775
--- /dev/null
+++ b/src/boost/tools/build/test/startup/no-bootstrap2/boost-build.jam
@@ -0,0 +1,6 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Bootstrap file not found via explicit lookup in .
+boost-build . ;
diff --git a/src/boost/tools/build/test/startup/no-bootstrap3/boost-build.jam b/src/boost/tools/build/test/startup/no-bootstrap3/boost-build.jam
new file mode 100644
index 000000000..252a3993c
--- /dev/null
+++ b/src/boost/tools/build/test/startup/no-bootstrap3/boost-build.jam
@@ -0,0 +1,5 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Call to boost-build is intentionally missing
diff --git a/src/boost/tools/build/test/startup_v2.py b/src/boost/tools/build/test/startup_v2.py
new file mode 100644
index 000000000..ae7d0da40
--- /dev/null
+++ b/src/boost/tools/build/test/startup_v2.py
@@ -0,0 +1,94 @@
+#!/usr/bin/python
+
+# Copyright 2002 Dave Abrahams
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os.path
+import re
+
+
+def check_for_existing_boost_build_jam(t):
+ """
+ This test depends on no boost-build.jam file existing in any of the
+ folders along the current folder's path. If it does exist, not only would
+ this test fail but it could point to a completely wrong Boost Build
+ installation, thus causing headaches when attempting to diagnose the
+ problem. That is why we explicitly check for this scenario.
+
+ """
+ problem = find_up_to_root(t.workdir, "boost-build.jam")
+ if problem:
+ BoostBuild.annotation("misconfiguration", """\
+This test expects to be run from a folder with no 'boost-build.jam' file in any
+of the folders along its path.
+
+Working folder:
+ '%s'
+
+Problematic boost-build.jam found at:
+ '%s'
+
+Please remove this file or change the test's working folder and rerun the test.
+""" % (t.workdir, problem))
+ t.fail_test(1, dump_stdio=False, dump_stack=False)
+
+
+def find_up_to_root(folder, name):
+ last = ""
+ while last != folder:
+ candidate = os.path.join(folder, name)
+ if os.path.exists(candidate):
+ return candidate
+ last = folder
+ folder = os.path.dirname(folder)
+
+
+def match_re(actual, expected):
+ return re.match(expected, actual, re.DOTALL) != None
+
+
+t = BoostBuild.Tester(match=match_re, boost_build_path="", pass_toolset=0)
+t.set_tree("startup")
+check_for_existing_boost_build_jam(t)
+
+t.run_build_system(status=1, stdout=
+r"""Unable to load B2: could not find "boost-build\.jam"
+.*Attempted search from .* up to the root""")
+
+t.run_build_system(status=1, subdir="no-bootstrap1",
+ stdout=r"Unable to load B2: could not find build system\."
+ r".*attempted to load the build system by invoking"
+ r".*'boost-build ;'"
+ r'.*but we were unable to find "bootstrap\.jam"')
+
+# Descend to a subdirectory which /does not/ contain a boost-build.jam file,
+# and try again to test the crawl-up behavior.
+t.run_build_system(status=1, subdir=os.path.join("no-bootstrap1", "subdir"),
+ stdout=r"Unable to load B2: could not find build system\."
+ r".*attempted to load the build system by invoking"
+ r".*'boost-build ;'"
+ r'.*but we were unable to find "bootstrap\.jam"')
+
+t.run_build_system(status=1, subdir="no-bootstrap2",
+ stdout=r"Unable to load B2: could not find build system\."
+ r".*attempted to load the build system by invoking"
+ r".*'boost-build \. ;'"
+ r'.*but we were unable to find "bootstrap\.jam"')
+
+t.run_build_system(status=1, subdir='no-bootstrap3', stdout=
+r"""Unable to load B2
+.*boost-build\.jam" was found.*
+However, it failed to call the "boost-build" rule""")
+
+# Test bootstrapping based on BOOST_BUILD_PATH.
+t.run_build_system(["-sBOOST_BUILD_PATH=../boost-root/build"],
+ subdir="bootstrap-env", stdout="build system bootstrapped")
+
+# Test bootstrapping based on an explicit path in boost-build.jam.
+t.run_build_system(subdir="bootstrap-explicit",
+ stdout="build system bootstrapped")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/static_and_shared_library.py b/src/boost/tools/build/test/static_and_shared_library.py
new file mode 100755
index 000000000..c3443e927
--- /dev/null
+++ b/src/boost/tools/build/test/static_and_shared_library.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+t.write("jamroot.jam", "")
+t.write("lib/c.cpp", "int bar() { return 0; }\n")
+t.write("lib/jamfile.jam", """\
+static-lib auxilliary1 : c.cpp ;
+lib auxilliary2 : c.cpp ;
+""")
+
+def reset():
+ t.rm("lib/bin")
+
+t.run_build_system(subdir='lib')
+t.expect_addition("lib/bin/$toolset/debug*/" * BoostBuild.List("c.obj "
+ "auxilliary1.lib auxilliary2.dll"))
+
+reset()
+t.run_build_system(["link=shared"], subdir="lib")
+t.expect_addition("lib/bin/$toolset/debug*/" * BoostBuild.List("c.obj "
+ "auxilliary1.lib auxilliary2.dll"))
+
+reset()
+t.run_build_system(["link=static"], subdir="lib")
+t.expect_addition("lib/bin/$toolset/debug*/" * BoostBuild.List(
+ "c.obj auxilliary1.lib auxilliary2.lib"))
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/suffix.py b/src/boost/tools/build/test/suffix.py
new file mode 100644
index 000000000..b31dd1730
--- /dev/null
+++ b/src/boost/tools/build/test/suffix.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+# Regression test: when staging V2 used to change suffixes on targets
+# corresponding to real files.
+t.write("jamfile.jam", """
+import type : register ;
+register A : a1 a2 a3 ;
+stage a : a.a3 ;
+""")
+
+t.write("jamroot.jam", "")
+t.write("a.a3", "")
+
+t.run_build_system()
+t.expect_addition("a/a.a3");
+
+# Regression test: we should be able to specify empty suffix for derived target
+# type, even if base type has non-empty suffix.
+t.write("a.cpp", "")
+
+t.write("suffixes.jam", """
+import type ;
+import generators ;
+import common ;
+
+type.register First : first : ;
+type.register Second : "" : First ;
+
+generators.register-standard $(__name__).second : CPP : Second ;
+
+rule second
+{
+ TOUCH on $(<) = [ common.file-creation-command ] ;
+}
+
+actions second
+{
+ $(TOUCH) $(<)
+}
+""")
+
+t.write("suffixes.py", """
+import b2.build.type as type
+import b2.build.generators as generators
+import b2.tools.common as common
+
+from b2.manager import get_manager
+
+type.register("First", ["first"])
+type.register("Second", [""], "First")
+
+generators.register_standard("suffixes.second", ["CPP"], ["Second"])
+
+get_manager().engine().register_action("suffixes.second",
+ "%s $(<)" % common.file_creation_command())
+
+""")
+
+t.write("jamroot.jam", """
+import suffixes ;
+""")
+
+t.write("jamfile.jam", """
+second a : a.cpp ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/a")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/symlink.py b/src/boost/tools/build/test/symlink.py
new file mode 100644
index 000000000..ab02db2f5
--- /dev/null
+++ b/src/boost/tools/build/test/symlink.py
@@ -0,0 +1,43 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'symlink' rule.
+
+from __future__ import print_function
+
+import os
+import BoostBuild
+
+
+if os.name != 'posix':
+ print("The symlink tests can be run on posix only.")
+ import sys
+ sys.exit(1)
+
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "import gcc ;")
+
+t.write("jamfile.jam", """
+exe hello : hello.cpp ;
+symlink hello_release : hello/<variant>release ;
+symlink hello_debug : hello/<variant>debug ;
+symlink links/hello_release : hello/<variant>release ;
+""")
+
+t.write("hello.cpp", """
+int main() {}
+""")
+
+t.run_build_system()
+t.expect_addition([
+ 'hello_debug.exe',
+ 'hello_release.exe',
+ 'links/hello_release.exe'])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/tag.py b/src/boost/tools/build/test/tag.py
new file mode 100644
index 000000000..adf2fce6f
--- /dev/null
+++ b/src/boost/tools/build/test/tag.py
@@ -0,0 +1,122 @@
+#!/usr/bin/python
+
+# Copyright (C) 2003. Pedro Ferreira
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_folder_with_dot_in_name()
+# ------------------------------
+#
+###############################################################################
+
+def test_folder_with_dot_in_name(t):
+ """
+ Regression test: the 'tag' feature did not work in directories that had a
+ dot in their name.
+
+ """
+ t.write("version-1.32.0/jamroot.jam", """\
+project test : requirements <tag>@$(__name__).tag ;
+
+rule tag ( name : type ? : property-set )
+{
+ # Do nothing, just make sure the rule is invoked OK.
+ ECHO The tag rule has been invoked. ;
+}
+exe a : a.cpp ;
+""")
+ t.write("version-1.32.0/a.cpp", "int main() {}\n")
+
+ t.run_build_system(subdir="version-1.32.0")
+ t.expect_addition("version-1.32.0/bin/$toolset/debug*/a.exe")
+ t.expect_output_lines("The tag rule has been invoked.")
+
+
+###############################################################################
+#
+# test_tag_property()
+# -------------------
+#
+###############################################################################
+
+def test_tag_property(t):
+ """Basic tag property test."""
+
+ t.write("jamroot.jam", """\
+import virtual-target ;
+
+rule tag ( name : type ? : property-set )
+{
+ local tags ;
+ switch [ $(property-set).get <variant> ]
+ {
+ case debug : tags += d ;
+ case release : tags += r ;
+ }
+ switch [ $(property-set).get <link> ]
+ {
+ case shared : tags += s ;
+ case static : tags += t ;
+ }
+ if $(tags)
+ {
+ return [ virtual-target.add-prefix-and-suffix $(name)_$(tags:J="")
+ : $(type) : $(property-set) ] ;
+ }
+}
+
+# Test both fully-qualified and local name of the rule
+exe a : a.cpp : <tag>@$(__name__).tag ;
+lib b : a.cpp : <tag>@tag ;
+stage c : a ;
+""")
+
+ t.write("a.cpp", """\
+int main() {}
+#ifdef _MSC_VER
+__declspec (dllexport) void x () {}
+#endif
+""")
+
+ file_list = (
+ BoostBuild.List("bin/$toolset/debug*/a_ds.exe") +
+ BoostBuild.List("bin/$toolset/debug*/b_ds.dll") +
+ BoostBuild.List("c/a_ds.exe") +
+ BoostBuild.List("bin/$toolset/release*/a_rs.exe") +
+ BoostBuild.List("bin/$toolset/release*/b_rs.dll") +
+ BoostBuild.List("c/a_rs.exe") +
+ BoostBuild.List("bin/$toolset/debug*/a_dt.exe") +
+ BoostBuild.List("bin/$toolset/debug*/b_dt.lib") +
+ BoostBuild.List("c/a_dt.exe") +
+ BoostBuild.List("bin/$toolset/release*/a_rt.exe") +
+ BoostBuild.List("bin/$toolset/release*/b_rt.lib") +
+ BoostBuild.List("c/a_rt.exe"))
+
+ variants = ["debug", "release", "link=static,shared"]
+
+ t.run_build_system(variants)
+ t.expect_addition(file_list)
+
+ t.run_build_system(variants + ["clean"])
+ t.expect_removal(file_list)
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+t = BoostBuild.Tester(use_test_config=False)
+
+test_tag_property(t)
+test_folder_with_dot_in_name(t)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/template.py b/src/boost/tools/build/test/template.py
new file mode 100644
index 000000000..c5917c2fa
--- /dev/null
+++ b/src/boost/tools/build/test/template.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+
+# Copyright (C) FILL SOMETHING HERE 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is template for B2 tests. It creates a simple project that
+# builds one exe from one source, and checks that the exe is really created.
+
+import BoostBuild
+
+
+# Create a temporary working directory.
+t = BoostBuild.Tester()
+
+# Create the needed files.
+t.write("jamroot.jam", """
+exe hello : hello.cpp ;
+""")
+
+t.write("hello.cpp", """
+int main() {}
+"""
+
+# Run the build.
+t.run_build_system()
+
+# First, create a list of three pathnames.
+file_list = BoostBuild.List("bin/$toolset/debug*/") * \
+ BoostBuild.List("hello.exe hello.obj")
+# Second, assert that those files were added as result of the last build system
+# invocation.
+t.expect_addition(file_list)
+
+# Invoke the build system once again.
+t.run_build_system("clean")
+# Check if the files added previously were removed.
+t.expect_removal(file_list)
+
+# Remove temporary directories.
+t.cleanup()
diff --git a/src/boost/tools/build/test/test-config-example.jam b/src/boost/tools/build/test/test-config-example.jam
new file mode 100644
index 000000000..6cb813fa0
--- /dev/null
+++ b/src/boost/tools/build/test/test-config-example.jam
@@ -0,0 +1,19 @@
+# Copyright 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+# Skeleton for test configuration. If your local configuration
+# interferes with testing, rename this files to 'test-system.jam'
+# and tweak it. When tests are run, only this file will be loaded,
+# while site-config.jam and user-config.jam will be ignored.
+
+using gcc ;
+
+using boostbook
+ : /home/ghost/Store/docbook/xsl
+ : /home/ghost/Store/docbook/dtd
+ : /home/ghost/Work/Boost/boost-svn/tools/boostbook
+ ;
+using doxygen ;
+using qt4 : /usr/share/qt4 ;
diff --git a/src/boost/tools/build/test/test.jam b/src/boost/tools/build/test/test.jam
new file mode 100644
index 000000000..1ae1a2059
--- /dev/null
+++ b/src/boost/tools/build/test/test.jam
@@ -0,0 +1,39 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002 Rene Rivera
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# util
+import assert ;
+import container ;
+import indirect ;
+import numbers ;
+import order ;
+import os ;
+import path ;
+import print ;
+import regex ;
+import sequence ;
+import set ;
+import string ;
+import utility ;
+
+# kernel
+import "class" ;
+import errors ;
+import modules ;
+
+# build
+import build-request ;
+import feature ;
+import property ;
+import toolset ;
+import type ;
+import version ;
+
+# tools
+import common ;
+
+actions nothing { }
+nothing all ;
diff --git a/src/boost/tools/build/test/test1.py b/src/boost/tools/build/test/test1.py
new file mode 100644
index 000000000..79d142221
--- /dev/null
+++ b/src/boost/tools/build/test/test1.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("test.jam", """
+actions unbuilt { }
+unbuilt all ;
+ECHO "Hi" ;
+""")
+
+t.run_build_system("-ftest.jam", stdout="Hi\n")
+t.cleanup()
diff --git a/src/boost/tools/build/test/test2.py b/src/boost/tools/build/test/test2.py
new file mode 100644
index 000000000..b7c99be12
--- /dev/null
+++ b/src/boost/tools/build/test/test2.py
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.set_tree("test2")
+
+file_list = 'bin/$toolset/debug*/' * \
+ BoostBuild.List("foo foo.o")
+
+t.run_build_system("-sBOOST_BUILD_PATH=" + t.original_workdir + "/..")
+t.expect_addition(file_list)
+
+
+t.write("foo.cpp", "int main() {}\n")
+t.run_build_system("-d2 -sBOOST_BUILD_PATH=" + t.original_workdir + "/..")
+t.expect_touch(file_list)
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/test2/foo.cpp b/src/boost/tools/build/test/test2/foo.cpp
new file mode 100644
index 000000000..135fa90f6
--- /dev/null
+++ b/src/boost/tools/build/test/test2/foo.cpp
@@ -0,0 +1,10 @@
+// Copyright (c) 2003 Vladimir Prus
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+int main() { return 0; }
diff --git a/src/boost/tools/build/test/test2/jamroot.jam b/src/boost/tools/build/test/test2/jamroot.jam
new file mode 100644
index 000000000..4fb3f2886
--- /dev/null
+++ b/src/boost/tools/build/test/test2/jamroot.jam
@@ -0,0 +1,5 @@
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+exe foo : foo.cpp ;
diff --git a/src/boost/tools/build/test/test_all.py b/src/boost/tools/build/test/test_all.py
new file mode 100644
index 000000000..ebb6a8cf9
--- /dev/null
+++ b/src/boost/tools/build/test/test_all.py
@@ -0,0 +1,347 @@
+#!/usr/bin/python
+
+# Copyright 2002-2005 Dave Abrahams.
+# Copyright 2002-2006 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from __future__ import print_function
+
+import BoostBuild
+
+import os
+import os.path
+import sys
+
+xml = "--xml" in sys.argv
+toolset = BoostBuild.get_toolset()
+
+
+# Clear environment for testing.
+#
+for s in ("BOOST_ROOT", "BOOST_BUILD_PATH", "JAM_TOOLSET", "BCCROOT",
+ "MSVCDir", "MSVC", "MSVCNT", "MINGW", "watcom"):
+ try:
+ del os.environ[s]
+ except:
+ pass
+
+BoostBuild.set_defer_annotations(1)
+
+
+def run_tests(critical_tests, other_tests):
+ """
+ Runs first the critical_tests and then the other_tests.
+
+ Writes the name of the first failed test to test_results.txt. Critical
+ tests are run in the specified order, other tests are run starting with the
+ one that failed first on the last test run.
+
+ """
+ last_failed = last_failed_test()
+ other_tests = reorder_tests(other_tests, last_failed)
+ all_tests = critical_tests + other_tests
+
+ invocation_dir = os.getcwd()
+ max_test_name_len = 10
+ for x in all_tests:
+ if len(x) > max_test_name_len:
+ max_test_name_len = len(x)
+
+ pass_count = 0
+ failures_count = 0
+
+ for test in all_tests:
+ if not xml:
+ s = "%%-%ds :" % max_test_name_len % test
+ print(s, end='')
+
+ passed = 0
+ try:
+ __import__(test)
+ passed = 1
+ except KeyboardInterrupt:
+ """This allows us to abort the testing manually using Ctrl-C."""
+ raise
+ except SystemExit:
+ """This is the regular way our test scripts are supposed to report
+ test failures."""
+ except:
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ try:
+ BoostBuild.annotation("failure - unhandled exception", "%s - "
+ "%s" % (exc_type.__name__, exc_value))
+ BoostBuild.annotate_stack_trace(exc_tb)
+ finally:
+ # Explicitly clear a hard-to-garbage-collect traceback
+ # related reference cycle as per documented sys.exc_info()
+ # usage suggestion.
+ del exc_tb
+
+ if passed:
+ pass_count += 1
+ else:
+ failures_count += 1
+ if failures_count == 1:
+ f = open(os.path.join(invocation_dir, "test_results.txt"), "w")
+ try:
+ f.write(test)
+ finally:
+ f.close()
+
+ # Restore the current directory, which might have been changed by the
+ # test.
+ os.chdir(invocation_dir)
+
+ if not xml:
+ if passed:
+ print("PASSED")
+ else:
+ print("FAILED")
+ BoostBuild.flush_annotations()
+ else:
+ rs = "succeed"
+ if not passed:
+ rs = "fail"
+ print('''
+<test-log library="build" test-name="%s" test-type="run" toolset="%s" test-program="%s" target-directory="%s">
+<run result="%s">''' % (test, toolset, "tools/build/v2/test/" + test + ".py",
+ "boost/bin.v2/boost.build.tests/" + toolset + "/" + test, rs))
+ if not passed:
+ BoostBuild.flush_annotations(1)
+ print('''
+</run>
+</test-log>
+''')
+ sys.stdout.flush() # Makes testing under emacs more entertaining.
+ BoostBuild.clear_annotations()
+
+ # Erase the file on success.
+ if failures_count == 0:
+ open("test_results.txt", "w").close()
+
+ if not xml:
+ print('''
+ === Test summary ===
+ PASS: %d
+ FAIL: %d
+ ''' % (pass_count, failures_count))
+
+ # exit with failure with failures
+ if failures_count > 0:
+ sys.exit(1)
+
+def last_failed_test():
+ "Returns the name of the last failed test or None."
+ try:
+ f = open("test_results.txt")
+ try:
+ return f.read().strip()
+ finally:
+ f.close()
+ except Exception:
+ return None
+
+
+def reorder_tests(tests, first_test):
+ try:
+ n = tests.index(first_test)
+ return [first_test] + tests[:n] + tests[n + 1:]
+ except ValueError:
+ return tests
+
+
+critical_tests = ["unit_tests", "module_actions", "startup_v2", "core_d12",
+ "core_typecheck", "core_delete_module", "core_language", "core_arguments",
+ "core_varnames", "core_import_module"]
+
+# We want to collect debug information about the test site before running any
+# of the tests, but only when not running the tests interactively. Then the
+# user can easily run this always-failing test directly to see what it would
+# have returned and there is no need to have it spoil a possible 'all tests
+# passed' result.
+if xml:
+ critical_tests.insert(0, "collect_debug_info")
+
+tests = ["absolute_sources",
+ "alias",
+ "alternatives",
+ "always",
+ "bad_dirname",
+ "build_dir",
+ "build_file",
+ "build_hooks",
+ "build_no",
+ "builtin_echo",
+ "builtin_exit",
+ "builtin_glob",
+ "builtin_split_by_characters",
+ "bzip2",
+ "c_file",
+ "chain",
+ "clean",
+ "command_line_properties",
+ "composite",
+ "conditionals",
+ "conditionals2",
+ "conditionals3",
+ "conditionals_multiple",
+ "configuration",
+ "configure",
+ "copy_time",
+ "core_action_output",
+ "core_action_status",
+ "core_actions_quietly",
+ "core_at_file",
+ "core_bindrule",
+ "core_syntax_error_exit_status",
+ "core_fail_expected",
+ "core_jamshell",
+ "core_multifile_actions",
+ "core_nt_cmd_line",
+ "core_option_d2",
+ "core_option_l",
+ "core_option_n",
+ "core_parallel_actions",
+ "core_parallel_multifile_actions_1",
+ "core_parallel_multifile_actions_2",
+ "core_scanner",
+ "core_source_line_tracking",
+ "core_update_now",
+ "core_variables_in_actions",
+ "custom_generator",
+ "debugger",
+ "debugger-mi",
+ "default_build",
+ "default_features",
+# This test is known to be broken itself.
+# "default_toolset",
+ "dependency_property",
+ "dependency_test",
+ "disambiguation",
+ "dll_path",
+ "double_loading",
+ "duplicate",
+ "example_libraries",
+ "example_make",
+ "exit_status",
+ "expansion",
+ "explicit",
+ "feature_cxxflags",
+ "feature_implicit_dependency",
+ "feature_relevant",
+ "feature_suppress_import_lib",
+ "file_types",
+ "flags",
+ "generator_selection",
+ "generators_test",
+ "implicit_dependency",
+ "indirect_conditional",
+ "inherit_toolset",
+ "inherited_dependency",
+ "inline",
+ "libjpeg",
+ "liblzma",
+ "libzstd",
+ "lib_source_property",
+ "lib_zlib",
+ "library_chain",
+ "library_property",
+ "link",
+ "load_order",
+ "loop",
+ "make_rule",
+ "message",
+ "ndebug",
+ "no_type",
+ "notfile",
+ "ordered_include",
+ "out_of_tree",
+ "package",
+ "param",
+ "path_features",
+ "prebuilt",
+ "print",
+ "project_dependencies",
+ "project_glob",
+ "project_id",
+ "project_root_constants",
+ "project_root_rule",
+ "project_test3",
+ "project_test4",
+ "property_expansion",
+ "rebuilds",
+ "relative_sources",
+ "remove_requirement",
+ "rescan_header",
+ "resolution",
+ "rootless",
+ "scanner_causing_rebuilds",
+ "searched_lib",
+ "skipping",
+ "sort_rule",
+ "source_locations",
+ "source_order",
+ "space_in_path",
+ "stage",
+ "standalone",
+ "static_and_shared_library",
+ "suffix",
+ "tag",
+ "test_rc",
+ "testing",
+ "timedata",
+ "toolset_clang_darwin",
+ "toolset_clang_linux",
+ "toolset_clang_vxworks",
+ "toolset_darwin",
+ "toolset_defaults",
+ "toolset_gcc",
+ "toolset_intel_darwin",
+ "toolset_requirements",
+ "unit_test",
+ "unused",
+ "use_requirements",
+ "using",
+ "wrapper",
+ "wrong_project",
+ ]
+
+if os.name == "posix":
+ tests.append("symlink")
+ # On Windows, library order is not important, so skip this test. Besides,
+ # it fails ;-). Further, the test relies on the fact that on Linux, one can
+ # build a shared library with unresolved symbols. This is not true on
+ # Windows, even with cygwin gcc.
+
+# Disable this test until we figure how to address failures due to --as-needed being default now.
+# if "CYGWIN" not in os.uname()[0]:
+# tests.append("library_order")
+
+if toolset.startswith("gcc") and os.name != "nt":
+ # On Windows it's allowed to have a static runtime with gcc. But this test
+ # assumes otherwise. Hence enable it only when not on Windows.
+ tests.append("gcc_runtime")
+
+# PCH test seems broken in strange ways. Disable it.
+# if toolset.startswith("gcc") or toolset.startswith("msvc"):
+# tests.append("pch")
+
+# Disable on OSX as it doesn't seem to work for unknown reasons.
+if sys.platform != 'darwin':
+ tests.append("builtin_glob_archive")
+
+if "--extras" in sys.argv:
+ tests.append("boostbook")
+ tests.append("qt4")
+ tests.append("qt5")
+ tests.append("example_qt4")
+ # Requires ./whatever.py to work, so is not guaranteed to work everywhere.
+ tests.append("example_customization")
+ # Requires gettext tools.
+ tests.append("example_gettext")
+elif not xml:
+ print("Note: skipping extra tests")
+
+run_tests(critical_tests, tests)
diff --git a/src/boost/tools/build/test/test_rc.py b/src/boost/tools/build/test/test_rc.py
new file mode 100755
index 000000000..56c02b7ef
--- /dev/null
+++ b/src/boost/tools/build/test/test_rc.py
@@ -0,0 +1,148 @@
+#!/usr/bin/python
+
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests rc toolset behaviour.
+
+import BoostBuild
+
+
+def included_resource_newer_than_rc_script():
+ """
+ When a .rc script file includes another resource file - the resource file
+ being newer than the .rc script file should not cause the .rc script file
+ to be considered old and force all of its dependents to rebuild.
+
+ """
+ toolsetName = "__myDummyResourceCompilerToolset__"
+
+ # Used options rationale:
+ #
+ # -d4 & --debug-configuration
+ # Display additional information in case of test failure. In the past
+ # we have had testing system issues causing this test to fail
+ # sporadically for which -d+3 output had been instrumental in getting to
+ # the root cause (a touched file's timestamp was not as new as it should
+ # have been).
+ #
+ # --ignore-site-config --user-config=
+ # Disable reading any external Boost Build configuration. This test is
+ # self sufficient so these options protect it from being adversly
+ # affected by any local (mis)configuration..
+ t = BoostBuild.Tester(["-d4", "--debug-configuration",
+ "--ignore-site-config", "--user-config=", "toolset=%s" % toolsetName],
+ pass_toolset=False, use_test_config=False,
+ translate_suffixes=False)
+
+ # Prepare a dummy toolset so we do not get errors in case the default one
+ # is not found and that we can test rc.jam functionality without having to
+ # depend on the externally specified toolset actually supporting it exactly
+ # the way it is required for this test, e.g. gcc toolset, under some
+ # circumstances, uses a quiet action for generating its null RC targets.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+import rc ;
+import type ;
+local toolset-name = "%s" ;
+feature.extend toolset : $(toolset-name) ;
+rule init ( ) { }
+rc.configure dummy-rc-command : <toolset>$(toolset-name) : <rc-type>dummy ;
+module rc
+{
+ rule compile.resource.dummy ( targets * : sources * : properties * )
+ {
+ import common ;
+ .TOUCH on $(targets) = [ common.file-touch-command ] ;
+ }
+ actions compile.resource.dummy { $(.TOUCH) "$(<)" }
+}
+# Make OBJ files generated by our toolset use the "obj" suffix on all
+# platforms. We need to do this explicitly for <target-os> windows & cygwin to
+# override the default OBJ type configuration (otherwise we would get
+# 'ambiguous key' errors on those platforms).
+local rule set-generated-obj-suffix ( target-os ? )
+{
+ type.set-generated-target-suffix OBJ : <toolset>$(toolset-name)
+ <target-os>$(target-os) : obj ;
+}
+set-generated-obj-suffix ;
+set-generated-obj-suffix windows ;
+set-generated-obj-suffix cygwin ;
+""" % toolsetName)
+
+ t.write(
+ toolsetName + '.py',
+"""
+from b2.build import feature, type as type_
+from b2.manager import get_manager
+from b2.tools import rc, common
+
+MANAGER = get_manager()
+ENGINE = MANAGER.engine()
+
+toolset_name = "{0}"
+
+feature.extend('toolset', [toolset_name])
+
+def init(*args):
+ pass
+
+rc.configure(['dummy-rc-command'], ['<toolset>' + toolset_name], ['<rc-type>dummy'])
+
+ENGINE.register_action(
+ 'rc.compile.resource.dummy',
+ '''
+ %s "$(<)"
+ ''' % common.file_creation_command()
+)
+
+def set_generated_obj_suffix(target_os=''):
+ requirements = ['<toolset>' + toolset_name]
+ if target_os:
+ requirements.append('<target-os>' + target_os)
+ type_.set_generated_target_suffix('OBJ', requirements, 'obj')
+
+set_generated_obj_suffix()
+set_generated_obj_suffix('windows')
+set_generated_obj_suffix('cygwin')
+""".format(toolsetName)
+ )
+
+ # Prepare project source files.
+ t.write("jamroot.jam", """\
+ECHO "{{{" [ modules.peek : XXX ] [ modules.peek : NOEXEC ] "}}}" ;
+obj xxx : xxx.rc ;
+""")
+ t.write("xxx.rc", '1 MESSAGETABLE "xxx.bin"\n')
+ t.write("xxx.bin", "foo")
+
+ def test1(n, expect, noexec=False):
+ params = ["-sXXX=%d" % n]
+ if noexec:
+ params.append("-n")
+ params.append("-sNOEXEC=NOEXEC")
+ t.run_build_system(params)
+ t.expect_output_lines("*NOEXEC*", noexec)
+ obj_file = "xxx_res.obj"
+ t.expect_output_lines("compile.resource.dummy *%s" % obj_file, expect)
+ if expect and not noexec:
+ expect("bin/%s/debug/%s" % (toolsetName, obj_file))
+ t.expect_nothing_more()
+
+ def test(n, expect):
+ test1(n, expect, noexec=True)
+ test1(n, expect)
+
+ test(1, t.expect_addition)
+ test(2, None)
+ t.touch("xxx.bin")
+ test(3, t.expect_touch)
+ test(4, None)
+
+ t.cleanup()
+
+
+included_resource_newer_than_rc_script()
diff --git a/src/boost/tools/build/test/test_system.html b/src/boost/tools/build/test/test_system.html
new file mode 100644
index 000000000..be870eaa7
--- /dev/null
+++ b/src/boost/tools/build/test/test_system.html
@@ -0,0 +1,623 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
+"http://www.w3.org/TR/html4/strict.dtd">
+
+<html>
+ <head>
+ <meta name="generator" content=
+ "HTML Tidy for Linux/x86 (vers 1st March 2002), see www.w3.org">
+ <!--tidy options: -i -wrap 78 -->
+ <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+
+ <title>A testing system for B2</title>
+<style type="text/css">
+ hr { color: black }
+ p.revision { text-align: right; font-style: italic }
+ pre.code { margin-left: 2em }
+ pre.example { margin-left: 2em; border: solid black thin }
+ pre.output { margin-left: 2em }
+ img.banner { border: 0; float: left }
+ h1 { text-align: right }
+ br.clear { clear: left }
+ div.attention { color: red }
+
+</style>
+ </head>
+
+ <body>
+ <p><a href="../../../../index.htm"><img class="banner" height="86" width=
+ "277" alt="C++ Boost" src="../../../../boost.png"></a></p>
+
+ <h1>A testing system for B2<br class="clear">
+ </h1>
+ <hr>
+
+ <dl class="page-index">
+ <dt><a href="#sec-intro">Introduction for users</a></dt>
+
+ <dd>
+ <dl class="page-index">
+ <dt><a href="#sec-command-line-options">Command line options</a></dt>
+ </dl>
+ </dd>
+
+ <dt><a href="#sec-developers">Introduction for developers</a></dt>
+
+ <dd>
+ <dl class="page-index">
+ <dt><a href="#sec-intro-changing">Changing the working
+ directory</a></dt>
+
+ <dt><a href="#sec-intro-examining">Examining the working directory and
+ changing it</a></dt>
+
+ <dt><a href="#sec-intro-results">Test result</a></dt>
+ </dl>
+ </dd>
+
+ <dt><a href="#sec-reference">Reference documentation</a></dt>
+
+ <dd>
+ <dl class="page-index">
+ <dt><a href="#method-__init__">Method __init__</a></dt>
+
+ <dt><a href="#method-set_tree">Method <tt>set_tree</tt></a></dt>
+
+ <dt><a href="#method-write">Method <tt>write</tt></a></dt>
+
+ <dt><a href="#method-copy">Method <tt>copy</tt></a></dt>
+
+ <dt><a href="#method-touch">Method <tt>touch</tt></a></dt>
+
+ <dt><a href="#method-run_build_system">Method
+ <tt>run_build_system</tt></a></dt>
+
+ <dt><a href="#method-read">Method <tt>read</tt></a></dt>
+
+ <dt><a href="#method-read_and_strip">Method
+ <tt>read_and_strip</tt></a></dt>
+
+ <dt><a href="#methods-expectations">Methods for declaring
+ expectations</a></dt>
+
+ <dt><a href="#methods-ignoring">Methods for ignoring
+ changes</a></dt>
+
+ <dt><a href="#methods-result">Methods for explicitly specifying
+ results</a></dt>
+
+ <dt><a href="#class-list">Helper class <tt>List</tt></a></dt>
+ </dl>
+ </dd>
+ </dl>
+ <hr>
+
+ <h2><a name="sec-intro">Introduction for users</a></h2>
+
+ <p>The testing system for B2 is a small set of Python modules and
+ scripts for automatically testing user-obversable behaviour. It uses
+ components from testing systems of <a href="http://www.scons.org">Scons</a>
+ and <a href="http://subversion.tigris.org">Subversion</a>, together with
+ some additional functionality.</p>
+
+ <p>To run the tests you need to:</p>
+
+ <ol>
+ <li>Get the source tree of B2 (located at <tt>tools/build</tt>
+ in Boost)</li>
+
+ <li>Have <a href="http://www.python.org">Python</a> installed. Version
+ 2.1 is known to work.</li>
+
+ <li>Build Boost.Jam. See <a href=
+ "../engine/index.html">$boost_build_root/engine/index.html</a> for
+ instructions.</li>
+
+ <li>Configure at least one toolset. You can edit <tt>site-config.jam</tt>
+ or <tt>user-config.jam</tt> to add new toolsets. Or you can create file
+ <tt>test-config.jam</tt> in <tt>$boost_build_root/test</tt> directory. In
+ this case, <tt>site-config.jam</tt> and <tt>user-config.jam</tt> will be
+ ignored for testing.</li>
+
+ <li>Make sure that in the configuration jam file(s) that you use you generate
+ no console output, ie. with the Boost Build 'ECHO' rule. Such console output
+ in the configuration jam file(s) will cause a number of tests to automatically
+ fail which would otherwise succeed.</li>
+ </ol>
+
+ <p>When all is set, you can run all the tests using the <tt>test_all.py</tt>
+ script or you can run a specific test by starting its Python script
+ directly.</p>
+
+ <p>Examples:</p>
+
+<pre class="code">
+python test_all.py
+python generators_test.py
+</pre>
+
+ <p>If everything is OK, you will see a list of passed tests. Otherwise, a
+ failure will be reported.</p>
+
+ <h3><a name="sec-command-line-options">Command line options</a></h3>
+
+ <p>Test scripts will use the toolset you configured to be the default or
+ you can specify a specific one on the command line:</p>
+
+<pre class="code">
+python test_all.py borland
+python generators_test.py msvc-7.1
+</pre>
+
+ <p>Other test script flags you can specify on the command line are:</p>
+
+ <ul>
+ <li><tt>--default-bjam</tt> -- By default the test system will use the
+ Boost Jam executable found built in its default development build
+ location. This option makes it use the default one available on your
+ system, i.e. the one found in the system path.</li>
+
+ <li><tt>--preserve</tt> -- In case of a failed test its working
+ directory will be copied to the "failed_test" directory under the
+ current directory.</li>
+
+ <li><tt>--verbose</tt> -- Makes the test system and the run build system
+ display additional output. Note though that this may cause tests that
+ check the build system output to fail.</li>
+ </ul>
+
+ <h2><a name="sec-developers">Introduction for developers</a></h2>
+
+ <p>It is suggested that every new functionality come together with tests,
+ and that bugfixes are accompanied by tests. There's no need to say that
+ tests are good, but two points are extremely important:</p>
+
+ <ul>
+ <li>For an interpreted language like Jam, without any static checks,
+ testing is simply the only sefeguard we can have.</li>
+
+ <li>Good tests allow us to change internal design much more safely, and we
+ have not gotten everything nailed down yet.</li>
+ </ul>
+
+ <p>Adding a new test is simple:</p>
+
+ <ol>
+ <li>Go to <tt>$boost_build_root/test/test_all.py</tt> and add new test
+ name to the list at the end of the file. Suppose the test name is "hello".
+ </li>
+
+ <li>Add a new python module, in this example "hello.py", to do the actual
+ testing.</li>
+ </ol>
+
+ <p>The module, in general will perform these basic actions:</p>
+
+ <ol>
+ <li>Set up the initial working directory state</li>
+
+ <li>
+ Run the build system and check the results:
+
+ <ol>
+ <li>generated output,</li>
+
+ <li>changes made to the working directory,</li>
+
+ <li>new content of the working directory.</li>
+ </ol>
+ </li>
+
+ <li>Add, remove or touch files or change their content and then repeat
+ the previous step until satisfied.</li>
+
+ <li>Clean up</li>
+ </ol>
+
+ <p>The "hello.py" module might contain:</p>
+<pre class="example">
+from BoostBuild import List
+
+# Create a temporary working directory
+t = BoostBuild.Tester()
+
+# Create the needed files
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", """
+exe hello : hello.cpp ;
+""")
+t.write("hello.cpp", """
+int main()
+{
+ return 0;
+}
+
+""")
+
+t.run_build_system()
+
+# First, create a list of three pathnames.
+file_list = List("bin/$toolset/debug/") * List("hello.exe hello.obj")
+# Second, assert that those files were added as result of the last build system invocation.
+t.expect_addition(file_list)
+
+# Invoke the build system once again.
+t.run_build_system("clean")
+# Check if the files added previously were removed.
+t.expect_removal(file_list)
+
+# Remove temporary directories
+t.cleanup()
+</pre>
+
+ <p>The <tt>test</tt> directory contains a file "template.py" which can be
+ used as a start for your own tests.</p>
+
+ <p>Overview of the most important methods of class <tt>Tester</tt> follows.
+ </p>
+
+ <h3><a name="sec-intro-changing">Changing the working directory</a></h3>
+
+ <p>The class <tt>Tester</tt> creates a temporary directory in its
+ constructor and changes to that directory. It can be modified by calling
+ these methods:</p>
+
+ <ul>
+ <li><tt>set_tree</tt> -- sets the content of the working directory to be
+ equal to the content of the specified directory. This method is
+ preferable when directory tree for testing is large.</li>
+
+ <li><tt>write</tt> -- sets the content of file in a working directory.
+ This is optimal if you want to create a directory tree with 3-4 small
+ files.</li>
+
+ <li><tt>touch</tt> -- changes the modification times of a file</li>
+ </ul>
+
+ <h3><a name="sec-intro-examining">Examining the working directory and
+ changing it</a></h3>
+
+ <p>The method <tt>read</tt>, inherited from the <tt>TestCmd</tt> class, can
+ be used to read any file in the working directory and check its content.
+ <tt>Tester</tt> adds another method for tracking changes. Whenever the build
+ system is run (using <a href="#method-run_build_system"><tt>run_build_system
+ </tt></a>), the working dir state before and after running is recorded. In
+ addition, difference between the two states -- i.e. lists of files that were
+ added, removed, modified or touched -- are stored in two member variables -
+ <tt>tree_difference</tt> and <tt>unexpected_difference</tt>.</p>
+
+ <p>After than, the test author may specify that some change is expected, for
+ example, by calling <tt>expect_addition("foo")</tt>. This call will check if
+ the file was indeed added, and if so, will remove its name from the list of
+ added files in <tt>unexpected_difference</tt>. Likewise, it is possible to
+ specify that some changes are not interesting, for example a call to
+ <tt>ignore("*.obj")</tt> will just remove every file with the ".obj"
+ extension from <tt>unexpected_difference</tt>.</p>
+
+ <p>When test has finished with expectations and ignoring, the member
+ <tt>unexpected_difference</tt> will contain the list of all changes not yet
+ accounted for. It is possible to assure that this list is empty by calling
+ the <tt>expect_nothing_more</tt> member function.</p>
+
+ <h3><a name="sec-intro-results">Test result</a></h3>
+
+ <p>Any of the <tt>expect*</tt> methods below will fail the test if the
+ expectation is not met. It is also possible to perform manually arbitrary
+ test and explicitly cause the test to either pass or fail. Ordinary
+ filesystem functions can be used to work with the directory tree. Methods
+ <tt>pass_test</tt> and <tt>fail_test</tt> are used to explicitly give the
+ test outcome.</p>
+
+ <p>Typically, after test termination, the working directory is erased. See
+ the <a href="#sec-command-line-options">"--preserve" command line option</a>
+ for information on how to preserve the working directory content for failed
+ tests for debugging purposes.</p>
+
+ <h2 id="sec-reference">Reference documentation</h2>
+
+ <p>The test system is composed of class <tt>Tester</tt>, derived form
+ <tt>TestCmd.TestCmd</tt>, and helper class <tt>List</tt>. <tt>Tester</tt>
+ and <tt>List</tt> methods are described below.</p>
+
+ <p>The documentation frequently refers to <tt>filename</tt>. In all cases,
+ files are specified in unix style: a sequence of components, separated by
+ "/". This is true on all platforms. In some contexts a list of files is
+ allowed. In those cases any object with a sequence interface is allowed.</p>
+
+ <h3><a name="method-__init__">Method <tt>__init__(self, arguments="",
+ executable="bjam", match=TestCmd.match_exact, boost_build_path=None,
+ translate_suffixes=True, pass_toolset=True, use_test_config=True,
+ ignore_toolset_requirements=True, workdir="", **keywords)</tt></a></h3>
+
+ <p><b>Optional arguments:</b></p>
+
+ <ul>
+ <li><tt>arguments</tt>
+ -- Arguments passed to the run executable.</li>
+ <li><tt>executable</tt>
+ -- Name of the executable to invoke.</li>
+ <li><tt>match</tt>
+ -- Function to use for compating actual and expected file contents.
+ </li>
+ <li><tt>boost_build_path</tt>
+ -- Boost build path to be passed to the run executable.</li>
+ <li><tt>translate_suffixes</tt>
+ -- Whether to update suffixes on the the file names passed from the
+ test script so they match those actually created by the current
+ toolset. For example, static library files are specified by using
+ the .lib suffix but when the 'gcc' toolset is used it actually
+ creates them using the .a suffix.</li>
+ <li><tt>pass_toolset</tt>
+ -- Whether the test system should pass the specified toolset to the
+ run executable.</li>
+ <li><tt>use_test_config</tt>
+ -- Whether the test system should tell the run executable to read in
+ the test_config.jam configuration file.</li>
+ <li><tt>ignore_toolset_requirements</tt>
+ -- Whether the test system should tell the run executable to ignore
+ toolset requirements.</li>
+ <li><tt>workdir</tt>
+ -- Indicates an absolute directory where the test will be run from.
+ </li>
+ </ul>
+
+ <p><b>Optional arguments inherited from the base class:</b></p>
+
+ <ul>
+ <li><tt>description</tt>
+ -- Test description string displayed in case of a failed test.</li>
+ <li><tt>subdir</tt>
+ -- List of subdirectories to automatically create under the working
+ directory. Each subdirectory needs to be specified separately
+ parent coming before its child.</li>
+ <li><tt>verbose</tt>
+ -- Flag that may be used to enable more verbose test system output.
+ Note that it does not also enable more verbose build system output
+ like the <a href="#sec-command-line-options">"--verbose" command
+ line option</a> does.</li>
+ </ul>
+
+ <p><b>Effects:</b></p>
+
+ <ol>
+ <li>Remembers the current working directory in member
+ <tt>original_workdir</tt>.</li>
+
+ <li>Determines the location of the executable (<code>bjam</code> by
+ default) and build system files, assuming that the current directory is
+ <tt>tools/build/test</tt>. Formulates jam invocation command, which
+ will include explicit setting for the <tt>BOOST_BUILD_PATH</tt> variable
+ and arguments passed to this methods, if any. This command will be used
+ by subsequent invocation of <a href="#method-run_build_system"><tt>
+ run_build_system</tt></a>. Finally, initializes the base class.</li>
+
+ <li>Changes the current working directory to the temporary working
+ directory created by the base constructor.</li>
+
+ <li>If you want to run a test in an existing directory, pass it as
+ <tt>workdir</tt>.</li>
+
+ <li> Most parameters passed to this constructor function may be overruled
+ for each specific test system run using <a href=
+ "#method-run_build_system"><tt>run_build_system</tt></a> parameters.
+ </ol>
+
+ <h3><a name="method-set_tree">Method <tt>set_tree(self,
+ tree_location)</tt></a></h3>
+
+ <p><b>Effects:</b></p>
+
+ <p>Replaces the content of the current working directory with the content
+ of directory at <tt>tree_location</tt>. If <tt>tree_location</tt> is not
+ absolute pathname, it will be treated as relative to
+ <tt>self.original_workdir</tt>. This methods also explicitly makes the
+ copied files writeable.</p>
+
+ <h3><a name="method-write">Method <tt>write(self, name,
+ content)</tt></a></h3>
+
+ <p><b>Effects:</b></p>
+
+ <p>Writes the specified content to the file given by <tt>name</tt> under
+ the temporary working directory. If the file already exists, it is
+ overwritten. Any required directories are automatically created.</p>
+
+ <h3><a name="method-copy">Method <tt>copy(self, src, dst)</tt></a></h3>
+
+ <p><b>Effects:</b></p>
+
+ <p>Equvivalent to <tt>self.write(self.read(src), dst)</tt>.</p>
+
+ <h3><a name="method-touch">Method <tt>touch(self, names)</tt></a></h3>
+
+ <p><b>Effects:</b></p>
+
+ <p>Sets the access and modification times for all files in <tt>names</tt> to
+ the current time. All the elements in <tt>names</tt> should be relative
+ paths.</p>
+
+ <h3><a name="method-run_build_system">Method <tt>run_build_system(self,
+ extra_args="", subdir="", stdout=None, stderr="", status=0, match=None,
+ pass_toolset=None, use_test_config=None, ignore_toolset_requirements=None,
+ expected_duration=None, **kw)</tt></a></h3>
+
+ <p><b>Effects:</b></p>
+
+ <ol>
+ <li>Stores the state of the working directory in
+ <tt>self.previous_tree</tt>.</li>
+
+ <li>Changes to <tt>subdir</tt>, if it is specified. It is relative to
+ the <tt>original_workdir</tt> or the workdir specified in
+ <tt>__init</tt>.</li>
+
+ <li>Invokes the <tt>bjam</tt> executable, passing <tt>extra_args</tt>
+ to it. The binary should be located under
+ <tt>&lt;test_invocation_dir&gt;/../jam/src/bin.&lt;platform&gt;</tt>.
+ This is to make sure tests use the version of jam build from CVS.</li>
+
+ <li>Compares the stdout, stderr and exit status of build system
+ invocation with values to appropriate parameters, if they are not
+ <tt>None</tt>. If any difference is found, the test fails.</li>
+
+ <li>If the <tt>expected_duration</tt> parameter is specified then it
+ represents the maximal allowed time in seconds for the test to run. The
+ test will be marked as failed if its duration is greater than the given
+ <tt>expected_duration</tt> parameter value.</li>
+
+ <li>Stores the new state of the working directory in <tt>self.tree</tt>.
+ Computes the difference between previous and current trees and stores them
+ in variables <tt>self.tree_difference</tt> and
+ <tt>self.unexpected_difference</tt>. Both variables are instances of class
+ <tt>tree.Trees_different</tt>, which have four attributes:
+ <tt>added_files</tt>, <tt>removed_files</tt>, <tt>modified_files</tt> and
+ <tt>touched_files</tt>. Each is a list of strings.</p></li>
+ </ol>
+
+ <h3><a name="method-read">Method <tt>read(self, name)</tt></a></h3>
+
+ <p><b>Effects:</b></p>
+
+ <p>Read the specified file and returns it content. Raises an exception is
+ the file is absent.</p>
+
+ <h3><a name="method-read_and_strip">Method <tt>read_and_strip(self, name)
+ </tt></a></h3>
+
+ <p><b>Effects:</b></p>
+
+ <p>Read the specified file and returns it content, after removing trailing
+ whitespace from every line. Raises an exception is the file is absent.</p>
+
+ <p><b>Rationale:</b></p>
+
+ <p>Although this method is questionable, there are a lot of cases when jam
+ or shells it uses insert spaces. It seems that introducing this method is
+ much simpler than dealing with all those cases.</p>
+
+ <h3><a name="methods-expectations">Methods for declaring expectations</a>
+ </h3>
+
+ <p>Accordingly to the number of changes kinds that are detected, there are
+ four methods that specify that test author expects a specific change to
+ occur. They check <tt>self.unexpected_difference</tt>, and if the change is
+ present there, it is removed. Otherwise, test fails.</p>
+
+ <p>Each method accepts a list of names. Those names use <tt>/</tt> path
+ separator on all systems. Additionally, the test system translates suffixes
+ appropriately. For the test to be portable, suffixes should use Windows
+ convention: <tt>exe</tt> for executables, <tt>dll</tt> for dynamic libraries
+ and <tt>lib</tt> for static libraries. Lastly, the string "$toolset" in file
+ names is replaced by the name of tested toolset.</p>
+
+ <p><b>Note:</b> The <tt>List</tt> helper class might be useful to create
+ lists of names.</p>
+
+ <p><b>Note:</b> The file content can be examined using the
+ <tt>TestCmd.read</tt> function.</p>
+
+ <p>The members are:</p>
+
+ <ul>
+ <li>expect_addition</li>
+ <li>expect_removal</li>
+ <li>expect_modification</li>
+ <li>expect_nothing</li>
+ </ul>
+
+ <p>Note that <tt>expect_modification</tt> is used to check that a either
+ file content or timestamp has changed. The rationale is that some compilers
+ change content even if sources does not change, and it's easier to have a
+ method which checks for both content and time changes.</p>
+
+ <p>There's also a member <tt>expect_nothing_more</tt>, which checks that all
+ the changes are either expected or ignored, in other words that
+ <tt>unexpected_difference</tt> is empty by now.</p>
+
+ <p>Lastly, there's a method to compare file content with expected content:
+ </p>
+ <tt>expect_content(self, name, content, exact=0)</tt>
+
+ <p>The method fails the test if the content of file identified by 'name' is
+ different from 'content'. If 'exact' is true, the file content is used
+ as-is, otherwise, two transformations are applied:</p>
+
+ <ul>
+ <li>The <tt>read_and_strip</tt> method is used to read the file, which
+ removes trailing whitespace</li>
+
+ <li>Each backslash in the file content is converted to forward slash.</li>
+ </ul>
+
+ <h3><a name="methods-ignoring">Methods for ignoring changes</a></h3>
+
+ <p>There are five methods which ignore changes made to the working tree.
+ They silently remove elements from <tt>self.unexpected_difference</tt>, and
+ don't generate error if element is not found. They accept shell style
+ wildcard.</p>
+
+ <p>The following methods correspond to four kinds of changes:</p>
+
+ <ul>
+ <li>ignore_addition(self, wildcard)</li>
+ <li>ignore_removal(self, wildcard)</li>
+ <li>ignore_modification(self, wildcard)</li>
+ <li>ignore_touch(self, wildcard)</li>
+ </ul>
+
+ <p>The method <tt>ignore(self, wildcard)</tt> ignores all the changes made
+ to files that match a wildcard.</p>
+
+ <h3><a name="methods-result">Methods for explicitly specifying results</a>
+ </h3>
+
+ <h4>Method <tt>pass_test(self, condition=1)</tt></h4>
+
+ <div class="attention">
+ At this moment, the method should not be used.
+ </div>
+
+ <h4>Method <tt>fail_test(self, condition=1)</tt></h4>
+
+ <p><b>Effects:</b> Cause the test to fail if <tt>condition</tt> is true.</p>
+
+ <h3><a name="class-list">Helper class <tt>List</tt></a></h3>
+ The class has sequence interface and two additional methods.
+
+ <h4>Method <tt>__init__(self, string)</tt></h4>
+
+ <p><b>Effects:</b> Splits the string on unescaped spaces and tabs. The split
+ components can further be retrieved using standard sequence access.</p>
+
+ <h4>Method <tt>__mul__(self, other)</tt></h4>
+
+ <p><b>Effects:</b> Returns an <tt>List</tt> instance, which elements are all
+ possible concatenations of two string, first of which is from <tt>self</tt>,
+ and second of which is from <tt>other</tt>.</p>
+
+ <p>The class also defines <tt>__str__</tt> and <tt>__repr__</tt> methods.
+ Finally, there's <tt>__coerce__</tt> method which allows to convert strings
+ to instances of <tt>List</tt>.</p>
+
+ <p><b>Example:</b></p>
+<pre>
+ l = "a b" * List("c d")
+ for e in l:
+ print e
+</pre>
+
+ <p>will output:</p>
+<pre>
+ ac
+ ad
+ bc
+ bd
+
+</pre>
+ <hr>
+ <p class="revision">Last modified: May 02, 2008</p>
+ <p>&copy; Copyright Vladimir Prus 2002, 2003, 2004, 2005.<br>
+ &copy; Copyright Jurko Gospodnetic 2008.<br>
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)</p>
+ </body>
+</html>
diff --git a/src/boost/tools/build/test/testing.py b/src/boost/tools/build/test/testing.py
new file mode 100755
index 000000000..b196b9db7
--- /dev/null
+++ b/src/boost/tools/build/test/testing.py
@@ -0,0 +1,535 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic
+# Copyright 2017 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests different aspects of Boost Builds automated testing support.
+
+import BoostBuild
+import TestCmd
+
+def test_run():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+run pass.cpp ;
+run fail-compile.cpp ;
+run fail-link.cpp ;
+run fail-run.cpp ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.exe")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.output")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.run")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.test")
+
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.obj")
+
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.obj")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.exe")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.output")
+
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_run_fail():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+run-fail pass.cpp ;
+run-fail fail-compile.cpp ;
+run-fail fail-link.cpp ;
+run-fail fail-run.cpp ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.exe")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.output")
+
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.obj")
+
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.obj")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.exe")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.output")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.run")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.test")
+
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_run_change():
+ """Tests that the test file is removed when a test fails after it
+ previously passed."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int main() { return 1; }\n")
+ t.write("fail-compile.cpp", "int main() {}\n")
+ t.write("fail-link.cpp", "int main() {}\n")
+ t.write("fail-run.cpp", "int main() {}\n")
+
+ t.write("Jamroot.jam", """import testing ;
+run-fail pass.cpp ;
+run fail-compile.cpp ;
+run fail-link.cpp ;
+run fail-run.cpp ;
+""")
+ t.run_build_system()
+ # Sanity check
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.test")
+ t.expect_addition("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.test")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.test")
+ t.expect_output_lines("...failed*", False)
+
+ # Now make them fail
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+ t.run_build_system(status=1)
+
+ t.expect_removal("bin/pass.test/$toolset/debug*/pass.test")
+ t.expect_removal("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+ t.expect_removal("bin/fail-link.test/$toolset/debug*/fail-link.test")
+ t.expect_removal("bin/fail-run.test/$toolset/debug*/fail-run.test")
+
+ t.cleanup()
+
+def test_run_path():
+ """Tests that run can find shared libraries even without
+ hardcode-dll-paths. Important: The library is in neither the
+ current working directory, nor any system path, nor the same
+ directory as the executable, so it should never be found without
+ help from B2."""
+ t = BoostBuild.Tester(["hardcode-dll-paths=false"], use_test_config=False)
+
+ t.write("l.cpp", """
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+f() {}
+""")
+ t.write("pass.cpp", "void f(); int main() { f(); }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+lib l : l.cpp : <link>shared ;
+run pass.cpp l ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug*/l.obj")
+ t.expect_addition("bin/$toolset/debug*/l.dll")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.exe")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.output")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.run")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.test")
+
+ t.cleanup()
+
+def test_run_args():
+ """Tests the handling of args and input-files"""
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("test.cpp", """
+#include <iostream>
+#include <fstream>
+int main(int argc, const char ** argv)
+{
+ for(int i = 1; i < argc; ++i)
+ {
+ if(argv[i][0] == '-')
+ {
+ std::cout << argv[i] << std::endl;
+ }
+ else
+ {
+ std::ifstream ifs(argv[i]);
+ std::cout << ifs.rdbuf();
+ }
+ }
+}
+""")
+ t.write("input1.in", "first input\n")
+ t.write("input2.in", "second input\n")
+ t.write("Jamroot.jam", """import testing ;
+import common ;
+# FIXME: The order actually depends on the lexigraphical
+# ordering of the virtual target objects, which is just
+# crazy. Switch the order of input1.txt and input2.txt
+# to make this fail. Joining the arguments with && might
+# work, but might get a bit complicated to implement as
+# dependency properties do not currently support &&.
+make input1.txt : input1.in : @common.copy ;
+make input2.txt : input2.in : @common.copy ;
+run test.cpp : -y -a : input1.txt input2.txt ;
+""")
+ t.run_build_system()
+ t.expect_content("bin/test.test/$toolset/debug*/test.output", """\
+-y
+-a
+first input
+second input
+
+EXIT STATUS: 0
+""")
+ t.cleanup()
+
+def test_link():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+link pass.cpp ;
+link fail-compile.cpp ;
+link fail-link.cpp ;
+link fail-run.cpp ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.exe")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.test")
+
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.obj")
+
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.obj")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.exe")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.test")
+
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_link_fail():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+link-fail pass.cpp ;
+link-fail fail-compile.cpp ;
+link-fail fail-link.cpp ;
+link-fail fail-run.cpp ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.obj")
+
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.obj")
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.exe")
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.test")
+
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.obj")
+
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_link_change():
+ """Tests that the test file is removed when a test fails after it
+ previously passed."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-compile.cpp", "int main() {}\n")
+ t.write("fail-link.cpp", "int main() {}\n")
+
+ t.write("Jamroot.jam", """import testing ;
+link-fail pass.cpp ;
+link fail-compile.cpp ;
+link fail-link.cpp ;
+""")
+ t.run_build_system()
+ # Sanity check
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.test")
+ t.expect_addition("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.test")
+ t.expect_output_lines("...failed*", False)
+
+ # Now make them fail
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.run_build_system(status=1)
+
+ t.expect_removal("bin/pass.test/$toolset/debug*/pass.test")
+ t.expect_removal("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+ t.expect_removal("bin/fail-link.test/$toolset/debug*/fail-link.test")
+
+ t.cleanup()
+
+def test_compile():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+compile pass.cpp ;
+compile fail-compile.cpp ;
+compile fail-link.cpp ;
+compile fail-run.cpp ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.obj")
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.test")
+
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.obj")
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.test")
+
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.obj")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.test")
+
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_compile_fail():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+compile-fail pass.cpp ;
+compile-fail fail-compile.cpp ;
+compile-fail fail-link.cpp ;
+compile-fail fail-run.cpp ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_addition("bin/fail-compile.test/$toolset/debug*/fail-compile.obj")
+ t.expect_addition("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_compile_change():
+ """Tests that the test file is removed when a test fails after it
+ previously passed."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass.cpp", "#error expected to fail\n")
+ t.write("fail-compile.cpp", "int main() {}\n")
+
+ t.write("Jamroot.jam", """import testing ;
+compile-fail pass.cpp ;
+compile fail-compile.cpp ;
+""")
+ t.run_build_system()
+ # Sanity check
+ t.expect_addition("bin/pass.test/$toolset/debug*/pass.test")
+ t.expect_addition("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+ t.expect_output_lines("...failed*", False)
+
+ # Now make them fail
+ t.write("pass.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.run_build_system(status=1)
+
+ t.expect_removal("bin/pass.test/$toolset/debug*/pass.test")
+ t.expect_removal("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+
+ t.cleanup()
+
+def test_remove_test_targets(option):
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass-compile.cpp", "int main() {}\n")
+ t.write("pass-link.cpp", "int main() {}\n")
+ t.write("pass-run.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+ t.write("source.cpp", "int f();\n")
+
+ t.write("Jamroot.jam", """import testing ;
+obj source.o : source.cpp ;
+compile pass-compile.cpp ;
+link pass-link.cpp source.o ;
+run pass-run.cpp source.o ;
+compile-fail fail-compile.cpp ;
+link-fail fail-link.cpp ;
+run-fail fail-run.cpp ;
+""")
+
+ t.run_build_system([option])
+
+ t.expect_addition("bin/$toolset/debug*/source.obj")
+
+ t.expect_addition("bin/pass-compile.test/$toolset/debug*/pass-compile.test")
+
+ t.expect_addition("bin/pass-link.test/$toolset/debug*/pass-link.test")
+
+ t.expect_addition("bin/pass-run.test/$toolset/debug*/pass-run.output")
+ t.expect_addition("bin/pass-run.test/$toolset/debug*/pass-run.run")
+ t.expect_addition("bin/pass-run.test/$toolset/debug*/pass-run.test")
+
+ t.expect_addition("bin/fail-compile.test/$toolset/debug*/fail-compile.test")
+
+ t.expect_addition("bin/fail-link.test/$toolset/debug*/fail-link.test")
+
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.output")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.run")
+ t.expect_addition("bin/fail-run.test/$toolset/debug*/fail-run.test")
+
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_dump_tests():
+ """Tests the output of the --dump-tests option"""
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("pass-compile.cpp", "int main() {}\n")
+ t.write("pass-link.cpp", "int main() {}\n")
+ t.write("pass-run.cpp", "int main() {}\n")
+ t.write("fail-compile.cpp", "#error expected to fail\n")
+ t.write("fail-link.cpp", "int f();\nint main() { return f(); }\n")
+ t.write("fail-run.cpp", "int main() { return 1; }\n")
+
+ t.write("Jamroot.jam", """import testing ;
+run pass-run.cpp ;
+run-fail fail-run.cpp ;
+link pass-link.cpp ;
+link-fail fail-link.cpp ;
+compile pass-compile.cpp ;
+compile-fail fail-compile.cpp ;
+build-project libs/any/test ;
+build-project libs/any/example ;
+build-project libs/any ;
+build-project tools/bcp/test ;
+build-project tools/bcp/example ;
+build-project subdir/test ;
+build-project status ;
+build-project outside/project ;
+""")
+ def write_subdir(dir):
+ t.write(dir + "/test.cpp", "int main() {}\n")
+ t.write(dir + "/Jamfile", "run test.cpp ;")
+ write_subdir("libs/any/test")
+ write_subdir("libs/any/example")
+ write_subdir("libs/any")
+ write_subdir("tools/bcp/test")
+ write_subdir("tools/bcp/example")
+ write_subdir("status")
+ write_subdir("subdir/test")
+ t.write("outside/other/test.cpp", "int main() {}\n")
+ t.write("outside/project/Jamroot", "run ../other/test.cpp ;")
+ t.run_build_system(["--dump-tests", "-n", "-d0"],
+ match=TestCmd.match_re, stdout=
+"""boost-test\(RUN\) ".*/pass-run" : "pass-run\.cpp"
+boost-test\(RUN_FAIL\) ".*/fail-run" : "fail-run\.cpp"
+boost-test\(LINK\) ".*/pass-link" : "pass-link\.cpp"
+boost-test\(LINK_FAIL\) ".*/fail-link" : "fail-link\.cpp"
+boost-test\(COMPILE\) ".*/pass-compile" : "pass-compile\.cpp"
+boost-test\(COMPILE_FAIL\) ".*/fail-compile" : "fail-compile\.cpp"
+boost-test\(RUN\) "any/test" : "libs/any/test\.cpp"
+boost-test\(RUN\) "any/test" : "libs/any/test/test\.cpp"
+boost-test\(RUN\) "any/test" : "libs/any/example/test\.cpp"
+boost-test\(RUN\) "bcp/test" : "tools/bcp/test/test\.cpp"
+boost-test\(RUN\) "bcp/test" : "tools/bcp/example/test\.cpp"
+boost-test\(RUN\) ".*/subdir/test/test" : "subdir/test/test\.cpp"
+boost-test\(RUN\) "test" : "status/test\.cpp"
+boost-test\(RUN\) ".*/outside/project/test" : "../other/test.cpp"
+""")
+ t.cleanup()
+
+################################################################################
+#
+# test_files_with_spaces_in_their_name()
+# --------------------------------------
+#
+################################################################################
+
+def test_files_with_spaces_in_their_name():
+ """Regression test making sure test result files get created correctly when
+ testing files with spaces in their name.
+ """
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("valid source.cpp", "int main() {}\n");
+
+ t.write("invalid source.cpp", "this is not valid source code");
+
+ t.write("jamroot.jam", """
+import testing ;
+testing.compile "valid source.cpp" ;
+testing.compile-fail "invalid source.cpp" ;
+""")
+
+ t.run_build_system(status=0)
+ t.expect_addition("bin/invalid source.test/$toolset/debug*/invalid source.obj")
+ t.expect_addition("bin/invalid source.test/$toolset/debug*/invalid source.test")
+ t.expect_addition("bin/valid source.test/$toolset/debug*/valid source.obj")
+ t.expect_addition("bin/valid source.test/$toolset/debug*/valid source.test")
+
+ t.expect_content("bin/valid source.test/$toolset/debug*/valid source.test", \
+ "passed" )
+ t.expect_content( \
+ "bin/invalid source.test/$toolset/debug*/invalid source.test", \
+ "passed" )
+ t.expect_content( \
+ "bin/invalid source.test/$toolset/debug*/invalid source.obj", \
+ "failed as expected" )
+
+ t.cleanup()
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+test_run()
+test_run_fail()
+test_run_change()
+test_run_path()
+test_run_args()
+test_link()
+test_link_fail()
+test_link_change()
+test_compile()
+test_compile_fail()
+test_compile_change()
+test_remove_test_targets("--remove-test-targets")
+test_remove_test_targets("preserve-test-targets=off")
+test_dump_tests()
+test_files_with_spaces_in_their_name()
diff --git a/src/boost/tools/build/test/timedata.py b/src/boost/tools/build/test/timedata.py
new file mode 100644
index 000000000..32cec265a
--- /dev/null
+++ b/src/boost/tools/build/test/timedata.py
@@ -0,0 +1,178 @@
+#!/usr/bin/python
+
+# Copyright 2005 David Abrahams
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the build step timing facilities.
+
+# TODO: Missing tests:
+# 1. 'time' target with a source target representing more than one virtual
+# target. This happens in practice, e.g. when using the time rule on a msvc
+# exe target whose generator actually constructs an EXE and a PDB target.
+# When this is done - only the main virtual target's constructing action
+# should be timed.
+# 2. 'time' target with a source target representing a virtual target that
+# actually gets built by multiple actions run in sequence. In that case a
+# separate timing result should be reported for each of those actions. This
+# happens in practice, e.g. when using the time rule on a msvc exe target
+# which first gets created as a result of some link action and then its
+# manifest gets embedded inside it as a resource using a separate action
+# (assuming an appropriate property has been set for this target - see the
+# msvc module for details).
+
+import BoostBuild
+import re
+
+
+###############################################################################
+#
+# basic_jam_action_test()
+# -----------------------
+#
+###############################################################################
+
+def basic_jam_action_test():
+ """Tests basic Jam action timing support."""
+
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+rule time
+{
+ DEPENDS $(<) : $(>) ;
+ __TIMING_RULE__ on $(>) = record_time $(<) ;
+ DEPENDS all : $(<) ;
+}
+
+actions time
+{
+ echo $(>) user: $(__USER_TIME__) system: $(__SYSTEM_TIME__) clock: $(__CLOCK_TIME__)
+ echo timed from $(>) >> $(<)
+}
+
+rule record_time ( target : source : start end user system clock )
+{
+ __USER_TIME__ on $(target) = $(user) ;
+ __SYSTEM_TIME__ on $(target) = $(system) ;
+ __CLOCK_TIME__ on $(target) = $(clock) ;
+}
+
+rule make
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+actions make
+{
+ echo made from $(>) >> $(<)
+}
+
+time foo : bar ;
+make bar : baz ;
+""")
+
+ t.write("baz", "nothing")
+
+ expected_output = """\
+\.\.\.found 4 targets\.\.\.
+\.\.\.updating 2 targets\.\.\.
+make bar
+time foo
+bar +user: [0-9\.]+ +system: +[0-9\.]+ +clock: +[0-9\.]+ *
+\.\.\.updated 2 targets\.\.\.$
+"""
+
+ t.run_build_system(["-ffile.jam", "-d+1"], stdout=expected_output,
+ match=lambda actual, expected: re.search(expected, actual, re.DOTALL))
+ t.expect_addition("foo")
+ t.expect_addition("bar")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# boost_build_testing_support_timing_rule():
+# ------------------------------------------
+#
+###############################################################################
+
+def boost_build_testing_support_timing_rule():
+ """
+ Tests the target build timing rule provided by the Boost Build testing
+ support system.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("aaa.cpp", "int main() {}\n")
+
+ t.write("jamroot.jam", """\
+import testing ;
+exe my-exe : aaa.cpp ;
+time my-time : my-exe ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug*/aaa.obj")
+ t.expect_addition("bin/$toolset/debug*/my-exe.exe")
+ t.expect_addition("bin/$toolset/debug*/my-time.time")
+
+ t.expect_content_lines("bin/$toolset/debug*/my-time.time",
+ "user: *[0-9] seconds")
+ t.expect_content_lines("bin/$toolset/debug*/my-time.time",
+ "system: *[0-9] seconds")
+ t.expect_content_lines("bin/$toolset/debug*/my-time.time",
+ "clock: *[0-9] seconds")
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# boost_build_testing_support_timing_rule_with_spaces_in_names()
+# --------------------------------------------------------------
+#
+###############################################################################
+
+def boost_build_testing_support_timing_rule_with_spaces_in_names():
+ """
+ Tests the target build timing rule provided by the Boost Build testing
+ support system when used with targets contining spaces in their names.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("aaa bbb.cpp", "int main() {}\n")
+
+ t.write("jamroot.jam", """\
+import testing ;
+exe "my exe" : "aaa bbb.cpp" ;
+time "my time" : "my exe" ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug*/aaa bbb.obj")
+ t.expect_addition("bin/$toolset/debug*/my exe.exe")
+ t.expect_addition("bin/$toolset/debug*/my time.time")
+
+ t.expect_content_lines("bin/$toolset/debug*/my time.time", "user: *")
+ t.expect_content_lines("bin/$toolset/debug*/my time.time", "system: *")
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+basic_jam_action_test()
+boost_build_testing_support_timing_rule()
+boost_build_testing_support_timing_rule_with_spaces_in_names() \ No newline at end of file
diff --git a/src/boost/tools/build/test/toolset-mock/Jamroot.jam b/src/boost/tools/build/test/toolset-mock/Jamroot.jam
new file mode 100644
index 000000000..fd5f7907b
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/Jamroot.jam
@@ -0,0 +1,8 @@
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+lib l1 : lib.cpp ;
+exe test : main.cpp l1 ;
diff --git a/src/boost/tools/build/test/toolset-mock/lib.cpp b/src/boost/tools/build/test/toolset-mock/lib.cpp
new file mode 100644
index 000000000..1ba30e32d
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/lib.cpp
@@ -0,0 +1,7 @@
+// Copyright (c) 2017 Steven Watanabe
+//
+// Distributed under the Boost Software License Version 1.0.
+// (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+int f() {}
diff --git a/src/boost/tools/build/test/toolset-mock/main.cpp b/src/boost/tools/build/test/toolset-mock/main.cpp
new file mode 100644
index 000000000..0fc8b9a75
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/main.cpp
@@ -0,0 +1,7 @@
+// Copyright (c) 2017 Steven Watanabe
+//
+// Distributed under the Boost Software License Version 1.0.
+// (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+int main() {}
diff --git a/src/boost/tools/build/test/toolset-mock/project-config.jam b/src/boost/tools/build/test/toolset-mock/project-config.jam
new file mode 100644
index 000000000..30a9e9415
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/project-config.jam
@@ -0,0 +1,43 @@
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import os ;
+
+path-constant here : . ;
+
+local PYTHON = [ os.environ PYTHON_CMD ] ;
+
+using gcc : 4.8.3 : $(PYTHON) $(here)/src/gcc-4.8.3-linux.py : : <target-os>linux ;
+using gcc : 4.2.1 : $(PYTHON) $(here)/src/gcc-4.2.1-darwin.py : : <target-os>darwin ;
+
+# hard-code this to make the test work on other platforms
+modules.poke darwin : .host-osx-version : 10.11.0 ;
+using darwin : 4.2.1 : $(PYTHON) $(here)/src/darwin-4.2.1.py
+ : <archiver>$(here)/src/bin/libtool
+ <striper>$(here)/src/bin/strip
+ : <target-os>darwin
+ ;
+
+using clang-darwin : 3.9.0 : $(PYTHON) $(here)/src/clang-3.9.0-darwin.py
+ : <archiver>$(here)/src/bin/ar
+ <ranlib>$(here)/src/bin/ranlib
+ ;
+
+using clang-linux : 3.9.0 : $(PYTHON) $(here)/src/clang-linux-3.9.0.py
+ : <archiver>$(here)/src/bin/ar
+ <ranlib>$(here)/src/bin/ranlib
+ ;
+
+using clang-vxworks : 4.0.1 : $(PYTHON) $(here)/src/clang-vxworks-4.0.1.py
+ : <linker>$(here)/src/bin/ld
+ <archiver>$(here)/src/bin/ar
+ ;
+
+using intel-darwin : 10.2 : $(PYTHON) $(here)/src/intel-darwin-10.2.py
+ : <archiver>$(here)/src/bin/ar
+ <ranlib>$(here)/src/bin/ranlib
+ ;
diff --git a/src/boost/tools/build/test/toolset-mock/src/Jamroot.jam b/src/boost/tools/build/test/toolset-mock/src/Jamroot.jam
new file mode 100644
index 000000000..c8fc0078b
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/Jamroot.jam
@@ -0,0 +1,61 @@
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import os ;
+import print ;
+import regex ;
+import feature ;
+
+.PYTHON = [ os.environ PYTHON_CMD ] ;
+path-constant .AR : ar.py ;
+path-constant .RANLIB : ranlib.py ;
+path-constant .LIBTOOL : libtool.py ;
+path-constant .STRIP : strip.py ;
+path-constant .LD : ld.py ;
+
+rule c-escape ( str )
+{
+ return [ regex.replace $(str) \\\\ \\\\ ] ;
+}
+
+rule cfg-header ( target : : properties * )
+{
+ local PYTHON = [ c-escape $(.PYTHON) ] ;
+ local AR = [ c-escape $(.AR) ] ;
+ local RANLIB = [ c-escape $(.RANLIB) ] ;
+ local LIBTOOL = [ c-escape $(.LIBTOOL) ] ;
+ local STRIP = [ c-escape $(.STRIP) ] ;
+ local LD = [ c-escape $(.LD) ] ;
+ print.output $(target) ;
+ print.text "#define PYTHON_CMD "\"$(PYTHON)\" : true ;
+ print.text "#define AR_CMD "\"$(AR)\" : true ;
+ print.text "#define RANLIB_CMD "\"$(RANLIB)\" : true ;
+ print.text "#define LIBTOOL_CMD "\"$(LIBTOOL)\" : true ;
+ print.text "#define STRIP_CMD "\"$(STRIP)\" : true ;
+ print.text "#define LD_CMD "\"$(LD)\" : true ;
+}
+
+# We can only build one variant at a time and we need to have a fixed path
+project : requirements <location>bin ;
+
+make config.h : : @cfg-header ;
+
+project : requirements <implicit-dependency>config.h ;
+
+rule write-target-os ( target : : properties * )
+{
+ local target-os = [ feature.defaults <target-os> ] ;
+ print.output $(target) ;
+ print.text $(target-os:G=) : true ;
+}
+
+make target-os.txt : : @write-target-os ;
+
+exe ar : [ obj ar.obj : mock-program.cpp : <define>PY_SCRIPT=AR_CMD ] ;
+exe ranlib : [ obj ranlib.obj : mock-program.cpp : <define>PY_SCRIPT=RANLIB_CMD ] ;
+exe libtool : [ obj libtool.obj : mock-program.cpp : <define>PY_SCRIPT=LIBTOOL_CMD ] ;
+exe strip : [ obj strip.obj : mock-program.cpp : <define>PY_SCRIPT=STRIP_CMD ] ;
+exe ld : [ obj ld.obj : mock-program.cpp : <define>PY_SCRIPT=LD_CMD ] ;
diff --git a/src/boost/tools/build/test/toolset-mock/src/MockProgram.py b/src/boost/tools/build/test/toolset-mock/src/MockProgram.py
new file mode 100644
index 000000000..1ada7e2ad
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/MockProgram.py
@@ -0,0 +1,262 @@
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from __future__ import print_function
+
+import sys
+import os
+import re
+
+# Represents a sequence of arguments that must appear
+# in a fixed order.
+class ordered:
+ def __init__(self, *args):
+ self.args = args
+ def match(self, command_line, pos, outputs):
+ for p in self.args:
+ res = try_match(command_line, pos, p, outputs)
+ if res is None:
+ return
+ pos = res
+ return pos
+
+# Represents a sequence of arguments that can appear
+# in any order.
+class unordered:
+ def __init__(self, *args):
+ self.args = list(args)
+ def match(self, command_line, pos, outputs):
+ unmatched = self.args[:]
+ while len(unmatched) > 0:
+ res = try_match_one(command_line, pos, unmatched, outputs)
+ if res is None:
+ return
+ pos = res
+ return pos
+
+# Represents a single input file.
+# If id is set, then the file must have been created
+# by a prior use of output_file.
+# If source is set, then the file must be that source file.
+class input_file:
+ def __init__(self, id=None, source=None):
+ assert((id is None) ^ (source is None))
+ self.id = id
+ self.source = source
+ def check(self, path):
+ if path.startswith("-"):
+ return
+ if self.id is not None:
+ try:
+ with open(path, "r") as f:
+ data = f.read()
+ if data == make_file_contents(self.id):
+ return True
+ else:
+ return
+ except:
+ return
+ elif self.source is not None:
+ if self.source == path:
+ return True
+ else:
+ return
+ assert(False)
+ def match(self, command_line, pos, outputs):
+ if self.check(command_line[pos]):
+ return pos + 1
+
+# Matches an output file.
+# If the full pattern is matched, The
+# file will be created.
+class output_file:
+ def __init__(self, id):
+ self.id = id
+ def match(self, command_line, pos, outputs):
+ if command_line[pos].startswith("-"):
+ return
+ outputs.append((command_line[pos], self.id))
+ return pos + 1
+
+# Matches the directory containing an input_file
+class target_path(object):
+ def __init__(self, id):
+ self.tester = input_file(id=id)
+ def match(self, command_line, pos, outputs):
+ arg = command_line[pos]
+ if arg.startswith("-"):
+ return
+ try:
+ for path in os.listdir(arg):
+ if self.tester.check(os.path.join(arg, path)):
+ return pos + 1
+ except:
+ return
+
+# Matches a single argument, which is composed of a prefix and a path
+# for example arguments of the form -ofilename.
+class arg(object):
+ def __init__(self, prefix, a):
+ # The prefix should be a string, a should be target_path or input_file.
+ self.prefix = prefix
+ self.a = a
+ def match(self, command_line, pos, outputs):
+ s = command_line[pos]
+ if s.startswith(self.prefix) and try_match([s[len(self.prefix):]], 0, self.a, outputs) == 1:
+ return pos + 1
+
+# Given a file id, returns a string that will be
+# written to the file to allow it to be recognized.
+def make_file_contents(id):
+ return id
+
+# Matches a single pattern from a list.
+# If it succeeds, the matching pattern
+# is removed from the list.
+# Returns the index after the end of the match
+def try_match_one(command_line, pos, patterns, outputs):
+ for p in patterns:
+ tmp = outputs[:]
+ res = try_match(command_line, pos, p, tmp)
+ if res is not None:
+ outputs[:] = tmp
+ patterns.remove(p)
+ return res
+
+# returns the end of the match if any
+def try_match(command_line, pos, pattern, outputs):
+ if pos == len(command_line):
+ return
+ elif type(pattern) is str:
+ if pattern == command_line[pos]:
+ return pos + 1
+ else:
+ return pattern.match(command_line, pos, outputs)
+
+known_patterns = []
+program_name = None
+
+# Registers a command
+# The arguments should be a sequence of:
+# str, ordered, unordered, arg, input_file, output_file, target_path
+# kwarg: stdout is text that will be printed on success.
+def command(*args, **kwargs):
+ global known_patterns
+ global program_name
+ stdout = kwargs.get("stdout", None)
+ pattern = ordered(*args)
+ known_patterns += [(pattern, stdout)]
+ if program_name is None:
+ program_name = args[0]
+ else:
+ assert(program_name == args[0])
+
+# Use this to filter the recognized commands, based on the properties
+# passed to b2.
+def allow_properties(*args):
+ try:
+ return all(a in os.environ["B2_PROPERTIES"].split(" ") for a in args)
+ except KeyError:
+ return True
+
+# Use this in the stdout argument of command to print the command
+# for running another script.
+def script(name):
+ return os.path.join(os.path.dirname(__file__), "bin", re.sub('\.py$', '', name))
+
+def match(command_line):
+ for (p, stdout) in known_patterns:
+ outputs = []
+ if try_match(command_line, 0, p, outputs) == len(command_line):
+ return (stdout, outputs)
+
+# Every mock program should call this after setting up all the commands.
+def main():
+ command_line = [program_name] + sys.argv[1:]
+ result = match(command_line)
+ if result is not None:
+ (stdout, outputs) = result
+ if stdout is not None:
+ print(stdout)
+ for (file,id) in outputs:
+ with open(file, "w") as f:
+ f.write(make_file_contents(id))
+ exit(0)
+ else:
+ print(command_line)
+ exit(1)
+
+# file should be the name of a file in the same directory
+# as this. Must be called after verify_setup
+def verify_file(filename):
+ global known_files
+ if filename not in known_files:
+ known_files.add(filename)
+ srcdir = os.path.dirname(__file__)
+ execfile(os.path.join(srcdir, filename), {})
+
+def verify_setup():
+ """Override the behavior of most module components
+ in order to detect whether they are being used correctly."""
+ global main
+ global allow_properties
+ global output_file
+ global input_file
+ global target_path
+ global script
+ global command
+ global verify_errors
+ global output_ids
+ global input_ids
+ global known_files
+ def allow_properties(*args):
+ return True
+ def main():
+ pass
+ def output_file(id):
+ global output_ids
+ global verify_error
+ if id in output_ids:
+ verify_error("duplicate output_file: %s" % id)
+ output_ids.add(id)
+ def input_file(id=None, source=None):
+ if id is not None:
+ input_ids.add(id)
+ def target_path(id):
+ input_ids.add(id)
+ def script(filename):
+ verify_file(filename)
+ def command(*args, **kwargs):
+ pass
+ verify_errors = []
+ output_ids = set()
+ input_ids = set()
+ known_files = set()
+
+def verify_error(message):
+ global verify_errors
+ verify_errors += [message]
+
+def verify_finalize():
+ for id in input_ids:
+ if not id in output_ids:
+ verify_error("Input file does not exist: %s" % id)
+ for error in verify_errors:
+ print("error: %s" % error)
+ if len(verify_errors) != 0:
+ return 1
+ else:
+ return 0
+
+def verify():
+ srcdir = os.path.dirname(__file__)
+ if srcdir == '':
+ srcdir = '.'
+ verify_setup()
+ for f in os.listdir(srcdir):
+ if re.match(r"(gcc|clang|darwin|intel)-.*\.py", f):
+ verify_file(f)
+ exit(verify_finalize())
diff --git a/src/boost/tools/build/test/toolset-mock/src/ar.py b/src/boost/tools/build/test/toolset-mock/src/ar.py
new file mode 100644
index 000000000..853fe1dd8
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/ar.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+#
+# Copyright 2017-2018 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('ar', 'rc', output_file('bin/gcc-gnu-4.8.3/debug/link-static/libl1.a'), input_file('bin/gcc-gnu-4.8.3/debug/link-static/lib.o'))
+command('ar', 'rc', output_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/libl1.a'), input_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/lib.o'))
+command('ar', 'rc', output_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/libl1.a'), input_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/lib.o'))
+command('ar', 'rc', output_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), input_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/lib.o'))
+command('ar', 'rc', output_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/libl1.a'), input_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/lib.o'))
+command('ar', 'rc', output_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), input_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/lib.o'))
+command('ar', 'rc', output_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/libl1.a'), input_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/lib.o'))
+command('ar', 'rc', output_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), input_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/lib.o'))
+command('ar', 'rc', output_file('bin/clang-linux-3.9.0/debug/link-static/libl1.a'), input_file('bin/clang-linux-3.9.0/debug/link-static/lib.o'))
+command('ar', 'rc', output_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/libl1.a'), input_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/lib.o'))
+command('ar', 'rcu', output_file('bin/clang-vxworks-4.0.1/debug/link-static/libl1.a'), input_file('bin/clang-vxworks-4.0.1/debug/link-static/lib.o'))
+command('ar', 'rcu', output_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/libl1.a'), input_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/lib.o'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/clang-3.9.0-darwin.py b/src/boost/tools/build/test/toolset-mock/src/clang-3.9.0-darwin.py
new file mode 100644
index 000000000..d8c2163ab
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/clang-3.9.0-darwin.py
@@ -0,0 +1,49 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('clang++', '-print-prog-name=ar', stdout=script('ar.py'))
+command('clang++', '-print-prog-name=ranlib', stdout=script('ranlib.py'))
+
+# all builds are multi-threaded for darwin
+if allow_properties("variant=debug", "link=shared", "runtime-link=shared"):
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-fPIC'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/libl1.dylib'), '-single_module', '-dynamiclib', '-install_name', '@rpath/libl1.dylib', input_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/lib.o'), unordered('-g', '-fPIC'))
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-fPIC'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/test'), input_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/main.o'), input_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/libl1.dylib'), unordered('-g', '-fPIC'))
+
+if allow_properties("variant=release", "link=shared", "runtime-link=shared"):
+ command('clang++', '-x', 'c++', unordered('-O3', '-Wno-inline', '-Wall', '-fPIC'), '-DNDEBUG', '-c', '-o', output_file('bin/clang-darwin-3.9.0/release/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-v', '-o', output_file('bin/clang-darwin-3.9.0/release/target-os-darwin/libl1.dylib'), '-single_module', '-dynamiclib', '-install_name', '@rpath/libl1.dylib', input_file('bin/clang-darwin-3.9.0/release/target-os-darwin/lib.o'), '-fPIC')
+ command('clang++', '-x', 'c++', unordered('-O3', '-Wno-inline', '-Wall', '-fPIC'), '-DNDEBUG', '-c', '-o', output_file('bin/clang-darwin-3.9.0/release/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-v', '-o', output_file('bin/clang-darwin-3.9.0/release/target-os-darwin/test'), input_file('bin/clang-darwin-3.9.0/release/target-os-darwin/main.o'), input_file('bin/clang-darwin-3.9.0/release/target-os-darwin/libl1.dylib'), '-fPIC')
+
+if allow_properties("variant=debug", "link=static", "runtime-link=shared"):
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/test'), input_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/main.o'), input_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/libl1.a'), '-g')
+
+if allow_properties("variant=debug", "link=static", "runtime-link=static"):
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/test'), input_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), unordered('-g', '-static'))
+
+if allow_properties("variant=debug", "link=shared", "runtime-link=shared", "architecture=x86", "address-model=32"):
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-fPIC', '-m32'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/x86/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/x86/target-os-darwin/libl1.dylib'), '-single_module', '-dynamiclib', '-install_name', '@rpath/libl1.dylib', input_file('bin/clang-darwin-3.9.0/debug/x86/target-os-darwin/lib.o'), unordered('-g', '-march=i686', '-fPIC', '-m32'))
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-fPIC', '-m32'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/x86/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/x86/target-os-darwin/test'), input_file('bin/clang-darwin-3.9.0/debug/x86/target-os-darwin/main.o'), input_file('bin/clang-darwin-3.9.0/debug/x86/target-os-darwin/libl1.dylib'), unordered('-g', '-march=i686', '-fPIC', '-m32'))
+
+if allow_properties("variant=debug", "link=shared", "runtime-link=shared", "cxxstd=latest"):
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-std=c++1z'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/libl1.dylib'), '-single_module', '-dynamiclib', '-install_name', '@rpath/libl1.dylib', input_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/lib.o'), unordered('-g', '-fPIC', '-std=c++1z'))
+ command('clang++', '-x', 'c++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-std=c++1z'), '-c', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-o', output_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/test'), input_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/main.o'), input_file('bin/clang-darwin-3.9.0/debug/target-os-darwin/libl1.dylib'), unordered('-g', '-fPIC', '-std=c++1z'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/clang-linux-3.9.0.py b/src/boost/tools/build/test/toolset-mock/src/clang-linux-3.9.0.py
new file mode 100644
index 000000000..242d25624
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/clang-linux-3.9.0.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('clang++', '-print-prog-name=ar', stdout=script('ar.py'))
+command('clang++', '-print-prog-name=ranlib', stdout=script('ranlib.py'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-o', output_file('bin/clang-linux-3.9.0/debug/libl1.so'), '-Wl,-soname', '-Wl,libl1.so', '-shared', '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/lib.o'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', unordered('-g', '-fPIC'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-Wl,-R', arg('-Wl,', target_path('bin/clang-linux-3.9.0/debug/libl1.so')), '-Wl,-rpath-link', arg('-Wl,', target_path('bin/clang-linux-3.9.0/debug/libl1.so')), '-o', output_file('bin/clang-linux-3.9.0/debug/test'), '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/main.o'), input_file('bin/clang-linux-3.9.0/debug/libl1.so'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', unordered('-g', '-fPIC'))
+
+if allow_properties('variant=release', 'link=shared', 'threading=single', 'runtime-link=shared', 'strip=on'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O3', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG', '-c'), '-o', output_file('bin/clang-linux-3.9.0/release/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-o', output_file('bin/clang-linux-3.9.0/release/libl1.so'), '-Wl,-soname', '-Wl,libl1.so', '-shared', '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/release/lib.o'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', unordered('-fPIC', '-Wl,--strip-all'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O3', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG', '-c'), '-o', output_file('bin/clang-linux-3.9.0/release/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-Wl,-R', arg('-Wl,', target_path('bin/clang-linux-3.9.0/release/libl1.so')), '-Wl,-rpath-link', arg('-Wl,', target_path('bin/clang-linux-3.9.0/release/libl1.so')), '-o', output_file('bin/clang-linux-3.9.0/release/test'), '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/release/main.o'), input_file('bin/clang-linux-3.9.0/release/libl1.so'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', unordered('-fPIC', '-Wl,--strip-all'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=multi', 'runtime-link=shared'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-pthread', '-fPIC', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/threading-multi/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-o', output_file('bin/clang-linux-3.9.0/debug/threading-multi/libl1.so'), '-Wl,-soname', '-Wl,libl1.so', '-shared', '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/threading-multi/lib.o'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-lrt', '-Wl,--end-group', unordered('-g', '-pthread', '-fPIC'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-pthread', '-fPIC', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/threading-multi/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-Wl,-R', arg('-Wl,', target_path('bin/clang-linux-3.9.0/debug/threading-multi/libl1.so')), '-Wl,-rpath-link', arg('-Wl,', target_path('bin/clang-linux-3.9.0/debug/threading-multi/libl1.so')), '-o', output_file('bin/clang-linux-3.9.0/debug/threading-multi/test'), '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/threading-multi/main.o'), input_file('bin/clang-linux-3.9.0/debug/threading-multi/libl1.so'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-lrt', '-Wl,--end-group', unordered('-g', '-pthread', '-fPIC'))
+
+if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=shared'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/link-static/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/link-static/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-o', output_file('bin/clang-linux-3.9.0/debug/link-static/test'), '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/link-static/main.o'), input_file('bin/clang-linux-3.9.0/debug/link-static/libl1.a'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', '-g')
+
+if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=static'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-o', output_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/test'), '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/main.o'), input_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/libl1.a'), '-Wl,--end-group', unordered('-g', '-static'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared', 'architecture=x86', 'address-model=32'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-m32', '-fPIC', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', '-o', output_file('bin/clang-linux-3.9.0/debug/libl1.so'), '-Wl,-soname', '-Wl,libl1.so', '-shared', '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/lib.o'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', unordered('-g', '-march=i686', '-fPIC', '-m32'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-m32', '-fPIC', '-c'), '-o', output_file('bin/clang-linux-3.9.0/debug/main.o'), input_file(source='main.cpp'))
+ command('clang++', '-Wl,-R', arg('-Wl,', target_path('bin/clang-linux-3.9.0/debug/libl1.so')), '-Wl,-rpath-link', arg('-Wl,', target_path('bin/clang-linux-3.9.0/debug/libl1.so')), '-o', output_file('bin/clang-linux-3.9.0/debug/test'), '-Wl,--start-group', input_file('bin/clang-linux-3.9.0/debug/main.o'), input_file('bin/clang-linux-3.9.0/debug/libl1.so'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', unordered('-g', '-march=i686', '-fPIC', '-m32'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/clang-vxworks-4.0.1.py b/src/boost/tools/build/test/toolset-mock/src/clang-vxworks-4.0.1.py
new file mode 100644
index 000000000..a1fbedb11
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/clang-vxworks-4.0.1.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+#
+# Copyright 2018 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('clang++', '-print-prog-name=ar', stdout=script('ar.py'))
+command('clang++', '-print-prog-name=ranlib', stdout=script('ranlib.py'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file(source='main.cpp'))
+
+if allow_properties('variant=release', 'link=shared', 'threading=single', 'runtime-link=shared', 'strip=on'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O3', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/release/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O3', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/release/main.o'), input_file(source='main.cpp'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=multi', 'runtime-link=shared'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/threading-multi/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/threading-multi/main.o'), input_file(source='main.cpp'))
+
+if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=shared'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/main.o'), input_file(source='main.cpp'))
+
+if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=static'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/main.o'), input_file(source='main.cpp'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared', 'architecture=x86', 'address-model=32'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-m32', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-m32', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file(source='main.cpp'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared', 'rtti=off', 'exception-handling=off'):
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-fno-rtti', '-fno-exceptions', '-Wall', '-g', '-fPIC', '-D_NO_RTTI', '-D_NO_EX=1', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/lib.o'), input_file(source='lib.cpp'))
+ command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-fno-rtti', '-fno-exceptions', '-Wall', '-g', '-fPIC', '-D_NO_RTTI', '-D_NO_EX=1', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file(source='main.cpp'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/darwin-4.2.1.py b/src/boost/tools/build/test/toolset-mock/src/darwin-4.2.1.py
new file mode 100644
index 000000000..d81359ca5
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/darwin-4.2.1.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+script("libtool.py")
+
+command('g++', '-dumpversion', stdout='4.2.1')
+
+# all builds are multi-threaded for darwin
+if allow_properties("variant=debug", "link=shared", "runtime-link=shared"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-dynamic', '-gdwarf-2', '-fexceptions', '-fPIC'), '-c', '-o', output_file('bin/darwin-4.2.1/debug/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', '-dynamiclib', '-Wl,-single_module', '-install_name', 'libl1.dylib', '-o', output_file('bin/darwin-4.2.1/debug/target-os-darwin/libl1.dylib'), input_file('bin/darwin-4.2.1/debug/target-os-darwin/lib.o'), '-headerpad_max_install_names', unordered('-g', '-fPIC'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-dynamic', '-gdwarf-2', '-fexceptions', '-fPIC'), '-c', '-o', output_file('bin/darwin-4.2.1/debug/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/darwin-4.2.1/debug/target-os-darwin/test'), input_file('bin/darwin-4.2.1/debug/target-os-darwin/main.o'), input_file('bin/darwin-4.2.1/debug/target-os-darwin/libl1.dylib'), unordered('-g', '-fPIC'))
+
+if allow_properties("variant=release", "link=shared", "runtime-link=shared"):
+ command('g++', unordered('-O3', '-Wno-inline', '-Wall', '-dynamic', '-gdwarf-2', '-fexceptions', '-fPIC'), '-DNDEBUG', '-c', '-o', output_file('bin/darwin-4.2.1/release/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', '-dynamiclib', '-Wl,-single_module', '-install_name', 'libl1.dylib', '-o', output_file('bin/darwin-4.2.1/release/target-os-darwin/libl1.dylib'), input_file('bin/darwin-4.2.1/release/target-os-darwin/lib.o'), '-headerpad_max_install_names', unordered(ordered('-Wl,-dead_strip', '-no_dead_strip_inits_and_terms'), '-fPIC'))
+ command('g++', unordered('-O3', '-Wno-inline', '-Wall', '-dynamic', '-gdwarf-2', '-fexceptions', '-fPIC'), '-DNDEBUG', '-c', '-o', output_file('bin/darwin-4.2.1/release/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/darwin-4.2.1/release/target-os-darwin/test'), input_file('bin/darwin-4.2.1/release/target-os-darwin/main.o'), input_file('bin/darwin-4.2.1/release/target-os-darwin/libl1.dylib'), unordered(ordered('-Wl,-dead_strip', '-no_dead_strip_inits_and_terms'), '-fPIC'))
+
+if allow_properties("variant=debug", "link=static", "runtime-link=shared"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-gdwarf-2', '-fexceptions'), '-c', '-o', output_file('bin/darwin-4.2.1/debug/link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-gdwarf-2', '-fexceptions'), '-c', '-o', output_file('bin/darwin-4.2.1/debug/link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/darwin-4.2.1/debug/link-static/target-os-darwin/test'), input_file('bin/darwin-4.2.1/debug/link-static/target-os-darwin/main.o'), input_file('bin/darwin-4.2.1/debug/link-static/target-os-darwin/libl1.a'), '-g')
+
+if allow_properties("variant=debug", "link=static", "runtime-link=static"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-gdwarf-2', '-fexceptions'), '-c', '-o', output_file('bin/darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-gdwarf-2', '-fexceptions'), '-c', '-o', output_file('bin/darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/test'), input_file('bin/darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file('bin/darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), unordered('-g', ordered('-nodefaultlibs', '-shared-libgcc', '-lstdc++-static', '-lgcc_eh', '-lgcc', '-lSystem'), '-static'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/gcc-4.2.1-darwin.py b/src/boost/tools/build/test/toolset-mock/src/gcc-4.2.1-darwin.py
new file mode 100644
index 000000000..76058c320
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/gcc-4.2.1-darwin.py
@@ -0,0 +1,37 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('g++', '-print-prog-name=ar', stdout=script('ar.py'))
+command('g++', '-print-prog-name=ranlib', stdout=script('ranlib.py'))
+
+# all builds are multi-threaded for darwin
+if allow_properties("variant=debug", "link=shared", "runtime-link=shared"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-fPIC'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/debug/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', '-o', output_file('bin/gcc-darwin-4.2.1/debug/target-os-darwin/libl1.dylib'), '-shared', input_file('bin/gcc-darwin-4.2.1/debug/target-os-darwin/lib.o'), unordered('-g', '-fPIC'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-fPIC'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/debug/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-Wl,-rpath', arg('-Wl,', target_path('bin/gcc-darwin-4.2.1/debug/target-os-darwin/libl1.dylib')), '-o', output_file('bin/gcc-darwin-4.2.1/debug/target-os-darwin/test'), input_file('bin/gcc-darwin-4.2.1/debug/target-os-darwin/main.o'), input_file('bin/gcc-darwin-4.2.1/debug/target-os-darwin/libl1.dylib'), unordered('-g', '-fPIC'))
+
+if allow_properties("variant=release", "link=shared", "runtime-link=shared"):
+ command('g++', unordered('-O3', '-finline-functions', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/release/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', '-o', output_file('bin/gcc-darwin-4.2.1/release/target-os-darwin/libl1.dylib'), '-shared', input_file('bin/gcc-darwin-4.2.1/release/target-os-darwin/lib.o'), '-fPIC')
+ command('g++', unordered('-O3', '-finline-functions', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/release/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-Wl,-rpath', arg('-Wl,', target_path('bin/gcc-darwin-4.2.1/release/target-os-darwin/libl1.dylib')), '-o', output_file('bin/gcc-darwin-4.2.1/release/target-os-darwin/test'), input_file('bin/gcc-darwin-4.2.1/release/target-os-darwin/main.o'), input_file('bin/gcc-darwin-4.2.1/release/target-os-darwin/libl1.dylib'), '-fPIC')
+
+if allow_properties("variant=debug", "link=static", "runtime-link=shared"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/test'), input_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/main.o'), input_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/libl1.a'), '-g')
+
+if allow_properties("variant=debug", "link=static", "runtime-link=static"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/test'), input_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), unordered('-g', '-static'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/gcc-4.8.3-linux.py b/src/boost/tools/build/test/toolset-mock/src/gcc-4.8.3-linux.py
new file mode 100644
index 000000000..5604ee5d1
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/gcc-4.8.3-linux.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('g++', '-print-prog-name=ar', stdout=script('ar.py'))
+command('g++', '-print-prog-name=ranlib', stdout=script('ranlib.py'))
+
+if allow_properties("variant=debug", "link=shared", "threading=single", "runtime-link=shared"):
+ command("g++", unordered("-O0", "-fno-inline", "-Wall", "-g", "-fPIC"), "-c", "-o", output_file("bin/gcc-gnu-4.8.3/debug/lib.o"), input_file(source="lib.cpp"))
+ command("g++", "-o", output_file("bin/gcc-gnu-4.8.3/debug/libl1.so"), "-Wl,-h", "-Wl,libl1.so", "-shared", "-Wl,--start-group", input_file("bin/gcc-gnu-4.8.3/debug/lib.o"), "-Wl,-Bstatic", "-Wl,-Bdynamic", "-Wl,--end-group", unordered("-g", "-fPIC"))
+ command("g++", unordered("-O0", "-fno-inline", "-Wall", "-g", "-fPIC"), "-c", "-o", output_file("bin/gcc-gnu-4.8.3/debug/main.o"), input_file(source="main.cpp"))
+ command("g++", "-Wl,-rpath", arg("-Wl,", target_path("bin/gcc-gnu-4.8.3/debug/libl1.so")), "-Wl,-rpath-link", arg("-Wl,", target_path("bin/gcc-gnu-4.8.3/debug/libl1.so")), "-o", output_file("bin/gcc-gnu-4.8.3/debug/test"), "-Wl,--start-group", input_file("bin/gcc-gnu-4.8.3/debug/main.o"), input_file("bin/gcc-gnu-4.8.3/debug/libl1.so"), "-Wl,-Bstatic", "-Wl,-Bdynamic", "-Wl,--end-group", unordered("-g", "-fPIC"))
+
+if allow_properties("variant=release", "link=shared", "threading=single", "runtime-link=shared"):
+ command('g++', unordered('-O3', '-finline-functions', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG'), '-c', '-o', output_file('bin/gcc-gnu-4.8.3/release/lib.o'), input_file(source='lib.cpp'))
+ command('g++', '-o', output_file('bin/gcc-gnu-4.8.3/release/libl1.so'), '-Wl,-h', '-Wl,libl1.so', '-shared', '-Wl,--start-group', input_file('bin/gcc-gnu-4.8.3/release/lib.o'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', '-fPIC')
+ command('g++', unordered('-O3', '-finline-functions', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG'), '-c', '-o', output_file('bin/gcc-gnu-4.8.3/release/main.o'), input_file(source='main.cpp'))
+ command('g++', '-Wl,-rpath', arg('-Wl,', target_path('bin/gcc-gnu-4.8.3/release/libl1.so')), '-Wl,-rpath-link', arg('-Wl,', target_path('bin/gcc-gnu-4.8.3/release/libl1.so')), '-o', output_file('bin/gcc-gnu-4.8.3/release/test'), '-Wl,--start-group', input_file('bin/gcc-gnu-4.8.3/release/main.o'), input_file('bin/gcc-gnu-4.8.3/release/libl1.so'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', '-fPIC')
+
+if allow_properties("variant=debug", "link=shared", "threading=multi", "runtime-link=shared"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-pthread', '-fPIC'), '-c', '-o', output_file('bin/gcc-gnu-4.8.3/debug/threading-multi/lib.o'), input_file(source='lib.cpp'))
+ command('g++', '-o', output_file('bin/gcc-gnu-4.8.3/debug/threading-multi/libl1.so'), '-Wl,-h', '-Wl,libl1.so', '-shared', '-Wl,--start-group', input_file('bin/gcc-gnu-4.8.3/debug/threading-multi/lib.o'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-lrt', '-Wl,--end-group', unordered('-g', '-pthread', '-fPIC'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-pthread', '-fPIC'), '-c', '-o', output_file('bin/gcc-gnu-4.8.3/debug/threading-multi/main.o'), input_file(source='main.cpp'))
+ command('g++', '-Wl,-rpath', arg('-Wl,', target_path('bin/gcc-gnu-4.8.3/debug/threading-multi/libl1.so')), '-Wl,-rpath-link', arg('-Wl,', target_path('bin/gcc-gnu-4.8.3/debug/threading-multi/libl1.so')), '-o', output_file('bin/gcc-gnu-4.8.3/debug/threading-multi/test'), '-Wl,--start-group', input_file('bin/gcc-gnu-4.8.3/debug/threading-multi/main.o'), input_file('bin/gcc-gnu-4.8.3/debug/threading-multi/libl1.so'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-lrt', '-Wl,--end-group', unordered('-g', '-pthread', '-fPIC'))
+
+if allow_properties("variant=debug", "link=static", "threading=single", "runtime-link=shared"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/gcc-gnu-4.8.3/debug/link-static/lib.o'), input_file(source='lib.cpp'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g'), '-c', '-o', output_file('bin/gcc-gnu-4.8.3/debug/link-static/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/gcc-gnu-4.8.3/debug/link-static/test'), '-Wl,--start-group', input_file('bin/gcc-gnu-4.8.3/debug/link-static/main.o'), input_file('bin/gcc-gnu-4.8.3/debug/link-static/libl1.a'), '-Wl,-Bstatic', '-Wl,-Bdynamic', '-Wl,--end-group', '-g')
+
+if allow_properties("variant=debug", "link=static", "threading=single", "runtime-link=static"):
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/lib.o'), input_file(source='lib.cpp'))
+ command('g++', unordered('-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/main.o'), input_file(source='main.cpp'))
+ command('g++', '-o', output_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/test'), '-Wl,--start-group', input_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/main.o'), input_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/libl1.a'), '-Wl,--end-group', unordered('-g', '-static'))
+
+
+if allow_properties("variant=debug", "link=shared", "threading=single", "runtime-link=shared"):
+ command("g++", unordered("-O0", "-fno-inline", "-Wall", "-g", "-fPIC", "-std=c++1y"), "-c", "-o", output_file("bin/gcc-gnu-4.8.3/debug/lib.o"), input_file(source="lib.cpp"))
+ command("g++", "-o", output_file("bin/gcc-gnu-4.8.3/debug/libl1.so"), "-Wl,-h", "-Wl,libl1.so", "-shared", "-Wl,--start-group", input_file("bin/gcc-gnu-4.8.3/debug/lib.o"), "-Wl,-Bstatic", "-Wl,-Bdynamic", "-Wl,--end-group", unordered("-g", "-fPIC", "-std=c++1y"))
+ command("g++", unordered("-O0", "-fno-inline", "-Wall", "-g", "-fPIC", "-std=c++1y"), "-c", "-o", output_file("bin/gcc-gnu-4.8.3/debug/main.o"), input_file(source="main.cpp"))
+ command("g++", "-Wl,-rpath", arg("-Wl,", target_path("bin/gcc-gnu-4.8.3/debug/libl1.so")), "-Wl,-rpath-link", arg("-Wl,", target_path("bin/gcc-gnu-4.8.3/debug/libl1.so")), "-o", output_file("bin/gcc-gnu-4.8.3/debug/test"), "-Wl,--start-group", input_file("bin/gcc-gnu-4.8.3/debug/main.o"), input_file("bin/gcc-gnu-4.8.3/debug/libl1.so"), "-Wl,-Bstatic", "-Wl,-Bdynamic", "-Wl,--end-group", unordered("-g", "-fPIC", "-std=c++1y"))
+
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/intel-darwin-10.2.py b/src/boost/tools/build/test/toolset-mock/src/intel-darwin-10.2.py
new file mode 100644
index 000000000..314d6c458
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/intel-darwin-10.2.py
@@ -0,0 +1,43 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('icc', '-print-prog-name=ar', stdout=script('ar.py'))
+command('icc', '-print-prog-name=ranlib', stdout=script('ranlib.py'))
+
+# all builds are multi-threaded for darwin
+if allow_properties("variant=debug", "link=shared", "runtime-link=shared"):
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0', '-fPIC'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/debug/target-os-darwin/libl1.dylib'), '-single_module', '-dynamiclib', '-install_name', 'libl1.dylib', input_file('bin/intel-darwin-10.2/debug/target-os-darwin/lib.o'), unordered('-g', ordered('-shared-intel', '-lstdc++', '-lpthread'), '-fPIC'))
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0', '-fPIC'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/debug/target-os-darwin/test'), input_file('bin/intel-darwin-10.2/debug/target-os-darwin/main.o'), input_file('bin/intel-darwin-10.2/debug/target-os-darwin/libl1.dylib'), unordered('-g', ordered('-shared-intel', '-lstdc++', '-lpthread'), '-fPIC'))
+
+if allow_properties("variant=release", "link=shared", "runtime-link=shared"):
+ command('icc', '-xc++', unordered('-O3', '-inline-level=2', '-w1', '-vec-report0', '-fPIC'), '-DNDEBUG', '-c', '-o', output_file('bin/intel-darwin-10.2/release/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/release/target-os-darwin/libl1.dylib'), '-single_module', '-dynamiclib', '-install_name', 'libl1.dylib', input_file('bin/intel-darwin-10.2/release/target-os-darwin/lib.o'), unordered(ordered('-shared-intel', '-lstdc++', '-lpthread'), '-fPIC'))
+ command('icc', '-xc++', unordered('-O3', '-inline-level=2', '-w1', '-vec-report0', '-fPIC'), '-DNDEBUG', '-c', '-o', output_file('bin/intel-darwin-10.2/release/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/release/target-os-darwin/test'), input_file('bin/intel-darwin-10.2/release/target-os-darwin/main.o'), input_file('bin/intel-darwin-10.2/release/target-os-darwin/libl1.dylib'), unordered(ordered('-shared-intel', '-lstdc++', '-lpthread'), '-fPIC'))
+
+if allow_properties("variant=debug", "link=static", "runtime-link=shared"):
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/test'), input_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/main.o'), input_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/libl1.a'), '-g', ordered('-shared-intel', '-lstdc++', '-lpthread'))
+
+if allow_properties("variant=debug", "link=static", "runtime-link=static"):
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/test'), input_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/main.o'), input_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), unordered('-g', ordered('-static', '-static-intel', '-lstdc++', '-lpthread'), '-static'))
+
+if allow_properties("variant=debug", "link=shared", "runtime-link=shared", "architecture=x86", "address-model=32"):
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0', '-march=i686', '-fPIC', '-m32'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/x86/target-os-darwin/lib.o'), input_file(source='lib.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/debug/x86/target-os-darwin/libl1.dylib'), '-single_module', '-dynamiclib', '-install_name', 'libl1.dylib', input_file('bin/intel-darwin-10.2/debug/x86/target-os-darwin/lib.o'), unordered('-g', ordered('-shared-intel', '-lstdc++', '-lpthread'), '-march=i686', '-fPIC', '-m32'))
+ command('icc', '-xc++', unordered('-O0', '-inline-level=0', '-w1', '-g', '-vec-report0', '-march=i686', '-fPIC', '-m32'), '-c', '-o', output_file('bin/intel-darwin-10.2/debug/x86/target-os-darwin/main.o'), input_file(source='main.cpp'))
+ command('icc', '-o', output_file('bin/intel-darwin-10.2/debug/x86/target-os-darwin/test'), input_file('bin/intel-darwin-10.2/debug/x86/target-os-darwin/main.o'), input_file('bin/intel-darwin-10.2/debug/x86/target-os-darwin/libl1.dylib'), unordered('-g', ordered('-shared-intel', '-lstdc++', '-lpthread'), '-march=i686', '-fPIC', '-m32'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/ld.py b/src/boost/tools/build/test/toolset-mock/src/ld.py
new file mode 100644
index 000000000..2b644e501
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/ld.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+#
+# Copyright 2018 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared'):
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/libl1.so'), input_file('bin/clang-vxworks-4.0.1/debug/lib.o'), unordered('-g', '-fPIC'), '-fpic', '-shared', '-non-static')
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/test'), input_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file('bin/clang-vxworks-4.0.1/debug/libl1.so'), unordered('-g', '-fPIC'))
+
+if allow_properties('variant=release', 'link=shared', 'threading=single', 'runtime-link=shared', 'strip=on'):
+ command('ld', '-t', '-o', output_file('bin/clang-vxworks-4.0.1/release/libl1.so'), input_file('bin/clang-vxworks-4.0.1/release/lib.o'), unordered('-fPIC', '-Wl,--strip-all'), '-fpic', '-shared', '-non-static')
+ command('ld', '-t', '-o', output_file('bin/clang-vxworks-4.0.1/release/test'), input_file('bin/clang-vxworks-4.0.1/release/main.o'), input_file('bin/clang-vxworks-4.0.1/release/libl1.so'), unordered('-fPIC', '-Wl,--strip-all'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=multi', 'runtime-link=shared'):
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/threading-multi/libl1.so'), input_file('bin/clang-vxworks-4.0.1/debug/threading-multi/lib.o'), unordered('-g', '-fPIC'), '-fpic', '-shared', '-non-static')
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/threading-multi/test'), input_file('bin/clang-vxworks-4.0.1/debug/threading-multi/main.o'), input_file('bin/clang-vxworks-4.0.1/debug/threading-multi/libl1.so'), unordered('-g', '-fPIC'))
+
+if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=shared'):
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/test'), input_file('bin/clang-vxworks-4.0.1/debug/link-static/main.o'), input_file('bin/clang-vxworks-4.0.1/debug/link-static/libl1.a'), '-g')
+
+if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=static'):
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/test'), input_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/main.o'), input_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/libl1.a'), unordered('-g'))
+
+if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared', 'architecture=x86', 'address-model=32'):
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/libl1.so'), input_file('bin/clang-vxworks-4.0.1/debug/lib.o'), unordered('-g', '-march=i686', '-fPIC', '-m32'), '-fpic', '-shared', '-non-static')
+ command('ld', '-o', output_file('bin/clang-vxworks-4.0.1/debug/test'), input_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file('bin/clang-vxworks-4.0.1/debug/libl1.so'), unordered('-g', '-march=i686', '-fPIC', '-m32'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/libtool.py b/src/boost/tools/build/test/toolset-mock/src/libtool.py
new file mode 100644
index 000000000..9f58dc96d
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/libtool.py
@@ -0,0 +1,14 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('libtool', '-static', '-o', output_file('bin/darwin-4.2.1/debug/link-static/target-os-darwin/libl1.a'), input_file('bin/darwin-4.2.1/debug/link-static/target-os-darwin/lib.o'))
+command('libtool', '-static', '-o', output_file('bin/darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'), input_file('bin/darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/lib.o'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/mock-program.cpp b/src/boost/tools/build/test/toolset-mock/src/mock-program.cpp
new file mode 100644
index 000000000..62dd4b8a4
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/mock-program.cpp
@@ -0,0 +1,42 @@
+// mock-program.cpp
+//
+// Copyright (c) 2017 Steven Watanabe
+//
+// Distributed under the Boost Software License Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+// This program does nothing except to exec a python script
+
+#include <vector>
+#include <iostream>
+#include <stdio.h>
+#include "config.h"
+
+#if defined(_WIN32)
+ #include <process.h>
+ #define execv _execv
+#else
+ #include <unistd.h>
+#endif
+
+#ifndef PY_SCRIPT
+#error PY_SCRIPT must be defined to the absolute path to the script to run
+#endif
+
+#ifndef PYTHON_CMD
+#error PYTHON_CMD must be defined to the absolute path to the python interpreter
+#endif
+
+int main(int argc, char ** argv)
+{
+ std::vector<char *> args;
+ char python_cmd[] = PYTHON_CMD;
+ char script[] = PY_SCRIPT;
+ args.push_back(python_cmd);
+ args.push_back(script);
+ args.insert(args.end(), argv + 1, argv + argc);
+ args.push_back(NULL);
+ execv(python_cmd, &args[0]);
+ perror("exec");
+}
diff --git a/src/boost/tools/build/test/toolset-mock/src/project-config.jam b/src/boost/tools/build/test/toolset-mock/src/project-config.jam
new file mode 100644
index 000000000..73dcf42df
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/project-config.jam
@@ -0,0 +1,5 @@
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
diff --git a/src/boost/tools/build/test/toolset-mock/src/ranlib.py b/src/boost/tools/build/test/toolset-mock/src/ranlib.py
new file mode 100644
index 000000000..4abe21ed0
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/ranlib.py
@@ -0,0 +1,22 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('ranlib', input_file('bin/gcc-gnu-4.8.3/debug/link-static/libl1.a'))
+command('ranlib', input_file('bin/gcc-gnu-4.8.3/debug/link-static/runtime-link-static/libl1.a'))
+command('ranlib', input_file('bin/gcc-darwin-4.2.1/debug/link-static/target-os-darwin/libl1.a'))
+command('ranlib', input_file('bin/gcc-darwin-4.2.1/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'))
+command('ranlib', input_file('bin/clang-darwin-3.9.0/debug/link-static/target-os-darwin/libl1.a'))
+command('ranlib', input_file('bin/clang-darwin-3.9.0/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'))
+command('ranlib', '-cs', input_file('bin/intel-darwin-10.2/debug/link-static/target-os-darwin/libl1.a'))
+command('ranlib', '-cs', input_file('bin/intel-darwin-10.2/debug/link-static/runtime-link-static/target-os-darwin/libl1.a'))
+command('ranlib', input_file('bin/clang-linux-3.9.0/debug/link-static/libl1.a'))
+command('ranlib', input_file('bin/clang-linux-3.9.0/debug/link-static/runtime-link-static/libl1.a'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/strip.py b/src/boost/tools/build/test/toolset-mock/src/strip.py
new file mode 100644
index 000000000..6245588bf
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/strip.py
@@ -0,0 +1,13 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+command('strip', '-S', '-x', input_file('bin/darwin-4.2.1/release/target-os-darwin/test'))
+
+main()
diff --git a/src/boost/tools/build/test/toolset-mock/src/verify.py b/src/boost/tools/build/test/toolset-mock/src/verify.py
new file mode 100644
index 000000000..6e5e0ea7b
--- /dev/null
+++ b/src/boost/tools/build/test/toolset-mock/src/verify.py
@@ -0,0 +1,9 @@
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+from MockProgram import *
+
+verify()
diff --git a/src/boost/tools/build/test/toolset_clang_darwin.py b/src/boost/tools/build/test/toolset_clang_darwin.py
new file mode 100644
index 000000000..53c7c07cb
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_clang_darwin.py
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# validates the clang-darwin toolset using a mock of clang
+
+from TestToolset import test_toolset
+
+test_toolset("clang-darwin", "3.9.0", [
+ ["target-os=darwin"],
+ ["target-os=darwin", "release", "strip=on", "linkflags=-v"],
+ ["target-os=darwin", "threading=multi"],
+ ["target-os=darwin", "link=static"],
+ ["target-os=darwin", "link=static", "runtime-link=static"],
+ ["target-os=darwin", "architecture=x86", "address-model=32"],
+ ["target-os=darwin", "cxxstd=latest"]])
diff --git a/src/boost/tools/build/test/toolset_clang_linux.py b/src/boost/tools/build/test/toolset_clang_linux.py
new file mode 100644
index 000000000..2fbf84b6d
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_clang_linux.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# validates the clang_linux toolset using a mock of clang
+
+from TestToolset import test_toolset
+
+test_toolset("clang-linux", "3.9.0", [
+ ["target-os=linux"],
+ ["target-os=linux", "release", "strip=on"],
+ ["target-os=linux", "threading=multi"],
+ ["target-os=linux", "link=static"],
+ ["target-os=linux", "link=static", "runtime-link=static"],
+ ["target-os=linux", "architecture=x86", "address-model=32"]])
diff --git a/src/boost/tools/build/test/toolset_clang_vxworks.py b/src/boost/tools/build/test/toolset_clang_vxworks.py
new file mode 100644
index 000000000..efdc12759
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_clang_vxworks.py
@@ -0,0 +1,20 @@
+#!/usr/bin/python
+#
+# Copyright 2018 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# validates the clang_vxworks toolset using a mock of clang
+
+from TestToolset import test_toolset
+
+test_toolset("clang-vxworks", "4.0.1", [
+ ["target-os=vxworks"],
+ ["target-os=vxworks", "release", "strip=on", "linkflags=-t"],
+ ["target-os=vxworks", "threading=multi"],
+ ["target-os=vxworks", "link=static"],
+ ["target-os=vxworks", "link=static", "runtime-link=static"],
+ ["target-os=vxworks", "architecture=x86", "address-model=32"],
+ ["target-os=vxworks", "rtti=off", "exception-handling=off"]])
diff --git a/src/boost/tools/build/test/toolset_darwin.py b/src/boost/tools/build/test/toolset_darwin.py
new file mode 100644
index 000000000..58ecc8d2b
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_darwin.py
@@ -0,0 +1,21 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# validates the darwin toolset using a mock of gcc
+
+from TestToolset import test_toolset
+
+test_toolset("darwin", "4.2.1", [
+ ["target-os=darwin"],
+ ["target-os=darwin", "release", "strip=on"],
+ ["target-os=darwin", "threading=multi"],
+ ["target-os=darwin", "link=static"],
+ ["target-os=darwin", "link=static", "runtime-link=static"],
+# Address-model handling is quite broken
+# ["target-os=darwin", "architecture=x86", "address-model=32"]
+])
diff --git a/src/boost/tools/build/test/toolset_defaults.py b/src/boost/tools/build/test/toolset_defaults.py
new file mode 100644
index 000000000..6d76c10fd
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_defaults.py
@@ -0,0 +1,60 @@
+#!/usr/bin/python
+
+# Copyright 2018 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the handling of toolset.add-defaults
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, ignore_toolset_requirements=False)
+
+t.write('jamroot.jam', '''
+import toolset ;
+import errors ;
+import feature : feature ;
+import set ;
+
+feature f1 : a b ;
+feature f2 : c d ;
+feature f3 : e f ;
+feature f4 : g h ;
+feature f5 : i j ;
+feature f6 : k l m ;
+
+rule test-rule ( properties * )
+{
+ if <f1>a in $(properties)
+ {
+ return <f2>d ;
+ }
+}
+
+toolset.add-defaults
+ <conditional>@test-rule
+ <f3>e:<f4>h
+ <f5>i:<f6>l
+;
+
+rule check-requirements ( target : sources * : properties * )
+{
+ local expected = <f2>d <f4>h <f6>m ;
+ local unexpected = <f2>c <f4>g <f6>k <f6>l ;
+ local missing = [ set.difference $(expected) : $(properties) ] ;
+ if $(missing)
+ {
+ errors.error $(missing) not present ;
+ }
+ local extra = [ set.intersection $(unexpected) : $(properties) ] ;
+ if $(extra)
+ {
+ errors.error $(extra) present ;
+ }
+}
+make test : : @check-requirements : <f6>m ;
+''')
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/toolset_gcc.py b/src/boost/tools/build/test/toolset_gcc.py
new file mode 100644
index 000000000..d3d65fcf9
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_gcc.py
@@ -0,0 +1,26 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# validates the gcc toolset using a mock of gcc
+
+from TestToolset import test_toolset
+
+test_toolset("gcc", "4.8.3", [
+ ["target-os=linux"],
+ ["target-os=linux", "release"],
+ ["target-os=linux", "threading=multi"],
+ ["target-os=linux", "link=static"],
+ ["target-os=linux", "link=static", "runtime-link=static"],
+ ["target-os=linux", "cxxstd=latest"]])
+
+test_toolset("gcc", "4.2.1", [
+ ["target-os=darwin"],
+ ["target-os=darwin", "release"],
+ ["target-os=darwin", "threading=multi"],
+ ["target-os=darwin", "link=static"],
+ ["target-os=darwin", "link=static", "runtime-link=static"]])
diff --git a/src/boost/tools/build/test/toolset_intel_darwin.py b/src/boost/tools/build/test/toolset_intel_darwin.py
new file mode 100644
index 000000000..db0444900
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_intel_darwin.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+#
+# Copyright 2017 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# validates the intel-darwin toolset using a mock of icc
+
+from TestToolset import test_toolset
+
+test_toolset("intel-darwin", "10.2", [
+ ["target-os=darwin"],
+ ["target-os=darwin", "release", "strip=on"],
+ ["target-os=darwin", "threading=multi"],
+ ["target-os=darwin", "link=static"],
+ ["target-os=darwin", "link=static", "runtime-link=static"],
+ ["target-os=darwin", "architecture=x86", "address-model=32"]])
diff --git a/src/boost/tools/build/test/toolset_requirements.py b/src/boost/tools/build/test/toolset_requirements.py
new file mode 100644
index 000000000..c9a8fa8ee
--- /dev/null
+++ b/src/boost/tools/build/test/toolset_requirements.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the handling of toolset.add-requirements
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, ignore_toolset_requirements=False)
+
+t.write('jamroot.jam', '''
+import toolset ;
+import errors ;
+
+rule test-rule ( properties * )
+{
+ return <define>TEST_INDIRECT_CONDITIONAL ;
+}
+
+toolset.add-requirements
+ <define>TEST_MACRO
+ <conditional>@test-rule
+ <link>shared:<define>TEST_CONDITIONAL
+;
+
+rule check-requirements ( target : sources * : properties * )
+{
+ local macros = TEST_MACRO TEST_CONDITIONAL TEST_INDIRECT_CONDITIONAL ;
+ for local m in $(macros)
+ {
+ if ! <define>$(m) in $(properties)
+ {
+ errors.error $(m) not defined ;
+ }
+ }
+}
+make test : : @check-requirements ;
+''')
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/tree.py b/src/boost/tools/build/test/tree.py
new file mode 100644
index 000000000..11899dd70
--- /dev/null
+++ b/src/boost/tools/build/test/tree.py
@@ -0,0 +1,245 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2001, 2002 Vladimir Prus
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+###############################################################################
+#
+# Based in part on an old Subversion tree.py source file (tools for comparing
+# directory trees). See http://subversion.tigris.org for more information.
+#
+# Copyright (c) 2001 Sam Tobin-Hochstadt. All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which you should
+# have received as part of this distribution. The terms are also available at
+# http://subversion.tigris.org/license-1.html. If newer versions of this
+# license are posted there, you may use a newer version instead, at your
+# option.
+#
+###############################################################################
+
+from __future__ import print_function
+
+import os
+import os.path
+import stat
+import sys
+
+
+class TreeNode:
+ """
+ Fundamental data type used to build file system tree structures.
+
+ If CHILDREN is None, then the node represents a file. Otherwise, CHILDREN
+ is a list of the nodes representing that directory's children.
+
+ NAME is simply the name of the file or directory. CONTENTS is a string
+ holding the file's contents (if a file).
+
+ """
+
+ def __init__(self, name, children=None, contents=None):
+ assert children is None or contents is None
+ self.name = name
+ self.mtime = 0
+ self.children = children
+ self.contents = contents
+ self.path = name
+
+ def add_child(self, newchild):
+ assert not self.is_file()
+ for a in self.children:
+ if a.name == newchild.name:
+ if newchild.is_file():
+ a.contents = newchild.contents
+ a.path = os.path.join(self.path, newchild.name)
+ else:
+ for i in newchild.children:
+ a.add_child(i)
+ break
+ else:
+ self.children.append(newchild)
+ newchild.path = os.path.join(self.path, newchild.name)
+
+ def get_child(self, name):
+ """
+ If the given TreeNode directory NODE contains a child named NAME,
+ return the child; else, return None.
+
+ """
+ for n in self.children:
+ if n.name == name:
+ return n
+
+ def is_file(self):
+ return self.children is None
+
+ def pprint(self):
+ print(" * Node name: %s" % self.name)
+ print(" Path: %s" % self.path)
+ print(" Contents: %s" % self.contents)
+ if self.is_file():
+ print(" Children: is a file.")
+ else:
+ print(" Children: %d" % len(self.children))
+
+
+class TreeDifference:
+ def __init__(self):
+ self.added_files = []
+ self.removed_files = []
+ self.modified_files = []
+ self.touched_files = []
+
+ def append(self, other):
+ self.added_files.extend(other.added_files)
+ self.removed_files.extend(other.removed_files)
+ self.modified_files.extend(other.modified_files)
+ self.touched_files.extend(other.touched_files)
+
+ def ignore_directories(self):
+ """Removes directories from our lists of found differences."""
+ not_dir = lambda x : x[-1] != "/"
+ self.added_files = list(filter(not_dir, self.added_files))
+ self.removed_files = list(filter(not_dir, self.removed_files))
+ self.modified_files = list(filter(not_dir, self.modified_files))
+ self.touched_files = list(filter(not_dir, self.touched_files))
+
+ def pprint(self, file=sys.stdout):
+ file.write("Added files : %s\n" % self.added_files)
+ file.write("Removed files : %s\n" % self.removed_files)
+ file.write("Modified files: %s\n" % self.modified_files)
+ file.write("Touched files : %s\n" % self.touched_files)
+
+ def empty(self):
+ return not (self.added_files or self.removed_files or
+ self.modified_files or self.touched_files)
+
+
+def build_tree(path):
+ """
+ Takes PATH as the folder path, walks the file system below that path, and
+ creates a tree structure based on any files and folders found there.
+ Returns the prepared tree structure plus the maximum file modification
+ timestamp under the given folder.
+
+ """
+ return _handle_dir(os.path.normpath(path))
+
+
+def tree_difference(a, b):
+ """Compare TreeNodes A and B, and create a TreeDifference instance."""
+ return _do_tree_difference(a, b, "", True)
+
+
+def _do_tree_difference(a, b, parent_path, root=False):
+ """Internal recursive worker function for tree_difference()."""
+
+ # We do not want to list root node names.
+ if root:
+ assert not parent_path
+ assert not a.is_file()
+ assert not b.is_file()
+ full_path = ""
+ else:
+ assert a.name == b.name
+ full_path = parent_path + a.name
+ result = TreeDifference()
+
+ # A and B are both files.
+ if a.is_file() and b.is_file():
+ if a.contents != b.contents:
+ result.modified_files.append(full_path)
+ elif a.mtime != b.mtime:
+ result.touched_files.append(full_path)
+ return result
+
+ # Directory converted to file.
+ if not a.is_file() and b.is_file():
+ result.removed_files.extend(_traverse_tree(a, parent_path))
+ result.added_files.append(full_path)
+
+ # File converted to directory.
+ elif a.is_file() and not b.is_file():
+ result.removed_files.append(full_path)
+ result.added_files.extend(_traverse_tree(b, parent_path))
+
+ # A and B are both directories.
+ else:
+ if full_path:
+ full_path += "/"
+ accounted_for = [] # Children present in both trees.
+ for a_child in a.children:
+ b_child = b.get_child(a_child.name)
+ if b_child:
+ accounted_for.append(b_child)
+ result.append(_do_tree_difference(a_child, b_child, full_path))
+ else:
+ result.removed_files.append(full_path + a_child.name)
+ for b_child in b.children:
+ if b_child not in accounted_for:
+ result.added_files.extend(_traverse_tree(b_child, full_path))
+
+ return result
+
+
+def _traverse_tree(t, parent_path):
+ """Returns a list of all names in a tree."""
+ assert not parent_path or parent_path[-1] == "/"
+ full_node_name = parent_path + t.name
+ if t.is_file():
+ result = [full_node_name]
+ else:
+ name_prefix = full_node_name + "/"
+ result = [name_prefix]
+ for i in t.children:
+ result.extend(_traverse_tree(i, name_prefix))
+ return result
+
+
+def _get_text(path):
+ """Return a string with the textual contents of a file at PATH."""
+ fp = open(path, 'rb')
+ try:
+ return fp.read()
+ finally:
+ fp.close()
+
+
+def _handle_dir(path):
+ """
+ Main recursive worker function for build_tree(). Returns a newly created
+ tree node representing the given normalized folder path as well as the
+ maximum file/folder modification time detected under the same path.
+
+ """
+ files = []
+ dirs = []
+ node = TreeNode(os.path.basename(path), children=[])
+ max_mtime = node.mtime = os.stat(path).st_mtime
+
+ # List files & folders.
+ for f in os.listdir(path):
+ f = os.path.join(path, f)
+ if os.path.isdir(f):
+ dirs.append(f)
+ elif os.path.isfile(f):
+ files.append(f)
+
+ # Add a child node for each file.
+ for f in files:
+ fcontents = _get_text(f)
+ new_file_node = TreeNode(os.path.basename(f), contents=fcontents)
+ new_file_node.mtime = os.stat(f).st_mtime
+ max_mtime = max(max_mtime, new_file_node.mtime)
+ node.add_child(new_file_node)
+
+ # For each subdir, create a node, walk its tree, add it as a child.
+ for d in dirs:
+ new_dir_node, new_max_mtime = _handle_dir(d)
+ max_mtime = max(max_mtime, new_max_mtime)
+ node.add_child(new_dir_node)
+
+ return node, max_mtime
diff --git a/src/boost/tools/build/test/unit_test.py b/src/boost/tools/build/test/unit_test.py
new file mode 100644
index 000000000..da28503bc
--- /dev/null
+++ b/src/boost/tools/build/test/unit_test.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the unit_test rule.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Create the needed files.
+t.write("jamroot.jam", """
+using testing ;
+lib helper : helper.cpp ;
+unit-test test : test.cpp : <library>helper ;
+""")
+
+t.write("test.cpp", """
+void helper();
+int main() { helper(); }
+""")
+
+t.write("helper.cpp", """
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+helper() {}
+""")
+
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static*/test.passed")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/unit_tests.py b/src/boost/tools/build/test/unit_tests.py
new file mode 100644
index 000000000..705764b6b
--- /dev/null
+++ b/src/boost/tools/build/test/unit_tests.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+t.run_build_system(["--debug", "--build-system=test/test"])
+t.cleanup()
diff --git a/src/boost/tools/build/test/unused.py b/src/boost/tools/build/test/unused.py
new file mode 100644
index 000000000..2b185d0de
--- /dev/null
+++ b/src/boost/tools/build/test/unused.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that unused sources are at least reported.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-d+2"], use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+t.write("b.cpp", "\n")
+t.write("b.xyz", "")
+t.write("jamroot.jam", """\
+import "class" : new ;
+import modules ;
+import project ;
+import targets ;
+import type ;
+import virtual-target ;
+
+type.register X : xyz ;
+
+class test-target-class : basic-target
+{
+ rule construct ( name : source-targets * : property-set )
+ {
+ local result = [ property-set.empty ] ;
+ if ! [ modules.peek : GENERATE_NOTHING ]
+ {
+ result += [ virtual-target.from-file b.xyz : . : $(self.project) ] ;
+ if ! [ modules.peek : GENERATE_ONLY_UNUSABLE ]
+ {
+ result += [ virtual-target.from-file b.cpp : . : $(self.project)
+ ] ;
+ }
+ }
+ return $(result) ;
+ }
+
+ rule compute-usage-requirements ( rproperties : targets * )
+ {
+ return [ property-set.create <define>FOO ] ;
+ }
+}
+
+rule make-b-main-target
+{
+ local project = [ project.current ] ;
+ targets.main-target-alternative [ new test-target-class b : $(project) ] ;
+}
+
+exe a : a.cpp b c ;
+make-b-main-target ;
+alias c ; # Expands to nothing, intentionally.
+""")
+
+t.run_build_system()
+
+# The second invocation should do nothing, and produce no warning. The previous
+# invocation might have printed executed actions and other things, so it is not
+# easy to check if a warning was issued or not.
+t.run_build_system(stdout="")
+
+t.run_build_system(["-sGENERATE_ONLY_UNUSABLE=1"], stdout="")
+
+# Check that even if main target generates nothing, its usage requirements are
+# still propagated to dependants.
+t.write("a.cpp", """\
+#ifndef FOO
+ #error We refuse to compile without FOO being defined!
+ We_refuse_to_compile_without_FOO_being_defined
+#endif
+int main() {}
+""")
+t.run_build_system(["-sGENERATE_NOTHING=1"])
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/use_requirements.py b/src/boost/tools/build/test/use_requirements.py
new file mode 100644
index 000000000..366281509
--- /dev/null
+++ b/src/boost/tools/build/test/use_requirements.py
@@ -0,0 +1,283 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# Test that usage requirements on main targets work (and are propagated all the
+# way up, and not only to direct dependants).
+t.write("jamroot.jam", "")
+
+# Note: 'lib cc ..', not 'lib c'. If using 'lib c: ...' the HP-CXX linker will
+# confuse it with the system C runtime.
+t.write("jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : :
+ <define>FOO <link>shared:<define>SHARED_B ;
+lib cc : c.cpp b ;
+exe a : a.cpp cc ;
+""")
+
+t.write("b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.write("c.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+create_lib_please() {}
+""")
+
+t.write("a.cpp", """\
+#ifdef FOO
+void
+# if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+# endif
+foo() {}
+#endif
+int main() { foo(); }
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test that use requirements on main target work, when they are referred using
+# 'dependency' features.
+
+t.write("jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
+ <link>shared:<define>SHARED_B ;
+exe a : a.cpp : <use>b ;
+""")
+
+t.write("b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.write("a.cpp", """\
+#ifdef FOO
+int main() {}
+#endif
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test that usage requirements on a project work.
+t.write("jamfile.jam", "exe a : a.cpp lib//b ;")
+
+t.write("lib/jamfile.jam", """\
+project
+ : requirements <link>shared:<define>SHARED_B
+ : usage-requirements <define>FOO <link>shared:<define>SHARED_B ;
+lib b : b.cpp ;
+""")
+
+t.write("lib/b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system()
+
+
+# Test that use requirements are inherited correctly.
+t.write("jamfile.jam", "exe a : a.cpp lib/1//b ;")
+
+t.write("a.cpp", """\
+#if defined(FOO) && defined(ZOO)
+void foo() {}
+#endif
+int main() { foo(); }
+""")
+
+t.write("lib/jamfile.jam", """\
+project : requirements : usage-requirements <define>FOO ;
+""")
+
+t.write("lib/1/jamfile.jam", """\
+project
+ : requirements <link>shared:<define>SHARED_B
+ : usage-requirements <define>ZOO <link>shared:<define>SHARED_B ;
+lib b : b.cpp ;
+""")
+
+t.write("lib/1/b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test that we correctly handle dependency features in usage requirements on
+# target.
+t.write("jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
+ <link>shared:<define>SHARED_B ;
+
+# Here's the test: we should correctly handle dependency feature and get usage
+# requirements from 'b'.
+lib cc : c.cpp : <link>shared:<define>SHARED_C : : <library>b ;
+
+# This will build only if <define>FOO was propagated from 'c'.
+exe a : a.cpp cc ;
+""")
+
+t.write("a.cpp", """\
+#ifdef FOO
+void
+# if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+# endif
+foo();
+#endif
+
+int main() { foo(); }
+""")
+
+t.write("c.cpp", """\
+int
+#if defined(_WIN32) && defined(SHARED_C)
+__declspec(dllexport)
+#endif
+must_export_something;
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test correct handling of dependency features in project requirements.
+t.write("jamfile.jam", "exe a : a.cpp lib1//cc ;")
+
+t.write("lib1/jamfile.jam", """\
+project
+ : requirements <link>shared:<define>SHARED_C
+ : usage-requirements <library>../lib2//b <link>shared:<define>SHARED_C ;
+lib cc : c.cpp ;
+""")
+
+t.write("lib1/c.cpp", """\
+int
+#if defined(_WIN32) && defined(SHARED_C)
+__declspec(dllexport)
+#endif
+must_export_something;
+""")
+
+t.write("lib2/jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
+ <link>shared:<define>SHARED_B ;
+""")
+
+t.copy("b.cpp", "lib2/b.cpp")
+
+t.run_build_system()
+
+
+# Test that targets listed in dependency features in usage requirements are
+# built with the correct properties.
+t.rm(".")
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", """\
+lib main : main.cpp : <use>libs//lib1 : : <library>libs//lib1 ;
+exe hello : hello.cpp main : ;
+""")
+
+t.write("main.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_LIB1)
+__declspec(dllimport)
+#endif
+foo();
+
+int main() { foo(); }
+""")
+
+t.write("hello.cpp", "\n")
+t.write("libs/a.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_LIB1)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+
+# This library should be built with the same properties as 'main'. This is a
+# regression test for a bug when they were generated with empty properties, and
+# there were ambiguities between variants.
+t.write("libs/jamfile.jam", """\
+lib lib1 : a_d.cpp : <variant>debug <link>shared:<define>SHARED_LIB1 : :
+ <link>shared:<define>SHARED_LIB1 ;
+lib lib1 : a.cpp : <variant>release <link>shared:<define>SHARED_LIB1 : :
+ <link>shared:<define>SHARED_LIB1 ;
+""")
+
+t.write("libs/a_d.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_LIB1)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system(["link=static"])
+t.expect_addition("libs/bin/$toolset/debug/link-static*/a_d.obj")
+
+
+# Test that indirect conditionals are respected in usage requirements.
+t.rm(".")
+
+t.write("jamroot.jam", """\
+rule has-foo ( properties * ) { return <define>HAS_FOO ; }
+exe a : a.cpp b ;
+lib b : b.cpp : <link>static : : <conditional>@has-foo ;
+""")
+
+t.write("a.cpp", """\
+#ifdef HAS_FOO
+void foo();
+int main() { foo(); }
+#endif
+""")
+
+t.write("b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/using.py b/src/boost/tools/build/test/using.py
new file mode 100644
index 000000000..495f412b1
--- /dev/null
+++ b/src/boost/tools/build/test/using.py
@@ -0,0 +1,32 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "using some_tool ;")
+t.write("some_tool.jam", """\
+import project ;
+project.initialize $(__name__) ;
+rule init ( ) { }
+""")
+
+t.write("some_tool.py", """\
+from b2.manager import get_manager
+get_manager().projects().initialize(__name__)
+def init():
+ pass
+""")
+
+t.write("sub/a.cpp", "int main() {}\n")
+t.write("sub/jamfile.jam", "exe a : a.cpp ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("sub/bin/$toolset/debug*/a.exe")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/wrapper.py b/src/boost/tools/build/test/wrapper.py
new file mode 100644
index 000000000..1adeb2c90
--- /dev/null
+++ b/src/boost/tools/build/test/wrapper.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the user can define his own rule that will call built-in main target
+# rule and that this will work.
+
+import BoostBuild
+
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamfile.jam", """
+my-test : test.cpp ;
+""")
+
+t.write("test.cpp", """
+int main() {}
+""")
+
+t.write("jamroot.jam", """
+using testing ;
+
+rule my-test ( name ? : sources + )
+{
+ name ?= test ;
+ unit-test $(name) : $(sources) ; # /site-config//cppunit /util//testMain ;
+}
+
+IMPORT $(__name__) : my-test : : my-test ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/test.passed")
+
+t.cleanup()
diff --git a/src/boost/tools/build/test/wrong_project.py b/src/boost/tools/build/test/wrong_project.py
new file mode 100644
index 000000000..7183a6062
--- /dev/null
+++ b/src/boost/tools/build/test/wrong_project.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python
+
+# Copyright Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test. When Jamfile contained "using whatever ; " and the 'whatever'
+# module declared a project, then all targets in Jamfile were considered to be
+# declared in the project associated with 'whatever', not with the Jamfile.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+
+t.write("jamroot.jam", """\
+using some_tool ;
+exe a : a.cpp ;
+""")
+
+t.write("some_tool.jam", """\
+import project ;
+project.initialize $(__name__) ;
+rule init ( ) { }
+""")
+
+t.write("some_tool.py", """\
+from b2.manager import get_manager
+get_manager().projects().initialize(__name__)
+def init():
+ pass
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug*/a.exe")
+
+t.cleanup()