From e6918187568dbd01842d8d1d2c808ce16a894239 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 21 Apr 2024 13:54:28 +0200 Subject: Adding upstream version 18.2.2. Signed-off-by: Daniel Baumann --- src/boost/tools/build/CONTRIBUTING.adoc | 174 + src/boost/tools/build/Jamroot.jam | 267 + src/boost/tools/build/LICENSE.txt | 23 + src/boost/tools/build/README.adoc | 41 + src/boost/tools/build/azure-pipelines.yml | 427 ++ src/boost/tools/build/boost-build.jam | 8 + src/boost/tools/build/bootstrap.bat | 39 + src/boost/tools/build/bootstrap.sh | 28 + src/boost/tools/build/bootstrap_vms.com | 48 + .../tools/build/example/asciidoctor/example.adoc | 3 + .../build/example/asciidoctor/example_manpage.adoc | 38 + .../tools/build/example/asciidoctor/jamroot.jam | 11 + src/boost/tools/build/example/boost-build.jam | 6 + .../tools/build/example/built_tool/Jamroot.jam | 8 + .../build/example/built_tool/core/Jamfile.jam | 39 + src/boost/tools/build/example/built_tool/core/a.td | 0 .../tools/build/example/built_tool/core/core.cpp | 5 + .../tools/build/example/built_tool/readme.txt | 5 + .../build/example/built_tool/tblgen/Jamfile.jam | 4 + .../build/example/built_tool/tblgen/tblgen.cpp | 9 + .../build/example/complex-testing/compile-fail.cpp | 14 + .../tools/build/example/complex-testing/fail.cpp | 14 + .../build/example/complex-testing/jamroot.jam | 15 + .../tools/build/example/complex-testing/post.cpp | 14 + .../build/example/complex-testing/success.cpp | 14 + .../build/example/customization/class.verbatim | 7 + .../tools/build/example/customization/codegen.cpp | 36 + .../build/example/customization/inline_file.py | 44 + .../tools/build/example/customization/jamroot.jam | 9 + .../tools/build/example/customization/readme.txt | 11 + .../tools/build/example/customization/t1.verbatim | 2 + .../tools/build/example/customization/t2.verbatim | 0 .../build/example/customization/usage.verbatim | 5 + .../tools/build/example/customization/verbatim.jam | 61 + .../tools/build/example/customization/verbatim.py | 47 + src/boost/tools/build/example/generate/README.txt | 11 + src/boost/tools/build/example/generate/a.cpp | 10 + src/boost/tools/build/example/generate/gen.jam | 26 + src/boost/tools/build/example/generate/gen.py | 16 + src/boost/tools/build/example/generate/jamroot.jam | 9 + src/boost/tools/build/example/generator/README.txt | 6 + src/boost/tools/build/example/generator/foo.gci | 10 + .../tools/build/example/generator/jamroot.jam | 6 + src/boost/tools/build/example/generator/soap.jam | 86 + src/boost/tools/build/example/gettext/jamfile.jam | 26 + src/boost/tools/build/example/gettext/jamroot.jam | 6 + src/boost/tools/build/example/gettext/main.cpp | 28 + src/boost/tools/build/example/gettext/readme.txt | 24 + src/boost/tools/build/example/gettext/russian.po | 21 + src/boost/tools/build/example/hello/hello.cpp | 14 + src/boost/tools/build/example/hello/jamroot.jam | 1 + src/boost/tools/build/example/hello/readme.adoc | 46 + .../tools/build/example/libraries/app/app.cpp | 12 + .../tools/build/example/libraries/app/jamfile.jam | 9 + .../tools/build/example/libraries/jamroot.jam | 4 + .../tools/build/example/libraries/util/foo/bar.cpp | 10 + .../example/libraries/util/foo/include/lib1.h | 7 + .../build/example/libraries/util/foo/jamfile.jam | 9 + src/boost/tools/build/example/make/foo.py | 2 + src/boost/tools/build/example/make/jamroot.jam | 22 + src/boost/tools/build/example/make/main_cpp.pro | 1 + src/boost/tools/build/example/make/readme.txt | 7 + src/boost/tools/build/example/named-install-dirs/a | 0 .../build/example/named-install-dirs/build.jam | 54 + .../build/example/named-install-dirs/x/build.jam | 5 + .../build/example/named-install-dirs/x/y/build.jam | 9 + .../build/example/named-install-dirs/x/z/build.jam | 11 + .../build/example/pch-multi/include/extra/meta.hpp | 17 + .../tools/build/example/pch-multi/include/pch.hpp | 19 + .../tools/build/example/pch-multi/include/std.hpp | 16 + .../tools/build/example/pch-multi/jamroot.jam | 30 + .../build/example/pch-multi/source/hello_world.cpp | 17 + src/boost/tools/build/example/pch/include/pch.hpp | 19 + src/boost/tools/build/example/pch/jamroot.jam | 29 + .../tools/build/example/pch/source/hello_world.cpp | 15 + .../example/pkg-config/debug-packages/debugged.pc | 4 + .../tools/build/example/pkg-config/jamroot.jam | 104 + .../build/example/pkg-config/packages/debugged.pc | 4 + .../build/example/pkg-config/packages/foobar.pc | 4 + .../example/pkg-config/packages/mangled-mt.pc | 4 + .../build/example/pkg-config/packages/mangled.pc | 4 + .../build/example/pkg-config/packages/versioned.pc | 3 + .../build/example/pkg-config/packages/with-var.pc | 4 + src/boost/tools/build/example/pkg-config/test1.cpp | 11 + src/boost/tools/build/example/pkg-config/test2.cpp | 12 + src/boost/tools/build/example/pkg-config/test3.cpp | 12 + src/boost/tools/build/example/pkg-config/test4.cpp | 11 + src/boost/tools/build/example/pkg-config/test5.cpp | 12 + .../tools/build/example/python_modules/jamroot.jam | 8 + .../example/python_modules/python_helpers.jam | 15 + .../build/example/python_modules/python_helpers.py | 18 + .../tools/build/example/python_modules/readme.txt | 16 + src/boost/tools/build/example/qt/README.txt | 20 + .../tools/build/example/qt/qt3/hello/canvas.cpp | 73 + .../tools/build/example/qt/qt3/hello/canvas.h | 35 + .../tools/build/example/qt/qt3/hello/jamroot.jam | 13 + .../tools/build/example/qt/qt3/hello/main.cpp | 36 + .../build/example/qt/qt3/moccable-cpp/jamroot.jam | 11 + .../build/example/qt/qt3/moccable-cpp/main.cpp | 41 + .../build/example/qt/qt3/uic/hello_world_widget.ui | 58 + .../tools/build/example/qt/qt3/uic/jamroot.jam | 15 + src/boost/tools/build/example/qt/qt3/uic/main.cpp | 18 + .../tools/build/example/qt/qt4/hello/arrow.cpp | 158 + src/boost/tools/build/example/qt/qt4/hello/arrow.h | 30 + .../tools/build/example/qt/qt4/hello/jamroot.jam | 14 + .../tools/build/example/qt/qt4/hello/main.cpp | 27 + .../build/example/qt/qt4/moccable-cpp/jamroot.jam | 18 + .../build/example/qt/qt4/moccable-cpp/main.cpp | 39 + .../build/example/qt/qt4/uic/hello_world_widget.ui | 55 + .../tools/build/example/qt/qt4/uic/jamroot.jam | 18 + src/boost/tools/build/example/qt/qt4/uic/main.cpp | 23 + .../tools/build/example/sanitizers/jamroot.jam | 1 + src/boost/tools/build/example/sanitizers/main.cpp | 9 + .../tools/build/example/sanitizers/readme.adoc | 64 + src/boost/tools/build/example/sass/importing.scss | 3 + .../tools/build/example/sass/include/foobar.scss | 3 + src/boost/tools/build/example/sass/jamroot.jam | 15 + src/boost/tools/build/example/sass/singleton.sass | 12 + src/boost/tools/build/example/sass/singleton.scss | 11 + src/boost/tools/build/example/site-config.jam | 4 + .../tools/build/example/testing/compile-fail.cpp | 14 + src/boost/tools/build/example/testing/fail.cpp | 14 + src/boost/tools/build/example/testing/jamroot.jam | 10 + src/boost/tools/build/example/testing/success.cpp | 15 + src/boost/tools/build/example/time/hello.cpp | 12 + src/boost/tools/build/example/time/jamroot.jam | 16 + src/boost/tools/build/example/time/readme.qbk | 47 + .../tools/build/example/try_compile/Jamroot.jam | 29 + src/boost/tools/build/example/try_compile/foo.cpp | 6 + src/boost/tools/build/example/try_compile/main.cpp | 8 + src/boost/tools/build/example/user-config.jam | 92 + src/boost/tools/build/example/variant/a.cpp | 7 + src/boost/tools/build/example/variant/jamfile.jam | 11 + src/boost/tools/build/example/variant/jamroot.jam | 12 + .../tools/build/example/variant/libs/jamfile.jam | 8 + src/boost/tools/build/example/variant/libs/l.cpp | 9 + src/boost/tools/build/example/variant/readme.qbk | 94 + src/boost/tools/build/index.html | 5 + src/boost/tools/build/notes/README.txt | 8 + src/boost/tools/build/notes/build_dir_option.txt | 77 + src/boost/tools/build/notes/changes.txt | 317 ++ .../tools/build/notes/relative_source_paths.txt | 76 + src/boost/tools/build/notes/release_procedure.txt | 83 + src/boost/tools/build/src/__init__.py | 0 src/boost/tools/build/src/bootstrap.jam | 18 + src/boost/tools/build/src/build-system.jam | 1087 ++++ src/boost/tools/build/src/build/__init__.py | 0 src/boost/tools/build/src/build/ac.jam | 326 ++ src/boost/tools/build/src/build/alias.jam | 82 + src/boost/tools/build/src/build/alias.py | 75 + src/boost/tools/build/src/build/build-request.jam | 418 ++ src/boost/tools/build/src/build/build_request.py | 222 + src/boost/tools/build/src/build/config-cache.jam | 78 + src/boost/tools/build/src/build/configure.jam | 629 +++ src/boost/tools/build/src/build/configure.py | 176 + src/boost/tools/build/src/build/engine.py | 246 + src/boost/tools/build/src/build/errors.py | 135 + src/boost/tools/build/src/build/feature.jam | 1442 +++++ src/boost/tools/build/src/build/feature.py | 914 ++++ src/boost/tools/build/src/build/generators.jam | 1453 +++++ src/boost/tools/build/src/build/generators.py | 1209 +++++ src/boost/tools/build/src/build/project.jam | 1357 +++++ src/boost/tools/build/src/build/project.py | 1285 +++++ src/boost/tools/build/src/build/property-set.jam | 604 +++ src/boost/tools/build/src/build/property.jam | 1005 ++++ src/boost/tools/build/src/build/property.py | 750 +++ src/boost/tools/build/src/build/property_set.py | 498 ++ src/boost/tools/build/src/build/readme.txt | 11 + src/boost/tools/build/src/build/scanner.jam | 163 + src/boost/tools/build/src/build/scanner.py | 167 + src/boost/tools/build/src/build/targets.jam | 1820 +++++++ src/boost/tools/build/src/build/targets.py | 1523 ++++++ src/boost/tools/build/src/build/toolset.jam | 703 +++ src/boost/tools/build/src/build/toolset.py | 417 ++ src/boost/tools/build/src/build/type.jam | 410 ++ src/boost/tools/build/src/build/type.py | 381 ++ src/boost/tools/build/src/build/version.jam | 225 + src/boost/tools/build/src/build/version.py | 38 + src/boost/tools/build/src/build/virtual-target.jam | 1394 +++++ src/boost/tools/build/src/build/virtual_target.py | 1175 +++++ src/boost/tools/build/src/build_system.py | 682 +++ src/boost/tools/build/src/contrib/__init__.py | 0 src/boost/tools/build/src/contrib/boost.jam | 309 ++ src/boost/tools/build/src/contrib/boost.py | 280 + src/boost/tools/build/src/contrib/modular.jam | 288 + src/boost/tools/build/src/contrib/tntnet.jam | 208 + .../tools/build/src/contrib/wxFormBuilder.jam | 195 + src/boost/tools/build/src/engine/boost-jam.spec | 64 + src/boost/tools/build/src/engine/boost-no-inspect | 1 + src/boost/tools/build/src/engine/build.bat | 195 + src/boost/tools/build/src/engine/build.sh | 511 ++ src/boost/tools/build/src/engine/build_vms.com | 153 + src/boost/tools/build/src/engine/builtins.cpp | 2631 +++++++++ src/boost/tools/build/src/engine/builtins.h | 74 + src/boost/tools/build/src/engine/bump_version.py | 98 + src/boost/tools/build/src/engine/check_clib.cpp | 19 + src/boost/tools/build/src/engine/check_cxx11.cpp | 31 + src/boost/tools/build/src/engine/class.cpp | 192 + src/boost/tools/build/src/engine/class.h | 15 + src/boost/tools/build/src/engine/command.cpp | 120 + src/boost/tools/build/src/engine/command.h | 108 + src/boost/tools/build/src/engine/compile.cpp | 231 + src/boost/tools/build/src/engine/compile.h | 60 + src/boost/tools/build/src/engine/config.h | 60 + .../tools/build/src/engine/config_toolset.bat | 238 + src/boost/tools/build/src/engine/constants.cpp | 199 + src/boost/tools/build/src/engine/constants.h | 78 + src/boost/tools/build/src/engine/cwd.cpp | 101 + src/boost/tools/build/src/engine/cwd.h | 42 + src/boost/tools/build/src/engine/debian/changelog | 72 + src/boost/tools/build/src/engine/debian/control | 16 + src/boost/tools/build/src/engine/debian/copyright | 25 + .../tools/build/src/engine/debian/jam.man.sgml | 236 + src/boost/tools/build/src/engine/debian/rules | 73 + src/boost/tools/build/src/engine/debug.cpp | 158 + src/boost/tools/build/src/engine/debug.h | 63 + src/boost/tools/build/src/engine/debugger.cpp | 2737 ++++++++++ src/boost/tools/build/src/engine/debugger.h | 64 + src/boost/tools/build/src/engine/execcmd.cpp | 122 + src/boost/tools/build/src/engine/execcmd.h | 119 + src/boost/tools/build/src/engine/execnt.cpp | 1389 +++++ src/boost/tools/build/src/engine/execunix.cpp | 614 +++ src/boost/tools/build/src/engine/execvms.cpp | 426 ++ src/boost/tools/build/src/engine/filent.cpp | 517 ++ src/boost/tools/build/src/engine/filesys.cpp | 708 +++ src/boost/tools/build/src/engine/filesys.h | 125 + src/boost/tools/build/src/engine/fileunix.cpp | 533 ++ src/boost/tools/build/src/engine/filevms.cpp | 440 ++ src/boost/tools/build/src/engine/frames.cpp | 29 + src/boost/tools/build/src/engine/frames.h | 46 + src/boost/tools/build/src/engine/function.cpp | 5560 ++++++++++++++++++++ src/boost/tools/build/src/engine/function.h | 53 + src/boost/tools/build/src/engine/glob.cpp | 152 + src/boost/tools/build/src/engine/guess_toolset.bat | 107 + src/boost/tools/build/src/engine/hash.cpp | 388 ++ src/boost/tools/build/src/engine/hash.h | 99 + src/boost/tools/build/src/engine/hcache.cpp | 534 ++ src/boost/tools/build/src/engine/hcache.h | 20 + src/boost/tools/build/src/engine/hdrmacro.cpp | 146 + src/boost/tools/build/src/engine/hdrmacro.h | 22 + src/boost/tools/build/src/engine/headers.cpp | 207 + src/boost/tools/build/src/engine/headers.h | 26 + src/boost/tools/build/src/engine/jam.cpp | 723 +++ src/boost/tools/build/src/engine/jam.h | 533 ++ src/boost/tools/build/src/engine/jam_strings.cpp | 240 + src/boost/tools/build/src/engine/jam_strings.h | 38 + src/boost/tools/build/src/engine/jamgram.cpp | 2287 ++++++++ src/boost/tools/build/src/engine/jamgram.hpp | 176 + src/boost/tools/build/src/engine/jamgram.y | 386 ++ src/boost/tools/build/src/engine/jamgram.yy | 340 ++ src/boost/tools/build/src/engine/jamgramtab.h | 46 + src/boost/tools/build/src/engine/lists.cpp | 455 ++ src/boost/tools/build/src/engine/lists.h | 182 + src/boost/tools/build/src/engine/make.cpp | 931 ++++ src/boost/tools/build/src/engine/make.h | 45 + src/boost/tools/build/src/engine/make1.cpp | 1515 ++++++ src/boost/tools/build/src/engine/md5.cpp | 381 ++ src/boost/tools/build/src/engine/md5.h | 93 + src/boost/tools/build/src/engine/mem.cpp | 8 + src/boost/tools/build/src/engine/mem.h | 170 + src/boost/tools/build/src/engine/modules.cpp | 434 ++ src/boost/tools/build/src/engine/modules.h | 57 + src/boost/tools/build/src/engine/modules/order.cpp | 159 + src/boost/tools/build/src/engine/modules/path.cpp | 25 + .../build/src/engine/modules/property-set.cpp | 334 ++ .../tools/build/src/engine/modules/readme.txt | 3 + src/boost/tools/build/src/engine/modules/regex.cpp | 233 + .../tools/build/src/engine/modules/sequence.cpp | 96 + src/boost/tools/build/src/engine/modules/set.cpp | 43 + src/boost/tools/build/src/engine/native.cpp | 34 + src/boost/tools/build/src/engine/native.h | 35 + src/boost/tools/build/src/engine/object.cpp | 404 ++ src/boost/tools/build/src/engine/object.h | 80 + src/boost/tools/build/src/engine/option.cpp | 94 + src/boost/tools/build/src/engine/option.h | 25 + src/boost/tools/build/src/engine/output.cpp | 192 + src/boost/tools/build/src/engine/output.h | 46 + src/boost/tools/build/src/engine/parse.cpp | 147 + src/boost/tools/build/src/engine/parse.h | 81 + src/boost/tools/build/src/engine/patchlevel.h | 16 + src/boost/tools/build/src/engine/pathnt.cpp | 413 ++ src/boost/tools/build/src/engine/pathsys.cpp | 469 ++ src/boost/tools/build/src/engine/pathsys.h | 123 + src/boost/tools/build/src/engine/pathunix.cpp | 91 + src/boost/tools/build/src/engine/pathvms.cpp | 254 + src/boost/tools/build/src/engine/regexp.cpp | 1330 +++++ src/boost/tools/build/src/engine/regexp.h | 36 + src/boost/tools/build/src/engine/rules.cpp | 735 +++ src/boost/tools/build/src/engine/rules.h | 294 ++ src/boost/tools/build/src/engine/scan.cpp | 738 +++ src/boost/tools/build/src/engine/scan.h | 71 + src/boost/tools/build/src/engine/search.cpp | 275 + src/boost/tools/build/src/engine/search.h | 23 + src/boost/tools/build/src/engine/startup.cpp | 270 + src/boost/tools/build/src/engine/startup.h | 46 + src/boost/tools/build/src/engine/subst.cpp | 116 + src/boost/tools/build/src/engine/subst.h | 15 + src/boost/tools/build/src/engine/sysinfo.cpp | 168 + src/boost/tools/build/src/engine/sysinfo.h | 46 + src/boost/tools/build/src/engine/timestamp.cpp | 231 + src/boost/tools/build/src/engine/timestamp.h | 48 + src/boost/tools/build/src/engine/variable.cpp | 393 ++ src/boost/tools/build/src/engine/variable.h | 111 + .../build/src/engine/vswhere_usability_wrapper.cmd | 73 + src/boost/tools/build/src/engine/w32_getreg.cpp | 201 + src/boost/tools/build/src/engine/yyacc.cpp | 219 + src/boost/tools/build/src/exceptions.py | 55 + src/boost/tools/build/src/kernel/boost-build.jam | 5 + src/boost/tools/build/src/kernel/bootstrap.jam | 265 + src/boost/tools/build/src/kernel/bootstrap.py | 25 + src/boost/tools/build/src/kernel/class.jam | 420 ++ src/boost/tools/build/src/kernel/errors.jam | 287 + src/boost/tools/build/src/kernel/modules.jam | 364 ++ src/boost/tools/build/src/manager.py | 110 + src/boost/tools/build/src/options/help.jam | 222 + src/boost/tools/build/src/tools/__init__.py | 0 src/boost/tools/build/src/tools/acc.jam | 160 + src/boost/tools/build/src/tools/asciidoctor.jam | 212 + src/boost/tools/build/src/tools/auto-index.jam | 204 + src/boost/tools/build/src/tools/bison.jam | 26 + .../tools/build/src/tools/boostbook-config.jam | 13 + src/boost/tools/build/src/tools/boostbook.jam | 740 +++ src/boost/tools/build/src/tools/borland.jam | 322 ++ src/boost/tools/build/src/tools/builtin.jam | 96 + src/boost/tools/build/src/tools/builtin.py | 816 +++ src/boost/tools/build/src/tools/bzip2.jam | 279 + src/boost/tools/build/src/tools/cast.jam | 91 + src/boost/tools/build/src/tools/cast.py | 76 + src/boost/tools/build/src/tools/clang-darwin.jam | 141 + src/boost/tools/build/src/tools/clang-linux.jam | 185 + src/boost/tools/build/src/tools/clang-vxworks.jam | 123 + src/boost/tools/build/src/tools/clang-win.jam | 247 + src/boost/tools/build/src/tools/clang.jam | 68 + src/boost/tools/build/src/tools/common.jam | 1196 +++++ src/boost/tools/build/src/tools/common.py | 860 +++ src/boost/tools/build/src/tools/como-linux.jam | 103 + src/boost/tools/build/src/tools/como-win.jam | 117 + src/boost/tools/build/src/tools/como.jam | 75 + src/boost/tools/build/src/tools/convert.jam | 62 + src/boost/tools/build/src/tools/cray.jam | 1158 ++++ src/boost/tools/build/src/tools/cw-config.jam | 34 + src/boost/tools/build/src/tools/cw.jam | 304 ++ src/boost/tools/build/src/tools/cygwin.jam | 12 + src/boost/tools/build/src/tools/darwin.jam | 511 ++ src/boost/tools/build/src/tools/darwin.py | 57 + src/boost/tools/build/src/tools/diab.jam | 131 + src/boost/tools/build/src/tools/dmc.jam | 174 + src/boost/tools/build/src/tools/docutils.jam | 125 + src/boost/tools/build/src/tools/doxproc.py | 859 +++ src/boost/tools/build/src/tools/doxygen-config.jam | 11 + src/boost/tools/build/src/tools/doxygen.jam | 782 +++ .../src/tools/doxygen/windows-paths-check.doxyfile | 3 + .../src/tools/doxygen/windows-paths-check.hpp | 0 src/boost/tools/build/src/tools/embarcadero.jam | 640 +++ src/boost/tools/build/src/tools/emscripten.jam | 105 + .../build/src/tools/features/__init_features__.jam | 23 + .../src/tools/features/address-model-feature.jam | 22 + .../build/src/tools/features/allow-feature.jam | 19 + .../src/tools/features/architecture-feature.jam | 52 + .../src/tools/features/archiveflags-feature.jam | 18 + .../build/src/tools/features/asmflags-feature.jam | 17 + .../build/src/tools/features/build-feature.jam | 22 + .../build/src/tools/features/cflags-feature.jam | 21 + .../src/tools/features/compileflags-feature.jam | 19 + .../src/tools/features/conditional-feature.jam | 31 + .../build/src/tools/features/coverage-feature.jam | 22 + .../tools/features/cxx-template-depth-feature.jam | 39 + .../build/src/tools/features/cxxabi-feature.jam | 18 + .../build/src/tools/features/cxxflags-feature.jam | 17 + .../build/src/tools/features/cxxstd-feature.jam | 50 + .../build/src/tools/features/debug-feature.jam | 34 + .../build/src/tools/features/define-feature.jam | 30 + .../src/tools/features/dependency-feature.jam | 62 + .../tools/build/src/tools/features/dll-feature.jam | 73 + .../build/src/tools/features/exception-feature.jam | 47 + .../build/src/tools/features/fflags-feature.jam | 18 + .../build/src/tools/features/file-feature.jam | 18 + .../build/src/tools/features/find-lib-feature.jam | 42 + .../build/src/tools/features/flags-feature.jam | 19 + .../src/tools/features/force-include-feature.jam | 21 + .../build/src/tools/features/include-feature.jam | 19 + .../src/tools/features/instruction-set-feature.jam | 73 + .../build/src/tools/features/internal-feature.jam | 19 + .../build/src/tools/features/library-feature.jam | 22 + .../build/src/tools/features/link-feature.jam | 19 + .../build/src/tools/features/linkflags-feature.jam | 17 + .../tools/features/local-visibility-feature.jam | 27 + .../build/src/tools/features/location-feature.jam | 18 + .../src/tools/features/location-prefix-feature.jam | 18 + .../tools/build/src/tools/features/lto-feature.jam | 46 + .../build/src/tools/features/name-feature.jam | 22 + .../build/src/tools/features/objcflags-feature.jam | 32 + .../src/tools/features/optimization-feature.jam | 46 + .../tools/build/src/tools/features/os-feature.jam | 95 + .../build/src/tools/features/relevant-feature.jam | 48 + .../src/tools/features/response-file-feature.jam | 28 + .../build/src/tools/features/rtti-feature.jam | 19 + .../build/src/tools/features/runtime-feature.jam | 40 + .../src/tools/features/sanitizers-feature.jam | 63 + .../build/src/tools/features/search-feature.jam | 20 + .../build/src/tools/features/source-feature.jam | 22 + .../build/src/tools/features/stdlib-feature.jam | 29 + .../build/src/tools/features/strip-feature.jam | 25 + .../tools/build/src/tools/features/tag-feature.jam | 39 + .../build/src/tools/features/threadapi-feature.jam | 39 + .../build/src/tools/features/threading-feature.jam | 24 + .../build/src/tools/features/toolset-feature.jam | 20 + .../src/tools/features/translate-path-feature.jam | 34 + .../src/tools/features/user-interface-feature.jam | 28 + .../build/src/tools/features/variant-feature.jam | 114 + .../build/src/tools/features/version-feature.jam | 19 + .../src/tools/features/visibility-feature.jam | 46 + .../build/src/tools/features/warnings-feature.jam | 41 + src/boost/tools/build/src/tools/flags.jam | 152 + src/boost/tools/build/src/tools/fop.jam | 69 + src/boost/tools/build/src/tools/fortran.jam | 55 + src/boost/tools/build/src/tools/gcc.jam | 1268 +++++ src/boost/tools/build/src/tools/gcc.py | 875 +++ src/boost/tools/build/src/tools/generate.jam | 111 + .../src/tools/generators/__init_generators__.jam | 23 + .../src/tools/generators/archive-generator.jam | 74 + .../src/tools/generators/c-compiling-generator.jam | 70 + .../build/src/tools/generators/dummy-generator.jam | 20 + .../build/src/tools/generators/lib-generator.jam | 121 + .../src/tools/generators/linking-generator.jam | 179 + .../tools/generators/prebuilt-lib-generator.jam | 29 + .../tools/generators/searched-lib-generator.jam | 97 + src/boost/tools/build/src/tools/gettext.jam | 230 + src/boost/tools/build/src/tools/gfortran.jam | 39 + src/boost/tools/build/src/tools/hp_cxx.jam | 222 + src/boost/tools/build/src/tools/hpfortran.jam | 35 + src/boost/tools/build/src/tools/ifort.jam | 44 + src/boost/tools/build/src/tools/intel-darwin.jam | 188 + src/boost/tools/build/src/tools/intel-linux.jam | 311 ++ src/boost/tools/build/src/tools/intel-vxworks.jam | 192 + src/boost/tools/build/src/tools/intel-win.jam | 544 ++ src/boost/tools/build/src/tools/intel.jam | 85 + src/boost/tools/build/src/tools/lex.jam | 25 + src/boost/tools/build/src/tools/libjpeg.jam | 234 + src/boost/tools/build/src/tools/libpng.jam | 229 + src/boost/tools/build/src/tools/libtiff.jam | 227 + src/boost/tools/build/src/tools/link.jam | 547 ++ src/boost/tools/build/src/tools/lzma.jam | 134 + src/boost/tools/build/src/tools/make.jam | 69 + src/boost/tools/build/src/tools/make.py | 59 + src/boost/tools/build/src/tools/mc.jam | 44 + src/boost/tools/build/src/tools/mc.py | 46 + src/boost/tools/build/src/tools/message.jam | 62 + src/boost/tools/build/src/tools/message.py | 54 + src/boost/tools/build/src/tools/midl.jam | 142 + src/boost/tools/build/src/tools/midl.py | 134 + src/boost/tools/build/src/tools/mipspro.jam | 148 + src/boost/tools/build/src/tools/mpi.jam | 637 +++ src/boost/tools/build/src/tools/msvc-config.jam | 12 + src/boost/tools/build/src/tools/msvc.jam | 2217 ++++++++ src/boost/tools/build/src/tools/msvc.py | 1314 +++++ src/boost/tools/build/src/tools/notfile.jam | 65 + src/boost/tools/build/src/tools/notfile.py | 51 + src/boost/tools/build/src/tools/openssl.jam | 140 + src/boost/tools/build/src/tools/package.jam | 274 + src/boost/tools/build/src/tools/package.py | 168 + src/boost/tools/build/src/tools/pathscale.jam | 180 + src/boost/tools/build/src/tools/pch.jam | 95 + src/boost/tools/build/src/tools/pch.py | 83 + src/boost/tools/build/src/tools/pgi.jam | 141 + src/boost/tools/build/src/tools/pkg-config.jam | 486 ++ src/boost/tools/build/src/tools/python-config.jam | 27 + src/boost/tools/build/src/tools/python.jam | 1345 +++++ src/boost/tools/build/src/tools/qcc.jam | 299 ++ src/boost/tools/build/src/tools/qt.jam | 17 + src/boost/tools/build/src/tools/qt3.jam | 209 + src/boost/tools/build/src/tools/qt4.jam | 755 +++ src/boost/tools/build/src/tools/qt5.jam | 799 +++ .../tools/build/src/tools/quickbook-config.jam | 44 + src/boost/tools/build/src/tools/quickbook.jam | 363 ++ src/boost/tools/build/src/tools/rc.jam | 155 + src/boost/tools/build/src/tools/rc.py | 197 + src/boost/tools/build/src/tools/sass.jam | 193 + src/boost/tools/build/src/tools/saxonhe.jam | 53 + src/boost/tools/build/src/tools/stage.jam | 905 ++++ src/boost/tools/build/src/tools/stage.py | 350 ++ src/boost/tools/build/src/tools/stlport.jam | 312 ++ src/boost/tools/build/src/tools/sun.jam | 226 + src/boost/tools/build/src/tools/symlink.jam | 140 + src/boost/tools/build/src/tools/symlink.py | 112 + src/boost/tools/build/src/tools/testing-aux.jam | 344 ++ src/boost/tools/build/src/tools/testing.jam | 847 +++ src/boost/tools/build/src/tools/testing.py | 359 ++ src/boost/tools/build/src/tools/types/__init__.py | 19 + src/boost/tools/build/src/tools/types/adoc.jam | 26 + src/boost/tools/build/src/tools/types/asm.jam | 4 + src/boost/tools/build/src/tools/types/asm.py | 33 + src/boost/tools/build/src/tools/types/cpp.jam | 93 + src/boost/tools/build/src/tools/types/cpp.py | 11 + src/boost/tools/build/src/tools/types/css.jam | 10 + src/boost/tools/build/src/tools/types/docbook.jam | 10 + src/boost/tools/build/src/tools/types/exe.jam | 9 + src/boost/tools/build/src/tools/types/exe.py | 11 + src/boost/tools/build/src/tools/types/html.jam | 4 + src/boost/tools/build/src/tools/types/html.py | 10 + src/boost/tools/build/src/tools/types/lib.jam | 74 + src/boost/tools/build/src/tools/types/lib.py | 77 + src/boost/tools/build/src/tools/types/man.jam | 8 + src/boost/tools/build/src/tools/types/markdown.jam | 4 + src/boost/tools/build/src/tools/types/markdown.py | 10 + src/boost/tools/build/src/tools/types/obj.jam | 9 + src/boost/tools/build/src/tools/types/obj.py | 11 + src/boost/tools/build/src/tools/types/objc.jam | 26 + src/boost/tools/build/src/tools/types/pdf.jam | 8 + .../tools/build/src/tools/types/preprocessed.jam | 10 + .../tools/build/src/tools/types/preprocessed.py | 11 + src/boost/tools/build/src/tools/types/qt.jam | 14 + src/boost/tools/build/src/tools/types/register.jam | 39 + src/boost/tools/build/src/tools/types/rsp.jam | 4 + src/boost/tools/build/src/tools/types/rsp.py | 10 + .../tools/build/src/tools/types/sass-type.jam | 49 + src/boost/tools/build/src/tools/types/xml.jam | 49 + src/boost/tools/build/src/tools/unix.jam | 223 + src/boost/tools/build/src/tools/unix.py | 155 + src/boost/tools/build/src/tools/vacpp.jam | 173 + src/boost/tools/build/src/tools/vmsdecc.jam | 580 ++ src/boost/tools/build/src/tools/whale.jam | 116 + src/boost/tools/build/src/tools/xlcpp.jam | 168 + src/boost/tools/build/src/tools/xlf.jam | 39 + .../tools/build/src/tools/xsltproc-config.jam | 36 + src/boost/tools/build/src/tools/xsltproc.jam | 232 + .../tools/build/src/tools/xsltproc/included.xsl | 11 + src/boost/tools/build/src/tools/xsltproc/test.xml | 2 + src/boost/tools/build/src/tools/xsltproc/test.xsl | 12 + src/boost/tools/build/src/tools/zlib.jam | 235 + src/boost/tools/build/src/tools/zstd.jam | 100 + src/boost/tools/build/src/util/__init__.py | 321 ++ src/boost/tools/build/src/util/assert.jam | 346 ++ src/boost/tools/build/src/util/container.jam | 339 ++ src/boost/tools/build/src/util/doc.jam | 1076 ++++ src/boost/tools/build/src/util/indirect.jam | 167 + src/boost/tools/build/src/util/indirect.py | 15 + src/boost/tools/build/src/util/logger.py | 46 + src/boost/tools/build/src/util/numbers.jam | 241 + src/boost/tools/build/src/util/option.jam | 109 + src/boost/tools/build/src/util/option.py | 35 + src/boost/tools/build/src/util/order.jam | 173 + src/boost/tools/build/src/util/order.py | 121 + src/boost/tools/build/src/util/os.jam | 208 + src/boost/tools/build/src/util/os_j.py | 24 + src/boost/tools/build/src/util/param.jam | 54 + src/boost/tools/build/src/util/path.jam | 1015 ++++ src/boost/tools/build/src/util/path.py | 937 ++++ src/boost/tools/build/src/util/print.jam | 508 ++ src/boost/tools/build/src/util/regex.jam | 208 + src/boost/tools/build/src/util/regex.py | 63 + src/boost/tools/build/src/util/sequence.jam | 378 ++ src/boost/tools/build/src/util/sequence.py | 58 + src/boost/tools/build/src/util/set.jam | 93 + src/boost/tools/build/src/util/set.py | 48 + src/boost/tools/build/src/util/string.jam | 189 + src/boost/tools/build/src/util/utility.jam | 235 + src/boost/tools/build/src/util/utility.py | 176 + src/boost/tools/build/test/BoostBuild.py | 1381 +++++ src/boost/tools/build/test/Jamfile.jam | 29 + src/boost/tools/build/test/MockToolset.py | 267 + src/boost/tools/build/test/TestCmd.py | 609 +++ src/boost/tools/build/test/TestToolset.py | 134 + src/boost/tools/build/test/abs_workdir.py | 39 + src/boost/tools/build/test/absolute_sources.py | 73 + src/boost/tools/build/test/alias.py | 116 + src/boost/tools/build/test/alternatives.py | 129 + src/boost/tools/build/test/always.py | 34 + src/boost/tools/build/test/bad_dirname.py | 22 + src/boost/tools/build/test/boost-build.jam | 14 + src/boost/tools/build/test/boostbook.py | 23 + src/boost/tools/build/test/boostbook/a.hpp | 16 + src/boost/tools/build/test/boostbook/docs.xml | 36 + src/boost/tools/build/test/boostbook/jamroot.jam | 3 + src/boost/tools/build/test/build_dir.py | 107 + src/boost/tools/build/test/build_file.py | 170 + src/boost/tools/build/test/build_hooks.py | 39 + src/boost/tools/build/test/build_no.py | 23 + src/boost/tools/build/test/builtin_echo.py | 30 + src/boost/tools/build/test/builtin_exit.py | 42 + src/boost/tools/build/test/builtin_glob.py | 87 + src/boost/tools/build/test/builtin_glob_archive.py | 217 + src/boost/tools/build/test/builtin_readlink.py | 31 + .../build/test/builtin_split_by_characters.py | 57 + src/boost/tools/build/test/bzip2.py | 119 + src/boost/tools/build/test/c_file.py | 36 + src/boost/tools/build/test/chain.py | 56 + src/boost/tools/build/test/clean.py | 104 + .../tools/build/test/cli_property_expansion.py | 41 + src/boost/tools/build/test/collect_debug_info.py | 341 ++ .../tools/build/test/command_line_properties.py | 166 + src/boost/tools/build/test/composite.py | 25 + src/boost/tools/build/test/conditionals.py | 48 + src/boost/tools/build/test/conditionals2.py | 43 + src/boost/tools/build/test/conditionals3.py | 30 + src/boost/tools/build/test/conditionals4.py | 45 + .../tools/build/test/conditionals_multiple.py | 312 ++ src/boost/tools/build/test/configuration.py | 397 ++ src/boost/tools/build/test/configure.py | 267 + src/boost/tools/build/test/copy_time.py | 69 + src/boost/tools/build/test/core-language/test.jam | 1571 ++++++ src/boost/tools/build/test/core_action_output.py | 62 + src/boost/tools/build/test/core_action_status.py | 27 + src/boost/tools/build/test/core_actions_quietly.py | 61 + src/boost/tools/build/test/core_arguments.py | 105 + src/boost/tools/build/test/core_at_file.py | 64 + src/boost/tools/build/test/core_bindrule.py | 45 + src/boost/tools/build/test/core_d12.py | 32 + src/boost/tools/build/test/core_delete_module.py | 51 + src/boost/tools/build/test/core_dependencies.py | 157 + src/boost/tools/build/test/core_fail_expected.py | 139 + src/boost/tools/build/test/core_import_module.py | 82 + src/boost/tools/build/test/core_jamshell.py | 55 + src/boost/tools/build/test/core_language.py | 12 + src/boost/tools/build/test/core_modifiers.py | 50 + .../tools/build/test/core_multifile_actions.py | 202 + src/boost/tools/build/test/core_nt_cmd_line.py | 266 + src/boost/tools/build/test/core_option_d2.py | 55 + src/boost/tools/build/test/core_option_l.py | 44 + src/boost/tools/build/test/core_option_n.py | 51 + .../tools/build/test/core_parallel_actions.py | 103 + .../test/core_parallel_multifile_actions_1.py | 78 + .../test/core_parallel_multifile_actions_2.py | 71 + src/boost/tools/build/test/core_scanner.py | 36 + .../tools/build/test/core_source_line_tracking.py | 74 + .../build/test/core_syntax_error_exit_status.py | 23 + src/boost/tools/build/test/core_typecheck.py | 47 + src/boost/tools/build/test/core_update_now.py | 377 ++ .../tools/build/test/core_variables_in_actions.py | 39 + src/boost/tools/build/test/core_varnames.py | 38 + src/boost/tools/build/test/custom_generator.py | 66 + src/boost/tools/build/test/debugger-mi.py | 326 ++ src/boost/tools/build/test/debugger.py | 674 +++ src/boost/tools/build/test/default_build.py | 81 + src/boost/tools/build/test/default_features.py | 50 + src/boost/tools/build/test/default_toolset.py | 215 + src/boost/tools/build/test/dependency_property.py | 38 + src/boost/tools/build/test/dependency_test.py | 243 + src/boost/tools/build/test/disambiguation.py | 32 + src/boost/tools/build/test/dll_path.py | 163 + src/boost/tools/build/test/double_loading.py | 31 + src/boost/tools/build/test/duplicate.py | 38 + .../tools/build/test/example_customization.py | 21 + src/boost/tools/build/test/example_gettext.py | 30 + src/boost/tools/build/test/example_libraries.py | 21 + src/boost/tools/build/test/example_make.py | 17 + src/boost/tools/build/test/example_qt4.py | 26 + src/boost/tools/build/test/exit_status.py | 26 + src/boost/tools/build/test/expansion.py | 140 + src/boost/tools/build/test/explicit.py | 59 + src/boost/tools/build/test/feature_cxxflags.py | 37 + .../tools/build/test/feature_force_include.py | 41 + .../build/test/feature_implicit_dependency.py | 113 + src/boost/tools/build/test/feature_relevant.py | 142 + .../build/test/feature_suppress_import_lib.py | 33 + src/boost/tools/build/test/file_types.py | 44 + src/boost/tools/build/test/flags.py | 74 + src/boost/tools/build/test/gcc_runtime.py | 27 + src/boost/tools/build/test/generator_selection.py | 158 + src/boost/tools/build/test/generators_test.py | 433 ++ src/boost/tools/build/test/implicit_dependency.py | 81 + src/boost/tools/build/test/indirect_conditional.py | 150 + src/boost/tools/build/test/inherit_toolset.py | 100 + src/boost/tools/build/test/inherited_dependency.py | 237 + src/boost/tools/build/test/inline.py | 62 + src/boost/tools/build/test/install_build_no.py | 26 + src/boost/tools/build/test/lang_objc.py | 33 + src/boost/tools/build/test/lib_source_property.py | 45 + src/boost/tools/build/test/lib_zlib.py | 184 + src/boost/tools/build/test/libjpeg.py | 117 + src/boost/tools/build/test/liblzma.py | 118 + src/boost/tools/build/test/libpng.py | 119 + src/boost/tools/build/test/library_chain.py | 152 + src/boost/tools/build/test/library_order.py | 94 + src/boost/tools/build/test/library_property.py | 56 + src/boost/tools/build/test/libtiff.py | 119 + src/boost/tools/build/test/libzstd.py | 118 + src/boost/tools/build/test/link.py | 350 ++ src/boost/tools/build/test/load_dir.py | 84 + src/boost/tools/build/test/load_order.py | 71 + src/boost/tools/build/test/loop.py | 24 + src/boost/tools/build/test/make_rule.py | 54 + src/boost/tools/build/test/message.py | 38 + src/boost/tools/build/test/module_actions.py | 105 + src/boost/tools/build/test/ndebug.py | 33 + src/boost/tools/build/test/no_type.py | 19 + src/boost/tools/build/test/notfile.py | 36 + src/boost/tools/build/test/ordered_include.py | 251 + src/boost/tools/build/test/ordered_properties.py | 33 + src/boost/tools/build/test/out_of_tree.py | 29 + src/boost/tools/build/test/package.py | 231 + src/boost/tools/build/test/param.py | 61 + src/boost/tools/build/test/path_features.py | 163 + src/boost/tools/build/test/pch.py | 69 + src/boost/tools/build/test/prebuilt.py | 43 + src/boost/tools/build/test/prebuilt/ext/a.cpp | 14 + src/boost/tools/build/test/prebuilt/ext/debug/a.h | 10 + .../tools/build/test/prebuilt/ext/jamfile.jam | 13 + .../tools/build/test/prebuilt/ext/jamfile2.jam | 41 + .../tools/build/test/prebuilt/ext/jamfile3.jam | 48 + .../tools/build/test/prebuilt/ext/jamroot.jam | 5 + .../tools/build/test/prebuilt/ext/release/a.h | 10 + src/boost/tools/build/test/prebuilt/hello.cpp | 17 + src/boost/tools/build/test/prebuilt/jamfile.jam | 13 + src/boost/tools/build/test/prebuilt/jamroot.jam | 4 + src/boost/tools/build/test/preprocessor.py | 53 + src/boost/tools/build/test/print.py | 48 + src/boost/tools/build/test/project-test3/a.cpp | 5 + .../tools/build/test/project-test3/jamfile.jam | 13 + .../tools/build/test/project-test3/jamroot.jam | 67 + src/boost/tools/build/test/project-test3/lib/b.cpp | 5 + .../tools/build/test/project-test3/lib/jamfile.jam | 9 + .../tools/build/test/project-test3/lib2/c.cpp | 5 + .../tools/build/test/project-test3/lib2/d.cpp | 5 + .../build/test/project-test3/lib2/helper/e.cpp | 5 + .../test/project-test3/lib2/helper/jamfile.jam | 9 + .../build/test/project-test3/lib2/jamfile.jam | 11 + .../tools/build/test/project-test3/lib3/f.cpp | 5 + .../build/test/project-test3/lib3/jamfile.jam | 47 + .../build/test/project-test3/lib3/jamroot.jam | 5 + .../tools/build/test/project-test3/readme.txt | 7 + src/boost/tools/build/test/project-test4/a.cpp | 5 + src/boost/tools/build/test/project-test4/a_gcc.cpp | 5 + .../tools/build/test/project-test4/jamfile.jam | 11 + .../tools/build/test/project-test4/jamfile3.jam | 5 + .../tools/build/test/project-test4/jamfile4.jam | 4 + .../tools/build/test/project-test4/jamfile5.jam | 6 + .../tools/build/test/project-test4/jamroot.jam | 68 + src/boost/tools/build/test/project-test4/lib/b.cpp | 5 + .../tools/build/test/project-test4/lib/jamfile.jam | 6 + .../build/test/project-test4/lib/jamfile1.jam | 2 + .../build/test/project-test4/lib/jamfile2.jam | 4 + .../build/test/project-test4/lib/jamfile3.jam | 2 + .../build/test/project-test4/lib2/jamfile.jam | 8 + .../build/test/project-test4/lib2/jamfile2.jam | 4 + .../tools/build/test/project-test4/readme.txt | 6 + src/boost/tools/build/test/project_dependencies.py | 51 + src/boost/tools/build/test/project_glob.py | 212 + src/boost/tools/build/test/project_id.py | 414 ++ .../tools/build/test/project_root_constants.py | 62 + src/boost/tools/build/test/project_root_rule.py | 34 + src/boost/tools/build/test/project_test3.py | 135 + src/boost/tools/build/test/project_test4.py | 65 + src/boost/tools/build/test/property_expansion.py | 28 + src/boost/tools/build/test/qt4.py | 19 + src/boost/tools/build/test/qt4/jamroot.jam | 82 + src/boost/tools/build/test/qt4/mock.cpp | 26 + src/boost/tools/build/test/qt4/mock.h | 21 + src/boost/tools/build/test/qt4/phonon.cpp | 23 + src/boost/tools/build/test/qt4/qt3support.cpp | 29 + src/boost/tools/build/test/qt4/qtassistant.cpp | 21 + src/boost/tools/build/test/qt4/qtcore.cpp | 22 + src/boost/tools/build/test/qt4/qtcorefail.cpp | 23 + src/boost/tools/build/test/qt4/qtdeclarative.cpp | 27 + src/boost/tools/build/test/qt4/qtgui.cpp | 42 + src/boost/tools/build/test/qt4/qthelp.cpp | 22 + src/boost/tools/build/test/qt4/qtmultimedia.cpp | 25 + src/boost/tools/build/test/qt4/qtnetwork.cpp | 33 + src/boost/tools/build/test/qt4/qtscript.cpp | 37 + src/boost/tools/build/test/qt4/qtscripttools.cpp | 47 + src/boost/tools/build/test/qt4/qtsql.cpp | 37 + src/boost/tools/build/test/qt4/qtsvg.cpp | 21 + src/boost/tools/build/test/qt4/qttest.cpp | 30 + src/boost/tools/build/test/qt4/qtwebkit.cpp | 24 + src/boost/tools/build/test/qt4/qtxml.cpp | 29 + src/boost/tools/build/test/qt4/qtxmlpatterns.cpp | 76 + src/boost/tools/build/test/qt4/rcc.cpp | 20 + src/boost/tools/build/test/qt4/rcc.qrc | 5 + src/boost/tools/build/test/qt5.py | 20 + src/boost/tools/build/test/qt5/initialization.cpp | 7 + src/boost/tools/build/test/qt5/jamroot.jam | 118 + src/boost/tools/build/test/qt5/mock.cpp | 26 + src/boost/tools/build/test/qt5/mock.h | 21 + src/boost/tools/build/test/qt5/qt3dcore.cpp | 21 + src/boost/tools/build/test/qt5/qt3dinput.cpp | 24 + src/boost/tools/build/test/qt5/qt3dlogic.cpp | 20 + src/boost/tools/build/test/qt5/qt3drender.cpp | 21 + src/boost/tools/build/test/qt5/qtassistant.cpp | 21 + src/boost/tools/build/test/qt5/qtbluetooth.cpp | 34 + src/boost/tools/build/test/qt5/qtcharts.cpp | 15 + src/boost/tools/build/test/qt5/qtcore.cpp | 22 + src/boost/tools/build/test/qt5/qtcorefail.cpp | 23 + .../tools/build/test/qt5/qtdatavisualization.cpp | 31 + src/boost/tools/build/test/qt5/qtdeclarative.cpp | 26 + src/boost/tools/build/test/qt5/qtgamepad.cpp | 29 + src/boost/tools/build/test/qt5/qthelp.cpp | 22 + src/boost/tools/build/test/qt5/qtlocation.cpp | 30 + src/boost/tools/build/test/qt5/qtmultimedia.cpp | 25 + src/boost/tools/build/test/qt5/qtnetwork.cpp | 33 + src/boost/tools/build/test/qt5/qtnfc.cpp | 28 + src/boost/tools/build/test/qt5/qtpositioning.cpp | 23 + src/boost/tools/build/test/qt5/qtpurchasing.cpp | 44 + src/boost/tools/build/test/qt5/qtquick.cpp | 43 + src/boost/tools/build/test/qt5/qtquick.qml | 20 + src/boost/tools/build/test/qt5/qtscript.cpp | 37 + src/boost/tools/build/test/qt5/qtscripttools.cpp | 47 + src/boost/tools/build/test/qt5/qtscxml.cpp | 33 + src/boost/tools/build/test/qt5/qtserialbus.cpp | 25 + src/boost/tools/build/test/qt5/qtserialport.cpp | 22 + src/boost/tools/build/test/qt5/qtsql.cpp | 37 + src/boost/tools/build/test/qt5/qtsvg.cpp | 21 + src/boost/tools/build/test/qt5/qttest.cpp | 30 + src/boost/tools/build/test/qt5/qtwebchannel.cpp | 29 + src/boost/tools/build/test/qt5/qtwebengine.cpp | 30 + .../tools/build/test/qt5/qtwebenginewidgets.cpp | 40 + src/boost/tools/build/test/qt5/qtwebkit.cpp | 22 + src/boost/tools/build/test/qt5/qtwebkitwidgets.cpp | 23 + src/boost/tools/build/test/qt5/qtwebsocket.cpp | 26 + src/boost/tools/build/test/qt5/qtwebsockets.cpp | 24 + src/boost/tools/build/test/qt5/qtwebview.cpp | 31 + src/boost/tools/build/test/qt5/qtwidgets.cpp | 43 + src/boost/tools/build/test/qt5/qtxml.cpp | 29 + src/boost/tools/build/test/qt5/qtxmlpatterns.cpp | 76 + src/boost/tools/build/test/qt5/rcc.cpp | 20 + src/boost/tools/build/test/qt5/rcc.qrc | 5 + src/boost/tools/build/test/readme.txt | 6 + src/boost/tools/build/test/rebuilds.py | 68 + src/boost/tools/build/test/relative_sources.py | 38 + src/boost/tools/build/test/remove_requirement.py | 91 + src/boost/tools/build/test/rescan_header.py | 268 + src/boost/tools/build/test/resolution.py | 35 + src/boost/tools/build/test/results-python.txt | 132 + src/boost/tools/build/test/rootless.py | 36 + .../tools/build/test/rootless/test1/sub_root/a.cpp | 6 + .../build/test/rootless/test1/sub_root/jamfile.jam | 10 + .../tools/build/test/rootless/test2/sub_root/a.cpp | 6 + .../build/test/rootless/test2/sub_root/jamfile.jam | 13 + .../tools/build/test/rootless/test3/jamfile.jam | 6 + .../build/test/rootless/test3/sub/inner/a.cpp | 6 + .../test/rootless/test3/sub/inner/jamfile.jam | 11 + .../tools/build/test/scanner_causing_rebuilds.py | 132 + src/boost/tools/build/test/searched_lib.py | 202 + src/boost/tools/build/test/skipping.py | 27 + src/boost/tools/build/test/sort_rule.py | 98 + src/boost/tools/build/test/source_locations.py | 42 + src/boost/tools/build/test/source_order.py | 84 + src/boost/tools/build/test/space_in_path.py | 51 + src/boost/tools/build/test/stage.py | 207 + src/boost/tools/build/test/standalone.py | 53 + .../build/test/startup/boost-root/boost-build.jam | 7 + .../test/startup/boost-root/build/boost-build.jam | 6 + .../test/startup/boost-root/build/bootstrap.jam | 7 + .../test/startup/bootstrap-env/boost-build.jam | 5 + .../startup/bootstrap-explicit/boost-build.jam | 6 + .../test/startup/bootstrap-implicit/readme.txt | 5 + .../test/startup/no-bootstrap1/boost-build.jam | 6 + .../test/startup/no-bootstrap1/subdir/readme.txt | 5 + .../test/startup/no-bootstrap2/boost-build.jam | 6 + .../test/startup/no-bootstrap3/boost-build.jam | 5 + src/boost/tools/build/test/startup_v2.py | 96 + .../tools/build/test/static_and_shared_library.py | 36 + src/boost/tools/build/test/suffix.py | 78 + src/boost/tools/build/test/symlink.py | 43 + src/boost/tools/build/test/tag.py | 122 + src/boost/tools/build/test/template.py | 42 + src/boost/tools/build/test/test-config-example.jam | 19 + src/boost/tools/build/test/test.jam | 39 + src/boost/tools/build/test/test1.py | 18 + src/boost/tools/build/test/test2.py | 25 + src/boost/tools/build/test/test2/foo.cpp | 7 + src/boost/tools/build/test/test2/jamroot.jam | 5 + src/boost/tools/build/test/test_all.py | 373 ++ src/boost/tools/build/test/test_rc.py | 148 + src/boost/tools/build/test/test_system.html | 623 +++ src/boost/tools/build/test/testing.py | 556 ++ src/boost/tools/build/test/timedata.py | 178 + .../tools/build/test/toolset-mock/Jamroot.jam | 8 + src/boost/tools/build/test/toolset-mock/lib.cpp | 7 + src/boost/tools/build/test/toolset-mock/main.cpp | 7 + .../build/test/toolset-mock/project-config.jam | 48 + .../tools/build/test/toolset-mock/src/Jamroot.jam | 57 + .../build/test/toolset-mock/src/MockProgram.py | 287 + src/boost/tools/build/test/toolset-mock/src/ar.py | 27 + .../test/toolset-mock/src/clang-3.9.0-darwin.py | 48 + .../test/toolset-mock/src/clang-linux-3.9.0.py | 87 + .../test/toolset-mock/src/clang-vxworks-4.0.1.py | 41 + .../build/test/toolset-mock/src/darwin-4.2.1.py | 38 + .../test/toolset-mock/src/gcc-4.2.1-darwin.py | 36 + .../build/test/toolset-mock/src/gcc-4.8.3-linux.py | 49 + .../test/toolset-mock/src/intel-darwin-10.2.py | 42 + src/boost/tools/build/test/toolset-mock/src/ld.py | 33 + .../tools/build/test/toolset-mock/src/libtool.py | 14 + .../tools/build/test/toolset-mock/src/linkx.py | 33 + .../build/test/toolset-mock/src/mock-program.cpp | 42 + .../tools/build/test/toolset-mock/src/msvc-14.3.py | 33 + .../build/test/toolset-mock/src/project-config.jam | 5 + .../tools/build/test/toolset-mock/src/strip.py | 13 + .../tools/build/test/toolset-mock/src/verify.py | 9 + src/boost/tools/build/test/toolset_clang_darwin.py | 20 + src/boost/tools/build/test/toolset_clang_linux.py | 29 + .../tools/build/test/toolset_clang_vxworks.py | 20 + src/boost/tools/build/test/toolset_darwin.py | 21 + src/boost/tools/build/test/toolset_defaults.py | 60 + src/boost/tools/build/test/toolset_gcc.py | 26 + src/boost/tools/build/test/toolset_intel_darwin.py | 19 + src/boost/tools/build/test/toolset_msvc.py | 19 + src/boost/tools/build/test/toolset_requirements.py | 44 + src/boost/tools/build/test/transitive_skip.py | 30 + src/boost/tools/build/test/tree.py | 245 + src/boost/tools/build/test/unit_test.py | 36 + src/boost/tools/build/test/unit_tests.py | 11 + src/boost/tools/build/test/unused.py | 81 + src/boost/tools/build/test/use_requirements.py | 283 + src/boost/tools/build/test/using.py | 32 + src/boost/tools/build/test/wrapper.py | 38 + src/boost/tools/build/test/wrong_project.py | 39 + 906 files changed, 139935 insertions(+) create mode 100644 src/boost/tools/build/CONTRIBUTING.adoc create mode 100644 src/boost/tools/build/Jamroot.jam create mode 100644 src/boost/tools/build/LICENSE.txt create mode 100644 src/boost/tools/build/README.adoc create mode 100644 src/boost/tools/build/azure-pipelines.yml create mode 100644 src/boost/tools/build/boost-build.jam create mode 100644 src/boost/tools/build/bootstrap.bat create mode 100755 src/boost/tools/build/bootstrap.sh create mode 100644 src/boost/tools/build/bootstrap_vms.com create mode 100644 src/boost/tools/build/example/asciidoctor/example.adoc create mode 100644 src/boost/tools/build/example/asciidoctor/example_manpage.adoc create mode 100644 src/boost/tools/build/example/asciidoctor/jamroot.jam create mode 100644 src/boost/tools/build/example/boost-build.jam create mode 100644 src/boost/tools/build/example/built_tool/Jamroot.jam create mode 100644 src/boost/tools/build/example/built_tool/core/Jamfile.jam create mode 100644 src/boost/tools/build/example/built_tool/core/a.td create mode 100644 src/boost/tools/build/example/built_tool/core/core.cpp create mode 100644 src/boost/tools/build/example/built_tool/readme.txt create mode 100644 src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam create mode 100644 src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp create mode 100644 src/boost/tools/build/example/complex-testing/compile-fail.cpp create mode 100644 src/boost/tools/build/example/complex-testing/fail.cpp create mode 100644 src/boost/tools/build/example/complex-testing/jamroot.jam create mode 100644 src/boost/tools/build/example/complex-testing/post.cpp create mode 100644 src/boost/tools/build/example/complex-testing/success.cpp create mode 100644 src/boost/tools/build/example/customization/class.verbatim create mode 100644 src/boost/tools/build/example/customization/codegen.cpp create mode 100644 src/boost/tools/build/example/customization/inline_file.py create mode 100644 src/boost/tools/build/example/customization/jamroot.jam create mode 100644 src/boost/tools/build/example/customization/readme.txt create mode 100644 src/boost/tools/build/example/customization/t1.verbatim create mode 100644 src/boost/tools/build/example/customization/t2.verbatim create mode 100644 src/boost/tools/build/example/customization/usage.verbatim create mode 100644 src/boost/tools/build/example/customization/verbatim.jam create mode 100644 src/boost/tools/build/example/customization/verbatim.py create mode 100644 src/boost/tools/build/example/generate/README.txt create mode 100644 src/boost/tools/build/example/generate/a.cpp create mode 100644 src/boost/tools/build/example/generate/gen.jam create mode 100644 src/boost/tools/build/example/generate/gen.py create mode 100644 src/boost/tools/build/example/generate/jamroot.jam create mode 100644 src/boost/tools/build/example/generator/README.txt create mode 100644 src/boost/tools/build/example/generator/foo.gci create mode 100644 src/boost/tools/build/example/generator/jamroot.jam create mode 100644 src/boost/tools/build/example/generator/soap.jam create mode 100644 src/boost/tools/build/example/gettext/jamfile.jam create mode 100644 src/boost/tools/build/example/gettext/jamroot.jam create mode 100644 src/boost/tools/build/example/gettext/main.cpp create mode 100644 src/boost/tools/build/example/gettext/readme.txt create mode 100644 src/boost/tools/build/example/gettext/russian.po create mode 100644 src/boost/tools/build/example/hello/hello.cpp create mode 100644 src/boost/tools/build/example/hello/jamroot.jam create mode 100644 src/boost/tools/build/example/hello/readme.adoc create mode 100644 src/boost/tools/build/example/libraries/app/app.cpp create mode 100644 src/boost/tools/build/example/libraries/app/jamfile.jam create mode 100644 src/boost/tools/build/example/libraries/jamroot.jam create mode 100644 src/boost/tools/build/example/libraries/util/foo/bar.cpp create mode 100644 src/boost/tools/build/example/libraries/util/foo/include/lib1.h create mode 100644 src/boost/tools/build/example/libraries/util/foo/jamfile.jam create mode 100644 src/boost/tools/build/example/make/foo.py create mode 100644 src/boost/tools/build/example/make/jamroot.jam create mode 100644 src/boost/tools/build/example/make/main_cpp.pro create mode 100644 src/boost/tools/build/example/make/readme.txt create mode 100644 src/boost/tools/build/example/named-install-dirs/a create mode 100644 src/boost/tools/build/example/named-install-dirs/build.jam create mode 100644 src/boost/tools/build/example/named-install-dirs/x/build.jam create mode 100644 src/boost/tools/build/example/named-install-dirs/x/y/build.jam create mode 100644 src/boost/tools/build/example/named-install-dirs/x/z/build.jam create mode 100644 src/boost/tools/build/example/pch-multi/include/extra/meta.hpp create mode 100644 src/boost/tools/build/example/pch-multi/include/pch.hpp create mode 100644 src/boost/tools/build/example/pch-multi/include/std.hpp create mode 100644 src/boost/tools/build/example/pch-multi/jamroot.jam create mode 100644 src/boost/tools/build/example/pch-multi/source/hello_world.cpp create mode 100644 src/boost/tools/build/example/pch/include/pch.hpp create mode 100644 src/boost/tools/build/example/pch/jamroot.jam create mode 100644 src/boost/tools/build/example/pch/source/hello_world.cpp create mode 100644 src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc create mode 100644 src/boost/tools/build/example/pkg-config/jamroot.jam create mode 100644 src/boost/tools/build/example/pkg-config/packages/debugged.pc create mode 100644 src/boost/tools/build/example/pkg-config/packages/foobar.pc create mode 100644 src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc create mode 100644 src/boost/tools/build/example/pkg-config/packages/mangled.pc create mode 100644 src/boost/tools/build/example/pkg-config/packages/versioned.pc create mode 100644 src/boost/tools/build/example/pkg-config/packages/with-var.pc create mode 100644 src/boost/tools/build/example/pkg-config/test1.cpp create mode 100644 src/boost/tools/build/example/pkg-config/test2.cpp create mode 100644 src/boost/tools/build/example/pkg-config/test3.cpp create mode 100644 src/boost/tools/build/example/pkg-config/test4.cpp create mode 100644 src/boost/tools/build/example/pkg-config/test5.cpp create mode 100644 src/boost/tools/build/example/python_modules/jamroot.jam create mode 100644 src/boost/tools/build/example/python_modules/python_helpers.jam create mode 100644 src/boost/tools/build/example/python_modules/python_helpers.py create mode 100644 src/boost/tools/build/example/python_modules/readme.txt create mode 100644 src/boost/tools/build/example/qt/README.txt create mode 100644 src/boost/tools/build/example/qt/qt3/hello/canvas.cpp create mode 100644 src/boost/tools/build/example/qt/qt3/hello/canvas.h create mode 100644 src/boost/tools/build/example/qt/qt3/hello/jamroot.jam create mode 100644 src/boost/tools/build/example/qt/qt3/hello/main.cpp create mode 100644 src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam create mode 100644 src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp create mode 100644 src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui create mode 100644 src/boost/tools/build/example/qt/qt3/uic/jamroot.jam create mode 100644 src/boost/tools/build/example/qt/qt3/uic/main.cpp create mode 100644 src/boost/tools/build/example/qt/qt4/hello/arrow.cpp create mode 100644 src/boost/tools/build/example/qt/qt4/hello/arrow.h create mode 100644 src/boost/tools/build/example/qt/qt4/hello/jamroot.jam create mode 100644 src/boost/tools/build/example/qt/qt4/hello/main.cpp create mode 100644 src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam create mode 100644 src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp create mode 100644 src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui create mode 100644 src/boost/tools/build/example/qt/qt4/uic/jamroot.jam create mode 100644 src/boost/tools/build/example/qt/qt4/uic/main.cpp create mode 100644 src/boost/tools/build/example/sanitizers/jamroot.jam create mode 100644 src/boost/tools/build/example/sanitizers/main.cpp create mode 100644 src/boost/tools/build/example/sanitizers/readme.adoc create mode 100644 src/boost/tools/build/example/sass/importing.scss create mode 100644 src/boost/tools/build/example/sass/include/foobar.scss create mode 100644 src/boost/tools/build/example/sass/jamroot.jam create mode 100644 src/boost/tools/build/example/sass/singleton.sass create mode 100644 src/boost/tools/build/example/sass/singleton.scss create mode 100644 src/boost/tools/build/example/site-config.jam create mode 100644 src/boost/tools/build/example/testing/compile-fail.cpp create mode 100644 src/boost/tools/build/example/testing/fail.cpp create mode 100644 src/boost/tools/build/example/testing/jamroot.jam create mode 100644 src/boost/tools/build/example/testing/success.cpp create mode 100644 src/boost/tools/build/example/time/hello.cpp create mode 100644 src/boost/tools/build/example/time/jamroot.jam create mode 100644 src/boost/tools/build/example/time/readme.qbk create mode 100644 src/boost/tools/build/example/try_compile/Jamroot.jam create mode 100644 src/boost/tools/build/example/try_compile/foo.cpp create mode 100644 src/boost/tools/build/example/try_compile/main.cpp create mode 100644 src/boost/tools/build/example/user-config.jam create mode 100644 src/boost/tools/build/example/variant/a.cpp create mode 100644 src/boost/tools/build/example/variant/jamfile.jam create mode 100644 src/boost/tools/build/example/variant/jamroot.jam create mode 100644 src/boost/tools/build/example/variant/libs/jamfile.jam create mode 100644 src/boost/tools/build/example/variant/libs/l.cpp create mode 100644 src/boost/tools/build/example/variant/readme.qbk create mode 100644 src/boost/tools/build/index.html create mode 100644 src/boost/tools/build/notes/README.txt create mode 100644 src/boost/tools/build/notes/build_dir_option.txt create mode 100644 src/boost/tools/build/notes/changes.txt create mode 100644 src/boost/tools/build/notes/relative_source_paths.txt create mode 100644 src/boost/tools/build/notes/release_procedure.txt create mode 100644 src/boost/tools/build/src/__init__.py create mode 100644 src/boost/tools/build/src/bootstrap.jam create mode 100644 src/boost/tools/build/src/build-system.jam create mode 100644 src/boost/tools/build/src/build/__init__.py create mode 100644 src/boost/tools/build/src/build/ac.jam create mode 100644 src/boost/tools/build/src/build/alias.jam create mode 100755 src/boost/tools/build/src/build/alias.py create mode 100644 src/boost/tools/build/src/build/build-request.jam create mode 100644 src/boost/tools/build/src/build/build_request.py create mode 100644 src/boost/tools/build/src/build/config-cache.jam create mode 100644 src/boost/tools/build/src/build/configure.jam create mode 100644 src/boost/tools/build/src/build/configure.py create mode 100644 src/boost/tools/build/src/build/engine.py create mode 100644 src/boost/tools/build/src/build/errors.py create mode 100644 src/boost/tools/build/src/build/feature.jam create mode 100644 src/boost/tools/build/src/build/feature.py create mode 100644 src/boost/tools/build/src/build/generators.jam create mode 100644 src/boost/tools/build/src/build/generators.py create mode 100644 src/boost/tools/build/src/build/project.jam create mode 100644 src/boost/tools/build/src/build/project.py create mode 100644 src/boost/tools/build/src/build/property-set.jam create mode 100644 src/boost/tools/build/src/build/property.jam create mode 100644 src/boost/tools/build/src/build/property.py create mode 100644 src/boost/tools/build/src/build/property_set.py create mode 100644 src/boost/tools/build/src/build/readme.txt create mode 100644 src/boost/tools/build/src/build/scanner.jam create mode 100644 src/boost/tools/build/src/build/scanner.py create mode 100644 src/boost/tools/build/src/build/targets.jam create mode 100644 src/boost/tools/build/src/build/targets.py create mode 100644 src/boost/tools/build/src/build/toolset.jam create mode 100644 src/boost/tools/build/src/build/toolset.py create mode 100644 src/boost/tools/build/src/build/type.jam create mode 100644 src/boost/tools/build/src/build/type.py create mode 100644 src/boost/tools/build/src/build/version.jam create mode 100644 src/boost/tools/build/src/build/version.py create mode 100644 src/boost/tools/build/src/build/virtual-target.jam create mode 100644 src/boost/tools/build/src/build/virtual_target.py create mode 100644 src/boost/tools/build/src/build_system.py create mode 100644 src/boost/tools/build/src/contrib/__init__.py create mode 100644 src/boost/tools/build/src/contrib/boost.jam create mode 100644 src/boost/tools/build/src/contrib/boost.py create mode 100644 src/boost/tools/build/src/contrib/modular.jam create mode 100644 src/boost/tools/build/src/contrib/tntnet.jam create mode 100644 src/boost/tools/build/src/contrib/wxFormBuilder.jam create mode 100644 src/boost/tools/build/src/engine/boost-jam.spec create mode 100644 src/boost/tools/build/src/engine/boost-no-inspect create mode 100644 src/boost/tools/build/src/engine/build.bat create mode 100755 src/boost/tools/build/src/engine/build.sh create mode 100644 src/boost/tools/build/src/engine/build_vms.com create mode 100644 src/boost/tools/build/src/engine/builtins.cpp create mode 100644 src/boost/tools/build/src/engine/builtins.h create mode 100644 src/boost/tools/build/src/engine/bump_version.py create mode 100644 src/boost/tools/build/src/engine/check_clib.cpp create mode 100644 src/boost/tools/build/src/engine/check_cxx11.cpp create mode 100644 src/boost/tools/build/src/engine/class.cpp create mode 100644 src/boost/tools/build/src/engine/class.h create mode 100644 src/boost/tools/build/src/engine/command.cpp create mode 100644 src/boost/tools/build/src/engine/command.h create mode 100644 src/boost/tools/build/src/engine/compile.cpp create mode 100644 src/boost/tools/build/src/engine/compile.h create mode 100644 src/boost/tools/build/src/engine/config.h create mode 100644 src/boost/tools/build/src/engine/config_toolset.bat create mode 100644 src/boost/tools/build/src/engine/constants.cpp create mode 100644 src/boost/tools/build/src/engine/constants.h create mode 100644 src/boost/tools/build/src/engine/cwd.cpp create mode 100644 src/boost/tools/build/src/engine/cwd.h create mode 100644 src/boost/tools/build/src/engine/debian/changelog create mode 100644 src/boost/tools/build/src/engine/debian/control create mode 100644 src/boost/tools/build/src/engine/debian/copyright create mode 100644 src/boost/tools/build/src/engine/debian/jam.man.sgml create mode 100755 src/boost/tools/build/src/engine/debian/rules create mode 100644 src/boost/tools/build/src/engine/debug.cpp create mode 100644 src/boost/tools/build/src/engine/debug.h create mode 100644 src/boost/tools/build/src/engine/debugger.cpp create mode 100644 src/boost/tools/build/src/engine/debugger.h create mode 100644 src/boost/tools/build/src/engine/execcmd.cpp create mode 100644 src/boost/tools/build/src/engine/execcmd.h create mode 100644 src/boost/tools/build/src/engine/execnt.cpp create mode 100644 src/boost/tools/build/src/engine/execunix.cpp create mode 100644 src/boost/tools/build/src/engine/execvms.cpp create mode 100644 src/boost/tools/build/src/engine/filent.cpp create mode 100644 src/boost/tools/build/src/engine/filesys.cpp create mode 100644 src/boost/tools/build/src/engine/filesys.h create mode 100644 src/boost/tools/build/src/engine/fileunix.cpp create mode 100644 src/boost/tools/build/src/engine/filevms.cpp create mode 100644 src/boost/tools/build/src/engine/frames.cpp create mode 100644 src/boost/tools/build/src/engine/frames.h create mode 100644 src/boost/tools/build/src/engine/function.cpp create mode 100644 src/boost/tools/build/src/engine/function.h create mode 100644 src/boost/tools/build/src/engine/glob.cpp create mode 100644 src/boost/tools/build/src/engine/guess_toolset.bat create mode 100644 src/boost/tools/build/src/engine/hash.cpp create mode 100644 src/boost/tools/build/src/engine/hash.h create mode 100644 src/boost/tools/build/src/engine/hcache.cpp create mode 100644 src/boost/tools/build/src/engine/hcache.h create mode 100644 src/boost/tools/build/src/engine/hdrmacro.cpp create mode 100644 src/boost/tools/build/src/engine/hdrmacro.h create mode 100644 src/boost/tools/build/src/engine/headers.cpp create mode 100644 src/boost/tools/build/src/engine/headers.h create mode 100644 src/boost/tools/build/src/engine/jam.cpp create mode 100644 src/boost/tools/build/src/engine/jam.h create mode 100644 src/boost/tools/build/src/engine/jam_strings.cpp create mode 100644 src/boost/tools/build/src/engine/jam_strings.h create mode 100644 src/boost/tools/build/src/engine/jamgram.cpp create mode 100644 src/boost/tools/build/src/engine/jamgram.hpp create mode 100644 src/boost/tools/build/src/engine/jamgram.y create mode 100644 src/boost/tools/build/src/engine/jamgram.yy create mode 100644 src/boost/tools/build/src/engine/jamgramtab.h create mode 100644 src/boost/tools/build/src/engine/lists.cpp create mode 100644 src/boost/tools/build/src/engine/lists.h create mode 100644 src/boost/tools/build/src/engine/make.cpp create mode 100644 src/boost/tools/build/src/engine/make.h create mode 100644 src/boost/tools/build/src/engine/make1.cpp create mode 100644 src/boost/tools/build/src/engine/md5.cpp create mode 100644 src/boost/tools/build/src/engine/md5.h create mode 100644 src/boost/tools/build/src/engine/mem.cpp create mode 100644 src/boost/tools/build/src/engine/mem.h create mode 100644 src/boost/tools/build/src/engine/modules.cpp create mode 100644 src/boost/tools/build/src/engine/modules.h create mode 100644 src/boost/tools/build/src/engine/modules/order.cpp create mode 100644 src/boost/tools/build/src/engine/modules/path.cpp create mode 100644 src/boost/tools/build/src/engine/modules/property-set.cpp create mode 100644 src/boost/tools/build/src/engine/modules/readme.txt create mode 100644 src/boost/tools/build/src/engine/modules/regex.cpp create mode 100644 src/boost/tools/build/src/engine/modules/sequence.cpp create mode 100644 src/boost/tools/build/src/engine/modules/set.cpp create mode 100644 src/boost/tools/build/src/engine/native.cpp create mode 100644 src/boost/tools/build/src/engine/native.h create mode 100644 src/boost/tools/build/src/engine/object.cpp create mode 100644 src/boost/tools/build/src/engine/object.h create mode 100644 src/boost/tools/build/src/engine/option.cpp create mode 100644 src/boost/tools/build/src/engine/option.h create mode 100644 src/boost/tools/build/src/engine/output.cpp create mode 100644 src/boost/tools/build/src/engine/output.h create mode 100644 src/boost/tools/build/src/engine/parse.cpp create mode 100644 src/boost/tools/build/src/engine/parse.h create mode 100644 src/boost/tools/build/src/engine/patchlevel.h create mode 100644 src/boost/tools/build/src/engine/pathnt.cpp create mode 100644 src/boost/tools/build/src/engine/pathsys.cpp create mode 100644 src/boost/tools/build/src/engine/pathsys.h create mode 100644 src/boost/tools/build/src/engine/pathunix.cpp create mode 100644 src/boost/tools/build/src/engine/pathvms.cpp create mode 100644 src/boost/tools/build/src/engine/regexp.cpp create mode 100644 src/boost/tools/build/src/engine/regexp.h create mode 100644 src/boost/tools/build/src/engine/rules.cpp create mode 100644 src/boost/tools/build/src/engine/rules.h create mode 100644 src/boost/tools/build/src/engine/scan.cpp create mode 100644 src/boost/tools/build/src/engine/scan.h create mode 100644 src/boost/tools/build/src/engine/search.cpp create mode 100644 src/boost/tools/build/src/engine/search.h create mode 100644 src/boost/tools/build/src/engine/startup.cpp create mode 100644 src/boost/tools/build/src/engine/startup.h create mode 100644 src/boost/tools/build/src/engine/subst.cpp create mode 100644 src/boost/tools/build/src/engine/subst.h create mode 100644 src/boost/tools/build/src/engine/sysinfo.cpp create mode 100644 src/boost/tools/build/src/engine/sysinfo.h create mode 100644 src/boost/tools/build/src/engine/timestamp.cpp create mode 100644 src/boost/tools/build/src/engine/timestamp.h create mode 100644 src/boost/tools/build/src/engine/variable.cpp create mode 100644 src/boost/tools/build/src/engine/variable.h create mode 100644 src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd create mode 100644 src/boost/tools/build/src/engine/w32_getreg.cpp create mode 100644 src/boost/tools/build/src/engine/yyacc.cpp create mode 100644 src/boost/tools/build/src/exceptions.py create mode 100644 src/boost/tools/build/src/kernel/boost-build.jam create mode 100644 src/boost/tools/build/src/kernel/bootstrap.jam create mode 100644 src/boost/tools/build/src/kernel/bootstrap.py create mode 100644 src/boost/tools/build/src/kernel/class.jam create mode 100644 src/boost/tools/build/src/kernel/errors.jam create mode 100644 src/boost/tools/build/src/kernel/modules.jam create mode 100644 src/boost/tools/build/src/manager.py create mode 100644 src/boost/tools/build/src/options/help.jam create mode 100644 src/boost/tools/build/src/tools/__init__.py create mode 100644 src/boost/tools/build/src/tools/acc.jam create mode 100644 src/boost/tools/build/src/tools/asciidoctor.jam create mode 100644 src/boost/tools/build/src/tools/auto-index.jam create mode 100644 src/boost/tools/build/src/tools/bison.jam create mode 100644 src/boost/tools/build/src/tools/boostbook-config.jam create mode 100644 src/boost/tools/build/src/tools/boostbook.jam create mode 100644 src/boost/tools/build/src/tools/borland.jam create mode 100644 src/boost/tools/build/src/tools/builtin.jam create mode 100644 src/boost/tools/build/src/tools/builtin.py create mode 100644 src/boost/tools/build/src/tools/bzip2.jam create mode 100644 src/boost/tools/build/src/tools/cast.jam create mode 100644 src/boost/tools/build/src/tools/cast.py create mode 100644 src/boost/tools/build/src/tools/clang-darwin.jam create mode 100644 src/boost/tools/build/src/tools/clang-linux.jam create mode 100644 src/boost/tools/build/src/tools/clang-vxworks.jam create mode 100644 src/boost/tools/build/src/tools/clang-win.jam create mode 100644 src/boost/tools/build/src/tools/clang.jam create mode 100644 src/boost/tools/build/src/tools/common.jam create mode 100644 src/boost/tools/build/src/tools/common.py create mode 100644 src/boost/tools/build/src/tools/como-linux.jam create mode 100644 src/boost/tools/build/src/tools/como-win.jam create mode 100644 src/boost/tools/build/src/tools/como.jam create mode 100644 src/boost/tools/build/src/tools/convert.jam create mode 100644 src/boost/tools/build/src/tools/cray.jam create mode 100644 src/boost/tools/build/src/tools/cw-config.jam create mode 100644 src/boost/tools/build/src/tools/cw.jam create mode 100644 src/boost/tools/build/src/tools/cygwin.jam create mode 100644 src/boost/tools/build/src/tools/darwin.jam create mode 100644 src/boost/tools/build/src/tools/darwin.py create mode 100644 src/boost/tools/build/src/tools/diab.jam create mode 100644 src/boost/tools/build/src/tools/dmc.jam create mode 100644 src/boost/tools/build/src/tools/docutils.jam create mode 100644 src/boost/tools/build/src/tools/doxproc.py create mode 100644 src/boost/tools/build/src/tools/doxygen-config.jam create mode 100644 src/boost/tools/build/src/tools/doxygen.jam create mode 100644 src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile create mode 100644 src/boost/tools/build/src/tools/doxygen/windows-paths-check.hpp create mode 100644 src/boost/tools/build/src/tools/embarcadero.jam create mode 100644 src/boost/tools/build/src/tools/emscripten.jam create mode 100644 src/boost/tools/build/src/tools/features/__init_features__.jam create mode 100644 src/boost/tools/build/src/tools/features/address-model-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/allow-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/architecture-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/archiveflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/asmflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/build-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/cflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/compileflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/conditional-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/coverage-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/cxxabi-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/cxxflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/cxxstd-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/debug-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/define-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/dependency-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/dll-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/exception-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/fflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/file-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/find-lib-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/flags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/force-include-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/include-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/instruction-set-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/internal-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/library-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/link-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/linkflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/local-visibility-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/location-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/location-prefix-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/lto-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/name-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/objcflags-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/optimization-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/os-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/relevant-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/response-file-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/rtti-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/runtime-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/sanitizers-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/search-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/source-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/stdlib-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/strip-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/tag-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/threadapi-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/threading-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/toolset-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/translate-path-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/user-interface-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/variant-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/version-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/visibility-feature.jam create mode 100644 src/boost/tools/build/src/tools/features/warnings-feature.jam create mode 100644 src/boost/tools/build/src/tools/flags.jam create mode 100644 src/boost/tools/build/src/tools/fop.jam create mode 100644 src/boost/tools/build/src/tools/fortran.jam create mode 100644 src/boost/tools/build/src/tools/gcc.jam create mode 100644 src/boost/tools/build/src/tools/gcc.py create mode 100644 src/boost/tools/build/src/tools/generate.jam create mode 100644 src/boost/tools/build/src/tools/generators/__init_generators__.jam create mode 100644 src/boost/tools/build/src/tools/generators/archive-generator.jam create mode 100644 src/boost/tools/build/src/tools/generators/c-compiling-generator.jam create mode 100644 src/boost/tools/build/src/tools/generators/dummy-generator.jam create mode 100644 src/boost/tools/build/src/tools/generators/lib-generator.jam create mode 100644 src/boost/tools/build/src/tools/generators/linking-generator.jam create mode 100644 src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam create mode 100644 src/boost/tools/build/src/tools/generators/searched-lib-generator.jam create mode 100644 src/boost/tools/build/src/tools/gettext.jam create mode 100644 src/boost/tools/build/src/tools/gfortran.jam create mode 100644 src/boost/tools/build/src/tools/hp_cxx.jam create mode 100644 src/boost/tools/build/src/tools/hpfortran.jam create mode 100644 src/boost/tools/build/src/tools/ifort.jam create mode 100644 src/boost/tools/build/src/tools/intel-darwin.jam create mode 100644 src/boost/tools/build/src/tools/intel-linux.jam create mode 100644 src/boost/tools/build/src/tools/intel-vxworks.jam create mode 100644 src/boost/tools/build/src/tools/intel-win.jam create mode 100644 src/boost/tools/build/src/tools/intel.jam create mode 100644 src/boost/tools/build/src/tools/lex.jam create mode 100644 src/boost/tools/build/src/tools/libjpeg.jam create mode 100644 src/boost/tools/build/src/tools/libpng.jam create mode 100644 src/boost/tools/build/src/tools/libtiff.jam create mode 100644 src/boost/tools/build/src/tools/link.jam create mode 100644 src/boost/tools/build/src/tools/lzma.jam create mode 100644 src/boost/tools/build/src/tools/make.jam create mode 100644 src/boost/tools/build/src/tools/make.py create mode 100644 src/boost/tools/build/src/tools/mc.jam create mode 100644 src/boost/tools/build/src/tools/mc.py create mode 100644 src/boost/tools/build/src/tools/message.jam create mode 100644 src/boost/tools/build/src/tools/message.py create mode 100644 src/boost/tools/build/src/tools/midl.jam create mode 100644 src/boost/tools/build/src/tools/midl.py create mode 100644 src/boost/tools/build/src/tools/mipspro.jam create mode 100644 src/boost/tools/build/src/tools/mpi.jam create mode 100644 src/boost/tools/build/src/tools/msvc-config.jam create mode 100644 src/boost/tools/build/src/tools/msvc.jam create mode 100644 src/boost/tools/build/src/tools/msvc.py create mode 100644 src/boost/tools/build/src/tools/notfile.jam create mode 100644 src/boost/tools/build/src/tools/notfile.py create mode 100644 src/boost/tools/build/src/tools/openssl.jam create mode 100644 src/boost/tools/build/src/tools/package.jam create mode 100644 src/boost/tools/build/src/tools/package.py create mode 100644 src/boost/tools/build/src/tools/pathscale.jam create mode 100644 src/boost/tools/build/src/tools/pch.jam create mode 100644 src/boost/tools/build/src/tools/pch.py create mode 100644 src/boost/tools/build/src/tools/pgi.jam create mode 100644 src/boost/tools/build/src/tools/pkg-config.jam create mode 100644 src/boost/tools/build/src/tools/python-config.jam create mode 100644 src/boost/tools/build/src/tools/python.jam create mode 100644 src/boost/tools/build/src/tools/qcc.jam create mode 100644 src/boost/tools/build/src/tools/qt.jam create mode 100644 src/boost/tools/build/src/tools/qt3.jam create mode 100644 src/boost/tools/build/src/tools/qt4.jam create mode 100644 src/boost/tools/build/src/tools/qt5.jam create mode 100644 src/boost/tools/build/src/tools/quickbook-config.jam create mode 100644 src/boost/tools/build/src/tools/quickbook.jam create mode 100644 src/boost/tools/build/src/tools/rc.jam create mode 100644 src/boost/tools/build/src/tools/rc.py create mode 100644 src/boost/tools/build/src/tools/sass.jam create mode 100644 src/boost/tools/build/src/tools/saxonhe.jam create mode 100644 src/boost/tools/build/src/tools/stage.jam create mode 100644 src/boost/tools/build/src/tools/stage.py create mode 100644 src/boost/tools/build/src/tools/stlport.jam create mode 100644 src/boost/tools/build/src/tools/sun.jam create mode 100644 src/boost/tools/build/src/tools/symlink.jam create mode 100644 src/boost/tools/build/src/tools/symlink.py create mode 100644 src/boost/tools/build/src/tools/testing-aux.jam create mode 100644 src/boost/tools/build/src/tools/testing.jam create mode 100644 src/boost/tools/build/src/tools/testing.py create mode 100644 src/boost/tools/build/src/tools/types/__init__.py create mode 100644 src/boost/tools/build/src/tools/types/adoc.jam create mode 100644 src/boost/tools/build/src/tools/types/asm.jam create mode 100644 src/boost/tools/build/src/tools/types/asm.py create mode 100644 src/boost/tools/build/src/tools/types/cpp.jam create mode 100644 src/boost/tools/build/src/tools/types/cpp.py create mode 100644 src/boost/tools/build/src/tools/types/css.jam create mode 100644 src/boost/tools/build/src/tools/types/docbook.jam create mode 100644 src/boost/tools/build/src/tools/types/exe.jam create mode 100644 src/boost/tools/build/src/tools/types/exe.py create mode 100644 src/boost/tools/build/src/tools/types/html.jam create mode 100644 src/boost/tools/build/src/tools/types/html.py create mode 100644 src/boost/tools/build/src/tools/types/lib.jam create mode 100644 src/boost/tools/build/src/tools/types/lib.py create mode 100644 src/boost/tools/build/src/tools/types/man.jam create mode 100644 src/boost/tools/build/src/tools/types/markdown.jam create mode 100644 src/boost/tools/build/src/tools/types/markdown.py create mode 100644 src/boost/tools/build/src/tools/types/obj.jam create mode 100644 src/boost/tools/build/src/tools/types/obj.py create mode 100644 src/boost/tools/build/src/tools/types/objc.jam create mode 100644 src/boost/tools/build/src/tools/types/pdf.jam create mode 100644 src/boost/tools/build/src/tools/types/preprocessed.jam create mode 100644 src/boost/tools/build/src/tools/types/preprocessed.py create mode 100644 src/boost/tools/build/src/tools/types/qt.jam create mode 100644 src/boost/tools/build/src/tools/types/register.jam create mode 100644 src/boost/tools/build/src/tools/types/rsp.jam create mode 100644 src/boost/tools/build/src/tools/types/rsp.py create mode 100644 src/boost/tools/build/src/tools/types/sass-type.jam create mode 100644 src/boost/tools/build/src/tools/types/xml.jam create mode 100644 src/boost/tools/build/src/tools/unix.jam create mode 100644 src/boost/tools/build/src/tools/unix.py create mode 100644 src/boost/tools/build/src/tools/vacpp.jam create mode 100644 src/boost/tools/build/src/tools/vmsdecc.jam create mode 100644 src/boost/tools/build/src/tools/whale.jam create mode 100644 src/boost/tools/build/src/tools/xlcpp.jam create mode 100644 src/boost/tools/build/src/tools/xlf.jam create mode 100644 src/boost/tools/build/src/tools/xsltproc-config.jam create mode 100644 src/boost/tools/build/src/tools/xsltproc.jam create mode 100644 src/boost/tools/build/src/tools/xsltproc/included.xsl create mode 100644 src/boost/tools/build/src/tools/xsltproc/test.xml create mode 100644 src/boost/tools/build/src/tools/xsltproc/test.xsl create mode 100644 src/boost/tools/build/src/tools/zlib.jam create mode 100644 src/boost/tools/build/src/tools/zstd.jam create mode 100644 src/boost/tools/build/src/util/__init__.py create mode 100644 src/boost/tools/build/src/util/assert.jam create mode 100644 src/boost/tools/build/src/util/container.jam create mode 100644 src/boost/tools/build/src/util/doc.jam create mode 100644 src/boost/tools/build/src/util/indirect.jam create mode 100644 src/boost/tools/build/src/util/indirect.py create mode 100644 src/boost/tools/build/src/util/logger.py create mode 100644 src/boost/tools/build/src/util/numbers.jam create mode 100644 src/boost/tools/build/src/util/option.jam create mode 100644 src/boost/tools/build/src/util/option.py create mode 100644 src/boost/tools/build/src/util/order.jam create mode 100644 src/boost/tools/build/src/util/order.py create mode 100644 src/boost/tools/build/src/util/os.jam create mode 100644 src/boost/tools/build/src/util/os_j.py create mode 100644 src/boost/tools/build/src/util/param.jam create mode 100644 src/boost/tools/build/src/util/path.jam create mode 100644 src/boost/tools/build/src/util/path.py create mode 100644 src/boost/tools/build/src/util/print.jam create mode 100644 src/boost/tools/build/src/util/regex.jam create mode 100644 src/boost/tools/build/src/util/regex.py create mode 100644 src/boost/tools/build/src/util/sequence.jam create mode 100644 src/boost/tools/build/src/util/sequence.py create mode 100644 src/boost/tools/build/src/util/set.jam create mode 100644 src/boost/tools/build/src/util/set.py create mode 100644 src/boost/tools/build/src/util/string.jam create mode 100644 src/boost/tools/build/src/util/utility.jam create mode 100644 src/boost/tools/build/src/util/utility.py create mode 100644 src/boost/tools/build/test/BoostBuild.py create mode 100644 src/boost/tools/build/test/Jamfile.jam create mode 100755 src/boost/tools/build/test/MockToolset.py create mode 100644 src/boost/tools/build/test/TestCmd.py create mode 100644 src/boost/tools/build/test/TestToolset.py create mode 100644 src/boost/tools/build/test/abs_workdir.py create mode 100644 src/boost/tools/build/test/absolute_sources.py create mode 100644 src/boost/tools/build/test/alias.py create mode 100644 src/boost/tools/build/test/alternatives.py create mode 100644 src/boost/tools/build/test/always.py create mode 100644 src/boost/tools/build/test/bad_dirname.py create mode 100644 src/boost/tools/build/test/boost-build.jam create mode 100644 src/boost/tools/build/test/boostbook.py create mode 100644 src/boost/tools/build/test/boostbook/a.hpp create mode 100644 src/boost/tools/build/test/boostbook/docs.xml create mode 100644 src/boost/tools/build/test/boostbook/jamroot.jam create mode 100644 src/boost/tools/build/test/build_dir.py create mode 100644 src/boost/tools/build/test/build_file.py create mode 100644 src/boost/tools/build/test/build_hooks.py create mode 100644 src/boost/tools/build/test/build_no.py create mode 100755 src/boost/tools/build/test/builtin_echo.py create mode 100755 src/boost/tools/build/test/builtin_exit.py create mode 100755 src/boost/tools/build/test/builtin_glob.py create mode 100644 src/boost/tools/build/test/builtin_glob_archive.py create mode 100755 src/boost/tools/build/test/builtin_readlink.py create mode 100755 src/boost/tools/build/test/builtin_split_by_characters.py create mode 100755 src/boost/tools/build/test/bzip2.py create mode 100644 src/boost/tools/build/test/c_file.py create mode 100644 src/boost/tools/build/test/chain.py create mode 100644 src/boost/tools/build/test/clean.py create mode 100644 src/boost/tools/build/test/cli_property_expansion.py create mode 100755 src/boost/tools/build/test/collect_debug_info.py create mode 100644 src/boost/tools/build/test/command_line_properties.py create mode 100644 src/boost/tools/build/test/composite.py create mode 100644 src/boost/tools/build/test/conditionals.py create mode 100644 src/boost/tools/build/test/conditionals2.py create mode 100644 src/boost/tools/build/test/conditionals3.py create mode 100644 src/boost/tools/build/test/conditionals4.py create mode 100755 src/boost/tools/build/test/conditionals_multiple.py create mode 100755 src/boost/tools/build/test/configuration.py create mode 100644 src/boost/tools/build/test/configure.py create mode 100755 src/boost/tools/build/test/copy_time.py create mode 100644 src/boost/tools/build/test/core-language/test.jam create mode 100755 src/boost/tools/build/test/core_action_output.py create mode 100755 src/boost/tools/build/test/core_action_status.py create mode 100755 src/boost/tools/build/test/core_actions_quietly.py create mode 100755 src/boost/tools/build/test/core_arguments.py create mode 100755 src/boost/tools/build/test/core_at_file.py create mode 100755 src/boost/tools/build/test/core_bindrule.py create mode 100644 src/boost/tools/build/test/core_d12.py create mode 100644 src/boost/tools/build/test/core_delete_module.py create mode 100644 src/boost/tools/build/test/core_dependencies.py create mode 100644 src/boost/tools/build/test/core_fail_expected.py create mode 100644 src/boost/tools/build/test/core_import_module.py create mode 100644 src/boost/tools/build/test/core_jamshell.py create mode 100755 src/boost/tools/build/test/core_language.py create mode 100644 src/boost/tools/build/test/core_modifiers.py create mode 100755 src/boost/tools/build/test/core_multifile_actions.py create mode 100755 src/boost/tools/build/test/core_nt_cmd_line.py create mode 100755 src/boost/tools/build/test/core_option_d2.py create mode 100755 src/boost/tools/build/test/core_option_l.py create mode 100755 src/boost/tools/build/test/core_option_n.py create mode 100755 src/boost/tools/build/test/core_parallel_actions.py create mode 100755 src/boost/tools/build/test/core_parallel_multifile_actions_1.py create mode 100755 src/boost/tools/build/test/core_parallel_multifile_actions_2.py create mode 100644 src/boost/tools/build/test/core_scanner.py create mode 100755 src/boost/tools/build/test/core_source_line_tracking.py create mode 100644 src/boost/tools/build/test/core_syntax_error_exit_status.py create mode 100644 src/boost/tools/build/test/core_typecheck.py create mode 100755 src/boost/tools/build/test/core_update_now.py create mode 100755 src/boost/tools/build/test/core_variables_in_actions.py create mode 100644 src/boost/tools/build/test/core_varnames.py create mode 100644 src/boost/tools/build/test/custom_generator.py create mode 100644 src/boost/tools/build/test/debugger-mi.py create mode 100644 src/boost/tools/build/test/debugger.py create mode 100644 src/boost/tools/build/test/default_build.py create mode 100644 src/boost/tools/build/test/default_features.py create mode 100755 src/boost/tools/build/test/default_toolset.py create mode 100644 src/boost/tools/build/test/dependency_property.py create mode 100644 src/boost/tools/build/test/dependency_test.py create mode 100644 src/boost/tools/build/test/disambiguation.py create mode 100644 src/boost/tools/build/test/dll_path.py create mode 100644 src/boost/tools/build/test/double_loading.py create mode 100644 src/boost/tools/build/test/duplicate.py create mode 100644 src/boost/tools/build/test/example_customization.py create mode 100644 src/boost/tools/build/test/example_gettext.py create mode 100644 src/boost/tools/build/test/example_libraries.py create mode 100644 src/boost/tools/build/test/example_make.py create mode 100644 src/boost/tools/build/test/example_qt4.py create mode 100755 src/boost/tools/build/test/exit_status.py create mode 100644 src/boost/tools/build/test/expansion.py create mode 100644 src/boost/tools/build/test/explicit.py create mode 100755 src/boost/tools/build/test/feature_cxxflags.py create mode 100644 src/boost/tools/build/test/feature_force_include.py create mode 100644 src/boost/tools/build/test/feature_implicit_dependency.py create mode 100644 src/boost/tools/build/test/feature_relevant.py create mode 100644 src/boost/tools/build/test/feature_suppress_import_lib.py create mode 100644 src/boost/tools/build/test/file_types.py create mode 100644 src/boost/tools/build/test/flags.py create mode 100644 src/boost/tools/build/test/gcc_runtime.py create mode 100755 src/boost/tools/build/test/generator_selection.py create mode 100644 src/boost/tools/build/test/generators_test.py create mode 100644 src/boost/tools/build/test/implicit_dependency.py create mode 100644 src/boost/tools/build/test/indirect_conditional.py create mode 100644 src/boost/tools/build/test/inherit_toolset.py create mode 100755 src/boost/tools/build/test/inherited_dependency.py create mode 100644 src/boost/tools/build/test/inline.py create mode 100755 src/boost/tools/build/test/install_build_no.py create mode 100644 src/boost/tools/build/test/lang_objc.py create mode 100644 src/boost/tools/build/test/lib_source_property.py create mode 100755 src/boost/tools/build/test/lib_zlib.py create mode 100755 src/boost/tools/build/test/libjpeg.py create mode 100755 src/boost/tools/build/test/liblzma.py create mode 100755 src/boost/tools/build/test/libpng.py create mode 100644 src/boost/tools/build/test/library_chain.py create mode 100644 src/boost/tools/build/test/library_order.py create mode 100644 src/boost/tools/build/test/library_property.py create mode 100755 src/boost/tools/build/test/libtiff.py create mode 100755 src/boost/tools/build/test/libzstd.py create mode 100755 src/boost/tools/build/test/link.py create mode 100644 src/boost/tools/build/test/load_dir.py create mode 100644 src/boost/tools/build/test/load_order.py create mode 100644 src/boost/tools/build/test/loop.py create mode 100644 src/boost/tools/build/test/make_rule.py create mode 100755 src/boost/tools/build/test/message.py create mode 100644 src/boost/tools/build/test/module_actions.py create mode 100644 src/boost/tools/build/test/ndebug.py create mode 100644 src/boost/tools/build/test/no_type.py create mode 100644 src/boost/tools/build/test/notfile.py create mode 100644 src/boost/tools/build/test/ordered_include.py create mode 100644 src/boost/tools/build/test/ordered_properties.py create mode 100644 src/boost/tools/build/test/out_of_tree.py create mode 100644 src/boost/tools/build/test/package.py create mode 100644 src/boost/tools/build/test/param.py create mode 100644 src/boost/tools/build/test/path_features.py create mode 100644 src/boost/tools/build/test/pch.py create mode 100644 src/boost/tools/build/test/prebuilt.py create mode 100644 src/boost/tools/build/test/prebuilt/ext/a.cpp create mode 100644 src/boost/tools/build/test/prebuilt/ext/debug/a.h create mode 100644 src/boost/tools/build/test/prebuilt/ext/jamfile.jam create mode 100644 src/boost/tools/build/test/prebuilt/ext/jamfile2.jam create mode 100644 src/boost/tools/build/test/prebuilt/ext/jamfile3.jam create mode 100644 src/boost/tools/build/test/prebuilt/ext/jamroot.jam create mode 100644 src/boost/tools/build/test/prebuilt/ext/release/a.h create mode 100644 src/boost/tools/build/test/prebuilt/hello.cpp create mode 100644 src/boost/tools/build/test/prebuilt/jamfile.jam create mode 100644 src/boost/tools/build/test/prebuilt/jamroot.jam create mode 100755 src/boost/tools/build/test/preprocessor.py create mode 100644 src/boost/tools/build/test/print.py create mode 100644 src/boost/tools/build/test/project-test3/a.cpp create mode 100644 src/boost/tools/build/test/project-test3/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test3/jamroot.jam create mode 100644 src/boost/tools/build/test/project-test3/lib/b.cpp create mode 100644 src/boost/tools/build/test/project-test3/lib/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test3/lib2/c.cpp create mode 100644 src/boost/tools/build/test/project-test3/lib2/d.cpp create mode 100644 src/boost/tools/build/test/project-test3/lib2/helper/e.cpp create mode 100644 src/boost/tools/build/test/project-test3/lib2/helper/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test3/lib2/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test3/lib3/f.cpp create mode 100644 src/boost/tools/build/test/project-test3/lib3/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test3/lib3/jamroot.jam create mode 100644 src/boost/tools/build/test/project-test3/readme.txt create mode 100644 src/boost/tools/build/test/project-test4/a.cpp create mode 100644 src/boost/tools/build/test/project-test4/a_gcc.cpp create mode 100644 src/boost/tools/build/test/project-test4/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test4/jamfile3.jam create mode 100644 src/boost/tools/build/test/project-test4/jamfile4.jam create mode 100644 src/boost/tools/build/test/project-test4/jamfile5.jam create mode 100644 src/boost/tools/build/test/project-test4/jamroot.jam create mode 100644 src/boost/tools/build/test/project-test4/lib/b.cpp create mode 100644 src/boost/tools/build/test/project-test4/lib/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test4/lib/jamfile1.jam create mode 100644 src/boost/tools/build/test/project-test4/lib/jamfile2.jam create mode 100644 src/boost/tools/build/test/project-test4/lib/jamfile3.jam create mode 100644 src/boost/tools/build/test/project-test4/lib2/jamfile.jam create mode 100644 src/boost/tools/build/test/project-test4/lib2/jamfile2.jam create mode 100644 src/boost/tools/build/test/project-test4/readme.txt create mode 100644 src/boost/tools/build/test/project_dependencies.py create mode 100644 src/boost/tools/build/test/project_glob.py create mode 100755 src/boost/tools/build/test/project_id.py create mode 100644 src/boost/tools/build/test/project_root_constants.py create mode 100644 src/boost/tools/build/test/project_root_rule.py create mode 100644 src/boost/tools/build/test/project_test3.py create mode 100644 src/boost/tools/build/test/project_test4.py create mode 100644 src/boost/tools/build/test/property_expansion.py create mode 100755 src/boost/tools/build/test/qt4.py create mode 100644 src/boost/tools/build/test/qt4/jamroot.jam create mode 100644 src/boost/tools/build/test/qt4/mock.cpp create mode 100644 src/boost/tools/build/test/qt4/mock.h create mode 100644 src/boost/tools/build/test/qt4/phonon.cpp create mode 100644 src/boost/tools/build/test/qt4/qt3support.cpp create mode 100644 src/boost/tools/build/test/qt4/qtassistant.cpp create mode 100644 src/boost/tools/build/test/qt4/qtcore.cpp create mode 100644 src/boost/tools/build/test/qt4/qtcorefail.cpp create mode 100644 src/boost/tools/build/test/qt4/qtdeclarative.cpp create mode 100644 src/boost/tools/build/test/qt4/qtgui.cpp create mode 100644 src/boost/tools/build/test/qt4/qthelp.cpp create mode 100644 src/boost/tools/build/test/qt4/qtmultimedia.cpp create mode 100644 src/boost/tools/build/test/qt4/qtnetwork.cpp create mode 100644 src/boost/tools/build/test/qt4/qtscript.cpp create mode 100644 src/boost/tools/build/test/qt4/qtscripttools.cpp create mode 100644 src/boost/tools/build/test/qt4/qtsql.cpp create mode 100644 src/boost/tools/build/test/qt4/qtsvg.cpp create mode 100644 src/boost/tools/build/test/qt4/qttest.cpp create mode 100644 src/boost/tools/build/test/qt4/qtwebkit.cpp create mode 100644 src/boost/tools/build/test/qt4/qtxml.cpp create mode 100644 src/boost/tools/build/test/qt4/qtxmlpatterns.cpp create mode 100644 src/boost/tools/build/test/qt4/rcc.cpp create mode 100644 src/boost/tools/build/test/qt4/rcc.qrc create mode 100755 src/boost/tools/build/test/qt5.py create mode 100644 src/boost/tools/build/test/qt5/initialization.cpp create mode 100644 src/boost/tools/build/test/qt5/jamroot.jam create mode 100644 src/boost/tools/build/test/qt5/mock.cpp create mode 100644 src/boost/tools/build/test/qt5/mock.h create mode 100644 src/boost/tools/build/test/qt5/qt3dcore.cpp create mode 100644 src/boost/tools/build/test/qt5/qt3dinput.cpp create mode 100644 src/boost/tools/build/test/qt5/qt3dlogic.cpp create mode 100644 src/boost/tools/build/test/qt5/qt3drender.cpp create mode 100644 src/boost/tools/build/test/qt5/qtassistant.cpp create mode 100644 src/boost/tools/build/test/qt5/qtbluetooth.cpp create mode 100644 src/boost/tools/build/test/qt5/qtcharts.cpp create mode 100644 src/boost/tools/build/test/qt5/qtcore.cpp create mode 100644 src/boost/tools/build/test/qt5/qtcorefail.cpp create mode 100644 src/boost/tools/build/test/qt5/qtdatavisualization.cpp create mode 100644 src/boost/tools/build/test/qt5/qtdeclarative.cpp create mode 100644 src/boost/tools/build/test/qt5/qtgamepad.cpp create mode 100644 src/boost/tools/build/test/qt5/qthelp.cpp create mode 100644 src/boost/tools/build/test/qt5/qtlocation.cpp create mode 100644 src/boost/tools/build/test/qt5/qtmultimedia.cpp create mode 100644 src/boost/tools/build/test/qt5/qtnetwork.cpp create mode 100644 src/boost/tools/build/test/qt5/qtnfc.cpp create mode 100644 src/boost/tools/build/test/qt5/qtpositioning.cpp create mode 100644 src/boost/tools/build/test/qt5/qtpurchasing.cpp create mode 100644 src/boost/tools/build/test/qt5/qtquick.cpp create mode 100644 src/boost/tools/build/test/qt5/qtquick.qml create mode 100644 src/boost/tools/build/test/qt5/qtscript.cpp create mode 100644 src/boost/tools/build/test/qt5/qtscripttools.cpp create mode 100644 src/boost/tools/build/test/qt5/qtscxml.cpp create mode 100644 src/boost/tools/build/test/qt5/qtserialbus.cpp create mode 100644 src/boost/tools/build/test/qt5/qtserialport.cpp create mode 100644 src/boost/tools/build/test/qt5/qtsql.cpp create mode 100644 src/boost/tools/build/test/qt5/qtsvg.cpp create mode 100644 src/boost/tools/build/test/qt5/qttest.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebchannel.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebengine.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebenginewidgets.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebkit.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebkitwidgets.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebsocket.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebsockets.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwebview.cpp create mode 100644 src/boost/tools/build/test/qt5/qtwidgets.cpp create mode 100644 src/boost/tools/build/test/qt5/qtxml.cpp create mode 100644 src/boost/tools/build/test/qt5/qtxmlpatterns.cpp create mode 100644 src/boost/tools/build/test/qt5/rcc.cpp create mode 100644 src/boost/tools/build/test/qt5/rcc.qrc create mode 100644 src/boost/tools/build/test/readme.txt create mode 100644 src/boost/tools/build/test/rebuilds.py create mode 100644 src/boost/tools/build/test/relative_sources.py create mode 100644 src/boost/tools/build/test/remove_requirement.py create mode 100755 src/boost/tools/build/test/rescan_header.py create mode 100644 src/boost/tools/build/test/resolution.py create mode 100644 src/boost/tools/build/test/results-python.txt create mode 100644 src/boost/tools/build/test/rootless.py create mode 100644 src/boost/tools/build/test/rootless/test1/sub_root/a.cpp create mode 100644 src/boost/tools/build/test/rootless/test1/sub_root/jamfile.jam create mode 100644 src/boost/tools/build/test/rootless/test2/sub_root/a.cpp create mode 100644 src/boost/tools/build/test/rootless/test2/sub_root/jamfile.jam create mode 100644 src/boost/tools/build/test/rootless/test3/jamfile.jam create mode 100644 src/boost/tools/build/test/rootless/test3/sub/inner/a.cpp create mode 100644 src/boost/tools/build/test/rootless/test3/sub/inner/jamfile.jam create mode 100755 src/boost/tools/build/test/scanner_causing_rebuilds.py create mode 100644 src/boost/tools/build/test/searched_lib.py create mode 100644 src/boost/tools/build/test/skipping.py create mode 100755 src/boost/tools/build/test/sort_rule.py create mode 100644 src/boost/tools/build/test/source_locations.py create mode 100755 src/boost/tools/build/test/source_order.py create mode 100755 src/boost/tools/build/test/space_in_path.py create mode 100644 src/boost/tools/build/test/stage.py create mode 100644 src/boost/tools/build/test/standalone.py create mode 100644 src/boost/tools/build/test/startup/boost-root/boost-build.jam create mode 100644 src/boost/tools/build/test/startup/boost-root/build/boost-build.jam create mode 100644 src/boost/tools/build/test/startup/boost-root/build/bootstrap.jam create mode 100644 src/boost/tools/build/test/startup/bootstrap-env/boost-build.jam create mode 100644 src/boost/tools/build/test/startup/bootstrap-explicit/boost-build.jam create mode 100644 src/boost/tools/build/test/startup/bootstrap-implicit/readme.txt create mode 100644 src/boost/tools/build/test/startup/no-bootstrap1/boost-build.jam create mode 100644 src/boost/tools/build/test/startup/no-bootstrap1/subdir/readme.txt create mode 100644 src/boost/tools/build/test/startup/no-bootstrap2/boost-build.jam create mode 100644 src/boost/tools/build/test/startup/no-bootstrap3/boost-build.jam create mode 100644 src/boost/tools/build/test/startup_v2.py create mode 100755 src/boost/tools/build/test/static_and_shared_library.py create mode 100644 src/boost/tools/build/test/suffix.py create mode 100644 src/boost/tools/build/test/symlink.py create mode 100644 src/boost/tools/build/test/tag.py create mode 100644 src/boost/tools/build/test/template.py create mode 100644 src/boost/tools/build/test/test-config-example.jam create mode 100644 src/boost/tools/build/test/test.jam create mode 100644 src/boost/tools/build/test/test1.py create mode 100644 src/boost/tools/build/test/test2.py create mode 100644 src/boost/tools/build/test/test2/foo.cpp create mode 100644 src/boost/tools/build/test/test2/jamroot.jam create mode 100644 src/boost/tools/build/test/test_all.py create mode 100755 src/boost/tools/build/test/test_rc.py create mode 100644 src/boost/tools/build/test/test_system.html create mode 100755 src/boost/tools/build/test/testing.py create mode 100644 src/boost/tools/build/test/timedata.py create mode 100644 src/boost/tools/build/test/toolset-mock/Jamroot.jam create mode 100644 src/boost/tools/build/test/toolset-mock/lib.cpp create mode 100644 src/boost/tools/build/test/toolset-mock/main.cpp create mode 100644 src/boost/tools/build/test/toolset-mock/project-config.jam create mode 100644 src/boost/tools/build/test/toolset-mock/src/Jamroot.jam create mode 100644 src/boost/tools/build/test/toolset-mock/src/MockProgram.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/ar.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/clang-3.9.0-darwin.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/clang-linux-3.9.0.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/clang-vxworks-4.0.1.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/darwin-4.2.1.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/gcc-4.2.1-darwin.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/gcc-4.8.3-linux.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/intel-darwin-10.2.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/ld.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/libtool.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/linkx.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/mock-program.cpp create mode 100644 src/boost/tools/build/test/toolset-mock/src/msvc-14.3.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/project-config.jam create mode 100644 src/boost/tools/build/test/toolset-mock/src/strip.py create mode 100644 src/boost/tools/build/test/toolset-mock/src/verify.py create mode 100644 src/boost/tools/build/test/toolset_clang_darwin.py create mode 100644 src/boost/tools/build/test/toolset_clang_linux.py create mode 100644 src/boost/tools/build/test/toolset_clang_vxworks.py create mode 100644 src/boost/tools/build/test/toolset_darwin.py create mode 100644 src/boost/tools/build/test/toolset_defaults.py create mode 100644 src/boost/tools/build/test/toolset_gcc.py create mode 100644 src/boost/tools/build/test/toolset_intel_darwin.py create mode 100644 src/boost/tools/build/test/toolset_msvc.py create mode 100644 src/boost/tools/build/test/toolset_requirements.py create mode 100644 src/boost/tools/build/test/transitive_skip.py create mode 100644 src/boost/tools/build/test/tree.py create mode 100644 src/boost/tools/build/test/unit_test.py create mode 100644 src/boost/tools/build/test/unit_tests.py create mode 100644 src/boost/tools/build/test/unused.py create mode 100644 src/boost/tools/build/test/use_requirements.py create mode 100644 src/boost/tools/build/test/using.py create mode 100644 src/boost/tools/build/test/wrapper.py create mode 100644 src/boost/tools/build/test/wrong_project.py (limited to 'src/boost/tools/build') diff --git a/src/boost/tools/build/CONTRIBUTING.adoc b/src/boost/tools/build/CONTRIBUTING.adoc new file mode 100644 index 000000000..da590db5e --- /dev/null +++ b/src/boost/tools/build/CONTRIBUTING.adoc @@ -0,0 +1,174 @@ +// Copyright 2019-2020 Rene Rivera +// Copyright 2003, 2006 Vladimir Prus +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + += B2 contributor guidelines + +B2 is an open-source project. This means that we welcome and appreciate +all contributions -- be it ideas, bug reports, or patches. This document +contains guidelines which helps to assure that development goes on smoothly, and +changes are made quickly. + +The guidelines are not mandatory, and you can decide for yourself which one to +follow. But note, the 10 mins that you spare writing a comment, for example, +might lead to significantly longer delay for everyone. + +== Additional resources include + +=== The issue tracker + +https://github.com/bfgroup/b2/issues + +=== Discussion forums + +https://github.com/bfgroup/b2/discussions + +== BUGS and PATCHES + +Both bugs and patches can be submitted to the GitHub tracker. + +When reporting a bug, please try to provide the following information: + +* What you did. + * A minimal reproducible test case is very much appreciated. + * Shell script with some annotations is much better than verbose + description of the problem. + * A regression test is the best (see test/test_system.html). + +* What you got. + +* What you expected. + +* What version of B2 did you use. If possible, please try to test with the + main branch state. + +When submitting a patch, please: + +* Make a single patch for a single logical change +* Follow the policies and coding conventions below +* Send patches as pull requests to the main branch +* Provide a good PR message together with the patch + +The purpose of message serves to communicate what was changed, and *why*. +Without a good message, you might spend a lot of time later, wondering where +a strange piece of code came from and why it was necessary. + +The good message mentions each changed file and each rule/method, saying +what happened to it, and why. Consider, the following log message + +---- +Better direct request handling. + +* new/build-request.jam + (directly-requested-properties-adjuster): Redo. + +* new/targets.jam + (main-target.generate-really): Adjust properties here. + +* new/virtual-target.jam + (register-actual-name): New rule. + (virtual-target.actualize-no-scanner): Call the above, to detected bugs, + where two virtual target correspond to one Jam target name. +---- + +The messages for the last two files are good. They tell what was changed. +The change to the first file is clearly under-commented. + +It's okay to use terse messages for uninteresting changes, like ones induced +by interface changes elsewhere. + +== POLICIES + +=== Testing + +All serious changes must be tested. New rules must be tested by the module where +they are declared. The test system (link:test/test_system.html[test/test_system.html]) +should be used to verify user-observable behavior. + +=== Documentation + +It turns out that it's hard to have too much comments, but it's easy to have too +little. Please prepend each rule with a comment saying what the rule does and +what arguments mean. Stop for a minute and consider if the comment makes sense +for anybody else, and completely describes what the rules does. Generic phrases +like "adjusts properties" are really not enough. + +When applicable, make changes to the user documentation as well. + +== CODING CONVENTIONS + +1. All names of rules and variables are lowercase with "-" to separate + words. ++ +---- +rule call-me-ishmael ( ) ... +---- + +2. Names with dots in them are "intended globals". Ordinary globals use a + dot prefix: ++ +---- +.foobar +$(.foobar) +---- + +3. Pseudofunctions or associations are .: ++ +---- +$(argument).name = hello ; +$($(argument).name) +---- + +4. Class attribute names are prefixed with "self.": ++ +---- +self.x +$(self.x) +---- + +5. Builtin rules are called via their ALL_UPPERCASE_NAMES: ++ +---- +DEPENDS $(target) : $(sources) ; +---- + +6. Opening and closing braces go on separate lines: ++ +---- +if $(a) +{ + # +} +else +{ + # +} +---- + +== ENGINE + +Developing in the `b2` engine, the C++ part, requires two steps to be +effective: building the "stable" engine, and developing the +"in-progress" engine. + +What is the "stable" engine is up to you. It only refers to a build of the +engine you know is at a good working state. When you are at a point the +source is stable you can run `bootstrap.sh/bat` from the root. That will +create the `b2` executable at the root. You can then use this version to run +regular B2 builds as needed both within the B2 tree and in other projects. + +The "in-progress" engine is whatever build you happen to be testing at the +moment. There are two ways to build this be engine. You can either +(a) run `b2 b2` at the root, or (b) run `build.sh/bat` in `src/engine`. + +Using (a) will place, by default, a fully debuggable `b2` in the `.build` +directories. You can run that one from a debugger with full symbols and +stepping features. This should be the first choice in developing in the +engine. + +After using (a) to implement functionality you can use (b) to fully test +that functionality. The engine built from (b) is fully optimized and +is the one used, by default, by the test system when running in the `test` +directory. Before submitting patches it's required to build this way and +run the tests in at least one toolset version (but preferably at least two). diff --git a/src/boost/tools/build/Jamroot.jam b/src/boost/tools/build/Jamroot.jam new file mode 100644 index 000000000..cd6a49e86 --- /dev/null +++ b/src/boost/tools/build/Jamroot.jam @@ -0,0 +1,267 @@ +# Copyright 2019 Rene Rivera +# Copyright 2017 Steven Watanabe +# Copyright 2016 Vladimir Prus +# Copyright 2017 Edward Diener +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import bison ; +import errors ; +import feature ; +import indirect ; +import os ; +# import package ; +import path ; +import set ; +import stage : add-install-dir ; +import toolset ; +import type ; +import virtual-target ; + +path-constant SELF : . ; + +project b2 + : build-dir .build + : requirements + 11 + msvc:_CRT_SECURE_NO_WARNINGS=1 + msvc:_CRT_NONSTDC_NO_DEPRECATE=1 + ; + +#| +Build the engine and its dependencies outside of the simple core build scripts. +This allows us to keep the core build scripts as simple as possible. And lets +us support more functionality for development of the engine core. +|# + +#| +Define custom yyacc tool. +|# + +feature.feature yyacc : : dependency free ; +toolset.flags yyacc TOOL ; + +exe yyacc + : src/engine/yyacc.cpp + : ; +explicit yyacc ; + +rule yyacc-gen ( project name : property-set : sources * ) +{ + local relevant = [ toolset.relevant $(__name__).yyacc ] ; + local a = [ new action $(sources[1]) : $(__name__).yyacc : [ $(property-set).add $(relevant) ] ] ; + local targets ; + for local n in $(name:S=).y $(name:S=)tab.h + { + targets += [ virtual-target.register + [ new file-target $(n) exact : [ type.type $(n) ] + : $(project) : $(a) + ] ] ; + } + return $(targets) ; +} + +actions yyacc bind TOOL +{ + "$(TOOL)" "$(<)" "$(>)" +} + +generate jamgram.y + : src/engine/jamgram.yy + : @yyacc-gen + src/engine + yyacc yyacc + ; +explicit jamgram.y ; + +#| +Define grammar translation with Bison. +|# + +BISON = [ os.environ BISON ] ; +BISON ?= bison ; +local BISON_IN_PATH = [ path.glob [ os.executable-path ] : $(BISON[1]) $(BISON[1]).* ] ; + +rule grammar ( target : source : properties * ) +{ + # LOCATE on $(target) = $(source:D) ; + BISON on $(target) = $(BISON) ; +} + +actions grammar +{ + "$(BISON)" --yacc --defines -o "$(<[1])" "$(>)" +} + +if $(BISON_IN_PATH) +{ + make jamgram.cpp + : src/engine/jamgram.y + : @grammar + : jamgram.y + src/engine ; +} +else +{ + errors.warning "Bison generator program '$(BISON:J= )' not found. Skipping grammar build." ; + alias jamgram.cpp + : src/engine/jamgram.cpp ; +} +explicit jamgram.cpp ; + +#| +Define the b2 executable. Sources are based on platform. +TODO: Make platform specific source be no-ops when not needed. +|# + +local python-exe = [ MATCH --with-python=(.*) : [ modules.peek : ARGV ] ] ; +local python-include ; +local python-ldlib ; +if $(python-exe) +{ + python-include = [ SHELL + "$(python-exe) -c \"import sysconfig; print(sysconfig.get_path('include'));\"" + : strip-eol ] ; + python-libdir = [ SHELL + "$(python-exe) -c \"import sysconfig; import os.path; print(sysconfig.get_config_var('LIBDIR'));\"" + : strip-eol ] ; + python-ldlib = [ MATCH ".*(python.*)" : [ SHELL + "$(python-exe) -c \"import sysconfig; import os.path; print(sysconfig.get_config_var('LIBRARY'));\"" + : strip-eol ] ] ; + python-ldlib = $(python-ldlib:S=) ; + + lib python + : + : $(python-ldlib) $(python-libdir) + : + : $(python-include) HAVE_PYTHON ; +} +else +{ + alias python ; +} + +obj jamgram.obj + : jamgram.cpp + : gcc:-Wno-free-nonheap-object + ; +explicit jamgram.obj ; + +local b2_src = + [ glob src/engine/*.cpp src/engine/modules/*.cpp : + src/engine/*nt.cpp src/engine/*unix.cpp src/engine/*vms.cpp + src/engine/yyacc.cpp src/engine/mkjambase.cpp + src/engine/check_*.cpp + src/engine/jamgram.cpp + ] ; +local b2_src_nt = [ glob src/engine/*nt.cpp ] ; +local b2_src_unix = [ glob src/engine/*unix.cpp ] ; +local b2_src_vms = [ glob src/engine/*vms.cpp ] ; +local unix_os = [ set.difference [ feature.values ] : windows vms ] ; + +exe b2 + : $(b2_src) + jamgram.obj + python + : windows:$(b2_src_nt) + vms:$(b2_src_vms) + $(unix_os):$(b2_src_unix) + msvc:kernel32 + msvc:advapi32 + msvc:user32 + ; +explicit b2 ; + +#| +Installation of the engine, build, and example files. +|# + +feature.feature b2-install-layout : standard portable : incidental propagated ; + +add-install-dir b2prefix-standard : : prefix ; +add-install-dir b2bindir-standard : : bindir ; +add-install-dir b2coredir-standard : boost-build/src : datarootdir ; +add-install-dir b2examplesdir-standard : boost-build/examples : datarootdir ; + +add-install-dir b2prefix-portable : : prefix ; +add-install-dir b2bindir-portable : : b2prefix-portable ; +add-install-dir b2coredir-portable : .b2 : b2prefix-portable ; +add-install-dir b2examplesdir-portable : .b2/examples : b2prefix-portable ; + +local ext = "" ; +if [ os.on-windows ] || [ os.on-vms ] +{ + ext = ".exe" ; +} + +install b2-engine + : $(SELF)/src/engine/b2$(ext) + : standard:(b2bindir-standard) + portable:(b2bindir-portable) + ; +explicit b2-engine ; + +local examples ; +for local e in [ glob-tree-ex $(SELF)/example : * : . .svn ] +{ + if [ CHECK_IF_FILE [ path.native $(e) ] ] + { + examples += $(e) ; + } +} +install b2-examples + : # What to install + $(examples) + : # What is the root of the directory + example + # Which subdir of $prefix/share + standard:(b2examplesdir-standard) + portable:(b2examplesdir-portable) + ; +explicit b2-examples ; + +local .core-sources = + $(SELF)/src/build-system.jam + [ path.glob-tree $(SELF)/src/build : *.jam ] + [ path.glob-tree $(SELF)/src/contrib : *.jam ] + [ path.glob-tree $(SELF)/src/kernel : *.jam ] + [ path.glob-tree $(SELF)/src/options : *.jam ] + [ path.glob-tree $(SELF)/src/util : *.jam ] + [ path.glob-tree $(SELF)/src/tools : *.jam *.xml *.xsl *.doxyfile *.hpp doxproc.py ] + ; +if $(python-exe) +{ + .core-sources += + [ path.glob-tree $(SELF)/src/build : *.py ] + [ path.glob-tree $(SELF)/src/contrib : *.py ] + [ path.glob-tree $(SELF)/src/kernel : *.py ] + [ path.glob-tree $(SELF)/src/options : *.py ] + [ path.glob-tree $(SELF)/src/util : *.py ] + [ path.glob-tree $(SELF)/src/tools : *.py : doxproc.py ] + ; +} + +install b2-core + : # What to install + $(.core-sources) + : # What is the root of the directory + src + # Which subdir of $prefix/share + standard:(b2coredir-standard) + portable:(b2coredir-portable) + ; +explicit b2-core ; + +#| +Only install example files when requested to avoid bloating install footprint. +|# +if --with-examples in [ modules.peek : ARGV ] +{ + alias install : b2-engine b2-core b2-examples ; +} +else +{ + alias install : b2-engine b2-core ; +} +explicit install ; diff --git a/src/boost/tools/build/LICENSE.txt b/src/boost/tools/build/LICENSE.txt new file mode 100644 index 000000000..36b7cd93c --- /dev/null +++ b/src/boost/tools/build/LICENSE.txt @@ -0,0 +1,23 @@ +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/src/boost/tools/build/README.adoc b/src/boost/tools/build/README.adoc new file mode 100644 index 000000000..25bb454ad --- /dev/null +++ b/src/boost/tools/build/README.adoc @@ -0,0 +1,41 @@ += B2 + +B2 makes it easy to build C++ projects, everywhere. + +image:https://img.shields.io/badge/license-BSL%201.0-blue.svg["Boost Software License 1.0", link="LICENSE.txt"] +image:https://img.shields.io/github/languages/code-size/bfgroup/b2.svg["GitHub code size in bytes", link="https://github.com/bfgroup/b2"] + +== License + +Distributed under the Boost Software License, Version 1.0. (See accompanying +file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +== Testing + +Continuously tested on: + +* FreeBSD Clang 7, 8, 9, 10, 11, 12 +* FreeBSD GCC 8, 9, 10, 11 +* Linux Clang 4, 5, 6, 7, 8, 9, 10, 11, 12, 13 +* Linux GCC 5, 6, 7, 8, 9, 10, 11 +* macOS Xcode 11.2.1, 11.3, 11.3.1, 11.4.1, 11.5, 11.6, 11.7, 12.0.1, 12.1.1, 12.2, 12.3, 12.4, 12.5.1, 13.0, 13.1, 13.2.1 +* Windows MinGW 8.1.0 +* Windows VS 2013, 2015, 2017, 2019, 2022 +* Cygwin 3.1.7 x64 +* Ubuntu 20.04 GCC 9 (armhf, arm64, ppc64el) +* Debian 11 GCC 10 (armhf) + +image:https://img.shields.io/azure-devops/build/bfgroup/3a4e7a7e-c1b4-4e2f-9199-f52918ea06c6/3/main.svg?label=main&logo=azuredevops["Linux/Windows/macOS: main", link="https://dev.azure.com/bfgroup/B2"] +image:https://img.shields.io/appveyor/build/bfgroup/b2?logo=appveyor["Windows", link="https://ci.appveyor.com/project/bfgroup/b2"] +image:https://api.cirrus-ci.com/github/bfgroup/b2.svg?branch=release["FreeBSD: release", link="https://cirrus-ci.com/github/bfgroup/b2/release"] +image:https://api.cirrus-ci.com/github/bfgroup/b2.svg?branch=main["FreeBSD: main", link="https://cirrus-ci.com/github/bfgroup/b2/main"] + +NOTE: A C+\+11 capable compiler is needed to build the `b2` engine. But using +the `b2` engine and build system does not require C++11. + +== More + +See the link:https://www.bfgroup.xyz/b2/[website] for more information. + +See the link:CONTRIBUTING.adoc[guidelines for contributing] if you would like +to get involved in the development. diff --git a/src/boost/tools/build/azure-pipelines.yml b/src/boost/tools/build/azure-pipelines.yml new file mode 100644 index 000000000..66e888eff --- /dev/null +++ b/src/boost/tools/build/azure-pipelines.yml @@ -0,0 +1,427 @@ +# Use, modification, and distribution are +# subject to the Boost Software License, Version 1.0. (See accompanying +# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) +# +# Copyright Rene Ferdinand Rivera Morell 2015-2021. + +trigger: + branches: + include: + - main + - release + - feature/* + paths: + exclude: + - .circleci/* + - .cirrus.yml + - .drone.star + - .github/workflows/* + - .semaphore/* + - .travis.yml + - appveyor.yml +pr: + branches: + include: + - main + paths: + exclude: + - appveyor.yml + +variables: +- { name: linux_latest_vm, value: 'ubuntu-20.04' } +- { name: linux_latest_os, value: 'focal' } +- { name: windows_latest_vm, value: 'windows-2019' } +- { name: clang_latest, value: '13' } +- { name: gcc_latest, value: '11' } +- { name: vc_latest, value: 'vc142' } +- { name: vs_latest, value: '2019' } +- { name: xc_latest, value: '13.2.1' } +- { name: macos_latest_vm, value: 'macOS-11' } + + +stages: + +- stage: Core + jobs: + + - job: 'Linux_Default_Build' + strategy: + matrix: + Clang ${{variables.clang_latest}}: {TOOLSET: "clang-${{variables.clang_latest}}", PACKAGES: "clang-${{variables.clang_latest}}", LLVM_OS: "${{variables.linux_latest_os}}", LLVM_VER: "${{variables.clang_latest}}", VM_IMAGE: "${{variables.linux_latest_vm}}"} + pool: + vmImage: $(VM_IMAGE) + steps: + - bash: | + set -e + uname -a + ./.ci/linux-cxx-install.sh + displayName: Install + - bash: | + set -e + ./src/engine/build.sh --verbose + displayName: Build + + - job: 'Linux_Clang_Only_Build' + strategy: + matrix: + Clang ${{variables.clang_latest}}: {TOOLSET: "clang-${{variables.clang_latest}}", PACKAGES: "clang-${{variables.clang_latest}}", LLVM_OS: "${{variables.linux_latest_os}}", LLVM_VER: "${{variables.clang_latest}}", VM_IMAGE: "${{variables.linux_latest_vm}}"} + pool: + vmImage: $(VM_IMAGE) + steps: + - bash: | + set -e + uname -a + ./.ci/linux-cxx-install.sh + sudo apt remove gcc g++ + displayName: Install + - bash: | + set -e + ./src/engine/build.sh --verbose + displayName: Build + + - job: 'Linux_Latest' + strategy: + matrix: + GCC ${{variables.gcc_latest}}: {TOOLSET: "gcc-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}", VM_IMAGE: "${{variables.linux_latest_vm}}"} + Clang ${{variables.clang_latest}}: {TOOLSET: "clang-${{variables.clang_latest}}", PACKAGES: "clang-${{variables.clang_latest}}", LLVM_OS: "${{variables.linux_latest_os}}", LLVM_VER: 13, VM_IMAGE: "${{variables.linux_latest_vm}}"} + pool: + vmImage: $(VM_IMAGE) + steps: + - template: .ci/azp-linux-test.yml + + - job: 'Windows_Latest' + strategy: + matrix: + VS ${{variables.vs_latest}}: {TOOLSET: "${{variables.vc_latest}}", TEST_TOOLSET: msvc, VM_IMAGE: "${{variables.windows_latest_vm}}"} + pool: + vmImage: $(VM_IMAGE) + steps: + - template: .ci/azp-windows-test.yml + + - job: 'macOS' + strategy: + matrix: + Xcode ${{variables.xc_latest}}: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app", VM_IMAGE: "${{variables.macos_latest_vm}}"} + pool: + vmImage: $(VM_IMAGE) + steps: + - template: .ci/azp-macos-test.yml + + - job: 'Linux_ASAN' + strategy: + matrix: + Clang ${{variables.clang_latest}}: {TOOLSET: "clang-${{variables.clang_latest}}", PACKAGES: "clang-${{variables.clang_latest}}", LLVM_OS: "${{variables.linux_latest_os}}", LLVM_VER: 13, VM_IMAGE: "${{variables.linux_latest_vm}}"} + pool: + vmImage: $(VM_IMAGE) + continueOnError: 'true' + steps: + - template: .ci/azp-linux-asan-test.yml + +- stage: Compilers + dependsOn: [Core] + jobs: + + - job: 'Linux' + strategy: + matrix: + GCC 10: {TOOLSET: gcc-10, PACKAGES: g++-10, VM_IMAGE: 'ubuntu-20.04'} + GCC 9: {TOOLSET: gcc-9, PACKAGES: g++-9, VM_IMAGE: 'ubuntu-18.04'} + GCC 8: {TOOLSET: gcc-8, PACKAGES: g++-8, VM_IMAGE: 'ubuntu-18.04'} + GCC 7: {TOOLSET: gcc-7, PACKAGES: g++-7, VM_IMAGE: 'ubuntu-18.04'} + GCC 6: {TOOLSET: gcc-6, PACKAGES: g++-6, VM_IMAGE: 'ubuntu-18.04'} + GCC 5: {TOOLSET: gcc-5, PACKAGES: g++-5, VM_IMAGE: 'ubuntu-18.04'} + Clang 12: {TOOLSET: clang-12, PACKAGES: clang-12, LLVM_OS: focal, LLVM_VER: 12, VM_IMAGE: 'ubuntu-20.04'} + Clang 11: {TOOLSET: clang-11, PACKAGES: clang-11, LLVM_OS: focal, LLVM_VER: 11, VM_IMAGE: 'ubuntu-20.04'} + Clang 10: {TOOLSET: clang-10, PACKAGES: clang-10, LLVM_OS: bionic, LLVM_VER: 10, VM_IMAGE: 'ubuntu-18.04'} + Clang 9: {TOOLSET: clang-9, PACKAGES: clang-9, LLVM_OS: bionic, LLVM_VER: 9, VM_IMAGE: 'ubuntu-18.04'} + Clang 8: {TOOLSET: clang-8, PACKAGES: clang-8, LLVM_OS: bionic, LLVM_VER: 8, VM_IMAGE: 'ubuntu-18.04'} + Clang 7: {TOOLSET: clang-7, PACKAGES: clang-7, LLVM_OS: bionic, LLVM_VER: 7, VM_IMAGE: 'ubuntu-18.04'} + Clang 6: {TOOLSET: clang-6.0, PACKAGES: clang-6.0, LLVM_OS: bionic, LLVM_VER: 6.0, VM_IMAGE: 'ubuntu-18.04'} + Clang 5: {TOOLSET: clang-5.0, PACKAGES: clang-5.0, LLVM_OS: bionic, LLVM_VER: 5.0, VM_IMAGE: 'ubuntu-18.04'} + Clang 4: {TOOLSET: clang-4.0, PACKAGES: clang-4.0, LLVM_OS: xenial, LLVM_VER: 4.0, VM_IMAGE: 'ubuntu-18.04'} + pool: + vmImage: $(VM_IMAGE) + steps: + - template: .ci/azp-linux-test.yml + + - job: 'Windows' + strategy: + matrix: + VS 2022: {TOOLSET: vc143, TEST_TOOLSET: msvc, VM_IMAGE: 'windows-2022'} + MinGW 8.1.0: {TOOLSET: mingw, TEST_TOOLSET: gcc, VM_IMAGE: 'windows-2019'} + pool: + vmImage: $(VM_IMAGE) + steps: + - template: .ci/azp-windows-test.yml + + - job: 'macOS' + strategy: + matrix: + Xcode 13.1: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_13.1.app, VM_IMAGE: 'macOS-11'} + Xcode 13.0: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_13.0.app, VM_IMAGE: 'macOS-11'} + Xcode 12.4: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_12.4.app, VM_IMAGE: 'macOS-11'} + Xcode 12.3: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_12.3.app, VM_IMAGE: 'macOS-10.15'} + Xcode 12.2: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_12.2.app, VM_IMAGE: 'macOS-10.15'} + Xcode 12.1.1: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_12.1.1.app, VM_IMAGE: 'macOS-10.15'} + Xcode 12.0.1: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_12.0.1.app, VM_IMAGE: 'macOS-10.15'} + Xcode 11.7: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_11.7.app, VM_IMAGE: 'macOS-10.15'} + Xcode 11.6: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_11.6.app, VM_IMAGE: 'macOS-10.15'} + Xcode 11.5: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_11.5.app, VM_IMAGE: 'macOS-10.15'} + Xcode 11.4.1: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_11.4.1.app, VM_IMAGE: 'macOS-10.15'} + Xcode 11.3.1: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_11.3.1.app, VM_IMAGE: 'macOS-10.15'} + Xcode 11.3: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_11.3.app, VM_IMAGE: 'macOS-10.15'} + Xcode 11.2.1: {TOOLSET: clang, TEST_TOOLSET: clang, CXX: clang++, XCODE_APP: /Applications/Xcode_11.2.1.app, VM_IMAGE: 'macOS-10.15'} + pool: + vmImage: $(VM_IMAGE) + steps: + - template: .ci/azp-macos-test.yml + +- stage: Boost_Dev + dependsOn: [Core] + jobs: + + - job: 'Dev_Linux' + displayName: 'Dev Linux' + pool: + vmImage: 'ubuntu-latest' + strategy: + matrix: + Master .. GCC ${{variables.gcc_latest}}: {BOOST_BRANCH: master, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + Master .. Clang ${{variables.clang_latest}}: {BOOST_BRANCH: master, TOOLSET: clang, CXX: "clang++-${{variables.clang_latest}}", PACKAGES: "clang-${{variables.clang_latest}}", LLVM_OS: "${{variables.linux_latest_os}}", LLVM_VER: "${{variables.clang_latest}}"} + Develop .. GCC ${{variables.gcc_latest}}: {BOOST_BRANCH: develop, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + Develop .. Clang ${{variables.clang_latest}}: {BOOST_BRANCH: develop, TOOLSET: clang, CXX: "clang++-${{variables.clang_latest}}", PACKAGES: "clang-${{variables.clang_latest}}", LLVM_OS: "${{variables.linux_latest_os}}", LLVM_VER: "${{variables.clang_latest}}"} + steps: + - bash: | + set -e + uname -a + ./.ci/linux-cxx-install.sh + displayName: Install + - bash: | + set -e + cd src/engine + ./build.sh ${TOOLSET} --cxx=${CXX} + ./b2 -v + displayName: Build + - bash: | + set -e + pushd ${HOME} + git clone --recursive https://github.com/boostorg/boost.git + cd boost + git checkout ${BOOST_BRANCH} + CXX_PATH=`which ${CXX}` + echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam + "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install + popd + displayName: Test + + - job: 'Dev_macOS' + displayName: 'Dev macOS' + pool: + vmImage: "${{variables.macos_latest_vm}}" + strategy: + matrix: + Master .. Xcode ${{variables.xc_latest}}: {BOOST_BRANCH: master, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + Develop .. Xcode ${{variables.xc_latest}}: {BOOST_BRANCH: develop, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + steps: + - bash: | + set -e + uname -a + sudo xcode-select -switch ${XCODE_APP} + which clang++ + displayName: Install + - bash: | + set -e + cd src/engine + ./build.sh ${TOOLSET} --cxx=${CXX} + ./b2 -v + displayName: Build + - bash: | + set -e + pushd ${HOME} + git clone --recursive https://github.com/boostorg/boost.git + cd boost + git checkout ${BOOST_BRANCH} + CXX_PATH=`which ${CXX}` + echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam + "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install + popd + displayName: Test + + - job: 'Dev_Windows' + displayName: 'Dev Windows' + pool: + vmImage: "${{variables.windows_latest_vm}}" + strategy: + matrix: + Master .. VS ${{variables.vs_latest}}: {BOOST_BRANCH: master, TOOLSET: "${{variables.vc_latest}}"} + Develop .. VS ${{variables.vs_latest}}: {BOOST_BRANCH: develop, TOOLSET: "${{variables.vc_latest}}"} + steps: + - powershell: | + cd src/engine + $env:path += ';' + ${env:CXX_PATH} + cmd /c build.bat ${env:TOOLSET} + ./b2.exe -v + cd ../.. + displayName: Build + - powershell: | + $env:HOME = "$env:HOMEDRIVE" + "$env:HOMEPATH" + cd "${env:HOME}" + git clone --recursive https://github.com/boostorg/boost.git + cd boost + $OriginalErrorActionPreference = $ErrorActionPreference + $ErrorActionPreference= 'silentlycontinue' + git checkout "${env:BOOST_BRANCH}" + $ErrorActionPreference = $OriginalErrorActionPreference + echo "using" "msvc" ";" > "${env:HOME}/user-config.jam" + & "${env:BUILD_SOURCESDIRECTORY}\src\engine\b2.exe" "--boost-build=${env:BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=msvc install + displayName: Test + +- stage: Boost_Release + dependsOn: [Boost_Dev] + jobs: + + - job: 'Release_Linux' + displayName: 'Release Linux' + pool: + vmImage: 'ubuntu-latest' + strategy: + matrix: + 1.78.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.78.0, BOOST_VERSION_U: 1_78_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.77.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.77.0, BOOST_VERSION_U: 1_77_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.76.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.76.0, BOOST_VERSION_U: 1_76_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.75.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.75.0, BOOST_VERSION_U: 1_75_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.74.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.74.0, BOOST_VERSION_U: 1_74_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.73.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.73.0, BOOST_VERSION_U: 1_73_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.72.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.72.0, BOOST_VERSION_U: 1_72_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.71.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.71.0, BOOST_VERSION_U: 1_71_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.70.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.70.0, BOOST_VERSION_U: 1_70_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.69.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.69.0, BOOST_VERSION_U: 1_69_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.68.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.68.0, BOOST_VERSION_U: 1_68_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.67.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.67.0, BOOST_VERSION_U: 1_67_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + 1.66.0 .. GCC ${{variables.gcc_latest}}: {BOOST_VERSION: 1.66.0, BOOST_VERSION_U: 1_66_0, TOOLSET: gcc, CXX: "g++-${{variables.gcc_latest}}", PACKAGES: "g++-${{variables.gcc_latest}}"} + steps: + - bash: | + set -e + uname -a + ./.ci/linux-cxx-install.sh + displayName: Install + - bash: | + set -e + cd src/engine + ./build.sh ${TOOLSET} --cxx=${CXX} + ./b2 -v + displayName: Build + - bash: | + set -e + pushd ${HOME} + git clone -b boost-${BOOST_VERSION} --single-branch --recurse-submodules https://github.com/boostorg/boost.git boost_${BOOST_VERSION_U} + cd boost_${BOOST_VERSION_U} + CXX_PATH=`which ${CXX}` + echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam + "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install + popd + displayName: Test + + - job: 'Release_macOS' + displayName: 'Release macOS' + pool: + vmImage: "${{variables.macos_latest_vm}}" + strategy: + matrix: + 1.77.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.77.0, BOOST_VERSION_U: 1_77_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.76.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.76.0, BOOST_VERSION_U: 1_76_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.75.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.75.0, BOOST_VERSION_U: 1_75_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.74.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.74.0, BOOST_VERSION_U: 1_74_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.73.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.73.0, BOOST_VERSION_U: 1_73_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.72.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.72.0, BOOST_VERSION_U: 1_72_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.71.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.71.0, BOOST_VERSION_U: 1_71_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.70.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.70.0, BOOST_VERSION_U: 1_70_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.69.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.69.0, BOOST_VERSION_U: 1_69_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.68.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.68.0, BOOST_VERSION_U: 1_68_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.67.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.67.0, BOOST_VERSION_U: 1_67_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + 1.66.0 .. Xcode ${{variables.xc_latest}}: {BOOST_VERSION: 1.66.0, BOOST_VERSION_U: 1_66_0, TOOLSET: clang, CXX: clang++, XCODE_APP: "/Applications/Xcode_${{variables.xc_latest}}.app"} + steps: + - bash: | + set -e + uname -a + sudo xcode-select -switch ${XCODE_APP} + which clang++ + displayName: Install + - bash: | + set -e + cd src/engine + ./build.sh ${TOOLSET} --cxx=${CXX} + ./b2 -v + displayName: Build + - bash: | + set -e + pushd ${HOME} + git clone -b boost-${BOOST_VERSION} --single-branch --recurse-submodules https://github.com/boostorg/boost.git boost_${BOOST_VERSION_U} + cd boost_${BOOST_VERSION_U} + CXX_PATH=`which ${CXX}` + echo "using ${TOOLSET} : : ${CXX_PATH} ;" > ${HOME}/user-config.jam + "${BUILD_SOURCESDIRECTORY}/src/engine/b2" "--boost-build=${BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=${TOOLSET} install + popd + displayName: Test + + - job: 'Release_Windows' + displayName: 'Release Windows' + pool: + vmImage: "${{variables.windows_latest_vm}}" + strategy: + matrix: + 1.77.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.77.0, BOOST_VERSION_U: 1_77_0, TOOLSET: "${{variables.vc_latest}}"} + 1.76.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.76.0, BOOST_VERSION_U: 1_76_0, TOOLSET: "${{variables.vc_latest}}"} + 1.75.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.75.0, BOOST_VERSION_U: 1_75_0, TOOLSET: "${{variables.vc_latest}}"} + 1.74.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.74.0, BOOST_VERSION_U: 1_74_0, TOOLSET: "${{variables.vc_latest}}"} + 1.73.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.73.0, BOOST_VERSION_U: 1_73_0, TOOLSET: "${{variables.vc_latest}}"} + 1.72.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.72.0, BOOST_VERSION_U: 1_72_0, TOOLSET: "${{variables.vc_latest}}"} + 1.71.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.71.0, BOOST_VERSION_U: 1_71_0, TOOLSET: "${{variables.vc_latest}}"} + 1.70.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.70.0, BOOST_VERSION_U: 1_70_0, TOOLSET: "${{variables.vc_latest}}"} + 1.69.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.69.0, BOOST_VERSION_U: 1_69_0, TOOLSET: "${{variables.vc_latest}}"} + 1.68.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.68.0, BOOST_VERSION_U: 1_68_0, TOOLSET: "${{variables.vc_latest}}"} + 1.67.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.67.0, BOOST_VERSION_U: 1_67_0, TOOLSET: "${{variables.vc_latest}}"} + 1.66.0 .. VS ${{variables.vs_latest}}: {BOOST_VERSION: 1.66.0, BOOST_VERSION_U: 1_66_0, TOOLSET: "${{variables.vc_latest}}"} + steps: + - powershell: | + cd src/engine + $env:path += ';' + ${env:CXX_PATH} + cmd /c build.bat ${env:TOOLSET} + ./b2.exe -v + cd ../.. + displayName: Build + - powershell: | + $env:HOME = "$env:HOMEDRIVE" + "$env:HOMEPATH" + cd "${env:HOME}" + git clone -b boost-${env:BOOST_VERSION} --single-branch --recurse-submodules https://github.com/boostorg/boost.git boost_${env:BOOST_VERSION_U} + cd "boost_${env:BOOST_VERSION_U}" + echo "using" "msvc" ";" > "${env:HOME}/user-config.jam" + & "${env:BUILD_SOURCESDIRECTORY}\src\engine\b2.exe" "--boost-build=${env:BUILD_SOURCESDIRECTORY}/src" --debug-configuration --build-type=complete --layout=versioned -n -d1 toolset=msvc install + displayName: Test + +- stage: Website_Update + dependsOn: [Core] + displayName: 'Website Update' + condition: in(variables['Build.SourceBranch'], 'refs/heads/main', 'refs/heads/release') + jobs: + + - job: Documentation + pool: + vmImage: 'ubuntu-latest' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.x' + - task: UseRubyVersion@0 + - bash: | + pip install --user Pygments + pip install --user "https://github.com/bfgroup/jam_pygments/archive/master.zip" + gem install asciidoctor + gem install pygments.rb + echo "using asciidoctor ;" >> project-config.jam + ./bootstrap.sh + pushd doc + ../b2 --website-doc-dir=manual/$(Build.SourceBranchName) website + displayName: 'Build & Publish' + env: + GH_TOKEN: $(GitHubToken) diff --git a/src/boost/tools/build/boost-build.jam b/src/boost/tools/build/boost-build.jam new file mode 100644 index 000000000..d62860b6c --- /dev/null +++ b/src/boost/tools/build/boost-build.jam @@ -0,0 +1,8 @@ +# Copyright 2001, 2002 Dave Abrahams +# Copyright 2002 Rene Rivera +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + + +boost-build src/kernel ; diff --git a/src/boost/tools/build/bootstrap.bat b/src/boost/tools/build/bootstrap.bat new file mode 100644 index 000000000..2d51bdcd4 --- /dev/null +++ b/src/boost/tools/build/bootstrap.bat @@ -0,0 +1,39 @@ +@ECHO OFF + +REM Copyright (C) 2009 Vladimir Prus +REM Copyright 2019-2020 Rene Rivera +REM +REM Distributed under the Boost Software License, Version 1.0. +REM (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +:b2_build +ECHO Building the B2 engine.. +pushd src\engine +call .\build.bat %* +@ECHO OFF +popd +if exist ".\src\engine\b2.exe" ( + copy .\src\engine\b2.exe . > nul + goto :b2_built) +goto :b2_failure + + +:b2_built +ECHO. +ECHO Building is done. To install, run: +ECHO. +ECHO .\b2 --prefix=DIR install +ECHO. +goto :end + + +:b2_failure +ECHO. +ECHO Failed to build the B2 engine. +ECHO. +goto :end + + +:end +exit /b %ERRORLEVEL% diff --git a/src/boost/tools/build/bootstrap.sh b/src/boost/tools/build/bootstrap.sh new file mode 100755 index 000000000..fbf577e62 --- /dev/null +++ b/src/boost/tools/build/bootstrap.sh @@ -0,0 +1,28 @@ +#!/bin/sh +# Copyright (C) 2005, 2006 Douglas Gregor. +# Copyright (C) 2006 The Trustees of Indiana University +# Copyright (C) 2010 Bryce Lelbach +# Copyright 2018-2020 Rene Rivera +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +# Build b2 +echo "Building the B2 engine.." +pwd=`pwd` +"${pwd}/src/engine/build.sh" "$@" +if [ $? -ne 0 ]; then + echo + echo "Failed to build the B2 engine." 1>&2 + exit 1 +fi +cd "$pwd" +cp "./src/engine/b2" . + +cat << EOF + +Building is done. To install, run: + + ./b2 install --prefix= + +EOF diff --git a/src/boost/tools/build/bootstrap_vms.com b/src/boost/tools/build/bootstrap_vms.com new file mode 100644 index 000000000..3d8afaab9 --- /dev/null +++ b/src/boost/tools/build/bootstrap_vms.com @@ -0,0 +1,48 @@ +$! Copyright 2015 Artur Shepilko. +$! +$! Distributed under the Boost Software License, Version 1.0. +$! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +$! +$ THIS_FACILITY = "BOOSTBUILD" +$ +$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'") +$ save_verify = f$verify(verify) +$ save_default = f$env("DEFAULT") +$ +$ SAY := WRITE SYS$OUTPUT +$ +$ ON WARNING THEN CONTINUE +$ ON ERROR THEN GOTO ERROR +$ +$ SAY "I|Bootstrapping the build engine..." +$ +$ set def [.src.engine] +$ @build_vms /out=[--]bootstrap.log +$ +$ set def 'save_default' +$ +$ if f$search("[.src.engine.bin_vms]b2.exe") .eqs. "" then goto ERROR +$ copy [.src.engine.bin_vms]b2.exe [] +$ copy [.src.engine.bin_vms]bjam.exe [] +$ +$ SAY "I|Bootstrapping is done, B2.EXE created." +$ type sys$input +$DECK + + To build and install under ROOT: directory, run: + MC []B2 --prefix="/root" install + + Set B2 command: + B2 :== $ROOT:[BIN]B2.EXE + +$EOD +$ sts = 1 +$ +$EXIT: +$ set def 'save_default' +$ exit 'sts' + (0 * f$verify(save_verify)) + +$ERROR: +$ SAY "E|Failed to bootstrap build engine, see BOOTSTRAP.LOG for details." +$ sts = 4 +$ goto EXIT diff --git a/src/boost/tools/build/example/asciidoctor/example.adoc b/src/boost/tools/build/example/asciidoctor/example.adoc new file mode 100644 index 000000000..1a7675c12 --- /dev/null +++ b/src/boost/tools/build/example/asciidoctor/example.adoc @@ -0,0 +1,3 @@ += The Dangerous and Thrilling Documentation Chronicles + +This journey begins on a bleary Monday morning. diff --git a/src/boost/tools/build/example/asciidoctor/example_manpage.adoc b/src/boost/tools/build/example/asciidoctor/example_manpage.adoc new file mode 100644 index 000000000..88c535831 --- /dev/null +++ b/src/boost/tools/build/example/asciidoctor/example_manpage.adoc @@ -0,0 +1,38 @@ += b2(1) +Rene Rivera +v0.0.0 +:doctype: manpage +:manmanual: B2 +:mansource: B2 +:man-linkstyle: pass:[blue R < >] + +== NAME + +b2 - Boost Build + +== SYNOPSIS + +*b2* ['OPTION']... 'TARGET'... + +== OPTIONS + +*-n*:: + Print out what would get built. + +== EXIT STATUS + +*0*:: + Success. + +*1*:: + Failure. + +== RESOURCES + +*Project web site:* http://boost.org + +== COPYING + +Copyright \(C) 2017 {author}. + +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) \ No newline at end of file diff --git a/src/boost/tools/build/example/asciidoctor/jamroot.jam b/src/boost/tools/build/example/asciidoctor/jamroot.jam new file mode 100644 index 000000000..6b521acc5 --- /dev/null +++ b/src/boost/tools/build/example/asciidoctor/jamroot.jam @@ -0,0 +1,11 @@ +#| +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +html example_html : example.adoc ; +manpage example_1 : example_manpage.adoc ; +pdf example_pdf : example.adoc ; +docbook example_docbook : example.adoc ; diff --git a/src/boost/tools/build/example/boost-build.jam b/src/boost/tools/build/example/boost-build.jam new file mode 100644 index 000000000..21fe9782b --- /dev/null +++ b/src/boost/tools/build/example/boost-build.jam @@ -0,0 +1,6 @@ +# Copyright 2002, 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + + +boost-build ../src/kernel ; diff --git a/src/boost/tools/build/example/built_tool/Jamroot.jam b/src/boost/tools/build/example/built_tool/Jamroot.jam new file mode 100644 index 000000000..c458650e8 --- /dev/null +++ b/src/boost/tools/build/example/built_tool/Jamroot.jam @@ -0,0 +1,8 @@ + +import feature ; + +feature.feature tblgen : : dependency free ; + +project built_tool ; + +build-project core ; \ No newline at end of file diff --git a/src/boost/tools/build/example/built_tool/core/Jamfile.jam b/src/boost/tools/build/example/built_tool/core/Jamfile.jam new file mode 100644 index 000000000..d4ec62382 --- /dev/null +++ b/src/boost/tools/build/example/built_tool/core/Jamfile.jam @@ -0,0 +1,39 @@ + +import toolset ; +import os ; + +project : requirements ../tblgen//tblgen ; + + +# Create a.c using a custom action defined below. +make a.c : a.td : @tblgen ; + +# Use a.c in executable. +exe core : core.cpp a.c ; + +# The action has to invoke the tool built in other +# parts of the project. The feature is used +# to specify the location of the tool, and the flags +# statement below make the full path to the tool +# available inside the action. +toolset.flags tblgen COMMAND ; + +# We generally want a.c to be rebuilt when the tool changes. +rule tblgen ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(COMMAND) ] ; +} + +# The action that invokes the tool +actions tblgen bind COMMAND +{ + $(COMMAND:E=tblgen) > $(<) +} + +if [ os.name ] = VMS +{ + actions tblgen bind COMMAND + { + PIPE MCR $(COMMAND:WE=tblgen) > $(<:W) + } +} diff --git a/src/boost/tools/build/example/built_tool/core/a.td b/src/boost/tools/build/example/built_tool/core/a.td new file mode 100644 index 000000000..e69de29bb diff --git a/src/boost/tools/build/example/built_tool/core/core.cpp b/src/boost/tools/build/example/built_tool/core/core.cpp new file mode 100644 index 000000000..31a133726 --- /dev/null +++ b/src/boost/tools/build/example/built_tool/core/core.cpp @@ -0,0 +1,5 @@ + +int main() +{ + return 0; +} diff --git a/src/boost/tools/build/example/built_tool/readme.txt b/src/boost/tools/build/example/built_tool/readme.txt new file mode 100644 index 000000000..bbb9f9b3a --- /dev/null +++ b/src/boost/tools/build/example/built_tool/readme.txt @@ -0,0 +1,5 @@ + +This example shows how to build an executable and then use it +for generating other targets. The 'tblgen' subdirectory builds +a tool, while the 'core' subdirectory uses that tool. Refer +to core/Jamfile.jam for detailed comments. \ No newline at end of file diff --git a/src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam b/src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam new file mode 100644 index 000000000..af4906278 --- /dev/null +++ b/src/boost/tools/build/example/built_tool/tblgen/Jamfile.jam @@ -0,0 +1,4 @@ + +project : requirements -tblgen//tblgen ; + +exe tblgen : tblgen.cpp ; \ No newline at end of file diff --git a/src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp b/src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp new file mode 100644 index 000000000..fbd058133 --- /dev/null +++ b/src/boost/tools/build/example/built_tool/tblgen/tblgen.cpp @@ -0,0 +1,9 @@ + +#include + +int main() +{ + std::cout << "int foo;\n"; + return 0; +} + diff --git a/src/boost/tools/build/example/complex-testing/compile-fail.cpp b/src/boost/tools/build/example/complex-testing/compile-fail.cpp new file mode 100644 index 000000000..aa07d4e67 --- /dev/null +++ b/src/boost/tools/build/example/complex-testing/compile-fail.cpp @@ -0,0 +1,14 @@ +// Copyright (c) 2014 Rene Rivera +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include +#include + +int main() +{ + std::cout << "Bye!\n"; + return EXIT_FAILURE +} diff --git a/src/boost/tools/build/example/complex-testing/fail.cpp b/src/boost/tools/build/example/complex-testing/fail.cpp new file mode 100644 index 000000000..ce8e2a189 --- /dev/null +++ b/src/boost/tools/build/example/complex-testing/fail.cpp @@ -0,0 +1,14 @@ +// Copyright (c) 2014 Rene Rivera +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include +#include + +int main() +{ + std::cout << "Bye!\n"; + return EXIT_FAILURE; +} diff --git a/src/boost/tools/build/example/complex-testing/jamroot.jam b/src/boost/tools/build/example/complex-testing/jamroot.jam new file mode 100644 index 000000000..c180a2f6c --- /dev/null +++ b/src/boost/tools/build/example/complex-testing/jamroot.jam @@ -0,0 +1,15 @@ +# Copyright 2016 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +using testing ; +import property-set ; +import path ; + +exe success : success.cpp ; + +run success : arg1 arg2 : : : success-a ; +run success : arg3 arg4 : : : success-b ; + +run post.cpp : : success-a : : post-a ; +run post.cpp : : success-b : : post-b ; diff --git a/src/boost/tools/build/example/complex-testing/post.cpp b/src/boost/tools/build/example/complex-testing/post.cpp new file mode 100644 index 000000000..d4af2afb4 --- /dev/null +++ b/src/boost/tools/build/example/complex-testing/post.cpp @@ -0,0 +1,14 @@ +// Copyright (c) 2014 Rene Rivera +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include +#include + +int main(int argc, char *argv[]) +{ + std::cout << argv[1] << "\n"; + return EXIT_SUCCESS; +} diff --git a/src/boost/tools/build/example/complex-testing/success.cpp b/src/boost/tools/build/example/complex-testing/success.cpp new file mode 100644 index 000000000..30df302c4 --- /dev/null +++ b/src/boost/tools/build/example/complex-testing/success.cpp @@ -0,0 +1,14 @@ +// Copyright (c) 2014 Rene Rivera +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include +#include + +int main(int argc, char *argv[]) +{ + std::cout << "Hi!\n"; + return EXIT_SUCCESS; +} diff --git a/src/boost/tools/build/example/customization/class.verbatim b/src/boost/tools/build/example/customization/class.verbatim new file mode 100644 index 000000000..5c0d7b803 --- /dev/null +++ b/src/boost/tools/build/example/customization/class.verbatim @@ -0,0 +1,7 @@ +class_template + +class %class_name% { +public: + %class_name%() {} + ~%class_name%() {} +}; \ No newline at end of file diff --git a/src/boost/tools/build/example/customization/codegen.cpp b/src/boost/tools/build/example/customization/codegen.cpp new file mode 100644 index 000000000..2c632e2d1 --- /dev/null +++ b/src/boost/tools/build/example/customization/codegen.cpp @@ -0,0 +1,36 @@ +// (C) Copyright Vladimir Prus, 2003 +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +// Please see 'usage.verbatim' file for usage notes. + +#include +#include +#include +using std::cout; +using std::string; +using std::strlen; + +extern const char class_template[]; +extern const char usage[]; + +int main(int ac, char* av[]) +{ + if (av[1]) { + + string class_name = av[1]; + string s = class_template; + + string::size_type n; + while((n = s.find("%class_name%")) != string::npos) { + s.replace(n, strlen("%class_name%"), class_name); + } + std::cout << "Output is:\n"; + std::cout << s << "\n"; + return 0; + } else { + std::cout << usage << "\n"; + return 1; + } +} diff --git a/src/boost/tools/build/example/customization/inline_file.py b/src/boost/tools/build/example/customization/inline_file.py new file mode 100644 index 000000000..77cd8cb29 --- /dev/null +++ b/src/boost/tools/build/example/customization/inline_file.py @@ -0,0 +1,44 @@ +#!/usr/bin/python + +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import sys +from string import strip + +def quote_line(line): + + result = "" + + for i in line: + if (i == '\\'): + result = result + '\\\\' + elif (i == '\"'): + result = result + '\\\"' + elif (i != '\r' and i != '\n'): + result = result + i; + + return '\"' + result + '\\n\"' + +def quote_file(file): + result = "" + + for i in file.readlines(): + result = result + quote_line(i) + "\n" + + return result + +if len(sys.argv) < 3: + print "Usage: inline_file.py output_c_file file_to_include" +else: + output_c_file = sys.argv[1] + out_file = open(output_c_file, "w"); + + file_to_include = sys.argv[2] + + in_file = open(file_to_include, "r"); + variable_name = strip(in_file.readline()) + out_file.write("extern const char %s[] = {\n%s};\n\n" % (variable_name, quote_file(in_file))) + in_file.close() + out_file.close() diff --git a/src/boost/tools/build/example/customization/jamroot.jam b/src/boost/tools/build/example/customization/jamroot.jam new file mode 100644 index 000000000..3568213c7 --- /dev/null +++ b/src/boost/tools/build/example/customization/jamroot.jam @@ -0,0 +1,9 @@ +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import verbatim ; + +exe codegen : codegen.cpp class.verbatim usage.verbatim + t1.verbatim ; + diff --git a/src/boost/tools/build/example/customization/readme.txt b/src/boost/tools/build/example/customization/readme.txt new file mode 100644 index 000000000..6a799277a --- /dev/null +++ b/src/boost/tools/build/example/customization/readme.txt @@ -0,0 +1,11 @@ +Copyright 2003 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +This example show how to add a new target type and a new tool support to +B2. Please refer to extender manual for a complete description of this +example. + +Note that this example requires Python. If cygwin Python on Windows is to be +used, please go to "verbatim.jam" and follow instructions there. diff --git a/src/boost/tools/build/example/customization/t1.verbatim b/src/boost/tools/build/example/customization/t1.verbatim new file mode 100644 index 000000000..144540f29 --- /dev/null +++ b/src/boost/tools/build/example/customization/t1.verbatim @@ -0,0 +1,2 @@ +t1 +//###include "t2.verbatim" \ No newline at end of file diff --git a/src/boost/tools/build/example/customization/t2.verbatim b/src/boost/tools/build/example/customization/t2.verbatim new file mode 100644 index 000000000..e69de29bb diff --git a/src/boost/tools/build/example/customization/usage.verbatim b/src/boost/tools/build/example/customization/usage.verbatim new file mode 100644 index 000000000..0fc4b4a37 --- /dev/null +++ b/src/boost/tools/build/example/customization/usage.verbatim @@ -0,0 +1,5 @@ +usage +Usage: codegen class_name + +This program takes a template of C++ code and replaces of all occurrences of +%class_name% with the passed 'class_name' parameter. \ No newline at end of file diff --git a/src/boost/tools/build/example/customization/verbatim.jam b/src/boost/tools/build/example/customization/verbatim.jam new file mode 100644 index 000000000..c529845df --- /dev/null +++ b/src/boost/tools/build/example/customization/verbatim.jam @@ -0,0 +1,61 @@ +# Copyright 2003, 2004 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This file shows some of the primary customization mechanisms in B2 V2 +# and should serve as a basic for your own customization. +# Each part has a comment describing its purpose, and you can pick the parts +# which are relevant to your case, remove everything else, and then change names +# and actions to taste. + +import os ; + +# Declare a new target type. This allows B2 to do something sensible +# when targets with the .verbatim extension are found in sources. +import type ; +type.register VERBATIM : verbatim ; + +# Declare a dependency scanner for the new target type. The +# 'inline-file.py' script does not handle includes, so this is +# only for illustraction. +import scanner ; +# First, define a new class, derived from 'common-scanner', +# that class has all the interesting logic, and we only need +# to override the 'pattern' method which return regular +# expression to use when scanning. +class verbatim-scanner : common-scanner +{ + rule pattern ( ) + { + return "//###include[ ]*\"([^\"]*)\"" ; + } +} + +# Register the scanner class. The 'include' is +# the property which specifies the search path +# for includes. +scanner.register verbatim-scanner : include ; +# Assign the scanner class to the target type. +# Now, all .verbatim sources will be scanned. +# To test this, build the project, touch the +# t2.verbatim file and build again. +type.set-scanner VERBATIM : verbatim-scanner ; + +import generators ; +generators.register-standard verbatim.inline-file : VERBATIM : CPP ; + +# Note: To use Cygwin Python on Windows change the following line +# to "python inline_file.py $(<) $(>)" +# Also, make sure that "python" in in PATH. +actions inline-file +{ + "./inline_file.py" $(<) $(>) +} + +if [ os.name ] = VMS +{ + actions inline-file + { + python inline_file.py $(<:W) $(>:W) + } +} diff --git a/src/boost/tools/build/example/customization/verbatim.py b/src/boost/tools/build/example/customization/verbatim.py new file mode 100644 index 000000000..996577054 --- /dev/null +++ b/src/boost/tools/build/example/customization/verbatim.py @@ -0,0 +1,47 @@ +# Copyright 2010 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This file is only used with Python port of Boost.Build + +# This file shows some of the primary customization mechanisms in Boost.Build V2 +# and should serve as a basic for your own customization. +# Each part has a comment describing its purpose, and you can pick the parts +# which are relevant to your case, remove everything else, and then change names +# and actions to taste. + +# Declare a new target type. This allows Boost.Build to do something sensible +# when targets with the .verbatim extension are found in sources. +import b2.build.type as type +type.register("VERBATIM", ["verbatim"]) + +# Declare a dependency scanner for the new target type. The +# 'inline-file.py' script does not handle includes, so this is +# only for illustraction. +import b2.build.scanner as scanner; +# First, define a new class, derived from 'common-scanner', +# that class has all the interesting logic, and we only need +# to override the 'pattern' method which return regular +# expression to use when scanning. +class VerbatimScanner(scanner.CommonScanner): + + def pattern(self): + return "//###include[ ]*\"([^\"]*)\"" + +scanner.register(VerbatimScanner, ["include"]) +type.set_scanner("VERBATIM", VerbatimScanner) + +import b2.build.generators as generators + +generators.register_standard("verbatim.inline-file", + ["VERBATIM"], ["CPP"]) + +from b2.manager import get_manager + +get_manager().engine().register_action("verbatim.inline-file", +""" +./inline_file.py $(<) $(>) +""") + + + diff --git a/src/boost/tools/build/example/generate/README.txt b/src/boost/tools/build/example/generate/README.txt new file mode 100644 index 000000000..018cbb564 --- /dev/null +++ b/src/boost/tools/build/example/generate/README.txt @@ -0,0 +1,11 @@ +# Copyright 2007 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +This example shows the 'generate' rule, that allows you to construct target +using any arbitrary set of transformation and commands. + +The rule is similar to 'make' and 'notfile', but unlike those, you can operate +in terms of B2 'virtual targets', which is more flexible. + +Please consult the docs for more explanations. diff --git a/src/boost/tools/build/example/generate/a.cpp b/src/boost/tools/build/example/generate/a.cpp new file mode 100644 index 000000000..90dc5cc9b --- /dev/null +++ b/src/boost/tools/build/example/generate/a.cpp @@ -0,0 +1,10 @@ + +int main() +{ +} + +/* +Copyright 2007 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ diff --git a/src/boost/tools/build/example/generate/gen.jam b/src/boost/tools/build/example/generate/gen.jam new file mode 100644 index 000000000..73232aab5 --- /dev/null +++ b/src/boost/tools/build/example/generate/gen.jam @@ -0,0 +1,26 @@ + +import "class" : new ; +import common ; + +rule generate-example ( project name : property-set : sources * ) +{ + local result ; + for local s in $(sources) + { + #local source-name = [ $(s).name ] ; + #local source-action = [ $(s).action ] ; + #local source-properties = [ $(source-action).properties ] ; + + # Create a new action, that takes the source target and runs the + # 'common.copy' command on it. + local a = [ new non-scanning-action $(s) : common.copy : $(property-set) + ] ; + + # Create a target to represent the action result. Uses the target name + # passed here via the 'name' parameter and the same type and project as + # the source. + result += [ new file-target $(name) : [ $(s).type ] : $(project) : $(a) + ] ; + } + return $(result) ; +} \ No newline at end of file diff --git a/src/boost/tools/build/example/generate/gen.py b/src/boost/tools/build/example/generate/gen.py new file mode 100644 index 000000000..09ee15b43 --- /dev/null +++ b/src/boost/tools/build/example/generate/gen.py @@ -0,0 +1,16 @@ + +from b2.build.virtual_target import NonScanningAction, FileTarget + +def generate_example(project, name, ps, sources): + + result = [] + for s in sources: + + a = NonScanningAction([s], "common.copy", ps) + + # Create a target to represent the action result. Uses the target name + # passed here via the 'name' parameter and the same type and project as + # the source. + result.append(FileTarget(name, s.type(), project, a)) + + return result diff --git a/src/boost/tools/build/example/generate/jamroot.jam b/src/boost/tools/build/example/generate/jamroot.jam new file mode 100644 index 000000000..b190b322a --- /dev/null +++ b/src/boost/tools/build/example/generate/jamroot.jam @@ -0,0 +1,9 @@ +# Copyright 2007 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import generate ; + +import gen ; + +generate a2 : a.cpp : @gen.generate-example ; diff --git a/src/boost/tools/build/example/generator/README.txt b/src/boost/tools/build/example/generator/README.txt new file mode 100644 index 000000000..f26a856a5 --- /dev/null +++ b/src/boost/tools/build/example/generator/README.txt @@ -0,0 +1,6 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +This example shows how to declare a new generator class. It is necessary when +generator's logic is more complex that just running a single tool. diff --git a/src/boost/tools/build/example/generator/foo.gci b/src/boost/tools/build/example/generator/foo.gci new file mode 100644 index 000000000..2ccc45c6c --- /dev/null +++ b/src/boost/tools/build/example/generator/foo.gci @@ -0,0 +1,10 @@ + +int main() +{ + return 0; +} +/* +Copyright 2006 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + */ diff --git a/src/boost/tools/build/example/generator/jamroot.jam b/src/boost/tools/build/example/generator/jamroot.jam new file mode 100644 index 000000000..b77c63ddb --- /dev/null +++ b/src/boost/tools/build/example/generator/jamroot.jam @@ -0,0 +1,6 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import soap ; +exe foo : foo.gci : on ; diff --git a/src/boost/tools/build/example/generator/soap.jam b/src/boost/tools/build/example/generator/soap.jam new file mode 100644 index 000000000..d53f3a54d --- /dev/null +++ b/src/boost/tools/build/example/generator/soap.jam @@ -0,0 +1,86 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This is example of a fictional code generator tool. +# It accepts a single input of type '.gci' and produces +# either one or two outputs of type .cpp, depending +# on the value of the feature +# +# This example is loosely based on gSOAP code generator. + +import type ; +import generators ; +import feature ; +import common ; +import "class" : new ; +import os ; + +type.register GCI : gci ; + +feature.feature server : off on : incidental ; + +class soap-generator : generator +{ + import "class" : new ; + + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(sources[2]) + { + # Accept only single source. + local t = [ $(sources[1]).type ] ; + if $(t) = GCI + { + # The type is correct. + + # If no output name is specified, guess it from sources. + if ! $(name) + { + name = [ generator.determine-output-name $(sources) ] ; + } + + # Produce one output, using just copy. + local a = [ new action $(sources[1]) + : common.copy : $(property-set) ] ; + local t = [ new file-target $(name) : CPP : $(project) + : $(a) ] ; + + # If in server mode, create another output -- an + # empty file. If this were a real SOAP generator, we + # might have created a single action, and two targets + # both using that action. + local t2 ; + if [ $(property-set).get ] = "on" + { + local a = [ new action : soap.touch : $(property-set) ] ; + t2 = [ new file-target $(name)_server : CPP : $(project) + : $(a) ] ; + } + return [ virtual-target.register $(t) ] + [ virtual-target.register $(t2) ] ; + } + } + } +} + +generators.register [ new soap-generator soap.soap : GCI : CPP ] ; + +TOUCH = [ common.file-touch-command ] ; +actions touch +{ + $(TOUCH) $(<) +} + +if [ os.name ] = VMS +{ + actions touch + { + $(TOUCH) $(<:W) + } +} diff --git a/src/boost/tools/build/example/gettext/jamfile.jam b/src/boost/tools/build/example/gettext/jamfile.jam new file mode 100644 index 000000000..c2a5e9bef --- /dev/null +++ b/src/boost/tools/build/example/gettext/jamfile.jam @@ -0,0 +1,26 @@ +# Copyright 2003, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + + +# Declare a main target. +exe main : main.cpp ; + +# Declare an action for updating translations +# After changing main.cpp, invocation of +# +# bjam update-russian +# +# will update translations in russian.po +gettext.update update-russian : russian.po main ; + +# Compiled message catalog. +gettext.catalog russian : russian.po ; + +# A stage rule which installs message catalog to the +# location gettext expects. +stage messages-russian : russian + : messages/ru_RU.KOI8-R/LC_MESSAGES + main.mo + ; + diff --git a/src/boost/tools/build/example/gettext/jamroot.jam b/src/boost/tools/build/example/gettext/jamroot.jam new file mode 100644 index 000000000..14f87bb78 --- /dev/null +++ b/src/boost/tools/build/example/gettext/jamroot.jam @@ -0,0 +1,6 @@ +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + + +using gettext ; diff --git a/src/boost/tools/build/example/gettext/main.cpp b/src/boost/tools/build/example/gettext/main.cpp new file mode 100644 index 000000000..c44d02a81 --- /dev/null +++ b/src/boost/tools/build/example/gettext/main.cpp @@ -0,0 +1,28 @@ +// Copyright Vladimir Prus 2003. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#include +#include +#define i18n(s) gettext(s) + +#include +using namespace std; + +int main() +{ + // Specify that translations are stored in directory + // "messages". + bindtextdomain("main", "messages"); + textdomain("main"); + + // Switch to russian locale. + setlocale(LC_MESSAGES, "ru_RU.KOI8-R"); + + // Output localized message. + std::cout << i18n("hello") << "\n"; + + return 0; +} diff --git a/src/boost/tools/build/example/gettext/readme.txt b/src/boost/tools/build/example/gettext/readme.txt new file mode 100644 index 000000000..097e49b69 --- /dev/null +++ b/src/boost/tools/build/example/gettext/readme.txt @@ -0,0 +1,24 @@ +Copyright 2003 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +This example shows how it is possible to use GNU gettext utilities with +B2. + +A simple translation file is compiled and installed as message catalog for +russian. The main application explicitly switches to russian locale and outputs +the translation of "hello". + +To test: + + bjam + bin/gcc/debug/main + +To test even more: + + - add more localized strings to "main.cpp" + - run "bjam update-russian" + - edit "russian.po" + - run bjam + - run "main" diff --git a/src/boost/tools/build/example/gettext/russian.po b/src/boost/tools/build/example/gettext/russian.po new file mode 100644 index 000000000..daa7121c3 --- /dev/null +++ b/src/boost/tools/build/example/gettext/russian.po @@ -0,0 +1,21 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2003-07-01 15:45+0400\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=CHARSET\n" +"Content-Transfer-Encoding: 8bit\n" + +#: main.cpp:16 +msgid "hello" +msgstr "international hello" diff --git a/src/boost/tools/build/example/hello/hello.cpp b/src/boost/tools/build/example/hello/hello.cpp new file mode 100644 index 000000000..888e78b1c --- /dev/null +++ b/src/boost/tools/build/example/hello/hello.cpp @@ -0,0 +1,14 @@ +// Copyright (c) 2003 Vladimir Prus +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +// tag::source[] +#include + +int main() +{ + std::cout << "Hello!\n"; +} +// end::source[] diff --git a/src/boost/tools/build/example/hello/jamroot.jam b/src/boost/tools/build/example/hello/jamroot.jam new file mode 100644 index 000000000..672ec02e9 --- /dev/null +++ b/src/boost/tools/build/example/hello/jamroot.jam @@ -0,0 +1 @@ +exe hello : hello.cpp ; diff --git a/src/boost/tools/build/example/hello/readme.adoc b/src/boost/tools/build/example/hello/readme.adoc new file mode 100644 index 000000000..d69f074e6 --- /dev/null +++ b/src/boost/tools/build/example/hello/readme.adoc @@ -0,0 +1,46 @@ +//// +Copyright 2008 Jurko Gospodnetic +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +//// + += Hello + +This example shows a very basic Boost Build project set up so it compiles a +single executable from a single source file: + +.`hello.cpp` +[source,cpp] +---- +include::../../example/hello/hello.cpp[tag=source] +---- + +Our `jamroot.jam` is minimal and only specifies one `exe` target for the +program: + +.`jamroot.jam` +[source,jam] +---- +include::jamroot.jam[] +---- + +Building the example yields: + +[source,bash] +---- +> cd /example/hello +> b2 +...found 8 targets... +...updating 4 targets... +common.mkdir bin/clang-darwin-4.2.1 +common.mkdir bin/clang-darwin-4.2.1/debug +clang-darwin.compile.c++ bin/clang-darwin-4.2.1/debug/hello.o +clang-darwin.link bin/clang-darwin-4.2.1/debug/hello +...updated 4 targets... +> bin/clang-darwin-4.2.1/debug/hello +Hello! +---- + +NOTE: The actual paths in the `bin` sub-directory will depend on your +toolset. diff --git a/src/boost/tools/build/example/libraries/app/app.cpp b/src/boost/tools/build/example/libraries/app/app.cpp new file mode 100644 index 000000000..5e5d49fc0 --- /dev/null +++ b/src/boost/tools/build/example/libraries/app/app.cpp @@ -0,0 +1,12 @@ +// Copyright (c) 2003 Vladimir Prus +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include + +int main() +{ + foo(); +} diff --git a/src/boost/tools/build/example/libraries/app/jamfile.jam b/src/boost/tools/build/example/libraries/app/jamfile.jam new file mode 100644 index 000000000..f91cd05d0 --- /dev/null +++ b/src/boost/tools/build/example/libraries/app/jamfile.jam @@ -0,0 +1,9 @@ +# Copyright 2002, 2003, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + + +# Declare a executable file, which uses a library. Note that +# includes that for library will be automatically used +# when compiling 'app.cpp' +exe app : app.cpp /library-example/foo//bar ; diff --git a/src/boost/tools/build/example/libraries/jamroot.jam b/src/boost/tools/build/example/libraries/jamroot.jam new file mode 100644 index 000000000..5e0dc4814 --- /dev/null +++ b/src/boost/tools/build/example/libraries/jamroot.jam @@ -0,0 +1,4 @@ + +use-project /library-example/foo : util/foo ; + +build-project app ; diff --git a/src/boost/tools/build/example/libraries/util/foo/bar.cpp b/src/boost/tools/build/example/libraries/util/foo/bar.cpp new file mode 100644 index 000000000..2d6eb730f --- /dev/null +++ b/src/boost/tools/build/example/libraries/util/foo/bar.cpp @@ -0,0 +1,10 @@ +// Copyright (c) 2003 Vladimir Prus +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#ifdef _WIN32 +__declspec(dllexport) +#endif +void foo() {} diff --git a/src/boost/tools/build/example/libraries/util/foo/include/lib1.h b/src/boost/tools/build/example/libraries/util/foo/include/lib1.h new file mode 100644 index 000000000..9b0f98a5f --- /dev/null +++ b/src/boost/tools/build/example/libraries/util/foo/include/lib1.h @@ -0,0 +1,7 @@ +// Copyright (c) 2003 Vladimir Prus +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +void foo(); diff --git a/src/boost/tools/build/example/libraries/util/foo/jamfile.jam b/src/boost/tools/build/example/libraries/util/foo/jamfile.jam new file mode 100644 index 000000000..6741b1ca3 --- /dev/null +++ b/src/boost/tools/build/example/libraries/util/foo/jamfile.jam @@ -0,0 +1,9 @@ +# Copyright 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + + +project + : usage-requirements include ; + +lib bar : bar.cpp ; diff --git a/src/boost/tools/build/example/make/foo.py b/src/boost/tools/build/example/make/foo.py new file mode 100644 index 000000000..e4c0b824a --- /dev/null +++ b/src/boost/tools/build/example/make/foo.py @@ -0,0 +1,2 @@ +import sys +open(sys.argv[2], "w").write(open(sys.argv[1]).read()) diff --git a/src/boost/tools/build/example/make/jamroot.jam b/src/boost/tools/build/example/make/jamroot.jam new file mode 100644 index 000000000..3f5ec5b56 --- /dev/null +++ b/src/boost/tools/build/example/make/jamroot.jam @@ -0,0 +1,22 @@ +import feature ; +import toolset ; +import os ; + +path-constant HERE : . ; +make main.cpp : main_cpp.pro : @do-something ; + +feature.feature example.python.interpreter : : free ; + +toolset.flags do-something PYTHON : ; +actions do-something +{ + "$(PYTHON:E=python)" "$(HERE)/foo.py" "$(>)" "$(<)" +} + +if [ os.name ] = VMS +{ + actions do-something + { + $(PYTHON:E=python) $(HERE:W)foo.py $(>:W) $(<:W) + } +} diff --git a/src/boost/tools/build/example/make/main_cpp.pro b/src/boost/tools/build/example/make/main_cpp.pro new file mode 100644 index 000000000..237c8ce18 --- /dev/null +++ b/src/boost/tools/build/example/make/main_cpp.pro @@ -0,0 +1 @@ +int main() {} diff --git a/src/boost/tools/build/example/make/readme.txt b/src/boost/tools/build/example/make/readme.txt new file mode 100644 index 000000000..333c55a71 --- /dev/null +++ b/src/boost/tools/build/example/make/readme.txt @@ -0,0 +1,7 @@ +Copyright 2002, 2005 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +Example of using custom command to create one file from another, using the +built-in 'make' rule. diff --git a/src/boost/tools/build/example/named-install-dirs/a b/src/boost/tools/build/example/named-install-dirs/a new file mode 100644 index 000000000..e69de29bb diff --git a/src/boost/tools/build/example/named-install-dirs/build.jam b/src/boost/tools/build/example/named-install-dirs/build.jam new file mode 100644 index 000000000..00fe90128 --- /dev/null +++ b/src/boost/tools/build/example/named-install-dirs/build.jam @@ -0,0 +1,54 @@ +# showcasing several default install directories +install a1 : a : (bindir) ; +install a2 : a : (libdir)/a2 ; +install (sbindir)/a3 : a ; + +# using a custom prefix; the file will be installed into foo/bar/bin/a4 +install (bindir)/a4 : a : foo/bar ; + +# this one deduces installed package name to be the basename of the project +# directory, so e.g. on Linux the file will be installed installed into +# /usr/local/share/doc//a5 +install (docdir)/a5 : a : bar/baz ; + +# use a custom named directory; its default on Linux is /usr/local/share/xyz/ +import stage ; +stage.add-install-dir foodir : xyz : datadir ; +install (foodir)/a6 : a ; + + +# another custom named directory, this one appends package name like docdir; +# so, e.g. on Linux it defaults to /usr/local/lib/named-install-dirs +stage.add-install-dir privatedir : "" : libdir : package-suffix ; +install (privatedir)/a7 : a ; + +# using stage.get-package-name +make a8 : a : @write-dirs : p/q/r /bin ; + +rule write-dirs ( target : sources * : properties * ) +{ + import property-set ; + import print ; + local ps = [ property-set.create $(properties) ] ; + local pn = [ stage.get-package-name $(ps) ] ; + print.output $(target) ; + print.text + [ stage.get-dir docdir : $(ps) : $(pn) ] + [ stage.get-dir docdir : $(ps) : $(pn) : staged ] + [ stage.get-dir docdir : $(ps) : $(pn) : relative ] + [ stage.get-dir docdir : $(ps) : $(pn) : relative staged ] + [ stage.get-dir bindir : $(ps) : $(pn) : relative ] + : overwrite + ; +} + +# using staging prefix; on Linux installs into q/r/s/share/a9 +install (datarootdir)/a9 : a : q/r/s ; + + +build-project x ; + +# Copyright 2020 Dmitry Arkhipov +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) diff --git a/src/boost/tools/build/example/named-install-dirs/x/build.jam b/src/boost/tools/build/example/named-install-dirs/x/build.jam new file mode 100644 index 000000000..ceb409242 --- /dev/null +++ b/src/boost/tools/build/example/named-install-dirs/x/build.jam @@ -0,0 +1,5 @@ +# this subproject showcases installed package name deduction + +project subx ; +build-project y ; +build-project z ; diff --git a/src/boost/tools/build/example/named-install-dirs/x/y/build.jam b/src/boost/tools/build/example/named-install-dirs/x/y/build.jam new file mode 100644 index 000000000..69e52d1dc --- /dev/null +++ b/src/boost/tools/build/example/named-install-dirs/x/y/build.jam @@ -0,0 +1,9 @@ +# this subproject doesn't have a name, so its default package name is deduced +# from its parent + +install (docdir)/y1 : ../../a ; + +# Copyright 2020 Dmitry Arkhipov +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) diff --git a/src/boost/tools/build/example/named-install-dirs/x/z/build.jam b/src/boost/tools/build/example/named-install-dirs/x/z/build.jam new file mode 100644 index 000000000..5eaf76a5a --- /dev/null +++ b/src/boost/tools/build/example/named-install-dirs/x/z/build.jam @@ -0,0 +1,11 @@ +# this subproject does have a name, so its name is used as its default package +# name + +project subz ; + +install (docdir)/z1 : ../../a ; + +# Copyright 2020 Dmitry Arkhipov +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) diff --git a/src/boost/tools/build/example/pch-multi/include/extra/meta.hpp b/src/boost/tools/build/example/pch-multi/include/extra/meta.hpp new file mode 100644 index 000000000..6a85b0886 --- /dev/null +++ b/src/boost/tools/build/example/pch-multi/include/extra/meta.hpp @@ -0,0 +1,17 @@ +/* + Copyright Rene Rivera 2019 + + Distributed under the Boost Software License, Version 1.0. (See + accompanying file LICENSE_1_0.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ + +#ifndef B2_EXAMPLE_PCH_MULTI_EXTRA_META_HPP +#define B2_EXAMPLE_PCH_MULTI_EXTRA_META_HPP + +#include +#include +#include +#include + +#endif diff --git a/src/boost/tools/build/example/pch-multi/include/pch.hpp b/src/boost/tools/build/example/pch-multi/include/pch.hpp new file mode 100644 index 000000000..8f05cc43d --- /dev/null +++ b/src/boost/tools/build/example/pch-multi/include/pch.hpp @@ -0,0 +1,19 @@ +/* Copyright 2006 Vladimir Prus + + Distributed under the Boost Software License, Version 1.0. (See + accompanying file LICENSE_1_0.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ + +#ifdef BOOST_BUILD_PCH_ENABLED + +#ifdef FOO2 +int bar(); +#endif + +class TestClass { +public: + TestClass(int, int) {} +}; + +#endif diff --git a/src/boost/tools/build/example/pch-multi/include/std.hpp b/src/boost/tools/build/example/pch-multi/include/std.hpp new file mode 100644 index 000000000..89e76bf6a --- /dev/null +++ b/src/boost/tools/build/example/pch-multi/include/std.hpp @@ -0,0 +1,16 @@ +/* + Copyright Rene Rivera 2019 + + Distributed under the Boost Software License, Version 1.0. (See + accompanying file LICENSE_1_0.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ + +#ifndef B2_EXAMPLE_PCH_MULTI_STD_HPP +#define B2_EXAMPLE_PCH_MULTI_STD_HPP + +#include +#include +#include + +#endif diff --git a/src/boost/tools/build/example/pch-multi/jamroot.jam b/src/boost/tools/build/example/pch-multi/jamroot.jam new file mode 100644 index 000000000..6af21ff12 --- /dev/null +++ b/src/boost/tools/build/example/pch-multi/jamroot.jam @@ -0,0 +1,30 @@ +# Copyright 2006 Ilya Sokolov +# +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# pch ########################################################################## + +import pch ; + +local pchs ; +for local hpp in [ glob-tree *.hpp ] +{ + cpp-pch $(hpp:B) : $(hpp) : include ; + explicit $(hpp:B) ; + pchs += $(hpp:B) ; +} +alias headers : $(pchs) ; + +# exe ########################################################################## + +exe hello_world + : # sources + headers + source/hello_world.cpp + : # requirements + include + : # default build + : # usage requirements + ; diff --git a/src/boost/tools/build/example/pch-multi/source/hello_world.cpp b/src/boost/tools/build/example/pch-multi/source/hello_world.cpp new file mode 100644 index 000000000..885208c1a --- /dev/null +++ b/src/boost/tools/build/example/pch-multi/source/hello_world.cpp @@ -0,0 +1,17 @@ +/* Copyright 2006 Ilya Sokolov + Copyright 2006 Vladimir Prus + + Distributed under the Boost Software License, Version 1.0. (See + accompanying file LICENSE.txt or copy at + https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#include +#include +#include + +int main() +{ + TestClass c(1, 2); + return 0; +} diff --git a/src/boost/tools/build/example/pch/include/pch.hpp b/src/boost/tools/build/example/pch/include/pch.hpp new file mode 100644 index 000000000..8f05cc43d --- /dev/null +++ b/src/boost/tools/build/example/pch/include/pch.hpp @@ -0,0 +1,19 @@ +/* Copyright 2006 Vladimir Prus + + Distributed under the Boost Software License, Version 1.0. (See + accompanying file LICENSE_1_0.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ + +#ifdef BOOST_BUILD_PCH_ENABLED + +#ifdef FOO2 +int bar(); +#endif + +class TestClass { +public: + TestClass(int, int) {} +}; + +#endif diff --git a/src/boost/tools/build/example/pch/jamroot.jam b/src/boost/tools/build/example/pch/jamroot.jam new file mode 100644 index 000000000..ad6595567 --- /dev/null +++ b/src/boost/tools/build/example/pch/jamroot.jam @@ -0,0 +1,29 @@ +# Copyright 2006 Ilya Sokolov +# +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# pch ########################################################################## + +import pch ; + +cpp-pch pch + : # sources + include/pch.hpp + : # requirements + include + ; +explicit pch ; + +# exe ########################################################################## + +exe hello_world + : # sources + pch + source/hello_world.cpp + : # requirements + include + : # default build + : # usage requirements + ; diff --git a/src/boost/tools/build/example/pch/source/hello_world.cpp b/src/boost/tools/build/example/pch/source/hello_world.cpp new file mode 100644 index 000000000..db8b91f2d --- /dev/null +++ b/src/boost/tools/build/example/pch/source/hello_world.cpp @@ -0,0 +1,15 @@ +/* Copyright 2006 Ilya Sokolov + Copyright 2006 Vladimir Prus + + Distributed under the Boost Software License, Version 1.0. (See + accompanying file LICENSE.txt or copy at + https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#include + +int main() +{ + TestClass c(1, 2); + return 0; +} diff --git a/src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc b/src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc new file mode 100644 index 000000000..8b2d744ef --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/debug-packages/debugged.pc @@ -0,0 +1,4 @@ +Name: debugged +Version: 0.1 +Description: A package with separate debug version (debug version) +Cflags: -DVARIANT=\"DEBUG\" diff --git a/src/boost/tools/build/example/pkg-config/jamroot.jam b/src/boost/tools/build/example/pkg-config/jamroot.jam new file mode 100644 index 000000000..ca945f9e3 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/jamroot.jam @@ -0,0 +1,104 @@ +#| +Copyright 2019 Dmitry Arkhipov +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + + +using pkg-config : : : packages ; +using pkg-config : debug : : packages debug-packages ; + +import common ; +import pkg-config ; +import property-set ; +import testing ; +import version ; + + +project : requirements debug:debug ; + + +pkg-config.import debugged ; +pkg-config.import foobar : requirements >=0.3 ; +pkg-config.import mangled : requirements @mangle-name ; + +versioned = + [ pkg-config.import versioned + : usage-requirements @versioned-api + ] ; + +with-var = + [ pkg-config.import with-var + : usage-requirements @var-to-define + ] ; + + +# test if a package is found at all +run test1.cpp foobar ; + +# test if conditional requirement is applied +run test2.cpp mangled + : target-name test2-1 + : requirements single + : args SINGLE + ; + +run test2.cpp mangled + : target-name test2-2 + : requirements multi + : args MULTI + ; + +# test if pkg-config configuration is properly inferred from property set +run test3.cpp debugged + : target-name test3-1 + : requirements release + : args RELEASE + ; + +run test3.cpp debugged + : target-name test3-2 + : requirements debug + : args DEBUG + ; + +# test use of version method of pkg-config targets +run test4.cpp versioned ; + +# test use of variable method of pkg-config targets +run test5.cpp with-var ; + + +rule mangle-name ( props * ) { + import feature ; + local name = + [ common.format-name + + : mangled + : "" + : [ property-set.create $(props) ] + ] ; + return $(name) ; +} + + +rule versioned-api ( props * ) { + local ps = [ property-set.create $(props) ] ; + local version = [ $(versioned).version $(ps) ] ; + if [ version.version-less $(version) : 2 ] + { + return VERSIONED_API=1 ; + } + else + { + return VERSIONED_API=2 ; + } +} + + +rule var-to-define ( props * ) { + local ps = [ property-set.create $(props) ] ; + local qwerty = [ $(with-var).variable qwerty : $(ps) ] ; + return QWERTY=\\\"$(qwerty)\\\" ; +} diff --git a/src/boost/tools/build/example/pkg-config/packages/debugged.pc b/src/boost/tools/build/example/pkg-config/packages/debugged.pc new file mode 100644 index 000000000..b22e10d8b --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/packages/debugged.pc @@ -0,0 +1,4 @@ +Name: debugged +Version: 0.1 +Description: A package with separate debug version (release version) +Cflags: -DVARIANT=\"RELEASE\" diff --git a/src/boost/tools/build/example/pkg-config/packages/foobar.pc b/src/boost/tools/build/example/pkg-config/packages/foobar.pc new file mode 100644 index 000000000..f62cfc820 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/packages/foobar.pc @@ -0,0 +1,4 @@ +Name: foobar +Version: 0.3 +Description: The bar for your foo +Cflags: -DQWERTY=\"uiop\" diff --git a/src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc b/src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc new file mode 100644 index 000000000..107b4d3d3 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/packages/mangled-mt.pc @@ -0,0 +1,4 @@ +Name: mangled +Version: 0.1 +Description: A package with mangled name (multi-threaded version) +Cflags: -DTHREADING=\"MULTI\" diff --git a/src/boost/tools/build/example/pkg-config/packages/mangled.pc b/src/boost/tools/build/example/pkg-config/packages/mangled.pc new file mode 100644 index 000000000..76976ecc1 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/packages/mangled.pc @@ -0,0 +1,4 @@ +Name: mangled +Version: 0.1 +Description: A package with mangled name (single-threaded version) +Cflags: -DTHREADING=\"SINGLE\" diff --git a/src/boost/tools/build/example/pkg-config/packages/versioned.pc b/src/boost/tools/build/example/pkg-config/packages/versioned.pc new file mode 100644 index 000000000..701f35146 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/packages/versioned.pc @@ -0,0 +1,3 @@ +Name: versioned +Version: 4.2 +Description: A package with versioned API diff --git a/src/boost/tools/build/example/pkg-config/packages/with-var.pc b/src/boost/tools/build/example/pkg-config/packages/with-var.pc new file mode 100644 index 000000000..4b3e2e558 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/packages/with-var.pc @@ -0,0 +1,4 @@ +qwerty=UIOP +Name: with-var +Version: 0.1 +Description: A package that defines a custom variable diff --git a/src/boost/tools/build/example/pkg-config/test1.cpp b/src/boost/tools/build/example/pkg-config/test1.cpp new file mode 100644 index 000000000..c554928c0 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/test1.cpp @@ -0,0 +1,11 @@ +// Copyright 2019 Dmitry Arkhipov +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#include + +int main() { + return QWERTY == std::string("uiop") ? EXIT_SUCCESS : EXIT_FAILURE ; +} diff --git a/src/boost/tools/build/example/pkg-config/test2.cpp b/src/boost/tools/build/example/pkg-config/test2.cpp new file mode 100644 index 000000000..d8a987f0d --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/test2.cpp @@ -0,0 +1,12 @@ +// Copyright 2019 Dmitry Arkhipov +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#include +#include + +int main(int, char const** argv) { + return THREADING == std::string(argv[1]) ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/src/boost/tools/build/example/pkg-config/test3.cpp b/src/boost/tools/build/example/pkg-config/test3.cpp new file mode 100644 index 000000000..e161cd00f --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/test3.cpp @@ -0,0 +1,12 @@ +// Copyright 2019 Dmitry Arkhipov +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#include +#include + +int main(int, char const** argv) { + return VARIANT == std::string(argv[1]) ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/src/boost/tools/build/example/pkg-config/test4.cpp b/src/boost/tools/build/example/pkg-config/test4.cpp new file mode 100644 index 000000000..c8828b7f6 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/test4.cpp @@ -0,0 +1,11 @@ +// Copyright 2019 Dmitry Arkhipov +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#if VERSIONED_API < 2 +# error "API is too old" +#endif + +int main() {} diff --git a/src/boost/tools/build/example/pkg-config/test5.cpp b/src/boost/tools/build/example/pkg-config/test5.cpp new file mode 100644 index 000000000..7b7f974d2 --- /dev/null +++ b/src/boost/tools/build/example/pkg-config/test5.cpp @@ -0,0 +1,12 @@ +// Copyright 2019 Dmitry Arkhipov +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#include +#include + +int main(int, char const** argv) { + return QWERTY == std::string("UIOP") ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/src/boost/tools/build/example/python_modules/jamroot.jam b/src/boost/tools/build/example/python_modules/jamroot.jam new file mode 100644 index 000000000..29972a480 --- /dev/null +++ b/src/boost/tools/build/example/python_modules/jamroot.jam @@ -0,0 +1,8 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import python_helpers ; + +ECHO "test1:" [ python_helpers.test1 ] ; +ECHO "test2:" [ python_helpers.test2 1234 : 5678 ] ; diff --git a/src/boost/tools/build/example/python_modules/python_helpers.jam b/src/boost/tools/build/example/python_modules/python_helpers.jam new file mode 100644 index 000000000..5519515f2 --- /dev/null +++ b/src/boost/tools/build/example/python_modules/python_helpers.jam @@ -0,0 +1,15 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import modules ; +local here = [ modules.binding $(__name__) ] ; +here = $(here:D) ; +modules.poke : EXTRA_PYTHONPATH : $(here) ; + +# Import the Python rules to B2 +PYTHON_IMPORT_RULE python_helpers : test1 : python_helpers : test1 ; +PYTHON_IMPORT_RULE python_helpers : test2 : python_helpers : test2 ; + +# Make the new rules accessible to everybody who imports us. +EXPORT python_helpers : test1 test2 ; diff --git a/src/boost/tools/build/example/python_modules/python_helpers.py b/src/boost/tools/build/example/python_modules/python_helpers.py new file mode 100644 index 000000000..6fccd7e8c --- /dev/null +++ b/src/boost/tools/build/example/python_modules/python_helpers.py @@ -0,0 +1,18 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Declare a couple of functions called from B2 +# +# Each function will receive as many arguments as there ":"-separated +# arguments in bjam call. Each argument is a list of strings. +# As a special exception (aka bug), if no arguments are passed in bjam, +# Python function will be passed a single empty list. +# +# All Python functions must return a list of strings, which may be empty. + +def test1(l): + return ["foo", "bar"] + +def test2(l, l2): + return [l[0], l2[0]] \ No newline at end of file diff --git a/src/boost/tools/build/example/python_modules/readme.txt b/src/boost/tools/build/example/python_modules/readme.txt new file mode 100644 index 000000000..76b219117 --- /dev/null +++ b/src/boost/tools/build/example/python_modules/readme.txt @@ -0,0 +1,16 @@ +Copyright 2006 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +This example shows how you can use Python modules from B2. + +In order to do this, you need to build bjam with Python support, by running: + + ./build.sh --with-python=/usr + +in the jam/src directory (replace /usr with the root of your Python +installation). + +The integration between Python and bjam is very basic now, but enough to be +useful. diff --git a/src/boost/tools/build/example/qt/README.txt b/src/boost/tools/build/example/qt/README.txt new file mode 100644 index 000000000..d6977b584 --- /dev/null +++ b/src/boost/tools/build/example/qt/README.txt @@ -0,0 +1,20 @@ +Copyright 2005 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +This directory contains B2 examples for the Qt library +(http://www.trolltech.com/products/qt/index.html). + +The current examples are: + 1. Basic setup -- application with several sources and moccable header. + 2. Using of .ui source file. + 3. Running .cpp files via the moc tool. + +For convenience, there are examples both for 3.* and 4.* version of Qt, they are +mostly identical and differ only in source code. + +All examples assumes that you just installed B2 and that QTDIR +environment variables is set (typical values can be /usr/share/qt3 and +/usr/share/qt4). After adding "using qt ..." to your user-config.jam, you would +have to remove "using qt ; " statements from example Jamroot files. diff --git a/src/boost/tools/build/example/qt/qt3/hello/canvas.cpp b/src/boost/tools/build/example/qt/qt3/hello/canvas.cpp new file mode 100644 index 000000000..823e827a4 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/hello/canvas.cpp @@ -0,0 +1,73 @@ +// Copyright Vladimir Prus 2004. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include "canvas.h" + +#include +#include +#include + +Canvas::Canvas(QWidget* parent) +: QWidget(parent) +{ + m_pen = QPen(QColor(255, 128, 128)); + m_brushes = new QBrush[2]; + m_brushes[0] = QBrush(QColor(255, 0, 0)); + m_brushes[1] = QBrush(QColor(0, 255, 0)); + m_current_brush = 0; + + m_canvas = new QCanvas(this); + m_canvas->resize(4*1600, 600); + + redraw(); + + QVBoxLayout* l = new QVBoxLayout(this); + + m_canvas_view = new QCanvasView(m_canvas, this); + l->addWidget(m_canvas_view); + m_canvas_view->resize(rect().size()); + m_canvas_view->show(); +} + +Canvas::~Canvas() +{ + delete m_brushes; +} + +void Canvas::redraw() +{ + QCanvasItemList l = m_canvas->allItems(); + for(QCanvasItemList::iterator i = l.begin(), + e = l.end(); i != e; ++i) + { + delete *i; + } + + unsigned count = 0; + for (unsigned x = 10; x < 4*1600; x += 20) + for (unsigned y = 10; y < 600; y += 20) { + QCanvasRectangle* r = new QCanvasRectangle(x, y, 10, 10, m_canvas); + r->setPen(m_pen); + r->setBrush(m_brushes[m_current_brush]); + r->show(); + ++count; + QCanvasText* t = new QCanvasText("D", m_canvas); + t->move(x, y); + t->show(); + ++count; + } + + (new QCanvasText(QString::number(count), m_canvas))->show(); + m_canvas->setAllChanged(); + +} + +void Canvas::change_color() +{ + m_current_brush = (m_current_brush + 1)%2; + redraw(); + m_canvas->update(); +} + diff --git a/src/boost/tools/build/example/qt/qt3/hello/canvas.h b/src/boost/tools/build/example/qt/qt3/hello/canvas.h new file mode 100644 index 000000000..865fc6549 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/hello/canvas.h @@ -0,0 +1,35 @@ +// Copyright Vladimir Prus 2004. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#ifndef CANVAS_VP_2004_08_31 +#define CANVAS_VP_2004_08_31 + +#include +#include +#include + +class Canvas : public QWidget +{ + Q_OBJECT +public: + Canvas(QWidget* parent); + + virtual ~Canvas(); + +public slots: + void change_color(); + +private: + void redraw(); + class QCanvas* m_canvas; + class QCanvasView* m_canvas_view; + class QPen m_pen; + class QBrush* m_brushes; + int m_current_brush; +}; + +#endif + diff --git a/src/boost/tools/build/example/qt/qt3/hello/jamroot.jam b/src/boost/tools/build/example/qt/qt3/hello/jamroot.jam new file mode 100644 index 000000000..09b3fef0c --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/hello/jamroot.jam @@ -0,0 +1,13 @@ +# Copyright Vladimir Prus 2004. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +using qt ; + +project + # built MT version, unless asked otherwise. + : default-build multi + ; + +exe canvas : main.cpp canvas.cpp canvas.h : /qt//qt ; \ No newline at end of file diff --git a/src/boost/tools/build/example/qt/qt3/hello/main.cpp b/src/boost/tools/build/example/qt/qt3/hello/main.cpp new file mode 100644 index 000000000..0f1c8c3f9 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/hello/main.cpp @@ -0,0 +1,36 @@ +// Copyright Vladimir Prus 2004. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include "canvas.h" +#include +#include +#include + +class Window : public QMainWindow +{ +public: + Window() + { + setCaption("QCanvas test"); + QVBox* vb = new QVBox(this); + setCentralWidget(vb); + + Canvas* c = new Canvas(vb); + QPushButton* b = new QPushButton("Change color", vb); + connect(b, SIGNAL(clicked()), c, SLOT(change_color())); + } +}; + +int main(int argc, char **argv) +{ + QApplication app(argc, argv); + Window *w = new Window(); + + app.setMainWidget(w); + w->show(); + + return app.exec(); +} + diff --git a/src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam b/src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam new file mode 100644 index 000000000..85778da20 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam @@ -0,0 +1,11 @@ + +using qt ; +import cast ; + +project + : default-build multi + ; + +exe main : main.cpp [ cast _ moccable-cpp : main.cpp ] + /qt//qt + ; diff --git a/src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp b/src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp new file mode 100644 index 000000000..63533ba58 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/moccable-cpp/main.cpp @@ -0,0 +1,41 @@ +// Copyright Vladimir Prus 2005. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + + +#include +#include +#include + +#include + +class My_widget : public QWidget +{ + Q_OBJECT +public: + My_widget() : QWidget() + { + QPushButton* b = new QPushButton("Push me", this); + + connect(b, SIGNAL(clicked()), this, SLOT(theSlot())); + } + +private slots: + void theSlot() + { + std::cout << "Clicked\n"; + } + +}; + +int main(int ac, char* av[]) +{ + QApplication app(ac, av); + My_widget mw; + mw.show(); + app.setMainWidget(&mw); + app.exec(); +} + +#include "main.moc" diff --git a/src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui b/src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui new file mode 100644 index 000000000..26cc73487 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/uic/hello_world_widget.ui @@ -0,0 +1,58 @@ + +HelloWorldWidget + + + + + + HelloWorldWidget + + + + 0 + 0 + 124 + 63 + + + + Hello World! + + + + unnamed + + + 11 + + + 6 + + + + TextLabel2 + + + Hello World! + + + AlignCenter + + + + + OkButton + + + OK + + + + + + diff --git a/src/boost/tools/build/example/qt/qt3/uic/jamroot.jam b/src/boost/tools/build/example/qt/qt3/uic/jamroot.jam new file mode 100644 index 000000000..d53df7a58 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/uic/jamroot.jam @@ -0,0 +1,15 @@ +# Copyright Felix E. Klee, 2003 +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Tell that QT should be used. QTDIR will give installation +# prefix. +using qt ; + +project + : default-build multi + ; + +exe hello : main.cpp hello_world_widget.ui : /qt//qt ; + diff --git a/src/boost/tools/build/example/qt/qt3/uic/main.cpp b/src/boost/tools/build/example/qt/qt3/uic/main.cpp new file mode 100644 index 000000000..c48605574 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt3/uic/main.cpp @@ -0,0 +1,18 @@ +// Copyright Felix E. Klee, 2003 +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include "hello_world_widget.h" +#include + +#include + +int main(int argc, char **argv) { + QApplication a(argc, argv); + HelloWorldWidget w; + QObject::connect(static_cast(w.OkButton), SIGNAL(clicked()), &w, SLOT(close())); + a.setMainWidget(&w); + w.show(); + return a.exec(); +} diff --git a/src/boost/tools/build/example/qt/qt4/hello/arrow.cpp b/src/boost/tools/build/example/qt/qt4/hello/arrow.cpp new file mode 100644 index 000000000..5f36ed5a8 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/hello/arrow.cpp @@ -0,0 +1,158 @@ +// Copyright Vladimir Prus 2005. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include "arrow.h" + +#include + +#include +#include +#include + +#include +#include + +Arrow_widget::Arrow_widget(QWidget* parent) : QWidget(parent), color_(0) +{ + QPalette pal = palette(); + pal.setBrush(backgroundRole(), QBrush(Qt::white)); + setPalette(pal); +} + +void Arrow_widget::slotChangeColor() +{ + color_ = (color_ + 1) % 3; + update(); +} + +void +Arrow_widget::draw_arrow(int x1, int y1, int x2, int y2, QPainter& painter) +{ + // The length of the from the tip of the arrow to the point + // where line starts. + const int arrowhead_length = 16; + + QPainterPath arrow; + arrow.moveTo(x1, y1); + + // Determine the angle of the straight line. + double a1 = (x2-x1); + double a2 = (y2-y1); + double b1 = 1; + double b2 = 0; + + double straight_length = sqrt(a1*a1 + a2*a2); + + double dot_product = a1*b1 + a2*b2; + double cosine = dot_product/ + (sqrt(pow(a1, 2) + pow(a2, 2))*sqrt(b1 + b2)); + double angle = acos(cosine); + if (y1 < y2) + { + angle = -angle; + } + double straight_angle = angle*180/M_PI; + + double limit = 10; + + double angle_to_vertical; + if (fabs(straight_angle) < 90) + angle_to_vertical = fabs(straight_angle); + else if (straight_angle > 0) + angle_to_vertical = 180-straight_angle; + else + angle_to_vertical = 180-(-straight_angle); + + double angle_delta = 0; + if (angle_to_vertical > limit) + angle_delta = 30 * (angle_to_vertical - limit)/90; + double start_angle = straight_angle > 0 + ? straight_angle - angle_delta : + straight_angle + angle_delta; + + + QMatrix m1; + m1.translate(x1, y1); + m1.rotate(-start_angle); + + double end_angle = straight_angle > 0 + ? (straight_angle + 180 + angle_delta) : + (straight_angle + 180 - angle_delta); + + QMatrix m2; + m2.reset(); + m2.translate(x2, y2); + m2.rotate(-end_angle); + + arrow.cubicTo(m1.map(QPointF(straight_length/2, 0)), + m2.map(QPointF(straight_length/2, 0)), + m2.map(QPointF(arrowhead_length, 0))); + + painter.save(); + painter.setBrush(Qt::NoBrush); + painter.drawPath(arrow); + painter.restore(); + + painter.save(); + painter.translate(x2, y2); + + painter.rotate(-90); + painter.rotate(-end_angle); + painter.rotate(180); + + QPolygon arrowhead(4); + arrowhead.setPoint(0, 0, 0); + arrowhead.setPoint(1, arrowhead_length/3, -arrowhead_length*5/4); + arrowhead.setPoint(2, 0, -arrowhead_length); + arrowhead.setPoint(3, -arrowhead_length/3, -arrowhead_length*5/4); + + painter.drawPolygon(arrowhead); + + painter.restore(); + +} + + +void Arrow_widget::paintEvent(QPaintEvent*) +{ + QPainter p(this); + + p.setRenderHint(QPainter::Antialiasing); + + int base_x = 550; + int base_y = 200; + + if (color_ == 0) + p.setBrush(Qt::black); + else if (color_ == 1) + p.setBrush(Qt::green); + else if (color_ == 2) + p.setBrush(Qt::yellow); + else + p.setBrush(Qt::black); + + for (int x_step = 0; x_step < 6; ++x_step) + { + for (int y_step = 1; y_step <= 3; ++y_step) + { + draw_arrow(base_x, base_y, base_x+x_step*100, + base_y - y_step*50, p); + + draw_arrow(base_x, base_y, base_x+x_step*100, + base_y + y_step*50, p); + + draw_arrow(base_x, base_y, base_x-x_step*100, + base_y + y_step*50, p); + + draw_arrow(base_x, base_y, base_x-x_step*100, + base_y - y_step*50, p); + } + } + + draw_arrow(50, 400, 1000, 450, p); + draw_arrow(1000, 400, 50, 450, p); + +} + diff --git a/src/boost/tools/build/example/qt/qt4/hello/arrow.h b/src/boost/tools/build/example/qt/qt4/hello/arrow.h new file mode 100644 index 000000000..8375d13c9 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/hello/arrow.h @@ -0,0 +1,30 @@ +// Copyright Vladimir Prus 2005. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include + +#include +#include +#include + +#include +#include + +class Arrow_widget : public QWidget +{ + Q_OBJECT +public: + Arrow_widget(QWidget* parent = 0); + +public slots: + void slotChangeColor(); + +private: + void draw_arrow(int x1, int y1, int x2, int y2, QPainter& painter); + void paintEvent(QPaintEvent*); + +private: + int color_; +}; diff --git a/src/boost/tools/build/example/qt/qt4/hello/jamroot.jam b/src/boost/tools/build/example/qt/qt4/hello/jamroot.jam new file mode 100644 index 000000000..83952f17b --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/hello/jamroot.jam @@ -0,0 +1,14 @@ + +import qt4 ; + +if ! [ qt4.initialized ] +{ + ECHO "Warning: Qt4 not initialized in user-config.jam" ; + ECHO "Assuming /space/p2/ghost/build/Qt4 as location." ; + ECHO "This is very likely won't work for you. " ; + using qt4 : /space/p2/ghost/build/Qt4 ; +} + +project : requirements multi ; + +exe arrow : main.cpp arrow.cpp arrow.h /qt//QtGui ; \ No newline at end of file diff --git a/src/boost/tools/build/example/qt/qt4/hello/main.cpp b/src/boost/tools/build/example/qt/qt4/hello/main.cpp new file mode 100644 index 000000000..bf4913666 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/hello/main.cpp @@ -0,0 +1,27 @@ +// Copyright Vladimir Prus 2005. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include "arrow.h" + +#include +#include + +int main(int ac, char* av[]) +{ + QApplication app(ac, av); + Arrow_widget* w = new Arrow_widget; + w->resize(1100, 480); + + QTimer timer; + QObject::connect(&timer, SIGNAL(timeout()), + w, SLOT(slotChangeColor())); + + timer.start(2000); + + w->show(); + app.exec(); + return 0; +} + diff --git a/src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam b/src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam new file mode 100644 index 000000000..d07b9c7d3 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam @@ -0,0 +1,18 @@ + +import qt4 ; +if ! [ qt4.initialized ] +{ + ECHO "Warning: Qt4 not initialized in user-config.jam" ; + ECHO "Assuming /space/p2/ghost/build/Qt4 as location." ; + ECHO "This is very likely won't work for you. " ; + using qt4 : /space/p2/ghost/build/Qt4 ; +} + +import cast ; +exe main : main.cpp + [ cast _ moccable-cpp : main.cpp ] + /qt//QtGui + : multi + ; + + diff --git a/src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp b/src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp new file mode 100644 index 000000000..f8d4a43e2 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/moccable-cpp/main.cpp @@ -0,0 +1,39 @@ +// Copyright Vladimir Prus 2005. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include +#include +#include + +#include + +class My_widget : public QWidget +{ + Q_OBJECT +public: + My_widget() : QWidget() + { + QPushButton* b = new QPushButton("Push me", this); + + connect(b, SIGNAL(clicked()), this, SLOT(theSlot())); + } + +private slots: + void theSlot() + { + std::cout << "Clicked\n"; + } + +}; + +int main(int ac, char* av[]) +{ + QApplication app(ac, av); + My_widget mw; + mw.show(); + app.exec(); +} + +#include "main.moc" diff --git a/src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui b/src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui new file mode 100644 index 000000000..67060b336 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/uic/hello_world_widget.ui @@ -0,0 +1,55 @@ + + + + + + + HelloWorldWidget + + + + 0 + 0 + 124 + 63 + + + + Hello World! + + + + 11 + + + 6 + + + + + Hello World! + + + Qt::AlignCenter + + + + + + + OK + + + + + + + qPixmapFromMimeSource + + + diff --git a/src/boost/tools/build/example/qt/qt4/uic/jamroot.jam b/src/boost/tools/build/example/qt/qt4/uic/jamroot.jam new file mode 100644 index 000000000..5af938c95 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/uic/jamroot.jam @@ -0,0 +1,18 @@ +# Copyright Felix E. Klee, 2003 +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import qt4 ; +if ! [ qt4.initialized ] +{ + ECHO "Warning: Qt4 not initialized in user-config.jam" ; + ECHO "Assuming /space/p2/ghost/build/Qt4 as location." ; + ECHO "This is very likely won't work for you. " ; + using qt4 : /space/p2/ghost/build/Qt4 ; +} + +project : requirements multi + ; + +exe hello : main.cpp hello_world_widget.ui : /qt//QtGui ; diff --git a/src/boost/tools/build/example/qt/qt4/uic/main.cpp b/src/boost/tools/build/example/qt/qt4/uic/main.cpp new file mode 100644 index 000000000..6b217ec19 --- /dev/null +++ b/src/boost/tools/build/example/qt/qt4/uic/main.cpp @@ -0,0 +1,23 @@ +// Copyright Felix E. Klee, 2003 +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include "ui_hello_world_widget.h" +#include +#include + +#include + +int main(int argc, char **argv) { + QApplication a(argc, argv); + + QWidget w; + Ui::HelloWorldWidget wm; + wm.setupUi(&w); + + QObject::connect(wm.OkButton, SIGNAL(clicked()), &w, SLOT(close())); + + w.show(); + return a.exec(); +} diff --git a/src/boost/tools/build/example/sanitizers/jamroot.jam b/src/boost/tools/build/example/sanitizers/jamroot.jam new file mode 100644 index 000000000..4b3bda916 --- /dev/null +++ b/src/boost/tools/build/example/sanitizers/jamroot.jam @@ -0,0 +1 @@ +exe main : main.cpp ; diff --git a/src/boost/tools/build/example/sanitizers/main.cpp b/src/boost/tools/build/example/sanitizers/main.cpp new file mode 100644 index 000000000..62e6f3b60 --- /dev/null +++ b/src/boost/tools/build/example/sanitizers/main.cpp @@ -0,0 +1,9 @@ +#include + +// tag::source[] +int main() +{ + char* c = nullptr; + std::cout << "Hello sanitizers\n " << *c; +} +// end::source[] diff --git a/src/boost/tools/build/example/sanitizers/readme.adoc b/src/boost/tools/build/example/sanitizers/readme.adoc new file mode 100644 index 000000000..efcc8c3a9 --- /dev/null +++ b/src/boost/tools/build/example/sanitizers/readme.adoc @@ -0,0 +1,64 @@ +//// +Copyright 2019 Damian Jarek +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +//// + += Sanitizers + +This example shows how to enable sanitizers when using a clang or gcc toolset + +.`main.cpp` +[source,cpp] +---- +include::../../example/sanitizers/main.cpp[tag=source] +---- + +Our `jamroot.jam` is minimal and only specifies one `exe` target for the +program: + +.`jamroot.jam` +[source,jam] +---- +include::jamroot.jam[] +---- + +Sanitizers can be enabled by passing `on` or `norecover` to the appropriate sanitizer feature +(e.g. `thread-sanitizer=on`). The `norecover` option causes the program to terminate after +the first sanitizer issue is detected. The following example shows how to enable `address` and `undefined` +sanitizers in a simple program: + +[source,bash] +---- +> cd /example/sanitizers +> b2 toolset=gcc address-sanitizer=norecover undefined-sanitizer=on +...found 10 targets... +...updating 7 targets... +gcc.compile.c++ bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main.o +gcc.link bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main +...updated 7 targets... +---- + +Running the produced program may produce an output simillar to the following: + +[source,bash] +---- +> ./bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main +Hello sanitizers +main.cpp:6:43: runtime error: load of null pointer of type 'char' +ASAN:DEADLYSIGNAL +================================================================= +==29767==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000000 (pc 0x55ba7988af1b bp 0x7ffdf3d76560 sp 0x7ffdf3d76530 T0) +==29767==The signal is caused by a READ memory access. +==29767==Hint: address points to the zero page. + #0 0x55ba7988af1a in main /home/damian/projects/boost/tools/build/example/sanitizers/main.cpp:6 + #1 0x7f42f2ba1b96 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x21b96) + #2 0x55ba7988adb9 in _start (/home/damian/projects/boost/tools/build/example/sanitizers/bin/gcc-7.3.0/debug/address-sanitizer-norecover/undefined-sanitizer-on/main+0xdb9) + +AddressSanitizer can not provide additional info. +SUMMARY: AddressSanitizer: SEGV /home/damian/projects/boost/tools/build/example/sanitizers/main.cpp:6 in main +==29767==ABORTING +---- + +NOTE: The actual paths in the `bin` sub-directory will depend on your +toolset and configuration. The presented output may vary depending on your compiler version. diff --git a/src/boost/tools/build/example/sass/importing.scss b/src/boost/tools/build/example/sass/importing.scss new file mode 100644 index 000000000..0c3586afc --- /dev/null +++ b/src/boost/tools/build/example/sass/importing.scss @@ -0,0 +1,3 @@ +@import "foobar"; + +body { color: red; } diff --git a/src/boost/tools/build/example/sass/include/foobar.scss b/src/boost/tools/build/example/sass/include/foobar.scss new file mode 100644 index 000000000..2c77cef1f --- /dev/null +++ b/src/boost/tools/build/example/sass/include/foobar.scss @@ -0,0 +1,3 @@ +body { + border: { color: red; } +} diff --git a/src/boost/tools/build/example/sass/jamroot.jam b/src/boost/tools/build/example/sass/jamroot.jam new file mode 100644 index 000000000..837477225 --- /dev/null +++ b/src/boost/tools/build/example/sass/jamroot.jam @@ -0,0 +1,15 @@ +#| +Copyright 2017 Dmitry Arkhipov +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +css stylesheet1 : singleton.scss : "--precision 1" ; +css stylesheet2 : singleton.sass ; +css stylesheet3 : importing.scss : include ; +css stylesheet4 + : singleton.scss + : expanded + off + ; diff --git a/src/boost/tools/build/example/sass/singleton.sass b/src/boost/tools/build/example/sass/singleton.sass new file mode 100644 index 000000000..455fefdd1 --- /dev/null +++ b/src/boost/tools/build/example/sass/singleton.sass @@ -0,0 +1,12 @@ +body + p + line-height: 1.5em + + span + font-weight: 700 + a + text-decoration: none + + &:hover + text-decoration: underline + font-size: (10px/3) diff --git a/src/boost/tools/build/example/sass/singleton.scss b/src/boost/tools/build/example/sass/singleton.scss new file mode 100644 index 000000000..afe15e9c4 --- /dev/null +++ b/src/boost/tools/build/example/sass/singleton.scss @@ -0,0 +1,11 @@ +body { + p { line-height: 1.5em; } + span { font-weight: 700; } + a { + text-decoration: none; + &:hover { + text-decoration: underline; + font-size: (10px/3); + } + } +} diff --git a/src/boost/tools/build/example/site-config.jam b/src/boost/tools/build/example/site-config.jam new file mode 100644 index 000000000..2bf5e5700 --- /dev/null +++ b/src/boost/tools/build/example/site-config.jam @@ -0,0 +1,4 @@ +# Copyright 2002, 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + diff --git a/src/boost/tools/build/example/testing/compile-fail.cpp b/src/boost/tools/build/example/testing/compile-fail.cpp new file mode 100644 index 000000000..aa07d4e67 --- /dev/null +++ b/src/boost/tools/build/example/testing/compile-fail.cpp @@ -0,0 +1,14 @@ +// Copyright (c) 2014 Rene Rivera +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include +#include + +int main() +{ + std::cout << "Bye!\n"; + return EXIT_FAILURE +} diff --git a/src/boost/tools/build/example/testing/fail.cpp b/src/boost/tools/build/example/testing/fail.cpp new file mode 100644 index 000000000..ce8e2a189 --- /dev/null +++ b/src/boost/tools/build/example/testing/fail.cpp @@ -0,0 +1,14 @@ +// Copyright (c) 2014 Rene Rivera +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include +#include + +int main() +{ + std::cout << "Bye!\n"; + return EXIT_FAILURE; +} diff --git a/src/boost/tools/build/example/testing/jamroot.jam b/src/boost/tools/build/example/testing/jamroot.jam new file mode 100644 index 000000000..7bbb2e619 --- /dev/null +++ b/src/boost/tools/build/example/testing/jamroot.jam @@ -0,0 +1,10 @@ +# Copyright 2014 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +using testing ; + +run success.cpp : : ; +run-fail fail.cpp : : ; +compile success.cpp : : success-compile ; +compile-fail compile-fail.cpp ; diff --git a/src/boost/tools/build/example/testing/success.cpp b/src/boost/tools/build/example/testing/success.cpp new file mode 100644 index 000000000..0b75980ab --- /dev/null +++ b/src/boost/tools/build/example/testing/success.cpp @@ -0,0 +1,15 @@ +// Copyright (c) 2014 Rene Rivera +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) +// + +#include +#include + +int main() +{ + std::cout << "Hi!\n"; + return EXIT_SUCCESS; +} diff --git a/src/boost/tools/build/example/time/hello.cpp b/src/boost/tools/build/example/time/hello.cpp new file mode 100644 index 000000000..b3392a3b3 --- /dev/null +++ b/src/boost/tools/build/example/time/hello.cpp @@ -0,0 +1,12 @@ +// Copyright (c) 2003 Vladimir Prus +// +// Distributed under the Boost Software License, Version 1.0. (See +// accompanying file LICENSE.txt or copy at +// https://www.bfgroup.xyz/b2/LICENSE.txt) + +#include + +int main() +{ + std::cout << "Hello!\n"; +} diff --git a/src/boost/tools/build/example/time/jamroot.jam b/src/boost/tools/build/example/time/jamroot.jam new file mode 100644 index 000000000..52fc1b980 --- /dev/null +++ b/src/boost/tools/build/example/time/jamroot.jam @@ -0,0 +1,16 @@ +#| +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +#[jamroot +#<< Import the time rule from the testing module. +import testing ; + +#<< The target we are timing just builds a hello program. +exe hello : hello.cpp ; + +#<< This target records the time to build the `hello` target. +time hello.time : hello ; +#] diff --git a/src/boost/tools/build/example/time/readme.qbk b/src/boost/tools/build/example/time/readme.qbk new file mode 100644 index 000000000..808a2ceec --- /dev/null +++ b/src/boost/tools/build/example/time/readme.qbk @@ -0,0 +1,47 @@ +[/ +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +] + +[section Time Action] + +This example shows how to use the `testing.time` utility to show time +information for building a target. + +Files: + +* [@../../example/time/jamroot.jam jamroot.jam] +* [@../../example/time/hello.cpp hello.cpp] + +Our `jamroot.jam` specifies the target we build and the `time` +declaration to time the target we build: + +[import jamroot.jam] + +[jamroot] + +Building the example yields: + +[teletype] +``` +> cd /example/time +> b2 +...found 9 targets... +...updating 6 targets... +common.mkdir bin +common.mkdir bin/clang-darwin-4.2.1 +common.mkdir bin/clang-darwin-4.2.1/debug +clang-darwin.compile.c++ bin/clang-darwin-4.2.1/debug/hello.o +clang-darwin.link bin/clang-darwin-4.2.1/debug/hello +testing.time bin/clang-darwin-4.2.1/debug/hello.time +user: [hello] 0.013509 +system: [hello] 0.045641 +clock: [hello] 0.000000 +...updated 6 targets... +``` + +[note The actual paths in the `bin` sub-directory will depend on your +toolset.] + +[endsect] diff --git a/src/boost/tools/build/example/try_compile/Jamroot.jam b/src/boost/tools/build/example/try_compile/Jamroot.jam new file mode 100644 index 000000000..3131e722e --- /dev/null +++ b/src/boost/tools/build/example/try_compile/Jamroot.jam @@ -0,0 +1,29 @@ + +# This example shows performing configure checks in B2, +# e.g. to check for some system function or compiler quirk. + +# First, declare a metatarget that we'll try to build. +obj foo : foo.cpp ; +# Make it explicit so that it's only built if used by a configure check +explicit foo ; + +# Declare a target that depends on configure check result. +exe main + : main.cpp + # The check-target-builds invocation in requirements section will + # - build the specified metatarget + # - if it builds OK, add the properties in the second parameter + # - otherwise, add the properties in the third parameter + : [ check-target-builds foo : FOO=1 : FOO=0 ] + ; + +# To test this: +# +# 1. Build with "b2". You should see a "foo builds: yes" message, and running +# the produced executable will show that FOO is set to 1. +# 2. Modify foo.cpp to contain a compile error, rebuild with +# "b2 -a --reconfigure". You should see a "foo builds: no" message, and running +# the produced executable should show that FOO is now set to 0. +# +# The output from the check is not shown on the console, instead it is +# redirected to the bin/config.log file diff --git a/src/boost/tools/build/example/try_compile/foo.cpp b/src/boost/tools/build/example/try_compile/foo.cpp new file mode 100644 index 000000000..c9107f937 --- /dev/null +++ b/src/boost/tools/build/example/try_compile/foo.cpp @@ -0,0 +1,6 @@ + + +int foo() +{ + return 0; +} \ No newline at end of file diff --git a/src/boost/tools/build/example/try_compile/main.cpp b/src/boost/tools/build/example/try_compile/main.cpp new file mode 100644 index 000000000..12f64995b --- /dev/null +++ b/src/boost/tools/build/example/try_compile/main.cpp @@ -0,0 +1,8 @@ + +#include +using namespace std; + +int main() +{ + std::cout << "Foo: " << FOO << "\n"; +} \ No newline at end of file diff --git a/src/boost/tools/build/example/user-config.jam b/src/boost/tools/build/example/user-config.jam new file mode 100644 index 000000000..05d823da0 --- /dev/null +++ b/src/boost/tools/build/example/user-config.jam @@ -0,0 +1,92 @@ +# Copyright 2003, 2005 Douglas Gregor +# Copyright 2004 John Maddock +# Copyright 2002, 2003, 2004, 2007 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This file is used to configure your B2 installation. You can modify +# this file in place, or you can place it in a permanent location so that it +# does not get overwritten should you get a new version of B2. See: +# +# https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.overview.configuration +# +# for documentation about possible permanent locations. + +# This file specifies which toolsets (C++ compilers), libraries, and other +# tools are available. Often, you should be able to just uncomment existing +# example lines and adjust them to taste. The complete list of supported tools, +# and configuration instructions can be found at: +# +# https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.reference.tools +# + +# This file uses Jam language syntax to describe available tools. Mostly, +# there are 'using' lines, that contain the name of the used tools, and +# parameters to pass to those tools -- where parameters are separated by +# semicolons. Important syntax notes: +# +# - Both ':' and ';' must be separated from other tokens by whitespace +# - The '\' symbol is a quote character, so when specifying Windows paths you +# should use '/' or '\\' instead. +# +# More details about the syntax can be found at: +# +# https://www.bfgroup.xyz/b2/manual/release/index.html#jam.language +# + +# ------------------ +# GCC configuration. +# ------------------ + +# Configure gcc (default version). +# using gcc ; + +# Configure specific gcc version, giving alternative name to use. +# using gcc : 3.2 : g++-3.2 ; + + +# ------------------- +# MSVC configuration. +# ------------------- + +# Configure msvc (default version, searched for in standard locations and PATH). +# using msvc ; + +# Configure specific msvc version (searched for in standard locations and PATH). +# using msvc : 8.0 ; + + +# ---------------------- +# Borland configuration. +# ---------------------- +# using borland ; + + +# ---------------------- +# STLPort configuration. +# ---------------------- + +# Configure specifying location of STLPort headers. Libraries must be either +# not needed or available to the compiler by default. +# using stlport : : /usr/include/stlport ; + +# Configure specifying location of both headers and libraries explicitly. +# using stlport : : /usr/include/stlport /usr/lib ; + + +# ----------------- +# QT configuration. +# ----------------- + +# Configure assuming QTDIR gives the installation prefix. +# using qt ; + +# Configure with an explicit installation prefix. +# using qt : /usr/opt/qt ; + +# --------------------- +# Python configuration. +# --------------------- + +# Configure specific Python version. +# using python : 3.1 : /usr/bin/python3 : /usr/include/python3.1 : /usr/lib ; diff --git a/src/boost/tools/build/example/variant/a.cpp b/src/boost/tools/build/example/variant/a.cpp new file mode 100644 index 000000000..e6abcf9c0 --- /dev/null +++ b/src/boost/tools/build/example/variant/a.cpp @@ -0,0 +1,7 @@ +// Copyright Vladimir Prus 2004. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +void l(); +int main() { l(); return 0; } diff --git a/src/boost/tools/build/example/variant/jamfile.jam b/src/boost/tools/build/example/variant/jamfile.jam new file mode 100644 index 000000000..6fd72adbd --- /dev/null +++ b/src/boost/tools/build/example/variant/jamfile.jam @@ -0,0 +1,11 @@ +# Copyright 2004 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +#[jamfile +#<< By default, build the project with the two variants we have defined in jamroot.jam. +project : default-build crazy super_release ; + +#<< We build an `a` exe target that links a built library. The library builds with the propagated properties of the exe. +exe a : a.cpp libs//l ; +#] \ No newline at end of file diff --git a/src/boost/tools/build/example/variant/jamroot.jam b/src/boost/tools/build/example/variant/jamroot.jam new file mode 100644 index 000000000..d20669789 --- /dev/null +++ b/src/boost/tools/build/example/variant/jamroot.jam @@ -0,0 +1,12 @@ +# Copyright 2004 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +#[jamroot +#<< Define a build variant which is just combination of four properties. +variant crazy : speed off + on on ; + +#<< Define a built variant inherited from 'release'. It defines one new property and gets all properties from the parent `release` variant. +variant super_release : release : USE_ASM ; +#] diff --git a/src/boost/tools/build/example/variant/libs/jamfile.jam b/src/boost/tools/build/example/variant/libs/jamfile.jam new file mode 100644 index 000000000..5d6d42dbd --- /dev/null +++ b/src/boost/tools/build/example/variant/libs/jamfile.jam @@ -0,0 +1,8 @@ +# Copyright 2004 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +#[libs_jamfile +#<< The library `l` just needs the sources. By default it will be a shared library. +lib l : l.cpp ; +#] \ No newline at end of file diff --git a/src/boost/tools/build/example/variant/libs/l.cpp b/src/boost/tools/build/example/variant/libs/l.cpp new file mode 100644 index 000000000..be3254ca8 --- /dev/null +++ b/src/boost/tools/build/example/variant/libs/l.cpp @@ -0,0 +1,9 @@ +// Copyright Vladimir Prus 2002-2004. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE.txt +// or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#ifdef _WIN32 +__declspec(dllexport) +#endif +void l() {} diff --git a/src/boost/tools/build/example/variant/readme.qbk b/src/boost/tools/build/example/variant/readme.qbk new file mode 100644 index 000000000..663219e34 --- /dev/null +++ b/src/boost/tools/build/example/variant/readme.qbk @@ -0,0 +1,94 @@ +[/ +Copyright 2004 Vladimir Prus +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +/] + +[section Build Variants] + +This example shows how user can create his own build variants. Two variants are +defined: "crazy", which is just a random combination of properties, and +"super-release", which is inherited from "release", and differs by a single +define. + +Files: + +* [@../../example/variant/a.cpp a.cpp] +* [@../../example/variant/jamroot.jam jamroot.jam] +* [@../../example/variant/jamfile.jam jamfile.jam] +* [@../../example/variant/libs/jamfile.jam libs/jamfile.jam] +* [@../../example/variant/libs/l.cpp libs/l.cpp] + +[import jamroot.jam] +[import jamfile.jam] +[import libs/jamfile.jam] + +In this project the `jamroot.jam` specifies the custom build variants and the +targets are specified in the two `jamfile.jam` files. + +[jamroot] + +The top-level `jamfile.jam`: + +[jamfile] + +And the library `jamfile.jam` that the top-level `jamfile.jam` refers to: + +[libs_jamfile] + +Building the example yields: + +[teletype] +``` +> cd /example/variant +> b2 +...found 20 targets... +...updating 16 targets... +common.mkdir bin +common.mkdir bin/clang-darwin-4.2.1 +common.mkdir bin/clang-darwin-4.2.1/crazy +clang-darwin.compile.c++ bin/clang-darwin-4.2.1/crazy/a.o +common.mkdir libs/bin +common.mkdir libs/bin/clang-darwin-4.2.1 +common.mkdir libs/bin/clang-darwin-4.2.1/crazy +clang-darwin.compile.c++ libs/bin/clang-darwin-4.2.1/crazy/l.o +clang-darwin.link.dll libs/bin/clang-darwin-4.2.1/crazy/libl.dylib +clang-darwin.link bin/clang-darwin-4.2.1/crazy/a +common.mkdir bin/clang-darwin-4.2.1/super_release +clang-darwin.compile.c++ bin/clang-darwin-4.2.1/super_release/a.o +common.mkdir libs/bin/clang-darwin-4.2.1/super_release +clang-darwin.compile.c++ libs/bin/clang-darwin-4.2.1/super_release/l.o +clang-darwin.link.dll libs/bin/clang-darwin-4.2.1/super_release/libl.dylib +clang-darwin.link bin/clang-darwin-4.2.1/super_release/a +...updated 16 targets... +``` + +As specified in the top-level `jamfile.jam` both custom variants where built +by default. Once can override that by specifying the variant one wants to +build directly on the command line with a `variant=super_release`. Or just +with a `super_release` as variants can be referred to by their name only. +For example using that argument yields: + +``` +> cd /example/variant +> b2 super_release +...found 14 targets... +...updating 10 targets... +common.mkdir bin +common.mkdir bin/clang-darwin-4.2.1 +common.mkdir bin/clang-darwin-4.2.1/super_release +clang-darwin.compile.c++ bin/clang-darwin-4.2.1/super_release/a.o +common.mkdir libs/bin +common.mkdir libs/bin/clang-darwin-4.2.1 +common.mkdir libs/bin/clang-darwin-4.2.1/super_release +clang-darwin.compile.c++ libs/bin/clang-darwin-4.2.1/super_release/l.o +clang-darwin.link.dll libs/bin/clang-darwin-4.2.1/super_release/libl.dylib +clang-darwin.link bin/clang-darwin-4.2.1/super_release/a +...updated 10 targets... +``` + +[note The actual paths in the `bin` sub-directory will depend on your +toolset.] + +[endsect] diff --git a/src/boost/tools/build/index.html b/src/boost/tools/build/index.html new file mode 100644 index 000000000..afe4356ae --- /dev/null +++ b/src/boost/tools/build/index.html @@ -0,0 +1,5 @@ + + +Automatic redirection failed, please go to doc/html/index.html. + + diff --git a/src/boost/tools/build/notes/README.txt b/src/boost/tools/build/notes/README.txt new file mode 100644 index 000000000..96ef0c3aa --- /dev/null +++ b/src/boost/tools/build/notes/README.txt @@ -0,0 +1,8 @@ +Copyright 2005 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +This directory contains various development notes. Some of them +may eventually find the way into documentation, so are purely +implementation comments. diff --git a/src/boost/tools/build/notes/build_dir_option.txt b/src/boost/tools/build/notes/build_dir_option.txt new file mode 100644 index 000000000..0ebd3bef7 --- /dev/null +++ b/src/boost/tools/build/notes/build_dir_option.txt @@ -0,0 +1,77 @@ +Copyright 2005 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +Summary +------- + +We need a --build-dir option that users building from read-only +medium can use to force building to some other location. Pretty much +every project need this functionality, so it's desirable to have it +out-of-the box, without explicit setup. + +Design +------ + +We can achieve the desired effect manually by adding something like this +to Jamroot: + + project .... : build-dir [ my-rule-to-compute-build-dir ] ; + +Where 'my-rule-to-compute-build-dir' would look at the --build-dir option. + +We need to automate this, but essentially, --build-dir will only affect +the 'build-dir' attribute of Jamroots. + +If Jamroot contains: + + project foo ; + +and --build-dir options' value if /tmp/build, then we'll act as if Jamroot +contained: + + project foo : build-dir /tmp/build/foo ; + +If the 'project' rule has explicit 'build-dir': + + project foo : build-dir bin.v2 ; + +then with the same value of --build-dir we'd act as if Jamroot contained: + + project foo : build-dir /tmp/build/foo/bin.v2 ; + +We can't drop "bin.v2" because it's quite possible that the name of build dir +have specific meaning. For example, it can be used to separate B2 V1 +and V2 build results. + +The --build-dir option has no effect if Jamroot does not define any project id. +Doing otherwise can lead to nasty problems if we're building two distinct +projects (that is with two different Jamroot). They'll get the same build +directory. Most likely, user will see the "duplicate target" error, which is +generally confusing. + +It is expected that any non-trivial project will have top-level "project" +invocation with non empty id, so the above limitation is not so drastic. +We'll emit a warning if Jamroot does not define project id, and --build-dir +is specified. + +Here's the exact behavior of the --build-dir option. If we're loading a +Jamfile (either root or non-root), that declare some project id and some +build-dir attribute, the following table gives the value of build-dir +that will actually be used. + +------------------------------------------------------------------------------- +Root? Id Build-dir attribute Resulting build dir +------------------------------------------------------------------------------- +yes none * --build-dir is ignored, with warning +yes 'foo' none /tmp/build/foo +yes 'foo' 'bin.v2' /tmp/build/foo/bin.v2 +yes 'foo' '/tmp/bar' Error [1] +no * none --build-dir has no effect, inherited + build dir is used +no * non-empty Error [2] +------------------------------------------------------------------------------- +[1] -- not clear what to do +[2] -- can be made to work, but non-empty build-dir +attribute in non-root Jamfile does not make much sense even without --build-dir diff --git a/src/boost/tools/build/notes/changes.txt b/src/boost/tools/build/notes/changes.txt new file mode 100644 index 000000000..bb98661f1 --- /dev/null +++ b/src/boost/tools/build/notes/changes.txt @@ -0,0 +1,317 @@ +Copyright 2004-2007 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +Milestone 13 (in development) + +Changes in this release: + +The following bugs were fixed: + + - gcc support did not work on HP-UX systems + +Milestone 12 (Oct 1, 2007) + +Changes in this release: + + - The Pathscale, PGI and mipspro compilers are now supported. + - Support for autoconfiguration of toolset based on command-line + toolset=xxxx request, and for default toolset + configuration as a fallback. + - Support for precompiled headers for gcc toolset, + and improvements for msvc. + - Mechanism for removing inherited requirements. + - The 'make' rule support specifying usage-requirements. + - New 'project.extension' rule for declaring standalone + projects. + - New 'conditional' convenience rule. + - New 'glob-tree' rule. + - The 'glob' rule accepts patterns to exclude. + - Inline targets are now marked explicit automatically. + - Toolsets can now implicitly add requirements to + all targets. + - New 'generate' rule. + - The executables produced by the 'run' rule are automatically + removed after run. + - The gcc toolset uses the version obtained by running + the compiler, if no explicit one is provided. + - The sun toolset now supports the 'address-model' feature, + and uses -KPIC for shared libraries. + - Free features on command line affect all targets, not + just 'directly requested' ones. + + +Documentation changes: + + - Installation instructions for Linux distributors. + - Configuration options for all supported C++ compilers + are now documented. + +The following bugs were fixed: + + - The 'cflags' and 'linkflags' now work on Darwin.o + - The intel toolset now works on Windows. + - Fix library search options for CodeWarriour toolset. + - The could cause duplicate + mkdir commands. + - Numerious fixes in Boost autolink support + - Numerious fixes in Boost.Python support. + - Indirect properties not evaluated in usage requirements. + - Generator that returns a property set but not target is + considered successful. + - On Darwin, when several compiler versions + are configured, -fcoalesce-templates is applied only to + versions that need it. + + +Milestone 11 (Jule 20, 2006) + +Changes in this release: + + - New C++ compilers: IBM xlf, HP aCC, HP CXX, Intel fortran compiler. + - New tools: Qt4 support, MS message compiler and IDL compiler. + - New main targets: 'notfile' and 'cast'. + + - Core changes: + + - Only one file required at top level of a project, named Jamroot. + - Jamfiles can now contain project-specific help messages. + - "Indirect conditional requirements" introduced + (http://tinyurl.com/mn3jp) + - Strip suffix in main target names when computing names of generated + files (URL) + - The 'source-location' project attribute can contain + several directories. + - Usage requirements are propagated not only direct dependents, + but to indirect dependents. + + - Command line option changes (see http://tinyurl.com/zbycz) + - New option --build-dir + - The --clean option cleans only target below the current directory, + not globally. + - New --clean-all option was added. + - New option --debug-building + - Running "bjam some_directory" works even if there's no Jamfile + in the current directory. + + - Toolset improvements: + - Assembling support with gcc, borland and msvc. + - Support amd64/ia64 cross-compiling with msvc. + - Improved, registry-based autodetection for msvc. + - Serialize execution of gcc.link actions + - Precompiled headers supported on MSVC + (Need documentation) + + - New features and + - The 'glob' rule accepts wildcards in directory names. + - The 'stage' rule was renamed to 'install' + (the old name still available for compatibility) + - The feature can accept user-defined function as value + (URL) + - The 'install' rule can install a directory hierarchy preserving relative + paths. + - The 'install' rule no longer allows to change library + name during install. + - The Jamfile referred via 'use-project' may declare project id different + from the one in 'use-project'. + - The 'using' rule now searches the directory of containing Jamfile. + + +The following bugs were fixed: + + - The feature was ignored for static linking + - Fix #include scanning for C files. + - Child projects were sometimes loaded before parent projects. + - Fix project references with absolute paths on Windows. + - The feature was ignored for 'install' targets. + - A generator having the same type in sources and targets was causing hang. + - Use 'icpc' command for Intel, fixing errors with 8.1 and higher. + - Generation of PS files with the FOP tool really produces .PS files. + - No dependency scanning was done for C files. + - The 'constant' and 'path-constant' rules did not accept multi-element + value. + - Don't pass -fcoalesce-templates to gcc on OSX 10.4 + - Fix static lib suffix on OSX. + - Fix rpath setting on Intel/Linux. + - The 'install' rule don't unnecessary scans #includes in installed + headers. + + +Developer visible changes: + + - Ability to customize type's prefix depending on build properties. + - Generator's 'run' method can return usage-requirements. + - Main target rule is automatically declared for each new target type. + - 'Link incompatible' feature attribute was removed + - Generators no longer bypass unhandled sources, they just ignore them. + - If there are several applicable generators, immediately report ambiguity. + Provide a way to explicitly resolve conflicts between generators. + - The 'flags' rule can match absence of feature. + - Great improvement in response files handling + - The 'toolset.flags' rules allows value-less feature to signify + absence of this feature (fix hack-hack). + - Automatically declare main target rule for each declared target type. + - When inheriting types, inherit generators for the base type, as opposed + to using various hacks to invoke base generators when needed. + - Improve diagnostic for "duplicate actual target" and generator ambiguity. + + +Milestone 10 (October 29, 2004) + +Changes in this release: + + Many toolsets were added: Intel, Metrowerks, Comeau, aCC, vacpp. + Documentation was converted to BoostBook and improved. + Performance was improved. + + - Toolsets initialization syntax is much more uniform. Compiler and linker + flags can now be specified. + - The algorithm for computing build properties was improved. Conditional + requirements can be chained, and a number of bugs were fixed. + - Specific order of properties can be specified. + - The main target rules can be called from everywhere, not necessary from + Jamfile. + - Check for "unused sources" removed. + - The feature affects only linking now. + - The feature now works only for libraries. + - Simpler syntax for "searched" libraries was added. + - New feature. + + + Unix: + The right order of static libraries on Unix is automatically + computed. + The feature is the default. + gcc: + The -fPIC option is passed when creating shared libraries. + Problems with distcc were solved. + Sun: + It's now possible to use the sun linker (as opposed to gnu), and + to compile C files. + Darwin: + Shared libraries are now supported. + MSVC: Before resource files compilation, the setup script is invoked. + Options deprecated in 8.0 are not longer used. + + +The following bugs were fixed: + + - The rule did not handle the property (!!!!!!) + - Don't add "bin" to the build directory explicitly specified by the user. + - Allow to select staged targets, + even with off. + - Includes for the form '# include " did not work. + - (Qt) Add paths to all dependent libs to uic command + line, which helps if the UI files uses plugins. + - Using xxx in requirements was broken. + - Error message printed when target can be found is much more clear. + - Inline targets in sources of 'stage' did not work. + - Don't produce 'independent target' warnings on Windows + - (gcc) The static did not work. + - (gcc) Suppress warnings from the 'ar' tool on some systems. + - (gcc) Don't try to set soname on NT. + +Developer visible changes: + + - Generator priorities are gone, explicit overrides are used. + - 'Active' features were removed + - Support for VMS paths was added. + +Thanks to Christopher Currie, Pedro Ferreira, Philipp Frauenfelder, +Andre Hentz, Jurgen Hunold, Toon Knapen, Johan Nilsson, Alexey Pakhunov, +Brock Peabody, Michael Stevens and Zbynek Winkler who contributed +code to this release. + + +Milestone 9.1 (Nov 6, 2003) + +The following bugs were fixed: + + - The 'unit-test' rule used to ignore properties. + - The gcc toolset used to ignore property. + +Milestone 9 (Nov 6, 2003) + +Changes in this release + + - Putting library in sources of other library now works even for static + linking, which makes expressing library->library dependency much + simpler. + - Performance was considerably improved. + - Regression testing framework now works on windows. + - The "alias" rule can have usage requirements and passes on usage + requirements of sources. + - The "stage" rule can traverse dependencies. + - Support for "def files" was implemented. + - Targets paths are now shorter. + - Darwin toolset was improved. + +The following bugs were fixed: + + - It was not possible to specify empty suffix for a target type derived + from other type. + - The stage rules used to generate incorrect suffix in some cases. + - It was possible to load Jamfile twice. + - The 'use-project' rule was broken when referring to a child project. + - Use of composite properties in requirements did not work. + +Developer visible changes: + + - New CALC builtin, which considerable improves performance. + - Source layout was reorganized. + - Handling of response file was simplified. + +Thanks to Pedro Ferreira, Kirill Lapshin, Andre Hentz, Paul Lin, +Jurgen Hunold, Christopher Currie, and Brock Peabody, who contributed to +this release. + +Milestone 8 (Oct 15, 2003) + +Changes in this release: + + - A regression testing framework was implemented. + - New feature was added for better handling + of dependencies to generated headers. + - The link-compatibility checks not longer cause projects to be skipped, + and issue warning, not error, for main targets. + - Algorithm for selecting main target alternative was improved. + - The feature was renamed to . + - Project root constants were made available in project root itself. + +The following bugs were fixed: + + - failure to recognize shared libraries with version as such + - the 'path-constant' rule was mishandling absolute paths on Windows. + + +Milestone 7 (Sep 11, 2003) + +Changes in this release: + + - Performance was improved. + - Support for Sun and Darwin toolsets was added. + - feature, which changes the name of target depending of build + variant, was implemented. + - Old-style targets-ids are no longer supported. + - New 'glob' rule allows to easily perform wildcard matching in Jamfile. + - Improve bison/flex support to understand C++. + +The following bugs were fixed: + + - bogus error on use of project default-build attribute with several + main target alternatives. + - broken toolset inheritance + - hard error after skipping a target due to incompatible requirements + - incorrect behaviour of a generator when producing several targets of + the same type + - errors on use of the 'project-root' rule in Jamfile context + - inability to require specific compiler version for a main target. + - incorrect behaviour of "bjam msvc" when msvc is configured with explicit + version. + +Thanks to Christopher Currie, Pedro Ferreira and Michael Stevens, who +contributed to this release. + + + + diff --git a/src/boost/tools/build/notes/relative_source_paths.txt b/src/boost/tools/build/notes/relative_source_paths.txt new file mode 100644 index 000000000..2f0557893 --- /dev/null +++ b/src/boost/tools/build/notes/relative_source_paths.txt @@ -0,0 +1,76 @@ +Copyright 2005 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + +Hi, +recently, we had a couple of problems caused by using relative file paths, and +I'd like to discuss what to do. + +Let's use the case from CÊdric. Simplified version is: + + exe a : a.cpp dir1/qt_file.h ; + exe b : a.cpp dir2/qt_file.h ; + +Both exes have the same source cpp file but different *.h files -- which are +processed by Qt tools. V2 currently strips directory name from all targets, +so it tries to + + - create "bin/mvsc/debug/moc_qt_file.cpp" from dir1/qt_file.h + - create "bin/msvc/debug/moc_qt_file.cpp" from dir2/qt_file.h + +There are two solutions that I see: + + 1. Rewrite the code like: + + lib aux : a.cpp + exe a : aux dir1/qt_file.h : a ; + exe b : aux dir2/qt_file.h : b ; + + This way, two version of moc_qt_file.cpp will be generated to different + places. + + 2. Rewrite the code like: + + obj a_moc : dir1/qt_file.h : /qt//qt ; + exe a : a.cpp a_moc ; + obj b_moc : dir2/qt_file.h : /qt//qt ; + exe b : a.cpp b_moc ; + + Explicitly changing name for the problematic files. + + 3. Generally change V2 so that directory part of source is preserved. This + will generate targets: + "bin/msvc/debug/dir1/moc_qt_file.cpp" and + "bin/msvc/debug/dir2/moc_qt_file.cpp". No problems. + + However, there are some additional questions: + + - What if source has absolute file name? + - What if source is "../../include/qt_file.h"? + + We can ignore directory names in those cases (i.e. use the current + behaviour) but that would be a bit inconsistent. + +Any opinions? + +Pedro Ferreira: + +I think this is a corner case and BB should not try to solve everything +automatically - otherwise it will become really complex. +I don't see a problem in requiring the user to help the build system by +using solutions 1 or 2. +Of course, the better the error reporting, the easier it will be to +find the cause and the cure of the problem. + +TEMPLIE Cedric: + +I agree with Pedro. Solution 1 or 2 is the best way to deal with this +problem. Of course I have a preference for the solution 1, but the +solution 2 has the advantage to work without any modification... + +Toon Knapen: + +I agree. + + diff --git a/src/boost/tools/build/notes/release_procedure.txt b/src/boost/tools/build/notes/release_procedure.txt new file mode 100644 index 000000000..9ed95f1e6 --- /dev/null +++ b/src/boost/tools/build/notes/release_procedure.txt @@ -0,0 +1,83 @@ +Copyright 2003, 2005, 2006 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + + + B2 V2 release procedure. + +[ Must be done from a Unix shell ] + +0. Look for all issues for current milestone in the tracker. Close the fixed one, + if not already closed. Move to a later milestone, or fix all the unfixed + ones. + + Make sure that "bjam --version" output is correct. Update version string if + needed. Update bjam version and the version check is necessary. + Check the download locations in "index.html". Check that "last modified" + string in index.html is correct. + +1. Make sure you don't have any local modification, and create SVN directory + + https://svn.boost.org/svn/boost/branches/build/Milestone_X + + Then, copy: + + https://svn.boost.org/svn/boost/trunk/tools/build + https://svn.boost.org/svn/boost/trunk/tools/jam + + to that directory. + +2. Run + + svn co https://svn.boost.org/svn/boost/branches/build/Milestone_X boost-build + +3. Go to "boost-build/build/v2" directory. + +4. Run "./roll.sh". This will create "boost-build.zip" and + "boost-build.tar.bz2" in parent directory, and also upload + new docs to sourceforge. + +5. Unpack "boost-build.tar.bz2", and build jam. + +6. Go to "test" and copy "test-config-example.jam" to "test-config.jam". + If you're not ghost, edit test-config.jam to specify all the right paths. + Run gcc tests: + + python test_all.py gcc --extras + +7. Build all projects in examples-v2, using the bjam binary created at step 4. + Note: "threading=multi" might be needed to build QT examples. + +8. Make SF release: + + - Go to + https://sourceforge.net/project/admin/editpackages.php?group_id=7586 + + - Create new B2 release. Name it 2.0-mXX + + - Upload the changelog. Be sure to turn the "Preserve my pre-formatted + text" checkbox. + + - Rename previously built packages to boost-build-2.0-mXX.tar.bz2 + and boost-build-2.0-mXX.zip. Upload them to the + /incoming directory on ftp://upload.sourceforge.net + + - Add those file to release, edit file properties. + + - In a separate browser, verify changelog is not damaged. + + - In a separate browser, Download the files and verify checksums. + + - In SF file release interface, send email notice. + +9. Announce the release, etc. + +10. Login to SF and update the current-release redirects in + /home/groups/b/bo/boost/htdocs/boost-build2/.htaccess. + +11. If any issues were found during release in this document or in + test-config-example.jam, commit those changes. The release need + not be redone, but changes must be committed. + +12. Set release date in changes.txt and commit. + diff --git a/src/boost/tools/build/src/__init__.py b/src/boost/tools/build/src/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/boost/tools/build/src/bootstrap.jam b/src/boost/tools/build/src/bootstrap.jam new file mode 100644 index 000000000..98f04896d --- /dev/null +++ b/src/boost/tools/build/src/bootstrap.jam @@ -0,0 +1,18 @@ +# Copyright (c) 2003 Vladimir Prus. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This file handles initial phase of B2 loading. +# Boost.Jam has already figured out where B2 is +# and loads this file, which is responsible for initialization +# of basic facilities such a module system and loading the +# main B2 module, build-system.jam. +# +# Exact operation of this module is not interesting, it makes +# sense to look at build-system.jam right away. + +# Load the kernel/bootstrap.jam, which does all the work. +.bootstrap-file = $(.bootstrap-file:D)/kernel/bootstrap.jam ; +include $(.bootstrap-file) ; \ No newline at end of file diff --git a/src/boost/tools/build/src/build-system.jam b/src/boost/tools/build/src/build-system.jam new file mode 100644 index 000000000..8f45391bd --- /dev/null +++ b/src/boost/tools/build/src/build-system.jam @@ -0,0 +1,1087 @@ +# Copyright 2003, 2005, 2007 Dave Abrahams +# Copyright 2006, 2007 Rene Rivera +# Copyright 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This file is part of Boost Build version 2. You can think of it as forming the +# main() routine. It is invoked by the bootstrapping code in bootstrap.jam. + +import build-request ; +import builtin ; +import "class" : new ; +import configure ; +import config-cache ; +import feature ; +import generators ; +import indirect ; +import make ; +import modules ; +import os ; +import path ; +import project ; +import property ; +import property-set ; +import regex ; +import sequence ; +import targets ; +import toolset ; +import utility ; +import version ; +import virtual-target ; + + +################################################################################ +# +# Module global data. +# +################################################################################ + +# Shortcut used in this module for accessing used command-line parameters. +.argv = [ modules.peek : ARGV ] ; + +# Flag indicating we should display additional debugging information related to +# locating and loading Boost Build configuration files. +.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ; + +# Virtual targets obtained when building main targets references on the command +# line. When running 'bjam --clean main_target' we want to clean only files +# belonging to that main target so we need to record which targets are produced +# for it. +.results-of-main-targets = ; + +# Was an XML dump requested? +.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ; + +# Default toolset & version to be used in case no other toolset has been used +# explicitly by either the loaded configuration files, the loaded project build +# scripts or an explicit toolset request on the command line. If not specified, +# an arbitrary default will be used based on the current host OS. This value, +# while not strictly necessary, has been added to allow testing Boost-Build's +# default toolset usage functionality. +.default-toolset = ; +.default-toolset-version = ; + + +################################################################################ +# +# Public rules. +# +################################################################################ + +# Returns the property set with the free features from the currently processed +# build request. +# +rule command-line-free-features ( ) +{ + return $(.command-line-free-features) ; +} + + +# Returns the location of the build system. The primary use case is building +# Boost where it is sometimes needed to get the location of other components +# (e.g. BoostBook files) and it is convenient to use locations relative to the +# Boost Build path. +# +rule location ( ) +{ + local r = [ modules.binding build-system ] ; + return $(r:P) ; +} + + +# Sets the default toolset & version to be used in case no other toolset has +# been used explicitly by either the loaded configuration files, the loaded +# project build scripts or an explicit toolset request on the command line. For +# more detailed information see the comment related to used global variables. +# +rule set-default-toolset ( toolset : version ? ) +{ + .default-toolset = $(toolset) ; + .default-toolset-version = $(version) ; +} + +rule add-pre-build-hook ( function ) +{ + .pre-build-hook += [ indirect.make $(function) : [ CALLER_MODULE ] ] ; +} + +rule add-post-build-hook ( function ) +{ + .post-build-hook += [ indirect.make $(function) : [ CALLER_MODULE ] ] ; +} + +# Old names for backwards compatibility +IMPORT build-system : add-pre-build-hook : build-system : set-pre-build-hook ; +IMPORT build-system : add-post-build-hook : build-system : set-post-build-hook ; +EXPORT build-system : set-pre-build-hook set-post-build-hook ; + +################################################################################ +# +# Local rules. +# +################################################################################ + +# Returns actual Jam targets to be used for executing a clean request. +# +local rule actual-clean-targets ( ) +{ + # The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' + # is a directory, then we want to clean targets which are in 'foo' as well + # as those in any children Jamfiles under foo but not in any unrelated + # Jamfiles. To achieve this we first mark all projects explicitly detected + # as targets for this build system run as needing to be cleaned. + for local t in $(targets) + { + if [ class.is-a $(t) : project-target ] + { + local project = [ $(t).project-module ] ; + .should-clean-project.$(project) = true ; + } + } + + # Construct a list of targets explicitly detected on this build system run + # as a result of building main targets. + local targets-to-clean ; + for local t in $(.results-of-main-targets) + { + # Do not include roots or sources. + targets-to-clean += [ virtual-target.traverse $(t) ] ; + } + targets-to-clean = [ sequence.unique $(targets-to-clean) ] ; + + local to-clean ; + for local t in [ virtual-target.all-targets ] + { + # Remove only derived targets and only those asked to be cleaned, + # whether directly or by belonging to one of the removed projects. + local p = [ $(t).project ] ; + if [ $(t).action ] && ( $(t) in $(targets-to-clean) || + [ should-clean-project [ $(p).project-module ] ] ) + { + to-clean += $(t) ; + } + } + + local to-clean-actual ; + for local t in $(to-clean) + { + to-clean-actual += [ $(t).actualize ] ; + } + return $(to-clean-actual) ; +} + + +# Given a target id, try to find and return the corresponding target. This is +# only invoked when there is no Jamfile in ".". This code somewhat duplicates +# code in project-target.find but we can not reuse that code without a +# project-targets instance. +# +local rule find-target ( target-id ) +{ + local split = [ MATCH (.*)//(.*) : $(target-id) ] ; + + local pm ; + if $(split) + { + pm = [ project.find $(split[1]) : "." ] ; + } + else + { + pm = [ project.find $(target-id) : "." ] ; + } + + local result ; + if $(pm) + { + result = [ project.target $(pm) ] ; + } + + if $(split) + { + result = [ $(result).find $(split[2]) ] ; + } + + return $(result) ; +} + + +# Initializes a new configuration module. +# +local rule initialize-config-module ( module-name : location ? ) +{ + project.initialize $(module-name) : $(location) ; + if USER_MODULE in [ RULENAMES ] + { + USER_MODULE $(module-name) ; + } +} + + +# Helper rule used to load configuration files. Loads the first configuration +# file with the given 'filename' at 'path' into module with name 'module-name'. +# Not finding the requested file may or may not be treated as an error depending +# on the must-find parameter. Returns a normalized path to the loaded +# configuration file or nothing if no file was loaded. +# +local rule load-config ( module-name : filename : path + : must-find ? ) +{ + if $(.debug-config) + { + local path-string = $(path) ; + if $(path-string) = "" { path-string = . ; } + ECHO "notice:" Searching '$(path-string)' for $(module-name) + configuration file '$(filename)'. ; + } + local where = [ GLOB $(path) : $(filename) ] ; + if $(where) + { + where = [ NORMALIZE_PATH $(where[1]) ] ; + if $(.debug-config) + { + local where-string = $(where:D) ; + if $(where-string) = "" { where-string = . ; } + where-string = '$(where-string)' ; + ECHO "notice:" Loading $(module-name) configuration file '$(filename)' + from $(where-string:J=" "). ; + } + + # Set source location so that path-constant in config files with + # relative paths work. This is of most importance for + # project-config.jam, but may be used in other config files as well. + local attributes = [ project.attributes $(module-name) ] ; + $(attributes).set source-location : $(where:D) : exact ; + modules.load $(module-name) : $(filename) : $(path) ; + project.load-used-projects $(module-name) ; + } + else if $(must-find) || $(.debug-config) + { + local path-string = $(path) ; + if $(path-string) = "" { path-string = . ; } + path-string = '$(path-string)' ; + path-string = $(path-string:J=" ") ; + if $(must-find) + { + import errors ; + errors.user-error Configuration file '$(filename)' not found "in" + $(path-string). ; + } + ECHO "notice:" Configuration file '$(filename)' not found "in" + $(path-string). ; + } + return $(where) ; +} + +# Parses options of the form --xxx-config=path/to/config.jam +# and environmental variables of the form BOOST_BUILD_XXX_CONFIG. +# If not found, returns an empty list. The option may be +# explicitly set to the empty string, in which case, handle-config-option +# will return "". +# +local rule handle-config-option ( name : env ? ) +{ + local result = [ MATCH ^--$(name)=(.*)$ : $(.argv) ] ; + if ! $(result)-is-defined && $(env) + { + result = [ os.environ $(env) ] ; + } + # Special handling for the case when the OS does not strip the quotes + # around the file name, as is the case when using Cygwin bash. + result = [ utility.unquote $(result[-1]) ] ; + if ! $(result) + { + return $(result) ; + } + # Treat explicitly entered user paths as native OS path + # references and, if non-absolute, root them at the current + # working directory. + result = [ path.make $(result) ] ; + result = [ path.root $(result) [ path.pwd ] ] ; + result = [ path.native $(result) ] ; + return $(result) ; +} + + +# Loads all the configuration files used by Boost Build in the following order: +# +# -- test-config -- +# Loaded only if specified on the command-line using the --test-config +# command-line parameter. It is ok for this file not to exist even if specified. +# If this configuration file is loaded, regular site and user configuration +# files will not be. If a relative path is specified, file is searched for in +# the current folder. +# +# -- all-config -- +# Loaded only if specified on the command-line using the --config command +# line option. If a file name is specified, it must exist and replaces all +# other configuration files. If an empty file name is passed, no configuration +# files will be loaded. +# +# -- site-config -- +# Named site-config.jam by default or may be named explicitly using the +# --site-config command-line option. If named explicitly, the file is found +# relative to the current working directory and must exist. If the default one +# is used then it is searched for in the system root path (Windows), +# /etc (non-Windows), user's home folder or the Boost Build path, in that +# order. Not loaded in case the test-config configuration file is loaded, +# the file is explicitly set to the empty string or the --ignore-site-config +# command-line option is specified. +# +# -- user-config -- +# Named user-config.jam by default or may be named explicitly using the +# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment +# variable. If named explicitly the file is looked for from the current working +# directory and if the default one is used then it is searched for in the +# user's home directory and the Boost Build path, in that order. Not loaded in +# case either the test-config configuration file is loaded or an empty file name +# is explicitly specified. If the file name has been given explicitly then the +# file must exist. +# +# -- project-config -- +# Named project-config.jam. Looked up in the current working folder and +# then upwards through its parents up to the root folder. It may also be +# named explicitly using the --project-config command-line option. If a file +# is specified explicitly, it is found relative to the current working +# directory and must exist. If an empty file name is passed, project-config +# will not be loaded. +# +# Test configurations have been added primarily for use by Boost Build's +# internal unit testing system but may be used freely in other places as well. +# +local rule load-configuration-files +{ + # Flag indicating that site configuration should not be loaded. + local ignore-site-config = + [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ; + local ignore-user-config ; + local ignore-project-config ; + + initialize-config-module test-config ; + local test-config = [ handle-config-option test-config ] ; + if $(test-config) + { + local where = [ load-config test-config : $(test-config:BS) : + $(test-config:D) ] ; + if $(where) + { + if $(.debug-config) + { + ECHO "notice: Regular site and user configuration files will" ; + ECHO "notice: be ignored due to the test configuration being" + "loaded." ; + } + ignore-site-config = true ; + ignore-user-config = true ; + } + } + + initialize-config-module all-config ; + local all-config = [ handle-config-option config ] ; + if $(all-config) + { + load-config all-config : $(all-config:D=) : $(all-config:D) : required ; + if $(.debug-config) + { + ECHO "notice: Regular configuration files will be ignored due" ; + ECHO "notice: to the global configuration being loaded." ; + } + } + if $(all-config)-is-defined + { + if $(.debug-config) && ! $(all-config) + { + ECHO "notice: Configuration file loading explicitly disabled." ; + } + ignore-site-config = true ; + ignore-user-config = true ; + ignore-project-config = true ; + } + + local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ; + local site-path = /etc $(user-path) ; + if [ os.name ] in NT CYGWIN + { + site-path = [ modules.peek : SystemRoot ] $(user-path) ; + } + + if $(.debug-config) && $(ignore-site-config) = --ignore-site-config + { + ECHO "notice: Site configuration files will be ignored due to the" ; + ECHO "notice: --ignore-site-config command-line option." ; + } + + initialize-config-module site-config ; + if ! $(ignore-site-config) + { + local site-config = [ handle-config-option site-config ] ; + if $(site-config) + { + load-config site-config : $(site-config:D=) : $(site-config:D) + : must-exist ; + } + else if ! $(site-config)-is-defined + { + load-config site-config : site-config.jam : $(site-path) ; + } + else if $(.debug-config) + { + ECHO "notice:" Site configuration file loading explicitly disabled. ; + } + } + + initialize-config-module user-config ; + if ! $(ignore-user-config) + { + local user-config = + [ handle-config-option user-config : BOOST_BUILD_USER_CONFIG ] ; + + if $(user-config) + { + if $(.debug-config) + { + ECHO "notice:" Loading explicitly specified user configuration + "file:" ; + ECHO " $(user-config)" ; + } + + load-config user-config : $(user-config:D=) : $(user-config:D) + : must-exist ; + } + else if ! $(user-config)-is-defined + { + load-config user-config : user-config.jam : $(user-path) ; + } + else if $(.debug-config) + { + ECHO "notice:" User configuration file loading explicitly disabled. ; + } + } + + # We look for project-config.jam from "." upward. I am not sure this is 100% + # right decision, we might as well check for it only alongside the Jamroot + # file. However: + # - We need to load project-config.jam before Jamroot + # - We probably need to load project-config.jam even if there is no Jamroot + # - e.g. to implement automake-style out-of-tree builds. + if ! $(ignore-project-config) + { + local project-config = [ handle-config-option project-config ] ; + if $(project-config) + { + initialize-config-module project-config : $(project-config:D=) ; + load-config project-config : $(project-config:D=) + : $(project-config:D) : must-exist ; + } + else if ! $(project-config)-is-defined + { + local file = [ path.glob "." : project-config.jam ] ; + if ! $(file) + { + file = [ path.glob-in-parents "." : project-config.jam ] ; + } + if $(file) + { + initialize-config-module project-config : $(file:D) ; + load-config project-config : project-config.jam : $(file:D) ; + } + } + else if $(.debug-config) + { + ECHO "notice:" Project configuration file loading explicitly + disabled. ; + } + } + + project.end-load ; +} + + +# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or +# toolset=xx,yy,...zz in the command line. May return additional properties to +# be processed as if they had been specified by the user. +# +local rule process-explicit-toolset-requests +{ + local extra-properties ; + + local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ; + local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ; + + for local t in $(option-toolsets) $(feature-toolsets) + { + # Parse toolset-version/properties. + local toolset = [ MATCH "([^/]+)/?.*" : $(t) ] ; + local properties = [ feature.expand-subfeatures $(toolset) : true ] ; + local toolset-property = [ property.select : $(properties) ] ; + local known ; + if $(toolset-property:G=) in [ feature.values ] + { + known = true ; + } + + # If the toolset is not known, configure it now. + + # TODO: we should do 'using $(toolset)' in case no version has been + # specified and there are no versions defined for the given toolset to + # allow the toolset to configure its default version. For this we need + # to know how to detect whether a given toolset has any versions + # defined. An alternative would be to do this whenever version is not + # specified but that would require that toolsets correctly handle the + # case when their default version is configured multiple times which + # should be checked for all existing toolsets first. + + if ! $(known) + { + if $(.debug-config) + { + ECHO "notice: [cmdline-cfg] toolset $(toolset) not" + "previously configured; attempting to auto-configure now" ; + } + local t,v = [ MATCH "([^-]+)-?(.+)?" : $(toolset) ] ; + project.push-current ; + toolset.using $(t,v[1]) : $(t,v[2]) ; + project.pop-current ; + } + + # Make sure we get an appropriate property into the build request in + # case toolset has been specified using the "--toolset=..." command-line + # option form. + if ! $(t) in $(.argv) $(feature-toolsets) + { + if $(.debug-config) + { + ECHO "notice:" "[cmdline-cfg]" adding toolset=$(t) to the build + request. ; + } + extra-properties += toolset=$(t) ; + } + } + + return $(extra-properties) ; +} + + +# Returns whether the given project (identifed by its project module) should be +# cleaned because it or any of its parent projects have already been marked as +# needing to be cleaned in this build. As an optimization, will explicitly mark +# all encountered project needing to be cleaned in case thay have not already +# been marked so. +# +local rule should-clean-project ( project ) +{ + if ! $(.should-clean-project.$(project))-is-defined + { + local r = "" ; + if ! [ project.is-jamroot-module $(project) ] + { + local parent = [ project.attribute $(project) parent-module ] ; + if $(parent) + { + r = [ should-clean-project $(parent) ] ; + } + } + .should-clean-project.$(project) = $(r) ; + } + + return $(.should-clean-project.$(project)) ; +} + + +################################################################################ +# +# main() +# ------ +# +################################################################################ + +{ + if --version in $(.argv) + { + version.print ; + EXIT ; + } + + version.verify-engine-version ; + + load-configuration-files ; + + # Load explicitly specified toolset modules. + local extra-properties = [ process-explicit-toolset-requests ] ; + + # Load the actual project build script modules. We always load the project + # in the current folder so 'use-project' directives have any chance of being + # seen. Otherwise, we would not be able to refer to subprojects using target + # ids. + local current-project ; + { + local current-module = [ project.find "." : "." ] ; + if $(current-module) + { + current-project = [ project.target $(current-module) ] ; + } + } + + # Load the default toolset module if no other has already been specified. + if ! [ feature.values ] + { + local default-toolset = $(.default-toolset) ; + local default-toolset-version = ; + if $(default-toolset) + { + default-toolset-version = $(.default-toolset-version) ; + } + else + { + default-toolset = gcc ; + if [ os.name ] = NT + { + default-toolset = msvc ; + } + else if [ os.name ] = VMS + { + default-toolset = vmsdecc ; + } + else if [ os.name ] = MACOSX + { + default-toolset = clang ; + } + } + + ECHO "warning: No toolsets are configured." ; + ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ; + ECHO "warning: If the default is wrong, your build may not work correctly." ; + ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ; + ECHO "warning: For more configuration options, please consult" ; + ECHO "warning: https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.overview.configuration" ; + + toolset.using $(default-toolset) : $(default-toolset-version) ; + } + + + # Parse command line for targets and properties. Note that this requires + # that all project files already be loaded. + # FIXME: This is not entirely true. Additional project files may be loaded + # only later via the project.find() rule when dereferencing encountered + # target ids containing explicit project references. See what to do about + # those as such 'lazy loading' may cause problems that are then extremely + # difficult to debug. + local build-request = [ build-request.from-command-line $(.argv) + $(extra-properties) ] ; + local target-ids = [ $(build-request).get-at 1 ] ; + local properties = [ $(build-request).get-at 2 ] ; + + + # Check that we actually found something to build. + if ! $(current-project) && ! $(target-ids) + { + import errors ; + errors.user-error no Jamfile "in" current directory found, and no target + references specified. ; + } + + + # Flags indicating that this build system run has been started in order to + # clean existing instead of create new targets. Note that these are not the + # final flag values as they may get changed later on due to some special + # targets being specified on the command line. + local clean ; if "--clean" in $(.argv) { clean = true ; } + local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; } + + + # List of explicitly requested files to build. Any target references read + # from the command line parameter not recognized as one of the targets + # defined in the loaded Jamfiles will be interpreted as an explicitly + # requested file to build. If any such files are explicitly requested then + # only those files and the targets they depend on will be built and they + # will be searched for among targets that would have been built had there + # been no explicitly requested files. + local explicitly-requested-files + + + # List of Boost Build meta-targets, virtual-targets and actual Jam targets + # constructed in this build system run. + local targets ; + local virtual-targets ; + local actual-targets ; + + + # Process each target specified on the command-line and convert it into + # internal Boost Build target objects. Detect special clean target. If no + # main Boost Build targets were explicitly requested use the current project + # as the target. + for local id in $(target-ids) + { + if $(id) = clean + { + clean = true ; + } + else + { + local t ; + if $(current-project) + { + t = [ $(current-project).find $(id) : no-error ] ; + } + else + { + t = [ find-target $(id) ] ; + } + + if ! $(t) + { + ECHO "notice: could not find main target" $(id) ; + ECHO "notice: assuming it is a name of file to create." ; + explicitly-requested-files += $(id) ; + } + else + { + targets += $(t) ; + } + } + } + if ! $(targets) + { + targets += [ project.target [ project.module-name "." ] ] ; + } + + if [ option.get dump-generators : : true ] + { + generators.dump ; + } + + # We wish to put config.log in the build directory corresponding to Jamroot, + # so that the location does not differ depending on the directory we run the + # build from. The amount of indirection necessary here is scary. + local first-project = [ $(targets[0]).project ] ; + local first-project-root-location = [ $(first-project).get project-root ] ; + local first-project-root-module = [ project.load + $(first-project-root-location) ] ; + local first-project-root = [ project.target $(first-project-root-module) ] ; + local first-build-build-dir = [ $(first-project-root).build-dir ] ; + configure.set-log-file $(first-build-build-dir)/config.log ; + config-cache.load $(first-build-build-dir)/project-cache.jam ; + + # Expand properties specified on the command line into multiple property + # sets consisting of all legal property combinations. Each expanded property + # set will be used for a single build run. E.g. if multiple toolsets are + # specified then requested targets will be built with each of them. + # The expansion is being performed as late as possible so that the feature + # validation is performed after all necessary modules (including project targets + # on the command line) have been loaded. + if $(properties) + { + local cli_properties = [ build-request.convert-command-line-elements $(properties) ] ; + if $(cli_properties) + { + expanded += $(cli_properties) ; + expanded = [ build-request.expand-no-defaults $(expanded) ] ; + local xexpanded ; + for local e in $(expanded) + { + xexpanded += [ property-set.create [ feature.split $(e) ] ] ; + } + expanded = $(xexpanded) ; + } + else + { + expanded = [ property-set.empty ] ; + } + } + else + { + expanded = [ property-set.empty ] ; + } + + # Now that we have a set of targets to build and a set of property sets to + # build the targets with, we can start the main build process by using each + # property set to generate virtual targets from all of our listed targets + # and any of their dependants. + for local p in $(expanded) + { + .command-line-free-features = [ property-set.create [ $(p).free ] ] ; + for local t in $(targets) + { + local g = [ $(t).generate $(p) ] ; + if ! [ class.is-a $(t) : project-target ] + { + .results-of-main-targets += $(g[2-]) ; + } + virtual-targets += $(g[2-]) ; + } + } + + + # Convert collected virtual targets into actual raw Jam targets. + for t in $(virtual-targets) + { + actual-targets += [ $(t).actualize ] ; + } + + config-cache.save ; + + + # If XML data output has been requested prepare additional rules and targets + # so we can hook into Jam to collect build data while its building and have + # it trigger the final XML report generation after all the planned targets + # have been built. + if $(.out-xml) + { + # Get a qualified virtual target name. + rule full-target-name ( target ) + { + local name = [ $(target).name ] ; + local project = [ $(target).project ] ; + local project-path = [ $(project).get location ] ; + return $(project-path)//$(name) ; + } + + # Generate an XML file containing build statistics for each constituent. + # + rule out-xml ( xml-file : constituents * ) + { + # Prepare valid XML header and footer with some basic info. + local nl = " +" ; + local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ; + local timestamp = [ modules.peek : JAMDATE ] ; + local cwd = [ PWD ] ; + local command = $(.argv) ; + local bb-version = [ version.boost-build ] ; + .header on $(xml-file) = + "" + "$(nl)" + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + ; + .footer on $(xml-file) = + "$(nl)" ; + + # Generate the target dependency graph. + .contents on $(xml-file) += + "$(nl) " ; + for local t in [ virtual-target.all-targets ] + { + local action = [ $(t).action ] ; + if $(action) + # If a target has no action, it has no dependencies. + { + local name = [ full-target-name $(t) ] ; + local sources = [ $(action).sources ] ; + local dependencies ; + for local s in $(sources) + { + dependencies += [ full-target-name $(s) ] ; + } + + local path = [ $(t).path ] ; + local jam-target = [ $(t).actual-name ] ; + + .contents on $(xml-file) += + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + ; + } + } + .contents on $(xml-file) += + "$(nl) " ; + + # Build $(xml-file) after $(constituents). Do so even if a + # constituent action fails and regenerate the xml on every bjam run. + INCLUDES $(xml-file) : $(constituents) ; + ALWAYS $(xml-file) ; + __ACTION_RULE__ on $(xml-file) = + build-system.out-xml.generate-action ; + out-xml.generate $(xml-file) ; + } + + # The actual build actions are here; if we did this work in the actions + # clause we would have to form a valid command line containing the + # result of @(...) below (the name of the XML file). + # + rule out-xml.generate-action ( args * : xml-file + : command status start end user system : output ? ) + { + local contents = + [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ; + local f = @($(xml-file):E=$(contents)) ; + } + + # Nothing to do here; the *real* actions happen in + # out-xml.generate-action. + actions quietly out-xml.generate { } + + # Define the out-xml file target, which depends on all the targets so + # that it runs the collection after the targets have run. + out-xml $(.out-xml) : $(actual-targets) ; + + # Set up a global __ACTION_RULE__ that records all the available + # statistics about each actual target in a variable "on" the --out-xml + # target. + # + rule out-xml.collect ( xml-file : target : command status start end user + system : output ? ) + { + local nl = " +" ; + # Open the action with some basic info. + .contents on $(xml-file) += + "$(nl) " ; + + # If we have an action object we can print out more detailed info. + local action = [ on $(target) return $(.action) ] ; + if $(action) + { + local action-name = [ $(action).action-name ] ; + local action-sources = [ $(action).sources ] ; + local action-props = [ $(action).properties ] ; + + # The qualified name of the action which we created the target. + .contents on $(xml-file) += + "$(nl) " ; + + # The sources that made up the target. + .contents on $(xml-file) += + "$(nl) " ; + for local source in $(action-sources) + { + local source-actual = [ $(source).actual-name ] ; + .contents on $(xml-file) += + "$(nl) " ; + } + .contents on $(xml-file) += + "$(nl) " ; + + # The properties that define the conditions under which the + # target was built. + .contents on $(xml-file) += + "$(nl) " ; + for local prop in [ $(action-props).raw ] + { + local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ; + .contents on $(xml-file) += + "$(nl) " ; + } + .contents on $(xml-file) += + "$(nl) " ; + } + + local locate = [ on $(target) return $(LOCATE) ] ; + locate ?= "" ; + .contents on $(xml-file) += + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " ; + .contents on $(xml-file) += + "$(nl) " ; + } + + # When no __ACTION_RULE__ is set "on" a target, the search falls back to + # the global module. + module + { + __ACTION_RULE__ = build-system.out-xml.collect + [ modules.peek build-system : .out-xml ] ; + } + + IMPORT + build-system : + out-xml.collect + out-xml.generate-action + : : + build-system.out-xml.collect + build-system.out-xml.generate-action + ; + } + + local j = [ option.get jobs ] ; + if $(j) + { + modules.poke : PARALLELISM : $(j) ; + } + + local k = [ option.get keep-going : true : true ] ; + if $(k) in "on" "yes" "true" + { + modules.poke : KEEP_GOING : 1 ; + } + else if $(k) in "off" "no" "false" + { + modules.poke : KEEP_GOING : 0 ; + } + else + { + EXIT "error: Invalid value for the --keep-going option" ; + } + + # The 'all' pseudo target is not strictly needed expect in the case when we + # use it below but people often assume they always have this target + # available and do not declare it themselves before use which may cause + # build failures with an error message about not being able to build the + # 'all' target. + NOTFILE all ; + + # And now that all the actual raw Jam targets and all the dependencies + # between them have been prepared all that is left is to tell Jam to update + # those targets. + if $(explicitly-requested-files) + { + # Note that this case can not be joined with the regular one when only + # exact Boost Build targets are requested as here we do not build those + # requested targets but only use them to construct the dependency tree + # needed to build the explicitly requested files. + UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ; + } + else if $(cleanall) + { + UPDATE clean-all ; + } + else if $(clean) + { + common.Clean clean : [ actual-clean-targets ] ; + UPDATE clean ; + } + else + { + configure.print-configure-checks-summary ; + + for local function in $(.pre-build-hook) + { + indirect.call $(function) ; + } + + DEPENDS all : $(actual-targets) ; + if UPDATE_NOW in [ RULENAMES ] + { + local ok = [ UPDATE_NOW all ] ; + # Force sequence updating of regular targets, then the xml + # log output target. To ensure the output records all built + # as otherwise if could execute out-of-sequence when + # doing parallel builds. + if $(.out-xml) + { + UPDATE_NOW $(.out-xml) : : ignore-minus-n ; + } + for local function in $(.post-build-hook) + { + indirect.call $(function) $(ok) ; + } + # Prevent automatic update of the 'all' target, now that we have + # explicitly updated what we wanted. + UPDATE ; + } + else + { + UPDATE all $(.out-xml) ; + } + } +} diff --git a/src/boost/tools/build/src/build/__init__.py b/src/boost/tools/build/src/build/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/boost/tools/build/src/build/ac.jam b/src/boost/tools/build/src/build/ac.jam new file mode 100644 index 000000000..02ae62bdb --- /dev/null +++ b/src/boost/tools/build/src/build/ac.jam @@ -0,0 +1,326 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# Copyright (c) 2021 Rene Ferdinand Rivera Morell +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import property-set ; +import path ; +import modules ; +import "class" ; +import errors ; +import configure ; +import feature ; +import project ; +import virtual-target ; +import generators ; +import property ; +import print ; +import regex ; + +project.initialize $(__name__) ; +.project = [ project.current ] ; +project ac ; + +feature.feature ac.print-text : : free ; + +rule generate-include ( target : sources * : properties * ) +{ + print.output $(target) ; + local text = [ property.select : $(properties) ] ; + if $(text) + { + print.text $(text:G=) : true ; + } + else + { + local header = [ property.select : $(properties) ] ; + print.text "#include <$(header:G=)>\n" : true ; + } +} + +rule generate-main ( target : sources * : properties * ) +{ + print.output $(target) ; + print.text "int main() {}" : true ; +} + +rule find-include-path ( properties : header : provided-path ? : test-source ? ) +{ + if $(provided-path) && [ path.exists [ path.root $(header) $(provided-path) ] ] + { + return $(provided-path) ; + } + else + { + local a = [ class.new action : ac.generate-include : [ property-set.create $(header) $(test-source) ] ] ; + # Create a new CPP target named after the header. + # Replace dots (".") in target basename for portability. + local basename = [ regex.replace $(header:D=) "[.]" "_" ] ; + local header-target = $(header:S=:B=$(basename)) ; + local cpp = [ class.new file-target $(header-target:S=.cpp) exact : CPP : $(.project) : $(a) ] ; + cpp = [ virtual-target.register $(cpp) ] ; + $(cpp).root true ; + local result = [ generators.construct $(.project) $(header-target) : OBJ : $(properties) : $(cpp) : true ] ; + configure.maybe-force-rebuild $(result[2-]) ; + local jam-targets ; + for local t in $(result[2-]) + { + jam-targets += [ $(t).actualize ] ; + } + if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ] + : ignore-minus-n ] + { + return %default ; + } + } +} + +rule construct-library ( name : property-set : provided-path ? ) +{ + local lib-props = [ $(property-set).add-raw $(name) $(provided-path) ] ; + return [ generators.construct $(.project) lib-$(name) + : SEARCHED_LIB : $(lib-props) : : true ] ; +} + + +rule find-library ( properties : names + : provided-path ? ) +{ + local result ; + if [ $(properties).get ] = shared + { + link-opts = shared static ; + } + else + { + link-opts = static shared ; + } + while $(link-opts) + { + local names-iter = $(names) ; + properties = [ $(properties).refine [ property-set.create $(link-opts[1]) ] ] ; + while $(names-iter) + { + local name = $(names-iter[1]) ; + local lib = [ construct-library $(name) : $(properties) : $(provided-path) ] ; + local a = [ class.new action : ac.generate-main : + [ property-set.empty ] ] ; + local main.cpp = [ virtual-target.register + [ class.new file-target main-$(name).cpp exact : CPP : $(.project) : $(a) ] ] ; + $(main.cpp).root true ; + local test = [ generators.construct $(.project) $(name) : EXE + : [ $(properties).add $(lib[1]) ] : $(main.cpp) $(lib[2-]) + : true ] ; + configure.maybe-force-rebuild $(test[2-]) ; + local jam-targets ; + for t in $(test[2-]) + { + jam-targets += [ $(t).actualize ] ; + } + if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ] + : ignore-minus-n ] + { + result = $(name) $(link-opts[1]) ; + names-iter = ; link-opts = ; # break + } + names-iter = $(names-iter[2-]) ; + } + link-opts = $(link-opts[2-]) ; + } + return $(result) ; +} + +class ac-library : basic-target +{ + import errors ; + import indirect ; + import virtual-target ; + import ac ; + import configure ; + import config-cache ; + import os ; + + rule __init__ ( name : project : requirements * : include-path ? : library-path ? : library-name ? ) + { + basic-target.__init__ $(name) : $(project) : : $(requirements) ; + + reconfigure $(include-path) : $(library-path) : $(library-name) ; + } + + rule set-header ( header ) + { + self.header = $(header) ; + } + + rule set-default-names ( names + ) + { + self.default-names = $(names) ; + } + + rule set-header-test ( source ) + { + self.header-test = $(source) ; + } + + rule reconfigure ( include-path ? : library-path ? : library-name ? ) + { + if $(include-path) || $(library-path) || $(library-name) + { + check-not-configured ; + + self.include-path = $(include-path) ; + self.library-path = $(library-path) ; + self.library-name = $(library-name) ; + } + } + + rule set-target ( target ) + { + check-not-configured ; + self.target = $(target) ; + } + + rule check-not-configured ( ) + { + if $(self.include-path) || $(self.library-path) || $(self.library-name) || $(self.target) + { + errors.user-error [ name ] "is already configured" ; + } + } + + rule construct ( name : sources * : property-set ) + { + if $(self.target) + { + return [ $(self.target).generate $(property-set) ] ; + } + else + { + local use-environment ; + if ! $(self.library-name) && ! $(self.include-path) && ! $(self.library-path) + { + use-environment = true ; + } + local libnames = $(self.library-name) ; + if ! $(libnames) && $(use-environment) + { + libnames = [ os.environ $(name:U)_NAME ] ; + # Backward compatibility only. + libnames ?= [ os.environ $(name:U)_BINARY ] ; + } + libnames ?= $(self.default-names) ; + + local include-path = $(self.include-path) ; + if ! $(include-path) && $(use-environment) + { + include-path = [ os.environ $(name:U)_INCLUDE ] ; + } + + local library-path = $(self.library-path) ; + if ! $(library-path) && $(use-environment) + { + library-path = [ os.environ $(name:U)_LIBRARY_PATH ] ; + # Backwards compatibility only + library-path ?= [ os.environ $(name:U)_LIBPATH ] ; + } + + local relevant = [ property.select [ configure.get-relevant-features ] : + [ $(property-set).raw ] ] ; + local min = [ property.as-path [ SORT [ feature.minimize $(relevant) ] ] ] ; + + local key = ac-library-$(name)-$(relevant:J=-) ; + local lookup = [ config-cache.get $(key) ] ; + + if $(lookup) + { + if $(lookup) = missing + { + configure.log-library-search-result $(name) : "no (cached)" $(min) ; + return [ property-set.empty ] ; + } + else + { + local includes = $(lookup[1]) ; + if $(includes) = %default + { + includes = ; + } + local library = [ ac.construct-library $(lookup[2]) : + [ $(property-set).refine [ property-set.create $(lookup[3]) ] ] : $(library-path) ] ; + configure.log-library-search-result $(name) : "yes (cached)" $(min) ; + return [ $(library[1]).add-raw $(includes) ] $(library[2-]) ; + } + } + else + { + local includes = [ ac.find-include-path $(property-set) : $(self.header) : $(include-path) : $(self.header-test) ] ; + local library = [ ac.find-library $(property-set) : $(libnames) : $(library-path) ] ; + if $(includes) && $(library) + { + config-cache.set $(key) : $(includes) $(library) ; + if $(includes) = %default + { + includes = ; + } + library = [ ac.construct-library $(library[1]) : + [ $(property-set).refine [ property-set.create $(library[2]) ] ] : $(library-path) ] ; + configure.log-library-search-result $(name) : "yes" $(min) ; + return [ $(library[1]).add-raw $(includes) ] $(library[2-]) ; + } + else + { + config-cache.set $(key) : missing ; + configure.log-library-search-result $(name) : "no" $(min) ; + return [ property-set.empty ] ; + } + } + } + } +} + +class check-library-worker +{ + import property-set ; + import targets ; + import property ; + + rule __init__ ( target : true-properties * : false-properties * ) + { + self.target = $(target) ; + self.true-properties = $(true-properties) ; + self.false-properties = $(false-properties) ; + } + + rule check ( properties * ) + { + local choosen ; + local t = [ targets.current ] ; + local p = [ $(t).project ] ; + local ps = [ property-set.create $(properties) ] ; + ps = [ $(ps).propagated ] ; + local generated = + [ targets.generate-from-reference $(self.target) : $(p) : $(ps) ] ; + if $(generated[2]) + { + choosen = $(self.true-properties) ; + } + else + { + choosen = $(self.false-properties) ; + } + return [ property.evaluate-conditionals-in-context $(choosen) : + $(properties) ] ; + } +} + +rule check-library ( target : true-properties * : false-properties * ) +{ + local instance = [ class.new check-library-worker $(target) : + $(true-properties) : $(false-properties) ] ; + return @$(instance).check + [ property.evaluate-conditional-relevance + $(true-properties) $(false-properties) + : [ configure.get-relevant-features ] ] ; +} diff --git a/src/boost/tools/build/src/build/alias.jam b/src/boost/tools/build/src/build/alias.jam new file mode 100644 index 000000000..6562ccccf --- /dev/null +++ b/src/boost/tools/build/src/build/alias.jam @@ -0,0 +1,82 @@ +# Copyright 2003, 2004, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module defines the 'alias' rule and the associated target class. +# +# Alias is just a main target which returns its source targets without any +# processing. For example: +# +# alias bin : hello test_hello ; +# alias lib : helpers xml_parser ; +# +# Another important use of 'alias' is to conveniently group source files: +# +# alias platform-src : win.cpp : NT ; +# alias platform-src : linux.cpp : LINUX ; +# exe main : main.cpp platform-src ; +# +# Lastly, it is possible to create a local alias for some target, with different +# properties: +# +# alias big_lib : : @/external_project/big_lib/static ; +# + +import "class" : new ; +import param ; +import project ; +import property-set ; +import targets ; + + +class alias-target-class : basic-target +{ + rule __init__ ( name : project : sources * : requirements * + : default-build * : usage-requirements * ) + { + basic-target.__init__ $(name) : $(project) : $(sources) : + $(requirements) : $(default-build) : $(usage-requirements) ; + } + + rule construct ( name : source-targets * : property-set ) + { + return [ property-set.empty ] $(source-targets) ; + } + + rule compute-usage-requirements ( subvariant ) + { + local base = [ basic-target.compute-usage-requirements $(subvariant) ] ; + return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ; + } + + rule skip-from-usage-requirements ( ) + { + } +} + + +# Declares the 'alias' target. It will process its sources virtual-targets by +# returning them unaltered as its own constructed virtual-targets. +# +rule alias ( name : sources * : requirements * : default-build * : + usage-requirements * ) +{ + param.handle-named-params + sources requirements default-build usage-requirements ; + + local project = [ project.current ] ; + + targets.main-target-alternative + [ new alias-target-class $(name) : $(project) + : [ targets.main-target-sources $(sources) : $(name) : no-renaming ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) + ] + : [ targets.main-target-usage-requirements $(usage-requirements) : + $(project) ] + ] ; +} + + +IMPORT $(__name__) : alias : : alias ; diff --git a/src/boost/tools/build/src/build/alias.py b/src/boost/tools/build/src/build/alias.py new file mode 100755 index 000000000..ec9914f20 --- /dev/null +++ b/src/boost/tools/build/src/build/alias.py @@ -0,0 +1,75 @@ +# Copyright 2003, 2004, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Status: ported (danielw) +# Base revision: 56043 + +# This module defines the 'alias' rule and associated class. +# +# Alias is just a main target which returns its source targets without any +# processing. For example:: +# +# alias bin : hello test_hello ; +# alias lib : helpers xml_parser ; +# +# Another important use of 'alias' is to conveniently group source files:: +# +# alias platform-src : win.cpp : NT ; +# alias platform-src : linux.cpp : LINUX ; +# exe main : main.cpp platform-src ; +# +# Lastly, it's possible to create local alias for some target, with different +# properties:: +# +# alias big_lib : : @/external_project/big_lib/static ; +# + +import targets +import property_set +from b2.manager import get_manager + +from b2.util import metatarget, is_iterable_typed + +class AliasTarget(targets.BasicTarget): + + def __init__(self, *args): + targets.BasicTarget.__init__(self, *args) + + def construct(self, name, source_targets, properties): + if __debug__: + from .virtual_target import VirtualTarget + assert isinstance(name, basestring) + assert is_iterable_typed(source_targets, VirtualTarget) + assert isinstance(properties, property_set.PropertySet) + return [property_set.empty(), source_targets] + + def compute_usage_requirements(self, subvariant): + if __debug__: + from .virtual_target import Subvariant + assert isinstance(subvariant, Subvariant) + base = targets.BasicTarget.compute_usage_requirements(self, subvariant) + # Add source's usage requirement. If we don't do this, "alias" does not + # look like 100% alias. + return base.add(subvariant.sources_usage_requirements()) + +@metatarget +def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]): + assert isinstance(name, basestring) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + assert is_iterable_typed(usage_requirements, basestring) + project = get_manager().projects().current() + targets = get_manager().targets() + + targets.main_target_alternative(AliasTarget( + name, project, + targets.main_target_sources(sources, name, no_renaming=True), + targets.main_target_requirements(requirements or [], project), + targets.main_target_default_build(default_build, project), + targets.main_target_usage_requirements(usage_requirements or [], project))) + +# Declares the 'alias' target. It will build sources, and return them unaltered. +get_manager().projects().add_rule("alias", alias) + diff --git a/src/boost/tools/build/src/build/build-request.jam b/src/boost/tools/build/src/build/build-request.jam new file mode 100644 index 000000000..6535e118a --- /dev/null +++ b/src/boost/tools/build/src/build/build-request.jam @@ -0,0 +1,418 @@ +# Copyright 2002 Dave Abrahams +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import sequence ; +import set ; +import regex ; +import feature ; +import property ; +import container ; +import string ; + + +# Transform property-set by applying f to each component property. +# +local rule apply-to-property-set ( f property-set ) +{ + local properties = [ feature.split $(property-set) ] ; + return [ string.join [ $(f) $(properties) ] : / ] ; +} + + +# Expand the given build request by combining all property-sets which do not +# specify conflicting non-free features. Expects all the project files to +# already be loaded. +# +rule expand-no-defaults ( property-sets * ) +{ + # First make all features and subfeatures explicit. + local expanded-property-sets = [ sequence.transform apply-to-property-set + feature.expand-subfeatures : $(property-sets) ] ; + + # Now combine all of the expanded property-sets + local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ; + + return $(product) ; +} + + +# Update the list of expected conflicts based on the new +# features. +# +local rule remove-conflicts ( conflicts * : features * ) +{ + local result ; + for local c in $(conflicts) + { + if ! [ set.intersection [ regex.split $(c) "/" ] : $(features) ] + { + result += $(c) ; + } + } + return $(result) ; +} + + +# Implementation of x-product, below. Expects all the project files to already +# be loaded. +# +local rule x-product-aux ( property-sets + ) +{ + local result ; + local p = [ feature.split $(property-sets[1]) ] ; + local f = [ set.difference $(p:G) : [ feature.free-features ] ] ; + local seen ; + local extra-conflicts ; + + # No conflict with things used at a higher level? + if ! [ set.intersection $(f) : $(x-product-used) ] + { + local x-product-seen ; + local x-product-conflicts = + [ remove-conflicts $(x-product-conflicts) : $(f) ] ; + { + # Do not mix in any conflicting features. + local x-product-used = $(x-product-used) $(f) ; + + if $(property-sets[2]) + { + local rest = [ x-product-aux $(property-sets[2-]) ] ; + result = $(property-sets[1])/$(rest) ; + } + if ! $(x-product-conflicts) + { + result ?= $(property-sets[1]) ; + } + } + + # If we did not encounter a conflicting feature lower down, do not + # recurse again. + if ! [ set.intersection $(f) : $(x-product-seen) ] + || [ remove-conflicts $(x-product-conflicts) : $(x-product-seen) ] + { + property-sets = ; + } + else + { + # A property is only allowed to be absent if it conflicts + # with either a higher or lower layer. We don't need to + # bother setting this if we already know that we don't need + # to recurse again. + extra-conflicts = $(f:J=/) ; + } + + seen = $(x-product-seen) ; + } + + if $(property-sets[2]) + { + # Lower layers expansion must conflict with this + local x-product-conflicts = $(x-product-conflicts) $(extra-conflicts) ; + + result += [ x-product-aux $(property-sets[2-]) ] ; + } + + # Note that we have seen these features so that higher levels will recurse + # again without them set. + x-product-seen += $(f) $(seen) ; + return $(result) ; +} + + +# Return the cross-product of all elements of property-sets, less any that would +# contain conflicting values for single-valued features. Expects all the project +# files to already be loaded. +# +# Formal definition: +# Returns all maximum non-conflicting subsets of property-sets. +# The result is a list of all property-sets p such that +# 1. p is composed by joining a subset of property-sets without removing +# duplicates +# 2. p contains at most one instance of every single-valued feature +# 3. Adding any additional element of property-sets to p be would +# violate (2) +local rule x-product ( property-sets * ) +{ + if $(property-sets).non-empty + { + # Prepare some "scoped globals" that can be used by the implementation + # function, x-product-aux. + local x-product-seen x-product-used x-product-conflicts ; + return [ x-product-aux $(property-sets) : $(feature-space) ] ; + } + # Otherwise return empty. +} + + +# Returns true if either 'v' or the part of 'v' before the first '-' symbol is +# an implicit value. Expects all the project files to already be loaded. +# +local rule looks-like-implicit-value ( v ) +{ + if [ feature.is-implicit-value $(v) ] + { + return true ; + } + else + { + local split = [ regex.split $(v) - ] ; + if [ feature.is-implicit-value $(split[1]) ] + { + return true ; + } + } +} + + +# Takes the command line tokens (such as taken from the ARGV rule) and +# constructs a build request from them. Returns a vector of two vectors (where +# "vector" means container.jam's "vector"). First is the set of targets +# specified in the command line, and second is the set of requested build +# properties. Expects all the project files to already be loaded. +# +rule from-command-line ( command-line * ) +{ + local targets ; + local properties ; + + command-line = $(command-line[2-]) ; + local skip-next = ; + for local e in $(command-line) + { + if $(skip-next) + { + skip-next = ; + } + else if ! [ MATCH ^(-) : $(e) ] + { + # Build request spec either has "=" in it or completely consists of + # implicit feature values. + local fs = feature-space ; + if [ MATCH "(.*=.*)" : $(e) ] + || [ looks-like-implicit-value $(e:D=) : $(feature-space) ] + { + properties += $(e) ; + } + else if $(e) + { + targets += $(e) ; + } + } + else if [ MATCH "^(-[-ldjfsto])$" : $(e) ] + { + skip-next = true ; + } + } + return [ new vector + [ new vector $(targets) ] + [ new vector $(properties) ] ] ; +} + + +# Converts a list of elements of command line build request specification into internal +# form. Expects all the project files to already be loaded. +# +rule convert-command-line-elements ( elements * ) +{ + local result ; + for local e in $(elements) + { + result += [ convert-command-line-element $(e) ] ; + } + return $(result) ; +} + + +# Converts one element of command line build request specification into internal +# form. +local rule convert-command-line-element ( e ) +{ + local result ; + local parts = [ regex.split $(e) "/" ] ; + while $(parts) + { + local p = $(parts[1]) ; + local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ; + local lresult ; + local feature ; + local values ; + if $(m) + { + feature = $(m[1]) ; + values = [ regex.split $(m[2]) "," ] ; + lresult = <$(feature)>$(values) ; + } + else + { + lresult = [ regex.split $(p) "," ] ; + } + + if $(feature) && free in [ feature.attributes <$(feature)> ] + { + # If we have free feature, then the value is everything + # until the end of the command line token. Slashes in + # the following string are not taked to mean separation + # of properties. Commas are also not interpreted specially. + values = $(values:J=,) ; + values = $(values) $(parts[2-]) ; + values = $(values:J=/) ; + lresult = ; + # Optional free features will ignore empty value arguments. + if optional in [ feature.attributes <$(feature)> ] + { + for local v in $(values) + { + if $(v) + { + lresult += <$(feature)>$(v) ; + } + } + } + else + { + lresult = <$(feature)>$(values) ; + } + parts = ; + } + + if ! [ MATCH (.*-.*) : $(p) ] + { + # property.validate cannot handle subfeatures, so we avoid the check + # here. + for local p in $(lresult) + { + property.validate $(p) : $(feature-space) ; + } + } + + if $(lresult) + { + if ! $(result) + { + result = $(lresult) ; + } + else + { + result = $(result)/$(lresult) ; + } + } + + parts = $(parts[2-]) ; + } + + return $(result) ; +} + + +rule __test__ ( ) +{ + import assert ; + import feature ; + + feature.prepare-test build-request-test-temp ; + + import build-request ; + import build-request : expand-no-defaults : build-request.expand-no-defaults ; + import errors : try catch ; + import feature : feature subfeature ; + + feature toolset : gcc msvc borland : implicit ; + subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 + 3.0 3.0.1 3.0.2 : optional ; + + feature variant : debug release : implicit composite ; + feature inlining : on off ; + feature "include" : : free ; + + feature stdlib : native stlport : implicit ; + + feature runtime-link : dynamic static : symmetric ; + + # Empty build requests should expand to empty. + assert.result + : build-request.expand-no-defaults ; + + assert.result + gcc/3.0.1/stlport/debug + msvc/stlport/debug + msvc/debug + : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ; + + assert.result + gcc/3.0.1/stlport/debug + msvc/debug + debug/msvc/stlport + : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ; + + assert.result + gcc/3.0.1/stlport/debug/off + gcc/3.0.1/stlport/release/off + : build-request.expand-no-defaults gcc-3.0.1/stlport debug release off ; + + assert.result + a/b/c/gcc/3.0.1/stlport/debug/x/y/z + a/b/c/msvc/stlport/debug/x/y/z + a/b/c/msvc/debug/x/y/z + : build-request.expand-no-defaults a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug x/y/z ; + + local r ; + + try ; + { + r = [ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ] ; + build-request.convert-command-line-elements [ $(r).get-at 2 ] ; + } + catch \"static\" is not an implicit feature value ; + + r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; + assert.equal [ $(r).get-at 1 ] : ; + assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ; + + assert.equal + [ build-request.convert-command-line-elements debug runtime-link=dynamic ] + : debug dynamic ; + + r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ; + assert.equal [ $(r).get-at 1 ] : target ; + assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ; + + assert.equal + [ build-request.convert-command-line-elements debug runtime-link=dynamic ] + : debug dynamic ; + + r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ; + assert.equal [ $(r).get-at 1 ] : ; + assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic,static ; + + assert.equal + [ build-request.convert-command-line-elements debug runtime-link=dynamic,static ] + : debug dynamic static ; + + r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ; + assert.equal [ $(r).get-at 1 ] : ; + assert.equal [ $(r).get-at 2 ] : debug gcc/runtime-link=dynamic,static ; + + assert.equal + [ build-request.convert-command-line-elements debug gcc/runtime-link=dynamic,static ] + : debug gcc/dynamic gcc/static ; + + r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ; + assert.equal [ $(r).get-at 1 ] : ; + assert.equal [ $(r).get-at 2 ] : msvc gcc,borland/runtime-link=static ; + + assert.equal + [ build-request.convert-command-line-elements msvc gcc,borland/runtime-link=static ] + : msvc gcc/static borland/static ; + + r = [ build-request.from-command-line bjam gcc-3.0 ] ; + assert.equal [ $(r).get-at 1 ] : ; + assert.equal [ $(r).get-at 2 ] : gcc-3.0 ; + + assert.equal + [ build-request.convert-command-line-elements gcc-3.0 ] + : gcc-3.0 ; + + feature.finish-test build-request-test-temp ; +} diff --git a/src/boost/tools/build/src/build/build_request.py b/src/boost/tools/build/src/build/build_request.py new file mode 100644 index 000000000..4fa54072f --- /dev/null +++ b/src/boost/tools/build/src/build/build_request.py @@ -0,0 +1,222 @@ +# Status: being ported by Vladimir Prus +# TODO: need to re-compare with mainline of .jam +# Base revision: 40480 +# +# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and +# distribute this software is granted provided this copyright notice appears in +# all copies. This software is provided "as is" without express or implied +# warranty, and with no claim as to its suitability for any purpose. + +import b2.build.feature +feature = b2.build.feature + +from b2.util.utility import * +from b2.util import is_iterable_typed +import b2.build.property_set as property_set + +def expand_no_defaults (property_sets): + """ Expand the given build request by combining all property_sets which don't + specify conflicting non-free features. + """ + assert is_iterable_typed(property_sets, property_set.PropertySet) + # First make all features and subfeatures explicit + expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets] + + # Now combine all of the expanded property_sets + product = __x_product (expanded_property_sets) + + return [property_set.create(p) for p in product] + + +def __x_product (property_sets): + """ Return the cross-product of all elements of property_sets, less any + that would contain conflicting values for single-valued features. + """ + assert is_iterable_typed(property_sets, property_set.PropertySet) + x_product_seen = set() + return __x_product_aux (property_sets, x_product_seen)[0] + +def __x_product_aux (property_sets, seen_features): + """Returns non-conflicting combinations of property sets. + + property_sets is a list of PropertySet instances. seen_features is a set of Property + instances. + + Returns a tuple of: + - list of lists of Property instances, such that within each list, no two Property instance + have the same feature, and no Property is for feature in seen_features. + - set of features we saw in property_sets + """ + assert is_iterable_typed(property_sets, property_set.PropertySet) + assert isinstance(seen_features, set) + if not property_sets: + return ([], set()) + + properties = property_sets[0].all() + + these_features = set() + for p in property_sets[0].non_free(): + these_features.add(p.feature) + + # Note: the algorithm as implemented here, as in original Jam code, appears to + # detect conflicts based on features, not properties. For example, if command + # line build request say: + # + # 1/1 c<1>/1 + # + # It will decide that those two property sets conflict, because they both specify + # a value for 'b' and will not try building "1 ", but rather two + # different property sets. This is a topic for future fixing, maybe. + if these_features & seen_features: + + (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features) + return (inner_result, inner_seen | these_features) + + else: + + result = [] + (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features) + if inner_result: + for inner in inner_result: + result.append(properties + inner) + else: + result.append(properties) + + if inner_seen & these_features: + # Some of elements in property_sets[1:] conflict with elements of property_sets[0], + # Try again, this time omitting elements of property_sets[0] + (inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features) + result.extend(inner_result2) + + return (result, inner_seen | these_features) + + + +def looks_like_implicit_value(v): + """Returns true if 'v' is either implicit value, or + the part before the first '-' symbol is implicit value.""" + assert isinstance(v, basestring) + if feature.is_implicit_value(v): + return 1 + else: + split = v.split("-") + if feature.is_implicit_value(split[0]): + return 1 + + return 0 + +def from_command_line(command_line): + """Takes the command line tokens (such as taken from ARGV rule) + and constructs build request from it. Returns a list of two + lists. First is the set of targets specified in the command line, + and second is the set of requested build properties.""" + assert is_iterable_typed(command_line, basestring) + targets = [] + properties = [] + + for e in command_line: + if e[:1] != "-": + # Build request spec either has "=" in it, or completely + # consists of implicit feature values. + if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]): + properties.append(e) + elif e: + targets.append(e) + + return [targets, properties] + +# Converts one element of command line build request specification into +# internal form. +def convert_command_line_element(e): + assert isinstance(e, basestring) + result = None + parts = e.split("/") + for p in parts: + m = p.split("=") + if len(m) > 1: + feature = m[0] + values = m[1].split(",") + lresult = [("<%s>%s" % (feature, v)) for v in values] + else: + lresult = p.split(",") + + if p.find('-') == -1: + # FIXME: first port property.validate + # property.validate cannot handle subfeatures, + # so we avoid the check here. + #for p in lresult: + # property.validate(p) + pass + + if not result: + result = lresult + else: + result = [e1 + "/" + e2 for e1 in result for e2 in lresult] + + return [property_set.create(b2.build.feature.split(r)) for r in result] + +### +### rule __test__ ( ) +### { +### import assert feature ; +### +### feature.prepare-test build-request-test-temp ; +### +### import build-request ; +### import build-request : expand_no_defaults : build-request.expand_no_defaults ; +### import errors : try catch ; +### import feature : feature subfeature ; +### +### feature toolset : gcc msvc borland : implicit ; +### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 +### 3.0 3.0.1 3.0.2 : optional ; +### +### feature variant : debug release : implicit composite ; +### feature inlining : on off ; +### feature "include" : : free ; +### +### feature stdlib : native stlport : implicit ; +### +### feature runtime-link : dynamic static : symmetric ; +### +### +### local r ; +### +### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; +### assert.equal [ $(r).get-at 1 ] : ; +### assert.equal [ $(r).get-at 2 ] : debug dynamic ; +### +### try ; +### { +### +### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ; +### } +### catch \"static\" is not a value of an implicit feature ; +### +### +### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ; +### assert.equal [ $(r).get-at 1 ] : target ; +### assert.equal [ $(r).get-at 2 ] : debug dynamic ; +### +### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ; +### assert.equal [ $(r).get-at 1 ] : ; +### assert.equal [ $(r).get-at 2 ] : debug dynamic static ; +### +### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ; +### assert.equal [ $(r).get-at 1 ] : ; +### assert.equal [ $(r).get-at 2 ] : debug gcc/dynamic +### gcc/static ; +### +### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ; +### assert.equal [ $(r).get-at 1 ] : ; +### assert.equal [ $(r).get-at 2 ] : msvc gcc/static +### borland/static ; +### +### r = [ build-request.from-command-line bjam gcc-3.0 ] ; +### assert.equal [ $(r).get-at 1 ] : ; +### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ; +### +### feature.finish-test build-request-test-temp ; +### } +### +### diff --git a/src/boost/tools/build/src/build/config-cache.jam b/src/boost/tools/build/src/build/config-cache.jam new file mode 100644 index 000000000..05a6fcd3f --- /dev/null +++ b/src/boost/tools/build/src/build/config-cache.jam @@ -0,0 +1,78 @@ +# Copyright 2012 Steven Watanabe +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import modules ; +import errors ; +import regex ; +import path ; +import project ; +import os ; + +rule get ( name ) +{ + return $(.vars.$(name)) ; +} + +rule set ( name : value * ) +{ + .all-vars += $(name) ; + .vars.$(name) = $(value) ; +} + +rule save ( ) +{ + if $(.cache-file) + { + local cache-file-native = [ path.native $(.cache-file) ] ; + local target = $(cache-file-native) ; + local contents = "# Automatically generated by B2.\n# Do not edit.\n\nmodule config-cache {\n" ; + for local var in $(.all-vars) + { + local transformed ; + for local value in $(.vars.$(var)) + { + transformed += [ regex.escape $(value) : \"\\ : \\ ] ; + } + local quoted = \"$(transformed)\" ; + contents += " set \"$(var)\" : $(quoted:J= ) ;\n" ; + } + contents += "}\n" ; + FILE_CONTENTS on $(target) = $(contents) ; + ALWAYS $(target) ; + config-cache.write $(target) ; + UPDATE_NOW $(target) : [ modules.peek configure : .log-fd ] : ignore-minus-n ; + import common ; + common.Clean clean-all : $(target) ; + } +} + +actions write +{ + @($(STDOUT):E=$(FILE_CONTENTS:J=)) > "$(<)" +} + +if [ os.name ] = VMS +{ + actions write + { + @($(STDOUT):E=$(FILE_CONTENTS:J=)) | TYPE SYS$INPUT /OUT=$(<:W) + } +} + +rule load ( cache-file ) +{ + if $(.cache-file) + { + errors.error duplicate load of cache file ; + } + cache-file = [ path.native $(cache-file) ] ; + if [ path.exists $(cache-file) ] && ! ( --reconfigure in [ modules.peek : ARGV ] ) + { + FILE_CONTENTS on $(cache-file) = "" ; + config-cache.write $(cache-file) ; + UPDATE_NOW $(cache-file) : [ modules.peek configure : .log-fd ] ; + include $(cache-file) ; + } + .cache-file = $(cache-file) ; +} diff --git a/src/boost/tools/build/src/build/configure.jam b/src/boost/tools/build/src/build/configure.jam new file mode 100644 index 000000000..8c89b1e1f --- /dev/null +++ b/src/boost/tools/build/src/build/configure.jam @@ -0,0 +1,629 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright 2017-2021 Rene Ferdinand Rivera Morell +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module defines function to help with two main tasks: +# +# - Discovering build-time configuration for the purposes of adjusting the build +# process. +# - Reporting what is built, and how it is configured. + +import "class" : new ; +import common ; +import indirect ; +import path ; +import project ; +import property ; +import property-set ; +import targets ; +import config-cache ; +import feature ; +import modules ; +import sequence ; +import utility ; +import virtual-target ; + + +rule log-summary ( ) +{ +} + + +.width = 30 ; + +rule set-width ( width ) +{ + .width = $(width) ; +} + + +# Declare that the components specified by the parameter exist. +# +rule register-components ( components * ) +{ + .components += $(components) ; +} + + +# Declare that the components specified by the parameters will be built. +# +rule components-building ( components * ) +{ + .built-components += $(components) ; +} + + +# Report something about component configuration that the user should better +# know. +# +rule log-component-configuration ( component : message ) +{ + # FIXME: Implement per-property-set logs. + .component-logs.$(component) += $(message) ; +} + + +.variant_index = 0 ; +.nl = "\n" ; + +.check_notes = ; + +rule log-check-result ( result variant ? ) +{ + if ! $(.announced-checks) + { + ECHO "Performing configuration checks\n" ; + .announced-checks = 1 ; + } + + if $(variant) + { + if $(.variant_index.$(variant)) + { + result = "$(result) [$(.variant_index.$(variant))]" ; + } + else + { + .variant_index = [ CALC $(.variant_index) + 1 ] ; + .variant_index.$(variant) = $(.variant_index) ; + result = "$(result) [$(.variant_index.$(variant))]" ; + .check_notes += "[$(.variant_index.$(variant))] $(variant)" ; + } + } + # else + # { + # result = "$(result) [?]" ; + # } + + ECHO $(result) ; + # FIXME: Unfinished code. Nothing seems to set .check-results at the moment. + #.check-results += $(result) ; +} + + +rule log-library-search-result ( library : result variant ? ) +{ + local x = [ PAD " - $(library)" : $(.width) ] ; + log-check-result "$(x) : $(result)" $(variant) ; +} + + +rule print-component-configuration ( ) +{ + # FIXME: See what was intended with this initial assignment. + # local c = [ sequence.unique $(.components) ] ; + + ECHO "\nComponent configuration:\n" ; + local c ; + for c in $(.components) + { + local s ; + if $(c) in $(.built-components) + { + s = "building" ; + } + else + { + s = "not building" ; + } + ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ; + for local m in $(.component-logs.$(c)) + { + ECHO " -" $(m) ; + } + } + ECHO ; +} + + +rule print-configure-checks-summary ( ) +{ + if $(.check_notes) + { + ECHO ; + for local l in $(.check_notes) { ECHO $(l) ; } + } + + # FIXME: The problem with this approach is that the user sees the checks + # summary when all checks are done, and has no progress reporting while the + # checks are being executed. + if $(.check-results) + { + ECHO "Configuration checks summary\n" ; + for local r in $(.check-results) + { + ECHO $(r) ; + } + ECHO ; + } +} + +if --reconfigure in [ modules.peek : ARGV ] +{ + .reconfigure = true ; +} + +# Handle the --reconfigure option +rule maybe-force-rebuild ( targets * ) +{ + if $(.reconfigure) + { + local all-targets ; + for local t in $(targets) + { + all-targets += [ virtual-target.traverse $(t) ] ; + } + for local t in [ sequence.unique $(all-targets) ] + { + $(t).always ; + } + } +} + +# Attempts to build a set of virtual targets +rule try-build ( targets * : ps : what : retry ? ) +{ + local cache-props = [ $(ps).raw ] ; + local cache-name = $(what) $(cache-props) ; + cache-name = $(cache-name:J=-) ; + local value = [ config-cache.get $(cache-name) ] ; + local cache-min = [ property.as-path [ SORT [ feature.minimize $(cache-props) ] ] ] ; + + local result ; + local jam-targets ; + + maybe-force-rebuild $(targets) ; + + for local t in $(targets) + { + jam-targets += [ $(t).actualize ] ; + } + + local x ; + if $(value) + { + x = [ PAD " - $(what)" : $(.width) ] ; + if $(value) = true + { + .$(what)-supported.$(ps) = yes ; + result = true ; + x = "$(x) : yes (cached)" ; + } + else + { + x = "$(x) : no (cached)" ; + } + } + else if ! UPDATE_NOW in [ RULENAMES ] + { + # Cannot determine. Assume existence. + } + else + { + x = [ PAD " - $(what)" : $(.width) ] ; + if [ UPDATE_NOW $(jam-targets) : + $(.log-fd) : ignore-minus-n : ignore-minus-q ] + { + .$(what)-supported.$(ps) = yes ; + result = true ; + x = "$(x) : yes" ; + } + else + { + x = "$(x) : no" ; + } + } + if $(x) + { + log-check-result "$(x)" "$(cache-min:J= )" ; + } + if ! $(value) + { + if $(result) + { + config-cache.set $(cache-name) : true ; + } + else + { + config-cache.set $(cache-name) : false ; + } + } + return $(result) ; +} + +# Attempts to build several sets of virtual targets. Returns the +# the index of the first set that builds. +rule try-find-build ( ps : what : * ) +{ + local args = 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ; + # The outer layer only needs to check $(what), but we + # also need to check the individual elements, in case + # the set of targets has changed since the last build. + local cache-props = [ $(ps).raw ] ; + local cache-name = $(what) $($(args)[1]) $(cache-props) ; + cache-name = $(cache-name:J=-) ; + local value = [ config-cache.get $(cache-name) ] ; + local cache-min = [ property.as-path [ SORT [ feature.minimize $(cache-props) ] ] ] ; + + local result ; + local jam-targets ; + + maybe-force-rebuild $($(args)[2-]) ; + + # Make sure that the targets are always actualized, + # even if the result is cached. This is needed to + # allow clean-all to find them and also to avoid + # unintentional behavior changes. + for local t in $($(args)[2-]) + { + $(t).actualize ; + } + + if $(value) + { + local none = none ; # What to show when the argument + local name = $(value) ; + if $(name) != none + { + name = [ CALC $(name) + 2 ] ; + } + local x = [ PAD " - $(what)" : $(.width) ] ; + local y = [ PAD $($(name)[1]) : 3 ] ; + result = $(value) ; + log-check-result "$(x) : $(y) (cached)" "$(cache-min:J= )" ; + } + else + { + local x = [ PAD " - $(what)" : $(.width) ] ; + for local i in $(args) + { + if ! $($(i)[1]) + { + break ; + } + local jam-targets ; + for local t in $($(i)[2-]) + { + jam-targets += [ $(t).actualize ] ; + } + if [ UPDATE_NOW $(jam-targets) : + $(.log-fd) : ignore-minus-n : ignore-minus-q ] + { + result = [ CALC $(i) - 2 ] ; + log-check-result "$(x) : $($(i)[1])" "$(cache-min:J= )" ; + break ; + } + } + if ! $(result) + { + log-check-result "$(x) : none" "$(cache-min:J= )" ; + result = none ; + } + } + if ! $(value) + { + if $(result) + { + config-cache.set $(cache-name) : $(result) ; + } + else + { + config-cache.set $(cache-name) : $(result) ; + } + } + if $(result) != none + { + return $(result) ; + } +} + +# Attempt to build a metatarget named by 'metatarget-reference' +# in context of 'project' with properties 'ps'. +# Returns non-empty value if build is OK. +rule builds-raw ( metatarget-reference : project : ps : what : retry ? ) +{ + local result ; + + if ! $(retry) && ! $(.$(what)-tested.$(ps)) + { + .$(what)-tested.$(ps) = true ; + + local targets = [ targets.generate-from-reference + $(metatarget-reference) : $(project) : $(ps) ] ; + + result = [ try-build $(targets[2-]) : $(ps) : $(what) : $(retry) ] ; + .$(what)-supported.$(ps) = $(result) ; + + return $(result) ; + + } + else + { + return $(.$(what)-supported.$(ps)) ; + } +} + +# Attempt to build a metatarget named by 'metatarget-reference' +# in context of 'project' with properties 'ps'. +# Returns the 1-based index of the first target +# that builds. +rule find-builds-raw ( project : ps : what : * ) +{ + local result ; + local args = 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ; + + if ! $(.$(what)-tested.$(ps)) + { + .$(what)-tested.$(ps) = true ; + local targets.$(i) what.$(i) ; + for local i in $(args) + { + if ! $($(i)) + { + break ; + } + targets.$(i) = [ targets.generate-from-reference + $($(i)[1]) : $(project) : $(ps) ] ; + # ignore usage requirements + targets.$(i) = $(targets.$(i)[2-]) ; + if $($(i)[2]) + { + what.$(i) = $($(i)[2]) ; + } + else + { + local t = [ targets.resolve-reference + $($(i)[1]) : $(project) ] ; + what.$(i) = [ $(t[1]).name ] ; + } + } + + result = [ try-find-build $(ps) : $(what) + : $(what.4) $(targets.4) + : $(what.5) $(targets.5) + : $(what.6) $(targets.6) + : $(what.7) $(targets.7) + : $(what.8) $(targets.8) + : $(what.9) $(targets.9) + : $(what.10) $(targets.10) + : $(what.11) $(targets.11) + : $(what.12) $(targets.12) + : $(what.13) $(targets.13) + : $(what.14) $(targets.14) + : $(what.15) $(targets.15) + : $(what.16) $(targets.16) + : $(what.17) $(targets.17) + : $(what.18) $(targets.18) + : $(what.19) $(targets.19) ] ; + .$(what)-result.$(ps) = $(result) ; + + return $(result) ; + } + else + { + return $(.$(what)-result.$(ps)) ; + } +} + +rule get-relevant-features ( properties * ) +{ + local ps-full = [ property-set.create $(properties) ] ; + local ps-base = [ property-set.create [ $(ps-full).base ] ] ; + local ps-min = [ feature.expand-subfeatures [ feature.minimize + [ $(ps-base).raw ] ] ] ; + local ps-relevant = [ property-set.create $(ps-min) ] ; + + return [ $(ps-relevant).raw ] ; +} + +rule builds ( metatarget-reference : properties * : what ? : retry ? ) +{ + local relevant = [ get-relevant-features $(properties) ] ; + local ps = [ property-set.create $(relevant) ] ; + local t = [ targets.current ] ; + local p = [ $(t).project ] ; + + if ! $(what) + { + local resolved = [ targets.resolve-reference $(metatarget-reference) : $(p) ] ; + local name = [ $(resolved[1]).name ] ; + what = "$(name) builds" ; + } + + return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) : + $(retry) ] ; +} + +rule find-builds ( what : properties * : * ) +{ + local relevant = [ get-relevant-features $(properties) ] ; + local ps = [ property-set.create $(relevant) ] ; + local t = [ targets.current ] ; + local p = [ $(t).project ] ; + + return [ find-builds-raw $(p) : $(ps) : $(what) : + $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : + $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : + $(16) : $(17) : $(18) ] ; +} + + +# Called by B2 startup code to specify the file to receive the +# configuration check results. Should never be called by user code. +# +rule set-log-file ( log-file ) +{ + path.makedirs [ path.parent $(log-file) ] ; + .log-fd = [ FILE_OPEN [ path.native $(log-file) ] : "w" ] ; + if ! $(.log-fd) + { + ECHO "warning:" failed to open log file $(log-file) for writing ; + } +} + + +# Frontend rules + +class check-target-builds-worker +{ + import configure ; + import property-set ; + import targets ; + import project ; + import property ; + + rule __init__ ( target message ? : true-properties * : false-properties * ) + { + local project = [ project.current ] ; + self.target = $(target) ; + self.message = $(message) ; + self.true-properties = + [ configure.translate-properties $(true-properties) : $(project) ] ; + self.false-properties = + [ configure.translate-properties $(false-properties) : $(project) ] ; + } + + rule check ( properties * ) + { + local choosen ; + if [ configure.builds $(self.target) : $(properties) : $(self.message) ] + { + choosen = $(self.true-properties) ; + } + else + { + choosen = $(self.false-properties) ; + } + return [ property.evaluate-conditionals-in-context $(choosen) : + $(properties) ] ; + } +} + +class configure-choose-worker +{ + import configure ; + import property ; + import project ; + rule __init__ ( message : * ) + { + local project = [ project.current ] ; + self.message = $(message) ; + for i in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 + { + local name = [ CALC $(i) - 1 ] ; + self.targets.$(name) = $($(i)[1]) ; + if ! $($(i)[2]:G) # Check whether the second argument is a property + { + self.what.$(name) = $($(i)[2]) ; + self.props.$(name) = $($(i)[3-]) ; + } + else + { + self.props.$(name) = $($(i)[2-]) ; + } + self.props.$(name) = [ configure.translate-properties + $(self.props.$(name)) : $(project) ] ; + } + } + rule all-properties ( ) + { + local i = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 ; + return $(self.props.$(i)) ; + } + rule check ( properties * ) + { + local i = [ configure.find-builds $(self.message) : $(properties) + : $(self.targets.1) $(self.what.1) + : $(self.targets.2) $(self.what.2) + : $(self.targets.3) $(self.what.3) + : $(self.targets.4) $(self.what.4) + : $(self.targets.5) $(self.what.5) + : $(self.targets.6) $(self.what.6) + : $(self.targets.7) $(self.what.7) + : $(self.targets.8) $(self.what.8) + : $(self.targets.9) $(self.what.9) + : $(self.targets.10) $(self.what.10) + : $(self.targets.11) $(self.what.11) + : $(self.targets.12) $(self.what.12) + : $(self.targets.13) $(self.what.13) + : $(self.targets.14) $(self.what.14) + : $(self.targets.15) $(self.what.15) + : $(self.targets.16) $(self.what.16) + : $(self.targets.17) $(self.what.17) ] ; + if $(self.props.$(i)) + { + return [ property.evaluate-conditionals-in-context $(self.props.$(i)) : $(properties) ] ; + } + } +} + +rule translate-properties ( properties * : project ? ) +{ + if $(project) && [ $(project).location ] + { + local location = [ $(project).location ] ; + local m = [ $(project).project-module ] ; + local project-id = [ project.attribute $(m) id ] ; + project-id ?= [ path.root $(location) [ path.pwd ] ] ; + return [ property.translate $(properties) + : $(project-id) : $(location) : $(m) ] ; + } + else + { + return $(properties) ; + } +} + +rule check-target-builds ( target message ? : true-properties * : + false-properties * ) +{ + local instance = [ new check-target-builds-worker $(target) $(message) : + $(true-properties) : $(false-properties) ] ; + local rulename = [ indirect.make check : $(instance) ] ; + return @$(rulename) + [ property.evaluate-conditional-relevance + $(true-properties) $(false-properties) ] ; +} + +# Usage: +# [ configure.choose "architecture" +# : /config//x86 x86 x86 +# : /config//mips mips mips +# ] +rule choose ( message : * ) +{ + local instance = [ new configure-choose-worker $(message) + : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) + : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) + : $(17) : $(18) : $(19) ] ; + local rulename = [ indirect.make check : $(instance) ] ; + return @$(rulename) + [ property.evaluate-conditional-relevance + [ $(instance).all-properties ] ] ; +} + + +IMPORT $(__name__) : check-target-builds : : check-target-builds ; diff --git a/src/boost/tools/build/src/build/configure.py b/src/boost/tools/build/src/build/configure.py new file mode 100644 index 000000000..cf2121a2a --- /dev/null +++ b/src/boost/tools/build/src/build/configure.py @@ -0,0 +1,176 @@ +# Status: ported. +# Base revision: 64488 +# +# Copyright (c) 2010 Vladimir Prus. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module defines function to help with two main tasks: +# +# - Discovering build-time configuration for the purposes of adjusting +# build process. +# - Reporting what is built, and how it is configured. + +import b2.build.property as property +import b2.build.property_set as property_set + +from b2.build import targets as targets_ + +from b2.manager import get_manager +from b2.util.sequence import unique +from b2.util import bjam_signature, value_to_jam, is_iterable + +import bjam +import os + +__width = 30 + +def set_width(width): + global __width + __width = 30 + +__components = [] +__built_components = [] +__component_logs = {} +__announced_checks = False + +__log_file = None +__log_fd = -1 + +def register_components(components): + """Declare that the components specified by the parameter exist.""" + assert is_iterable(components) + __components.extend(components) + +def components_building(components): + """Declare that the components specified by the parameters will be build.""" + assert is_iterable(components) + __built_components.extend(components) + +def log_component_configuration(component, message): + """Report something about component configuration that the user should better know.""" + assert isinstance(component, basestring) + assert isinstance(message, basestring) + __component_logs.setdefault(component, []).append(message) + +def log_check_result(result): + assert isinstance(result, basestring) + global __announced_checks + if not __announced_checks: + print "Performing configuration checks" + __announced_checks = True + + print result + +def log_library_search_result(library, result): + assert isinstance(library, basestring) + assert isinstance(result, basestring) + log_check_result((" - %(library)s : %(result)s" % locals()).rjust(__width)) + + +def print_component_configuration(): + + print "\nComponent configuration:" + for c in __components: + if c in __built_components: + s = "building" + else: + s = "not building" + message = " - %s)" % c + message = message.rjust(__width) + message += " : " + s + for m in __component_logs.get(c, []): + print " -" + m + print "" + +__builds_cache = {} + +def builds(metatarget_reference, project, ps, what): + # Attempt to build a metatarget named by 'metatarget-reference' + # in context of 'project' with properties 'ps'. + # Returns non-empty value if build is OK. + assert isinstance(metatarget_reference, basestring) + assert isinstance(project, targets_.ProjectTarget) + assert isinstance(ps, property_set.PropertySet) + assert isinstance(what, basestring) + + result = [] + + existing = __builds_cache.get((what, ps), None) + if existing is None: + + result = False + __builds_cache[(what, ps)] = False + + targets = targets_.generate_from_reference( + metatarget_reference, project, ps).targets() + jam_targets = [] + for t in targets: + jam_targets.append(t.actualize()) + + x = (" - %s" % what).rjust(__width) + if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"): + __builds_cache[(what, ps)] = True + result = True + log_check_result("%s: yes" % x) + else: + log_check_result("%s: no" % x) + + return result + else: + return existing + +def set_log_file(log_file_name): + assert isinstance(log_file_name, basestring) + # Called by Boost.Build startup code to specify name of a file + # that will receive results of configure checks. This + # should never be called by users. + global __log_file, __log_fd + dirname = os.path.dirname(log_file_name) + if not os.path.exists(dirname): + os.makedirs(dirname) + # Make sure to keep the file around, so that it's not + # garbage-collected and closed + __log_file = open(log_file_name, "w") + __log_fd = __log_file.fileno() + +# Frontend rules + +class CheckTargetBuildsWorker: + + def __init__(self, target, true_properties, false_properties): + self.target = target + self.true_properties = property.create_from_strings(true_properties, True) + self.false_properties = property.create_from_strings(false_properties, True) + + def check(self, ps): + assert isinstance(ps, property_set.PropertySet) + # FIXME: this should not be hardcoded. Other checks might + # want to consider different set of features as relevant. + toolset = ps.get('toolset')[0] + toolset_version_property = "" ; + relevant = ps.get_properties('target-os') + \ + ps.get_properties("toolset") + \ + ps.get_properties(toolset_version_property) + \ + ps.get_properties("address-model") + \ + ps.get_properties("architecture") + rps = property_set.create(relevant) + t = get_manager().targets().current() + p = t.project() + if builds(self.target, p, rps, "%s builds" % self.target): + choosen = self.true_properties + else: + choosen = self.false_properties + return property.evaluate_conditionals_in_context(choosen, ps) + +@bjam_signature((["target"], ["true_properties", "*"], ["false_properties", "*"])) +def check_target_builds(target, true_properties, false_properties): + worker = CheckTargetBuildsWorker(target, true_properties, false_properties) + value = value_to_jam(worker.check) + return "" + value + +get_manager().projects().add_rule("check-target-builds", check_target_builds) + + diff --git a/src/boost/tools/build/src/build/engine.py b/src/boost/tools/build/src/build/engine.py new file mode 100644 index 000000000..d88d5aa23 --- /dev/null +++ b/src/boost/tools/build/src/build/engine.py @@ -0,0 +1,246 @@ +# Copyright Pedro Ferreira 2005. +# Copyright Vladimir Prus 2007. +# Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +bjam_interface = __import__('bjam') + +import operator +import re + +import b2.build.property_set as property_set + +from b2.util import set_jam_action, is_iterable + +class BjamAction(object): + """Class representing bjam action defined from Python.""" + + def __init__(self, action_name, function, has_command=False): + assert isinstance(action_name, basestring) + assert callable(function) or function is None + self.action_name = action_name + self.function = function + self.has_command = has_command + + def __call__(self, targets, sources, property_set_): + assert is_iterable(targets) + assert is_iterable(sources) + assert isinstance(property_set_, property_set.PropertySet) + if self.has_command: + # Bjam actions defined from Python have only the command + # to execute, and no associated jam procedural code. So + # passing 'property_set' to it is not necessary. + bjam_interface.call("set-update-action", self.action_name, + targets, sources, []) + if self.function: + self.function(targets, sources, property_set_) + +class BjamNativeAction(BjamAction): + """Class representing bjam action defined by Jam code. + + We still allow to associate a Python callable that will + be called when this action is installed on any target. + """ + + def __call__(self, targets, sources, property_set_): + assert is_iterable(targets) + assert is_iterable(sources) + assert isinstance(property_set_, property_set.PropertySet) + if self.function: + self.function(targets, sources, property_set_) + + p = [] + if property_set: + p = property_set_.raw() + + set_jam_action(self.action_name, targets, sources, p) + +action_modifiers = {"updated": 0x01, + "together": 0x02, + "ignore": 0x04, + "quietly": 0x08, + "piecemeal": 0x10, + "existing": 0x20} + +class Engine: + """ The abstract interface to a build engine. + + For now, the naming of targets, and special handling of some + target variables like SEARCH and LOCATE make this class coupled + to bjam engine. + """ + def __init__ (self): + self.actions = {} + + def add_dependency (self, targets, sources): + """Adds a dependency from 'targets' to 'sources' + + Both 'targets' and 'sources' can be either list + of target names, or a single target name. + """ + if isinstance (targets, str): + targets = [targets] + if isinstance (sources, str): + sources = [sources] + assert is_iterable(targets) + assert is_iterable(sources) + + for target in targets: + for source in sources: + self.do_add_dependency (target, source) + + def get_target_variable(self, targets, variable): + """Gets the value of `variable` on set on the first target in `targets`. + + Args: + targets (str or list): one or more targets to get the variable from. + variable (str): the name of the variable + + Returns: + the value of `variable` set on `targets` (list) + + Example: + + >>> ENGINE = get_manager().engine() + >>> ENGINE.set_target_variable(targets, 'MY-VAR', 'Hello World') + >>> ENGINE.get_target_variable(targets, 'MY-VAR') + ['Hello World'] + + Equivalent Jam code: + + MY-VAR on $(targets) = "Hello World" ; + echo [ on $(targets) return $(MY-VAR) ] ; + "Hello World" + """ + if isinstance(targets, str): + targets = [targets] + assert is_iterable(targets) + assert isinstance(variable, basestring) + + return bjam_interface.call('get-target-variable', targets, variable) + + def set_target_variable (self, targets, variable, value, append=0): + """ Sets a target variable. + + The 'variable' will be available to bjam when it decides + where to generate targets, and will also be available to + updating rule for that 'taret'. + """ + if isinstance (targets, str): + targets = [targets] + if isinstance(value, str): + value = [value] + + assert is_iterable(targets) + assert isinstance(variable, basestring) + assert is_iterable(value) + + if targets: + if append: + bjam_interface.call("set-target-variable", targets, variable, value, "true") + else: + bjam_interface.call("set-target-variable", targets, variable, value) + + def set_update_action (self, action_name, targets, sources, properties=None): + """ Binds a target to the corresponding update action. + If target needs to be updated, the action registered + with action_name will be used. + The 'action_name' must be previously registered by + either 'register_action' or 'register_bjam_action' + method. + """ + if isinstance(targets, str): + targets = [targets] + if isinstance(sources, str): + sources = [sources] + if properties is None: + properties = property_set.empty() + assert isinstance(action_name, basestring) + assert is_iterable(targets) + assert is_iterable(sources) + assert(isinstance(properties, property_set.PropertySet)) + + self.do_set_update_action (action_name, targets, sources, properties) + + def register_action (self, action_name, command='', bound_list = [], flags = [], + function = None): + """Creates a new build engine action. + + Creates on bjam side an action named 'action_name', with + 'command' as the command to be executed, 'bound_variables' + naming the list of variables bound when the command is executed + and specified flag. + If 'function' is not None, it should be a callable taking three + parameters: + - targets + - sources + - instance of the property_set class + This function will be called by set_update_action, and can + set additional target variables. + """ + assert isinstance(action_name, basestring) + assert isinstance(command, basestring) + assert is_iterable(bound_list) + assert is_iterable(flags) + assert function is None or callable(function) + + bjam_flags = reduce(operator.or_, + (action_modifiers[flag] for flag in flags), 0) + + # We allow command to be empty so that we can define 'action' as pure + # python function that would do some conditional logic and then relay + # to other actions. + assert command or function + if command: + bjam_interface.define_action(action_name, command, bound_list, bjam_flags) + + self.actions[action_name] = BjamAction( + action_name, function, has_command=bool(command)) + + def register_bjam_action (self, action_name, function=None): + """Informs self that 'action_name' is declared in bjam. + + From this point, 'action_name' is a valid argument to the + set_update_action method. The action_name should be callable + in the global module of bjam. + """ + + # We allow duplicate calls to this rule for the same + # action name. This way, jamfile rules that take action names + # can just register them without specially checking if + # action is already registered. + assert isinstance(action_name, basestring) + assert function is None or callable(function) + if action_name not in self.actions: + self.actions[action_name] = BjamNativeAction(action_name, function) + + # Overridables + + + def do_set_update_action (self, action_name, targets, sources, property_set_): + assert isinstance(action_name, basestring) + assert is_iterable(targets) + assert is_iterable(sources) + assert isinstance(property_set_, property_set.PropertySet) + action = self.actions.get(action_name) + if not action: + raise Exception("No action %s was registered" % action_name) + action(targets, sources, property_set_) + + def do_set_target_variable (self, target, variable, value, append): + assert isinstance(target, basestring) + assert isinstance(variable, basestring) + assert is_iterable(value) + assert isinstance(append, int) # matches bools + if append: + bjam_interface.call("set-target-variable", target, variable, value, "true") + else: + bjam_interface.call("set-target-variable", target, variable, value) + + def do_add_dependency (self, target, source): + assert isinstance(target, basestring) + assert isinstance(source, basestring) + bjam_interface.call("DEPENDS", target, source) + + diff --git a/src/boost/tools/build/src/build/errors.py b/src/boost/tools/build/src/build/errors.py new file mode 100644 index 000000000..c65fa4105 --- /dev/null +++ b/src/boost/tools/build/src/build/errors.py @@ -0,0 +1,135 @@ +# Status: being written afresh by Vladimir Prus + +# Copyright 2007 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This file is supposed to implement error reporting for Boost.Build. +# Experience with jam version has shown that printing full backtrace +# on each error is buffling. Further, for errors printed after parsing -- +# during target building, the stacktrace does not even mention what +# target is being built. + +# This module implements explicit contexts -- where other code can +# communicate which projects/targets are being built, and error +# messages will show those contexts. For programming errors, +# Python assertions are to be used. + +import bjam +import traceback +import sys + +def format(message, prefix=""): + parts = str(message).split("\n") + return "\n".join(prefix+p for p in parts) + + +class Context: + + def __init__(self, message, nested=None): + self.message_ = message + self.nested_ = nested + + def report(self, indent=""): + print indent + " -", self.message_ + if self.nested_: + print indent + " declared at:" + for n in self.nested_: + n.report(indent + " ") + +class JamfileContext: + + def __init__(self): + raw = bjam.backtrace() + self.raw_ = raw + + def report(self, indent=""): + for r in self.raw_: + print indent + " - %s:%s" % (r[0], r[1]) + +class ExceptionWithUserContext(Exception): + + def __init__(self, message, context, + original_exception=None, original_tb=None, stack=None): + Exception.__init__(self, message) + self.context_ = context + self.original_exception_ = original_exception + self.original_tb_ = original_tb + self.stack_ = stack + + def report(self): + print "error:", self.args[0] + if self.original_exception_: + print format(str(self.original_exception_), " ") + print + print " error context (most recent first):" + for c in self.context_[::-1]: + c.report() + print + if "--stacktrace" in bjam.variable("ARGV"): + if self.original_tb_: + traceback.print_tb(self.original_tb_) + elif self.stack_: + for l in traceback.format_list(self.stack_): + print l, + else: + print " use the '--stacktrace' option to get Python stacktrace" + print + +def user_error_checkpoint(callable): + def wrapper(self, *args): + errors = self.manager().errors() + try: + return callable(self, *args) + except ExceptionWithUserContext, e: + raise + except Exception, e: + errors.handle_stray_exception(e) + finally: + errors.pop_user_context() + + return wrapper + +class Errors: + + def __init__(self): + self.contexts_ = [] + self._count = 0 + + def count(self): + return self._count + + def push_user_context(self, message, nested=None): + self.contexts_.append(Context(message, nested)) + + def pop_user_context(self): + del self.contexts_[-1] + + def push_jamfile_context(self): + self.contexts_.append(JamfileContext()) + + def pop_jamfile_context(self): + del self.contexts_[-1] + + def capture_user_context(self): + return self.contexts_[:] + + def handle_stray_exception(self, e): + raise ExceptionWithUserContext("unexpected exception", self.contexts_[:], + e, sys.exc_info()[2]) + def __call__(self, message): + self._count = self._count + 1 + raise ExceptionWithUserContext(message, self.contexts_[:], + stack=traceback.extract_stack()) + + +def nearest_user_location(): + """ + Returns: + tuple: the filename and line number of the nearest user location + """ + bt = bjam.backtrace() + if not bt: + return None + last = bt[-1] + return last[0], last[1] diff --git a/src/boost/tools/build/src/build/feature.jam b/src/boost/tools/build/src/build/feature.jam new file mode 100644 index 000000000..4c7bb9a10 --- /dev/null +++ b/src/boost/tools/build/src/build/feature.jam @@ -0,0 +1,1442 @@ +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2002, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import assert : * ; +import "class" : * ; +import indirect ; +import modules ; +import regex ; +import sequence ; +import set ; +import utility ; + + +local rule setup ( ) +{ + .all-attributes = + implicit + composite + optional + symmetric + free + incidental + path + dependency + propagated + link-incompatible + subfeature + order-sensitive + hidden + ; + + .all-features = ; + .all-subfeatures = ; + .all-top-features = ; # non-subfeatures + .all-implicit-values = ; +} +setup ; + + +# Prepare a fresh space to test in by moving all global variable settings into +# the given temporary module and erasing them here. +# +rule prepare-test ( temp-module ) +{ + DELETE_MODULE $(temp-module) ; + + # Transfer globals to temp-module. + for local v in [ VARNAMES feature ] + { + if [ MATCH (\\.) : $(v) ] + { + modules.poke $(temp-module) : $(v) : $($(v)) ; + $(v) = ; + } + } + setup ; +} + + +# Clear out all global variables and recover all variables from the given +# temporary module. +# +rule finish-test ( temp-module ) +{ + # Clear globals. + for local v in [ VARNAMES feature ] + { + if [ MATCH (\\.) : $(v) ] + { + $(v) = ; + } + } + + for local v in [ VARNAMES $(temp-module) ] + { + $(v) = [ modules.peek $(temp-module) : $(v) ] ; + } + DELETE_MODULE $(temp-module) ; +} + + +# Transform features by bracketing any elements which are not already bracketed +# by "<>". +# +local rule grist ( features * ) +{ + local empty = "" ; + return $(empty:G=$(features)) ; +} + + +# Declare a new feature with the given name, values, and attributes. +# +rule feature ( + name # Feature name. + : values * # Allowable values - may be extended later using feature.extend. + : attributes * # Feature attributes (e.g. implicit, free, propagated...). +) +{ + name = [ grist $(name) ] ; + + local error ; + + # Check for any unknown attributes. + if ! ( $(attributes) in $(.all-attributes) ) + { + error = unknown "attributes:" + [ set.difference $(attributes) : $(.all-attributes) ] ; + } + else if $(name) in $(.all-features) + { + error = feature already "defined:" ; + } + else if implicit in $(attributes) && free in $(attributes) + { + error = free features cannot also be implicit ; + } + else if free in $(attributes) && propagated in $(attributes) + { + error = free features cannot be propagated ; + } + else + { + local m = [ MATCH (.*=.*) : $(values) ] ; + if $(m[1]) + { + error = "feature value may not contain '='" ; + } + } + + if $(error) + { + import errors ; + errors.error $(error) + : "in" feature "declaration:" + : feature [ errors.lol->list $(1) : $(2) : $(3) ] ; + } + + $(name).values ?= ; + $(name).attributes = $(attributes) ; + $(name).subfeatures ?= ; + $(attributes).features += $(name) ; + + .all-features += $(name) ; + if subfeature in $(attributes) + { + .all-subfeatures += $(name) ; + } + else + { + .all-top-features += $(name) ; + } + extend $(name) : $(values) ; +} + + +# Sets the default value of the given feature, overriding any previous default. +# +rule set-default ( feature : value ) +{ + local f = [ grist $(feature) ] ; + local a = $($(f).attributes) ; + local bad-attribute = ; + if free in $(a) + { + bad-attribute = free ; + } + else if optional in $(a) + { + bad-attribute = optional ; + } + if $(bad-attribute) + { + import errors ; + errors.error $(bad-attribute) property $(f) cannot have a default. ; + } + if ! $(value) in $($(f).values) + { + import errors ; + errors.error The specified default value, '$(value)' is invalid : + allowed values "are:" $($(f).values) ; + } + $(f).default = $(value) ; +} + + +# Returns the default property values for the given features. +# +rule defaults ( features * ) +{ + local result ; + for local f in $(features) + { + local gf = $(:E=:G=$(f)) ; + local a = $($(gf).attributes) ; + if ( free in $(a) ) || ( optional in $(a) ) + { + } + else + { + result += $(gf)$($(gf).default) ; + } + } + return $(result) ; +} + + +# Returns true iff all 'names' elements are valid features. +# +rule valid ( names + ) +{ + if $(names) in $(.all-features) + { + return true ; + } +} + + +# Returns the attributes of the given feature. +# +rule attributes ( feature ) +{ + return $($(feature).attributes) ; +} + + +# Returns the values of the given feature. +# +rule values ( feature ) +{ + return $($(:E=:G=$(feature)).values) ; +} + + +# Returns true iff 'value-string' is a value-string of an implicit feature. +# +rule is-implicit-value ( value-string ) +{ + local v = [ regex.split $(value-string) - ] ; + local failed ; + if ! $(v[1]) in $(.all-implicit-values) + { + failed = true ; + } + else + { + local feature = $($(v[1]).implicit-feature) ; + for local subvalue in $(v[2-]) + { + if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ] + { + failed = true ; + } + } + } + + if ! $(failed) + { + return true ; + } +} + + +# Returns the implicit feature associated with the given implicit value. +# +rule implied-feature ( implicit-value ) +{ + local components = [ regex.split $(implicit-value) "-" ] ; + local feature = $($(components[1]).implicit-feature) ; + if ! $(feature) + { + import errors ; + errors.error \"$(implicit-value)\" is not an implicit feature value ; + feature = "" ; # Keep testing happy; it expects a result. + } + return $(feature) ; +} + + +local rule find-implied-subfeature ( feature subvalue : value-string ? ) +{ + # Feature should be of the form . + if $(feature) != $(feature:G) + { + import errors ; + errors.error invalid feature $(feature) ; + } + value-string += "" ; + return $($(feature)$(value-string)<>$(subvalue).subfeature) ; +} + + +# Given a feature and a value of one of its subfeatures, find the name of the +# subfeature. If value-string is supplied, looks for implied subfeatures that +# are specific to that value of feature +# +rule implied-subfeature ( + feature # The main feature name. + subvalue # The value of one of its subfeatures. + : value-string ? # The value of the main feature. +) +{ + local subfeature = [ find-implied-subfeature $(feature) $(subvalue) + : $(value-string) ] ; + if ! $(subfeature) + { + value-string ?= "" ; + import errors ; + errors.error \"$(subvalue)\" is not a known subfeature value of + $(feature)$(value-string) ; + } + return $(subfeature) ; +} + + +# Generate an error if the feature is unknown. +# +local rule validate-feature ( feature ) +{ + if ! $(feature) in $(.all-features) + { + import errors ; + errors.error unknown feature \"$(feature)\" ; + } +} + + +# Given a feature and its value or just a value corresponding to an implicit +# feature, returns a property set consisting of all component subfeatures and +# their values. For example all the following calls: +# +# expand-subfeatures-aux gcc-2.95.2-linux-x86 +# expand-subfeatures-aux gcc-2.95.2-linux-x86 +# +# return: +# +# gcc 2.95.2 linux x86 +# +local rule expand-subfeatures-aux ( + feature ? # Feature name or empty if value corresponds to an + # implicit property. + : value # Feature value. + : dont-validate ? # If set, no value string validation will be done. +) +{ + if $(feature) + { + feature = $(feature) ; + } + + if ! $(feature) + { + feature = [ implied-feature $(value) ] ; + } + else + { + validate-feature $(feature) ; + } + if ! $(dont-validate) + { + validate-value-string $(feature) $(value) ; + } + + local components = [ regex.split $(value) "-" ] ; + + # Get the top-level feature's value. + local value = $(components[1]:G=) ; + + local result = $(components[1]:G=$(feature)) ; + + for local subvalue in $(components[2-]) + { + local subfeature = [ find-implied-subfeature $(feature) $(subvalue) : + $(value) ] ; + + # If no subfeature was found reconstitute the value string and use that. + if ! $(subfeature) + { + result = $(components:J=-) ; + result = $(result:G=$(feature)) ; + break ; + } + else + { + local f = [ MATCH ^<(.*)>$ : $(feature) ] ; + result += $(subvalue:G=$(f)-$(subfeature)) ; + } + } + + return $(result) ; +} + + +# Make all elements of properties corresponding to implicit features explicit, +# and express all subfeature values as separate properties in their own right. +# For example, all of the following properties +# +# gcc-2.95.2-linux-x86 +# gcc-2.95.2-linux-x86 +# +# might expand to +# +# gcc 2.95.2 linux x86 +# +rule expand-subfeatures ( + properties * # Property set with elements of the form + # value-string or just value-string in the case + # of implicit features. + : dont-validate ? +) +{ + local result ; + for local p in $(properties) + { + # Don't expand subfeatures in subfeatures + if ! [ MATCH "(:)" : $(p:G) ] + { + result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ; + } + else + { + result += $(p) ; + } + } + return $(result) ; +} + + +# Helper for extend, below. Handles the feature case. +# +local rule extend-feature ( feature : values * ) +{ + feature = [ grist $(feature) ] ; + validate-feature $(feature) ; + if implicit in $($(feature).attributes) + { + for local v in $(values) + { + if $($(v).implicit-feature) + { + import errors ; + errors.error $(v) is already associated with the + \"$($(v).implicit-feature)\" feature ; + } + $(v).implicit-feature = $(feature) ; + } + + .all-implicit-values += $(values) ; + } + if ! $($(feature).values) + { + # This is the first value specified for this feature so make it be the + # default. + $(feature).default = $(values[1]) ; + } + $(feature).values += $(values) ; +} + + +# Checks that value-string is a valid value-string for the given feature. +# +rule validate-value-string ( feature value-string ) +{ + if ! ( + free in $($(feature).attributes) + || ( $(value-string) in $(feature).values ) + ) + { + local values = $(value-string) ; + + if $($(feature).subfeatures) + { + if ! $(value-string) in $($(feature).values) + $($(feature).subfeatures) + { + values = [ regex.split $(value-string) - ] ; + } + } + + if ! ( $(values[1]) in $($(feature).values) ) && + + # An empty value is allowed for optional features. + ( $(values[1]) || ! ( optional in $($(feature).attributes) ) ) + { + import errors ; + errors.error \"$(values[1])\" is not a known value of feature + $(feature) : legal "values:" \"$($(feature).values)\" ; + } + + for local v in $(values[2-]) + { + # This will validate any subfeature values in value-string. + implied-subfeature $(feature) $(v) : $(values[1]) ; + } + } +} + + +# A helper that computes: +# * name(s) of module-local variable(s) used to record the correspondence +# between subvalue(s) and a subfeature +# * value of that variable when such a subfeature/subvalue has been defined and +# returns a list consisting of the latter followed by the former. +# +local rule subvalue-var ( + feature # Main feature name. + value-string ? # If supplied, specifies a specific value of the main + # feature for which the subfeature values are valid. + : subfeature # Subfeature name. + : subvalues * # Subfeature values. +) +{ + feature = [ grist $(feature) ] ; + validate-feature $(feature) ; + if $(value-string) + { + validate-value-string $(feature) $(value-string) ; + } + + local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ; + + return $(subfeature-name) + $(feature)$(value-string:E="")<>$(subvalues).subfeature ; +} + + +# Extends the given subfeature with the subvalues. If the optional value-string +# is provided, the subvalues are only valid for the given value of the feature. +# Thus, you could say that mingw is specific to +# gcc-2.95.2 as follows: +# +# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ; +# +rule extend-subfeature ( + feature # The feature whose subfeature is being extended. + + value-string ? # If supplied, specifies a specific value of the main + # feature for which the new subfeature values are valid. + + : subfeature # Subfeature name. + : subvalues * # Additional subfeature values. +) +{ + local subfeature-vars = [ subvalue-var $(feature) $(value-string) + : $(subfeature) : $(subvalues) ] ; + + local f = [ utility.ungrist [ grist $(feature) ] ] ; + extend $(f)-$(subfeature-vars[1]) : $(subvalues) ; + + # Provide a way to get from the given feature or property and subfeature + # value to the subfeature name. + $(subfeature-vars[2-]) = $(subfeature-vars[1]) ; +} + + +# Returns true iff the subvalues are valid for the feature. When the optional +# value-string is provided, returns true iff the subvalues are valid for the +# given value of the feature. +# +rule is-subvalue ( feature : value-string ? : subfeature : subvalue ) +{ + local subfeature-vars = [ subvalue-var $(feature) $(value-string) + : $(subfeature) : $(subvalue) ] ; + + if $($(subfeature-vars[2])) = $(subfeature-vars[1]) + { + return true ; + } +} + + +# Can be called three ways: +# +# 1. extend feature : values * +# 2. extend subfeature : values * +# 3. extend value-string subfeature : values * +# +# * Form 1 adds the given values to the given feature. +# * Forms 2 and 3 add subfeature values to the given feature. +# * Form 3 adds the subfeature values as specific to the given property +# value-string. +# +rule extend ( feature-or-property subfeature ? : values * ) +{ + local feature ; # If a property was specified this is its feature. + local value-string ; # E.g., the gcc-2.95-2 part of gcc-2.95.2. + + # If a property was specified. + if $(feature-or-property:G) && $(feature-or-property:G=) + { + # Extract the feature and value-string, if any. + feature = $(feature-or-property:G) ; + value-string = $(feature-or-property:G=) ; + } + else + { + feature = [ grist $(feature-or-property) ] ; + } + + # Dispatch to the appropriate handler. + if $(subfeature) + { + extend-subfeature $(feature) $(value-string) : $(subfeature) + : $(values) ; + } + else + { + # If no subfeature was specified, we do not expect to see a + # value-string. + if $(value-string) + { + import errors ; + errors.error can only specify a property as the first argument when + extending a subfeature + : "usage:" + : " extend" feature ":" values... + : " | extend" value-string subfeature ":" values... ; + } + + extend-feature $(feature) : $(values) ; + } +} + + +local rule get-subfeature-name ( subfeature value-string ? ) +{ + local prefix = "$(value-string):" ; + return $(prefix:E="")$(subfeature) ; +} + + +# Declares a subfeature. +# +rule subfeature ( + feature # Root feature that is not a subfeature. + value-string ? # A value-string specifying which feature or subfeature + # values this subfeature is specific to, if any. + : subfeature # The name of the subfeature being declared. + : subvalues * # The allowed values of this subfeature. + : attributes * # The attributes of the subfeature. +) +{ + feature = [ grist $(feature) ] ; + validate-feature $(feature) ; + + # Add grist to the subfeature name if a value-string was supplied. + local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ; + + if $(subfeature-name) in $($(feature).subfeatures) + { + import errors ; + errors.error \"$(subfeature)\" already declared as a subfeature of + \"$(feature)\" "specific to "$(value-string) ; + } + $(feature).subfeatures += $(subfeature-name) ; + + # First declare the subfeature as a feature in its own right. + local f = [ utility.ungrist $(feature) ] ; + feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ; + + # Features and subfeatures are always relevant as a group + .feature-dependencies.$(f) += $(f)-$(subfeature-name) ; + .feature-dependencies.$(f)-$(subfeature-name) += $(f) ; + + # Now make sure the subfeature values are known. + extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ; +} + + +# Set components of the given composite property. +# +rule compose ( composite-property : component-properties * ) +{ + local feature = $(composite-property:G) ; + if ! ( composite in [ attributes $(feature) ] ) + { + import errors ; + errors.error "$(feature)" is not a composite feature ; + } + + $(composite-property).components ?= ; + if $($(composite-property).components) + { + import errors ; + errors.error components of "$(composite-property)" already "set:" + $($(composite-property).components) ; + } + + if $(composite-property) in $(component-properties) + { + import errors ; + errors.error composite property "$(composite-property)" cannot have itself as a component ; + } + $(composite-property).components = $(component-properties) ; + + # A composite feature is relevant if any composed feature is relevant + local component-features = [ sequence.transform utility.ungrist : $(component-properties:G) ] ; + .feature-dependencies.$(component-features) += [ utility.ungrist $(feature) ] ; +} + + +local rule expand-composite ( property ) +{ + return $(property) + [ sequence.transform expand-composite : $($(property).components) ] ; +} + + +# Return all values of the given feature specified by the given property set. +# +rule get-values ( feature : properties * ) +{ + local result ; + + feature = $(:E=:G=$(feature)) ; # Add <> if necessary. + for local p in $(properties) + { + if $(p:G) = $(feature) + { + # Use MATCH instead of :G= to get the value, in order to preserve + # the value intact instead of having bjam treat it as a decomposable + # path. + result += [ MATCH ">(.*)" : $(p) ] ; + } + } + return $(result) ; +} + + +rule free-features ( ) +{ + return $(free.features) ; +} + + +# Expand all composite properties in the set so that all components are +# explicitly expressed. +# +rule expand-composites ( properties * ) +{ + local explicit-features = $(properties:G) ; + local result ; + + # Now expand composite features. + for local p in $(properties) + { + local expanded = [ expand-composite $(p) ] ; + + for local x in $(expanded) + { + if ! $(x) in $(result) + { + local f = $(x:G) ; + + if $(f) in $(free.features) + { + result += $(x) ; + } + else if ! $(x) in $(properties) # x is the result of expansion + { + if ! $(f) in $(explicit-features) # not explicitly-specified + { + if $(f) in $(result:G) + { + import errors ; + errors.error expansions of composite features result + in conflicting values for $(f) + : "values:" [ get-values $(f) : $(result) ] $(x:G=) + : one contributing composite property was $(p) ; + } + else + { + result += $(x) ; + } + } + } + else if $(f) in $(result:G) + { + import errors ; + errors.error explicitly-specified values of non-free feature + $(f) conflict : + "existing values:" [ get-values $(f) : $(properties) ] : + "value from expanding " $(p) ":" $(x:G=) ; + } + else + { + result += $(x) ; + } + } + } + } + return $(result) ; +} + + +# Return true iff f is an ordinary subfeature of the parent-property's feature, +# or if f is a subfeature of the parent-property's feature specific to the +# parent-property's value. +# +local rule is-subfeature-of ( parent-property f ) +{ + if subfeature in $($(f).attributes) + { + local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ; + if $(specific-subfeature) + { + # The feature has the form , e.g. + # . + local feature-value = [ split-top-feature $(specific-subfeature[1]) + ] ; + if <$(feature-value[1])>$(feature-value[2]) = $(parent-property) + { + return true ; + } + } + else + { + # The feature has the form , e.g. + # + local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ; + if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G) + { + return true ; + } + } + } +} + + +# As for is-subfeature-of but for subproperties. +# +local rule is-subproperty-of ( parent-property p ) +{ + return [ is-subfeature-of $(parent-property) $(p:G) ] ; +} + + +# Given a property, return the subset of features consisting of all ordinary +# subfeatures of the property's feature, and all specific subfeatures of the +# property's feature which are conditional on the property's value. +# +local rule select-subfeatures ( parent-property : features * ) +{ + return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ; +} + + +# As for select-subfeatures but for subproperties. +# +local rule select-subproperties ( parent-property : properties * ) +{ + return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ; +} + + +# Given a property set which may consist of composite and implicit properties +# and combined subfeature values, returns an expanded, normalized property set +# with all implicit features expressed explicitly, all subfeature values +# individually expressed, and all components of composite properties expanded. +# Non-free features directly expressed in the input properties cause any values +# of those features due to composite feature expansion to be dropped. If two +# values of a given non-free feature are directly expressed in the input, an +# error is issued. +# +rule expand ( properties * ) +{ + local expanded = [ expand-subfeatures $(properties) ] ; + return [ expand-composites $(expanded) ] ; +} + + +# Helper rule for minimize. Returns true iff property's feature is present in +# the contents of the variable named by feature-set-var. +# +local rule in-features ( feature-set-var property ) +{ + if $(property:G) in $($(feature-set-var)) + { + return true ; + } +} + + +# Helper rule for minimize. Returns the list with the same properties, but with +# all subfeatures moved to the end of the list. +# +local rule move-subfeatures-to-the-end ( properties * ) +{ + local x1 ; + local x2 ; + for local p in $(properties) + { + if subfeature in $($(p:G).attributes) + { + x2 += $(p) ; + } + else + { + x1 += $(p) ; + } + } + return $(x1) $(x2) ; +} + + +# Given an expanded property set, eliminate all redundancy: properties that are +# elements of other (composite) properties in the set will be eliminated. +# Non-symmetric properties equal to default values will be eliminated unless +# they override a value from some composite property. Implicit properties will +# be expressed without feature grist, and sub-property values will be expressed +# as elements joined to the corresponding main property. +# +rule minimize ( properties * ) +{ + # Precondition checking + local implicits = [ set.intersection $(p:G=) : $(p:G) ] ; + if $(implicits) + { + import errors ; + errors.error minimize requires an expanded property set, but + \"$(implicits[1])\" appears to be the value of an un-expanded + implicit feature ; + } + + # Remove properties implied by composite features. + local components = $($(properties).components) ; + local x = [ set.difference $(properties) : $(components) ] ; + + # Handle subfeatures and implicit features. + x = [ move-subfeatures-to-the-end $(x) ] ; + local result ; + while $(x) + { + local p fullp = $(x[1]) ; + local f = $(p:G) ; + local v = $(p:G=) ; + + # Eliminate features in implicit properties. + if implicit in [ attributes $(f) ] + { + p = $(v) ; + } + + # Locate all subproperties of $(x[1]) in the property set. + local subproperties = [ select-subproperties $(fullp) : $(x) ] ; + if $(subproperties) + { + # Reconstitute the joined property name. + local sorted = [ sequence.insertion-sort $(subproperties) ] ; + result += $(p)-$(sorted:G="":J=-) ; + + x = [ set.difference $(x[2-]) : $(subproperties) ] ; + } + else + { + # Eliminate properties whose value is equal to feature's default, + # which are not symmetric and which do not contradict values implied + # by composite properties. + + # Since all component properties of composites in the set have been + # eliminated, any remaining property whose feature is the same as a + # component of a composite in the set must have a non-redundant + # value. + if $(fullp) != [ defaults $(f) ] + || symmetric in [ attributes $(f) ] + || $(fullp:G) in $(components:G) + { + result += $(p) ; + } + + x = $(x[2-]) ; + } + } + return $(result) ; +} + + +# Combine all subproperties into their parent properties +# +# Requires: for every subproperty, there is a parent property. All features are +# explicitly expressed. +# +# This rule probably should not be needed, but build-request.expand-no-defaults +# is being abused for unintended purposes and it needs help. +# +rule compress-subproperties ( properties * ) +{ + local all-subs ; + local matched-subs ; + local result ; + + for local p in $(properties) + { + if ! $(p:G) + { + # Expecting fully-gristed properties. + assert.variable-not-empty "p:G" ; + } + + if ! subfeature in $($(p:G).attributes) + { + local subs = [ sequence.insertion-sort + [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ; + + matched-subs += $(subs) ; + + local subvalues = -$(subs:G=:J=-) ; + subvalues ?= "" ; + result += $(p)$(subvalues) ; + } + else + { + all-subs += $(p) ; + } + } + assert.result true : set.equal $(all-subs) : $(matched-subs) ; + return $(result) ; +} + + +# Given an ungristed string, finds the longest prefix which is a top-level +# feature name followed by a dash, and return a pair consisting of the parts +# before and after that dash. More interesting than a simple split because +# feature names may contain dashes. +# +local rule split-top-feature ( feature-plus ) +{ + local e = [ regex.split $(feature-plus) - ] ; + local f = $(e[1]) ; + local v ; + while $(e) + { + if <$(f)> in $(.all-top-features) + { + v = $(f) $(e[2-]:J=-) ; + } + e = $(e[2-]) ; + f = $(f)-$(e[1]) ; + } + return $(v) ; +} + + +# Given a set of properties, add default values for features not represented in +# the set. +# +# properties must be fully expanded and must not contain conditionals. +# +# Note: if there's an ordinary feature F1 and a composite feature F2 which +# includes some value for F1 and both feature have default values then the +# default value of F1 will be added (as opposed to the value in F2). This might +# not be the right idea, e.g. consider: +# +# feature variant : debug ... ; +# debug : .... on +# feature : off on ; +# +# Here, when adding default for an empty property set, we'll get +# +# debug off +# +# and that's kind of strange. +# +rule add-defaults ( properties * ) +{ + for local v in $(properties:G=) + { + if $(v) in $(properties) + { + import errors ; + errors.error add-defaults requires explicitly specified features, + but \"$(v)\" appears to be the value of an un-expanded implicit + feature ; + } + } + local missing-top = [ set.difference $(.all-top-features) : $(properties:G) ] ; + local more = [ defaults $(missing-top) ] ; + + # This is similar to property.refine, except that it + # does not remove subfeatures, because we might be adding + # the default value of a subfeature. + local to-remove ; + for local f in $(properties:G) + { + if ! free in [ attributes $(f) ] + { + to-remove += $(f) ; + } + } + + local worklist = $(properties) $(more) ; + local expanded-from-composite ; + local to-expand = $(more) ; + while $(worklist) + { + # Add defaults for subfeatures of features which are present. + for local p in $(worklist) + { + local s = $($(p:G).subfeatures) ; + local f = [ utility.ungrist $(p:G) ] ; + local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ; + local sd = [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ; + to-expand += $(sd) ; + } + worklist = ; + + # Expand subfeatures of newly added properties + for local m in [ sequence.transform expand-composite : $(to-expand) ] + { + if ! $(m:G) in $(to-remove) + { + local att = [ attributes $(m:G) ] ; + if $(m:G) in $(expanded-from-composite) && + ! free in $(att) && + ! $(m) in $(more) + { + import errors ; + errors.error "default values for $(p:G) conflict" ; + } + if ! $(m) in $(to-expand) + { + expanded-from-composite += $(m:G) ; + } + more += $(m) ; + if ! subfeature in $(att) && ! free in $(att) + { + worklist += $(m) ; + } + } + } + to-expand = ; + } + + return [ sequence.unique $(properties) $(more) ] ; +} + + +# Given a property-set of the form +# v1/v2/...vN-1/vN/vN+1/...vM +# +# Returns +# v1 v2 ... vN-1 vN vN+1 ... vM +# +# Note that vN...vM may contain slashes. This needs to be resilient to the +# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps +# slash direction on NT. +# +rule split ( property-set ) +{ + local pieces = [ regex.split $(property-set) "[\\/]" ] ; + local result ; + + for local x in $(pieces) + { + if ( ! $(x:G) ) && $(result[-1]:G) + { + result = $(result[1--2]) $(result[-1])/$(x) ; + } + else + { + result += $(x) ; + } + } + + return $(result) ; +} + +# Returns all the features that also must be relevant when these features are relevant +rule expand-relevant ( features * ) +{ + local conditional ; + local result ; + for f in $(features) + { + # This looks like a conditional, even though it isn't really. + # (Free features can never be used in conditionals) + local split = [ MATCH "^(.*):(.*)$" : $(f) ] ; + if $(split) + { + local-dependencies.$(split[1]) += $(split[2]) ; + conditional += local-dependencies.$(split[1]) ; + } + else + { + result += $(f) ; + } + } + local queue = $(result) ; + while $(queue) + { + local added = [ set.difference + $(.feature-dependencies.$(queue)) + $(local-dependencies.$(queue)) + : $(result) ] ; + result += $(added) ; + queue = $(added) ; + } + # Clean up local map + $(conditional) = ; + return $(result) ; +} + + +# Tests of module feature. +# +rule __test__ ( ) +{ + # Use a fresh copy of the feature module. + prepare-test feature-test-temp ; + + import assert ; + import errors : try catch ; + + # These are local rules and so must be explicitly reimported into the + # testing module. + import feature : extend-feature validate-feature select-subfeatures ; + + feature toolset : gcc : implicit ; + feature define : : free ; + feature runtime-link : dynamic static : symmetric ; + feature optimization : on off ; + feature variant : debug release profile : implicit composite symmetric ; + feature stdlib : native stlport ; + feature magic : : free ; + + compose debug : _DEBUG off ; + compose release : NDEBUG on ; + + assert.result dynamic static : values ; + assert.result dynamic static : values runtime-link ; + + try ; + { + compose profile : profile ; + } + catch composite property profile cannot have itself as a component ; + + extend-feature toolset : msvc metrowerks ; + subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ; + + assert.true is-subvalue toolset : gcc : version : 2.95.3 ; + assert.false is-subvalue toolset : gcc : version : 1.1 ; + + assert.false is-subvalue toolset : msvc : version : 2.95.3 ; + assert.false is-subvalue toolset : : version : yabba ; + + feature yabba ; + subfeature yabba : version : dabba ; + assert.true is-subvalue yabba : : version : dabba ; + + subfeature toolset gcc : platform : linux cygwin : optional ; + + assert.result + : select-subfeatures gcc + : + + + ; + + subfeature stdlib : version : 3 4 : optional ; + + assert.result + : select-subfeatures native + : + + + ; + + assert.result gcc 3.0.1 + : expand-subfeatures gcc-3.0.1 ; + + assert.result gcc 3.0.1 linux + : expand-subfeatures gcc-3.0.1-linux ; + + assert.result gcc 3.0.1 + : expand gcc 3.0.1 ; + + assert.result foo=x-y + : expand-subfeatures foo=x-y ; + + assert.result minus=- + : expand-subfeatures minus=- ; + + assert.result gcc 3.0.1 + : expand-subfeatures gcc-3.0.1 ; + + assert.result a c e + : get-values : a b c d e ; + + assert.result gcc 3.0.1 + debug _DEBUG on + : expand gcc-3.0.1 debug on ; + + assert.result debug _DEBUG on + : expand debug on ; + + assert.result on debug _DEBUG + : expand on debug ; + + assert.result dynamic on + : defaults ; + + # Make sure defaults is resilient to missing grist. + assert.result dynamic on + : defaults runtime-link define optimization ; + + feature dummy : dummy1 dummy2 ; + subfeature dummy : subdummy : x y z : optional ; + + feature fu : fu1 fu2 : optional ; + subfeature fu : subfu : x y z : optional ; + subfeature fu : subfu2 : q r s ; + + assert.result optional : attributes ; + + assert.result [ SORT _DEBUG static + foobar on + gcc debug native + dummy1 2.95.2 ] + : add-defaults static foobar on ; + + assert.result [ SORT _DEBUG static + foobar on + fu1 gcc debug + native dummy1 q 2.95.2 ] + : add-defaults static foobar on + fu1 ; + + feature f0 : f0-0 f0-1 ; + feature f1 : f1-0 f1-1 ; + + assert.true valid ; + assert.true valid ; + assert.true valid ; + + set-default : static ; + assert.result static : defaults ; + + assert.result gcc-3.0.1 debug on + : minimize [ expand gcc-3.0.1 debug on native ] ; + + assert.result gcc-3.0.1 debug dynamic + : minimize + [ expand gcc-3.0.1 debug off dynamic ] ; + + assert.result gcc-3.0.1 debug + : minimize [ expand gcc-3.0.1 debug off ] ; + + assert.result debug on + : minimize [ expand debug on ] ; + + assert.result gcc-3.0 + : minimize gcc 3.0 ; + + assert.result gcc-3.0 + : minimize 3.0 gcc ; + + assert.result y/z b/c e/f + : split y/z/b/c/e/f ; + + assert.result y/z b/c e/f + : split y\\z\\b\\c\\e\\f ; + + assert.result a b c e/f/g i/j/k + : split a/b/c/e/f/g/i/j/k ; + + assert.result a b c e/f/g i/j/k + : split a\\b\\c\\e\\f\\g\\i\\j\\k ; + + # Test error checking. + + try ; + { + expand release off on ; + } + catch explicitly-specified values of non-free feature conflict ; + + try ; + { + validate-feature ; + } + catch unknown feature ; + + validate-value-string gcc ; + validate-value-string gcc-3.0.1 ; + + try ; + { + validate-value-string digital_mars ; + } + catch \"digital_mars\" is not a known value of ; + + try ; + { + feature foobar : : baz ; + } + catch unknown "attributes:" baz ; + + feature feature1 ; + try ; + { + feature feature1 ; + } + catch feature already "defined:" ; + + try ; + { + feature feature2 : : free implicit ; + } + catch free features cannot also be implicit ; + + try ; + { + feature feature3 : : free propagated ; + } + catch free features cannot be propagated ; + + try ; + { + implied-feature lackluster ; + } + catch \"lackluster\" is not an implicit feature value ; + + try ; + { + implied-subfeature 3.0.1 ; + } + catch \"3.0.1\" is not a known subfeature value of ; + + try ; + { + implied-subfeature not-a-version : gcc ; + } + catch \"not-a-version\" is not a known subfeature value of gcc ; + + # Leave a clean copy of the features module behind. + finish-test feature-test-temp ; +} diff --git a/src/boost/tools/build/src/build/feature.py b/src/boost/tools/build/src/build/feature.py new file mode 100644 index 000000000..db825f06f --- /dev/null +++ b/src/boost/tools/build/src/build/feature.py @@ -0,0 +1,914 @@ +# Status: ported, except for unit tests. +# Base revision: 64488 +# +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2002, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import re + +from b2.manager import get_manager +from b2.util import utility, bjam_signature, is_iterable_typed +import b2.util.set +from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq +from b2.exceptions import * + +__re_split_subfeatures = re.compile ('<(.*):(.*)>') +__re_no_hyphen = re.compile ('^([^:]+)$') +__re_slash_or_backslash = re.compile (r'[\\/]') + +VALID_ATTRIBUTES = { + 'implicit', + 'composite', + 'optional', + 'symmetric', + 'free', + 'incidental', + 'path', + 'dependency', + 'propagated', + 'link-incompatible', + 'subfeature', + 'order-sensitive' +} + + +class Feature(object): + def __init__(self, name, values, attributes): + assert isinstance(name, basestring) + assert is_iterable_typed(values, basestring) + assert is_iterable_typed(attributes, basestring) + self.name = name + self.values = values + self.default = None + self.subfeatures = [] + self.parent = None + self.attributes_string_list = [] + self._hash = hash(self.name) + + for attr in attributes: + self.attributes_string_list.append(attr) + attr = attr.replace("-", "_") + setattr(self, attr, True) + + def add_values(self, values): + assert is_iterable_typed(values, basestring) + self.values.extend(values) + + def set_default(self, value): + assert isinstance(value, basestring) + for attr in ('free', 'optional'): + if getattr(self, attr): + get_manager().errors()('"{}" feature "<{}>" cannot have a default value.' + .format(attr, self.name)) + + self.default = value + + def add_subfeature(self, name): + assert isinstance(name, Feature) + self.subfeatures.append(name) + + def set_parent(self, feature, value): + assert isinstance(feature, Feature) + assert isinstance(value, basestring) + self.parent = (feature, value) + + def __hash__(self): + return self._hash + + def __str__(self): + return self.name + + +def reset (): + """ Clear the module state. This is mainly for testing purposes. + """ + global __all_attributes, __all_features, __implicit_features, __composite_properties + global __subfeature_from_value, __all_top_features, __free_features + global __all_subfeatures + + # sets the default value of False for each valid attribute + for attr in VALID_ATTRIBUTES: + setattr(Feature, attr.replace("-", "_"), False) + + # A map containing all features. The key is the feature name. + # The value is an instance of Feature class. + __all_features = {} + + # All non-subfeatures. + __all_top_features = [] + + # Maps valus to the corresponding implicit feature + __implicit_features = {} + + # A map containing all composite properties. The key is a Property instance, + # and the value is a list of Property instances + __composite_properties = {} + + # Maps a value to the corresponding subfeature name. + __subfeature_from_value = {} + + # All free features + __free_features = [] + + __all_subfeatures = [] + +reset () + +def enumerate (): + """ Returns an iterator to the features map. + """ + return __all_features.iteritems () + +def get(name): + """Return the Feature instance for the specified name. + + Throws if no feature by such name exists + """ + assert isinstance(name, basestring) + return __all_features[name] + +# FIXME: prepare-test/finish-test? + +@bjam_signature((["name"], ["values", "*"], ["attributes", "*"])) +def feature (name, values, attributes = []): + """ Declares a new feature with the given name, values, and attributes. + name: the feature name + values: a sequence of the allowable values - may be extended later with feature.extend + attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...) + """ + __validate_feature_attributes (name, attributes) + + feature = Feature(name, [], attributes) + __all_features[name] = feature + # Temporary measure while we have not fully moved from 'gristed strings' + __all_features["<" + name + ">"] = feature + + name = add_grist(name) + + if 'subfeature' in attributes: + __all_subfeatures.append(name) + else: + __all_top_features.append(feature) + + extend (name, values) + + # FIXME: why his is needed. + if 'free' in attributes: + __free_features.append (name) + + return feature + +@bjam_signature((["feature"], ["value"])) +def set_default (feature, value): + """ Sets the default value of the given feature, overriding any previous default. + feature: the name of the feature + value: the default value to assign + """ + f = __all_features[feature] + bad_attribute = None + + if f.free: + bad_attribute = "free" + elif f.optional: + bad_attribute = "optional" + + if bad_attribute: + raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, f.name)) + + if value not in f.values: + raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % f.values) + + f.set_default(value) + +def defaults(features): + """ Returns the default property values for the given features. + """ + assert is_iterable_typed(features, Feature) + # FIXME: should merge feature and property modules. + from . import property + + result = [] + for f in features: + if not f.free and not f.optional and f.default: + result.append(property.Property(f, f.default)) + + return result + +def valid (names): + """ Returns true iff all elements of names are valid features. + """ + if isinstance(names, str): + names = [names] + assert is_iterable_typed(names, basestring) + + return all(name in __all_features for name in names) + +def attributes (feature): + """ Returns the attributes of the given feature. + """ + assert isinstance(feature, basestring) + return __all_features[feature].attributes_string_list + +def values (feature): + """ Return the values of the given feature. + """ + assert isinstance(feature, basestring) + validate_feature (feature) + return __all_features[feature].values + +def is_implicit_value (value_string): + """ Returns true iff 'value_string' is a value_string + of an implicit feature. + """ + assert isinstance(value_string, basestring) + if value_string in __implicit_features: + return __implicit_features[value_string] + + v = value_string.split('-') + + if v[0] not in __implicit_features: + return False + + feature = __implicit_features[v[0]] + + for subvalue in (v[1:]): + if not __find_implied_subfeature(feature, subvalue, v[0]): + return False + + return True + +def implied_feature (implicit_value): + """ Returns the implicit feature associated with the given implicit value. + """ + assert isinstance(implicit_value, basestring) + components = implicit_value.split('-') + + if components[0] not in __implicit_features: + raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value) + + return __implicit_features[components[0]] + +def __find_implied_subfeature (feature, subvalue, value_string): + assert isinstance(feature, Feature) + assert isinstance(subvalue, basestring) + assert isinstance(value_string, basestring) + + try: + return __subfeature_from_value[feature][value_string][subvalue] + except KeyError: + return None + +# Given a feature and a value of one of its subfeatures, find the name +# of the subfeature. If value-string is supplied, looks for implied +# subfeatures that are specific to that value of feature +# feature # The main feature name +# subvalue # The value of one of its subfeatures +# value-string # The value of the main feature + +def implied_subfeature (feature, subvalue, value_string): + assert isinstance(feature, Feature) + assert isinstance(subvalue, basestring) + assert isinstance(value_string, basestring) + result = __find_implied_subfeature (feature, subvalue, value_string) + if not result: + raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string)) + + return result + +def validate_feature (name): + """ Checks if all name is a valid feature. Otherwise, raises an exception. + """ + assert isinstance(name, basestring) + if name not in __all_features: + raise InvalidFeature ("'%s' is not a valid feature name" % name) + else: + return __all_features[name] + + +# Uses Property +def __expand_subfeatures_aux (property_, dont_validate = False): + """ Helper for expand_subfeatures. + Given a feature and value, or just a value corresponding to an + implicit feature, returns a property set consisting of all component + subfeatures and their values. For example: + + expand_subfeatures gcc-2.95.2-linux-x86 + -> gcc 2.95.2 linux x86 + equivalent to: + expand_subfeatures gcc-2.95.2-linux-x86 + + feature: The name of the feature, or empty if value corresponds to an implicit property + value: The value of the feature. + dont_validate: If True, no validation of value string will be done. + """ + from . import property # no __debug__ since Property is used elsewhere + assert isinstance(property_, property.Property) + assert isinstance(dont_validate, int) # matches bools + + f = property_.feature + v = property_.value + if not dont_validate: + validate_value_string(f, v) + + components = v.split ("-") + + v = components[0] + + result = [property.Property(f, components[0])] + + subvalues = components[1:] + + while len(subvalues) > 0: + subvalue = subvalues [0] # pop the head off of subvalues + subvalues = subvalues [1:] + + subfeature = __find_implied_subfeature (f, subvalue, v) + + # If no subfeature was found, reconstitute the value string and use that + if not subfeature: + return [property.Property(f, '-'.join(components))] + + result.append(property.Property(subfeature, subvalue)) + + return result + +def expand_subfeatures(properties, dont_validate = False): + """ + Make all elements of properties corresponding to implicit features + explicit, and express all subfeature values as separate properties + in their own right. For example, the property + + gcc-2.95.2-linux-x86 + + might expand to + + gcc 2.95.2 linux x86 + + properties: A sequence with elements of the form + value-string or just value-string in the + case of implicit features. + : dont_validate: If True, no validation of value string will be done. + """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + assert isinstance(dont_validate, int) # matches bools + result = [] + for p in properties: + # Don't expand subfeatures in subfeatures + if p.feature.subfeature: + result.append (p) + else: + result.extend(__expand_subfeatures_aux (p, dont_validate)) + + return result + + + +# rule extend was defined as below: + # Can be called three ways: + # + # 1. extend feature : values * + # 2. extend subfeature : values * + # 3. extend value-string subfeature : values * + # + # * Form 1 adds the given values to the given feature + # * Forms 2 and 3 add subfeature values to the given feature + # * Form 3 adds the subfeature values as specific to the given + # property value-string. + # + #rule extend ( feature-or-property subfeature ? : values * ) +# +# Now, the specific rule must be called, depending on the desired operation: +# extend_feature +# extend_subfeature +@bjam_signature([['name'], ['values', '*']]) +def extend (name, values): + """ Adds the given values to the given feature. + """ + assert isinstance(name, basestring) + assert is_iterable_typed(values, basestring) + name = add_grist (name) + __validate_feature (name) + feature = __all_features [name] + + if feature.implicit: + for v in values: + if v in __implicit_features: + raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v])) + + __implicit_features[v] = feature + + if values and not feature.values and not(feature.free or feature.optional): + # This is the first value specified for this feature, + # take it as default value + feature.set_default(values[0]) + + feature.add_values(values) + +def validate_value_string (f, value_string): + """ Checks that value-string is a valid value-string for the given feature. + """ + assert isinstance(f, Feature) + assert isinstance(value_string, basestring) + if f.free or value_string in f.values: + return + + values = [value_string] + + if f.subfeatures: + if not value_string in f.values and \ + not value_string in f.subfeatures: + values = value_string.split('-') + + # An empty value is allowed for optional features + if not values[0] in f.values and \ + (values[0] or not f.optional): + raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], f.name, f.values)) + + for v in values [1:]: + # this will validate any subfeature values in value-string + implied_subfeature(f, v, values[0]) + + +""" Extends the given subfeature with the subvalues. If the optional + value-string is provided, the subvalues are only valid for the given + value of the feature. Thus, you could say that + mingw is specific to gcc-2.95.2 as follows: + + extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ; + + feature: The feature whose subfeature is being extended. + + value-string: If supplied, specifies a specific value of the + main feature for which the new subfeature values + are valid. + + subfeature: The name of the subfeature. + + subvalues: The additional values of the subfeature being defined. +""" +def extend_subfeature (feature_name, value_string, subfeature_name, subvalues): + assert isinstance(feature_name, basestring) + assert isinstance(value_string, basestring) + assert isinstance(subfeature_name, basestring) + assert is_iterable_typed(subvalues, basestring) + feature = validate_feature(feature_name) + + if value_string: + validate_value_string(feature, value_string) + + subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string) + + extend(subfeature_name, subvalues) ; + subfeature = __all_features[subfeature_name] + + if value_string == None: value_string = '' + + if feature not in __subfeature_from_value: + __subfeature_from_value[feature] = {} + + if value_string not in __subfeature_from_value[feature]: + __subfeature_from_value[feature][value_string] = {} + + for subvalue in subvalues: + __subfeature_from_value [feature][value_string][subvalue] = subfeature + +@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"], + ["subvalues", "*"], ["attributes", "*"])) +def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []): + """ Declares a subfeature. + feature_name: Root feature that is not a subfeature. + value_string: An optional value-string specifying which feature or + subfeature values this subfeature is specific to, + if any. + subfeature: The name of the subfeature being declared. + subvalues: The allowed values of this subfeature. + attributes: The attributes of the subfeature. + """ + parent_feature = validate_feature (feature_name) + + # Add grist to the subfeature name if a value-string was supplied + subfeature_name = __get_subfeature_name (subfeature, value_string) + + if subfeature_name in __all_features[feature_name].subfeatures: + message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name) + message += " specific to '%s'" % value_string + raise BaseException (message) + + # First declare the subfeature as a feature in its own right + f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature']) + f.set_parent(parent_feature, value_string) + + parent_feature.add_subfeature(f) + + # Now make sure the subfeature values are known. + extend_subfeature (feature_name, value_string, subfeature, subvalues) + + +@bjam_signature((["composite_property_s"], ["component_properties_s", "*"])) +def compose (composite_property_s, component_properties_s): + """ Sets the components of the given composite property. + + All parameters are value strings + """ + from . import property + + component_properties_s = to_seq (component_properties_s) + composite_property = property.create_from_string(composite_property_s) + f = composite_property.feature + + if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property): + component_properties = component_properties_s + else: + component_properties = [property.create_from_string(p) for p in component_properties_s] + + if not f.composite: + raise BaseException ("'%s' is not a composite feature" % f) + + if property in __composite_properties: + raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property]))) + + if composite_property in component_properties: + raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property) + + __composite_properties[composite_property] = component_properties + + +def expand_composite(property_): + if __debug__: + from .property import Property + assert isinstance(property_, Property) + result = [ property_ ] + if property_ in __composite_properties: + for p in __composite_properties[property_]: + result.extend(expand_composite(p)) + return result + +@bjam_signature((['feature'], ['properties', '*'])) +def get_values (feature, properties): + """ Returns all values of the given feature specified by the given property set. + """ + if feature[0] != '<': + feature = '<' + feature + '>' + result = [] + for p in properties: + if get_grist (p) == feature: + result.append (replace_grist (p, '')) + + return result + +def free_features (): + """ Returns all free features. + """ + return __free_features + +def expand_composites (properties): + """ Expand all composite properties in the set so that all components + are explicitly expressed. + """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + explicit_features = set(p.feature for p in properties) + + result = [] + + # now expand composite features + for p in properties: + expanded = expand_composite(p) + + for x in expanded: + if not x in result: + f = x.feature + + if f.free: + result.append (x) + elif not x in properties: # x is the result of expansion + if not f in explicit_features: # not explicitly-specified + if any(r.feature == f for r in result): + raise FeatureConflict( + "expansions of composite features result in " + "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" % + (f.name, [r.value for r in result if r.feature == f] + [x.value], p)) + else: + result.append (x) + elif any(r.feature == f for r in result): + raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n" + "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f, + [r.value for r in result if r.feature == f], p, x.value)) + else: + result.append (x) + + return result + +# Uses Property +def is_subfeature_of (parent_property, f): + """ Return true iff f is an ordinary subfeature of the parent_property's + feature, or if f is a subfeature of the parent_property's feature + specific to the parent_property's value. + """ + if __debug__: + from .property import Property + assert isinstance(parent_property, Property) + assert isinstance(f, Feature) + + if not f.subfeature: + return False + + p = f.parent + if not p: + return False + + parent_feature = p[0] + parent_value = p[1] + + if parent_feature != parent_property.feature: + return False + + if parent_value and parent_value != parent_property.value: + return False + + return True + +def __is_subproperty_of (parent_property, p): + """ As is_subfeature_of, for subproperties. + """ + if __debug__: + from .property import Property + assert isinstance(parent_property, Property) + assert isinstance(p, Property) + return is_subfeature_of (parent_property, p.feature) + + +# Returns true iff the subvalue is valid for the feature. When the +# optional value-string is provided, returns true iff the subvalues +# are valid for the given value of the feature. +def is_subvalue(feature, value_string, subfeature, subvalue): + assert isinstance(feature, basestring) + assert isinstance(value_string, basestring) + assert isinstance(subfeature, basestring) + assert isinstance(subvalue, basestring) + if not value_string: + value_string = '' + try: + return __subfeature_from_value[feature][value_string][subvalue] == subfeature + except KeyError: + return False + + +# Uses Property +def expand (properties): + """ Given a property set which may consist of composite and implicit + properties and combined subfeature values, returns an expanded, + normalized property set with all implicit features expressed + explicitly, all subfeature values individually expressed, and all + components of composite properties expanded. Non-free features + directly expressed in the input properties cause any values of + those features due to composite feature expansion to be dropped. If + two values of a given non-free feature are directly expressed in the + input, an error is issued. + """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + expanded = expand_subfeatures(properties) + return expand_composites (expanded) + +# Accepts list of Property objects +def add_defaults (properties): + """ Given a set of properties, add default values for features not + represented in the set. + Note: if there's there's ordinary feature F1 and composite feature + F2, which includes some value for F1, and both feature have default values, + then the default value of F1 will be added, not the value in F2. This might + not be right idea: consider + + feature variant : debug ... ; + debug : .... on + feature : off on ; + + Here, when adding default for an empty property set, we'll get + + debug off + + and that's kind of strange. + """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + # create a copy since properties will be modified + result = list(properties) + + # We don't add default for conditional properties. We don't want + # debug:DEBUG to be takes as specified value for + handled_features = set(p.feature for p in properties if not p.condition) + + missing_top = [f for f in __all_top_features if not f in handled_features] + more = defaults(missing_top) + result.extend(more) + handled_features.update(p.feature for p in more) + + # Add defaults for subfeatures of features which are present + for p in result[:]: + subfeatures = [s for s in p.feature.subfeatures if not s in handled_features] + more = defaults(__select_subfeatures(p, subfeatures)) + handled_features.update(h.feature for h in more) + result.extend(more) + + return result + +def minimize (properties): + """ Given an expanded property set, eliminate all redundancy: properties + which are elements of other (composite) properties in the set will + be eliminated. Non-symmetric properties equal to default values will be + eliminated, unless the override a value from some composite property. + Implicit properties will be expressed without feature + grist, and sub-property values will be expressed as elements joined + to the corresponding main property. + """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + # remove properties implied by composite features + components = [] + component_features = set() + for property in properties: + if property in __composite_properties: + cs = __composite_properties[property] + components.extend(cs) + component_features.update(c.feature for c in cs) + + properties = b2.util.set.difference (properties, components) + + # handle subfeatures and implicit features + + # move subfeatures to the end of the list + properties = [p for p in properties if not p.feature.subfeature] +\ + [p for p in properties if p.feature.subfeature] + + result = [] + while properties: + p = properties[0] + f = p.feature + + # locate all subproperties of $(x[1]) in the property set + subproperties = [x for x in properties if is_subfeature_of(p, x.feature)] + + if subproperties: + # reconstitute the joined property name + subproperties.sort () + joined = b2.build.property.Property(p.feature, p.value + '-' + '-'.join ([sp.value for sp in subproperties])) + result.append(joined) + + properties = b2.util.set.difference(properties[1:], subproperties) + + else: + # eliminate properties whose value is equal to feature's + # default and which are not symmetric and which do not + # contradict values implied by composite properties. + + # since all component properties of composites in the set + # have been eliminated, any remaining property whose + # feature is the same as a component of a composite in the + # set must have a non-redundant value. + if p.value != f.default or f.symmetric or f in component_features: + result.append (p) + + properties = properties[1:] + + return result + + +def split (properties): + """ Given a property-set of the form + v1/v2/...vN-1/vN/vN+1/...vM + + Returns + v1 v2 ... vN-1 vN vN+1 ... vM + + Note that vN...vM may contain slashes. This is resilient to the + substitution of backslashes for slashes, since Jam, unbidden, + sometimes swaps slash direction on NT. + """ + assert isinstance(properties, basestring) + def split_one (properties): + pieces = re.split (__re_slash_or_backslash, properties) + result = [] + + for x in pieces: + if not get_grist (x) and len (result) > 0 and get_grist (result [-1]): + result = result [0:-1] + [ result [-1] + '/' + x ] + else: + result.append (x) + + return result + + if isinstance (properties, str): + return split_one (properties) + + result = [] + for p in properties: + result += split_one (p) + return result + + +def compress_subproperties (properties): + """ Combine all subproperties into their parent properties + + Requires: for every subproperty, there is a parent property. All + features are explicitly expressed. + + This rule probably shouldn't be needed, but + build-request.expand-no-defaults is being abused for unintended + purposes and it needs help + """ + from .property import Property + assert is_iterable_typed(properties, Property) + result = [] + matched_subs = set() + all_subs = set() + for p in properties: + f = p.feature + + if not f.subfeature: + subs = [x for x in properties if is_subfeature_of(p, x.feature)] + if subs: + + matched_subs.update(subs) + + subvalues = '-'.join (sub.value for sub in subs) + result.append(Property( + p.feature, p.value + '-' + subvalues, + p.condition)) + else: + result.append(p) + + else: + all_subs.add(p) + + # TODO: this variables are used just for debugging. What's the overhead? + assert all_subs == matched_subs + + return result + +###################################################################################### +# Private methods + +def __select_subproperties (parent_property, properties): + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + assert isinstance(parent_property, Property) + return [ x for x in properties if __is_subproperty_of (parent_property, x) ] + +def __get_subfeature_name (subfeature, value_string): + assert isinstance(subfeature, basestring) + assert isinstance(value_string, basestring) or value_string is None + if value_string == None: + prefix = '' + else: + prefix = value_string + ':' + + return prefix + subfeature + + +def __validate_feature_attributes (name, attributes): + assert isinstance(name, basestring) + assert is_iterable_typed(attributes, basestring) + for attribute in attributes: + if attribute not in VALID_ATTRIBUTES: + raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name)) + + if name in __all_features: + raise AlreadyDefined ("feature '%s' already defined" % name) + elif 'implicit' in attributes and 'free' in attributes: + raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name) + elif 'free' in attributes and 'propagated' in attributes: + raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name) + + +def __validate_feature (feature): + """ Generates an error if the feature is unknown. + """ + assert isinstance(feature, basestring) + if feature not in __all_features: + raise BaseException ('unknown feature "%s"' % feature) + + +def __select_subfeatures (parent_property, features): + """ Given a property, return the subset of features consisting of all + ordinary subfeatures of the property's feature, and all specific + subfeatures of the property's feature which are conditional on the + property's value. + """ + if __debug__: + from .property import Property + assert isinstance(parent_property, Property) + assert is_iterable_typed(features, Feature) + return [f for f in features if is_subfeature_of (parent_property, f)] + +# FIXME: copy over tests. diff --git a/src/boost/tools/build/src/build/generators.jam b/src/boost/tools/build/src/build/generators.jam new file mode 100644 index 000000000..80cf55879 --- /dev/null +++ b/src/boost/tools/build/src/build/generators.jam @@ -0,0 +1,1453 @@ +# Copyright 2002. Vladimir Prus +# Copyright 2006. Rene Rivera +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Manages 'generators' --- objects which can do transformation between different +# target types and contain algorithm for finding transformation from sources to +# targets. +# +# The main entry point to this module is generators.construct rule. It is given +# a list of source targets, desired target type and a set of properties. It +# starts by selecting 'viable generators', which have any chances of producing +# the desired target type with the required properties. Generators are ranked +# and a set of the most specific ones is selected. +# +# The most specific generators have their 'run' methods called, with the +# properties and list of sources. Each one selects a target which can be +# directly consumed, and tries to convert the remaining ones to the types it can +# consume. This is done by recursively calling 'construct' with all consumable +# types. +# +# If the generator has collected all the targets it needs, it creates targets +# corresponding to result, and returns it. When all generators have been run, +# results of one of them are selected and returned as a result. +# +# It is quite possible for 'construct' to return more targets that it was asked +# for. For example, if it were asked to generate a target of type EXE, but the +# only found generator produces both EXE and TDS (file with debug) information. +# The extra target will be returned. +# +# Likewise, when generator tries to convert sources to consumable types, it can +# get more targets that it was asked for. The question is what to do with extra +# targets. B2 attempts to convert them to requested types, and attempts +# that as early as possible. Specifically, this is done after invoking each +# generator. TODO: An example is needed to document the rationale for trying +# extra target conversion at that point. +# +# In order for the system to be able to use a specific generator instance 'when +# needed', the instance needs to be registered with the system using +# generators.register() or one of its related rules. Unregistered generators may +# only be run explicitly and will not be considered by B2 when when +# converting between given target types. + +import "class" : new ; +import property-set ; +import sequence ; +import set ; +import type ; +import utility ; +import virtual-target ; + + +if "--debug-generators" in [ modules.peek : ARGV ] +{ + .debug = true ; +} + + +# Updated cached viable source target type information as needed after a new +# target type gets defined. This is needed because if a target type is a viable +# source target type for some generator then all of the target type's derived +# target types should automatically be considered as viable source target types +# for the same generator as well. Does nothing if a non-derived target type is +# passed to it. +# +rule update-cached-information-with-a-new-type ( type ) +{ + local base-type = [ type.base $(type) ] ; + if $(base-type) + { + for local g in $(.vstg-cached-generators) + { + if $(base-type) in $(.vstg.$(g)) + { + .vstg.$(g) += $(type) ; + } + } + + for local t in $(.vst-cached-types) + { + if $(base-type) in $(.vst.$(t)) + { + .vst.$(t) += $(type) ; + } + } + } +} + + +# Clears cached viable source target type information except for target types +# and generators with all source types listed as viable. Should be called when +# something invalidates those cached values by possibly causing some new source +# types to become viable. +# +local rule invalidate-extendable-viable-source-target-type-cache ( ) +{ + local generators-with-cached-source-types = $(.vstg-cached-generators) ; + .vstg-cached-generators = ; + for local g in $(generators-with-cached-source-types) + { + if $(.vstg.$(g)) = * + { + .vstg-cached-generators += $(g) ; + } + else + { + .vstg.$(g) = ; + } + } + + local types-with-cached-source-types = $(.vst-cached-types) ; + .vst-cached-types = ; + for local t in $(types-with-cached-source-types) + { + if $(.vst.$(t)) = * + { + .vst-cached-types += $(t) ; + } + else + { + .vst.$(t) = ; + } + } +} + + +# Outputs a debug message if generators debugging is on. Each element of +# 'message' is checked to see if it is a class instance. If so, instead of the +# value, the result of 'str' call is output. +# +local rule generators.dout ( message * ) +{ + if $(.debug) + { + ECHO [ sequence.transform utility.str : $(message) ] ; + } +} + + +local rule indent ( ) +{ + return $(.indent:J="") ; +} + + +local rule increase-indent ( ) +{ + .indent += " " ; +} + + +local rule decrease-indent ( ) +{ + .indent = $(.indent[2-]) ; +} + + +# Models a generator. +# +class generator +{ + import "class" : new ; + import feature ; + import generators : indent increase-indent decrease-indent generators.dout ; + import utility ; + import path ; + import property ; + import property-set ; + import sequence ; + import set ; + import toolset ; + import type ; + import virtual-target ; + + EXPORT class@generator : indent increase-indent decrease-indent + generators.dout ; + + rule __init__ ( + id # Identifies the generator - should be name + # of the rule which sets up the build + # actions. + + composing ? # Whether generator processes each source + # target in turn, converting it to required + # types. Ordinary generators pass all + # sources together to the recursive + # generators.construct-types call. + + : source-types * # Types that this generator can handle. If + # empty, the generator can consume anything. + + : target-types-and-names + # Types the generator will create and, + # optionally, names for created targets. + # Each element should have the form + # type["(" name-pattern ")"], for example, + # obj(%_x). Generated target name will be + # found by replacing % with the name of + # source, provided an explicit name was not + # specified. + + : requirements * + ) + { + self.id = $(id) ; + self.rule-name = $(id) ; + self.composing = $(composing) ; + self.source-types = $(source-types) ; + self.target-types-and-names = $(target-types-and-names) ; + self.requirements = $(requirements) ; + + for local e in $(target-types-and-names) + { + # Create three parallel lists: one with the list of target types, + # and two other with prefixes and postfixes to be added to target + # name. We use parallel lists for prefix and postfix (as opposed to + # mapping), because given target type might occur several times, for + # example "H H(%_symbols)". + local m = [ MATCH "([^\\(]*)(\\((.*)%(.*)\\))?" : $(e) ] ; + self.target-types += $(m[1]) ; + self.name-prefix += $(m[3]:E="") ; + self.name-postfix += $(m[4]:E="") ; + } + + for local r in [ requirements ] + { + if $(r:G=) + { + self.property-requirements += $(r) ; + } + else + { + self.feature-requirements += $(r) ; + } + } + + # Note that 'transform' here, is the same as 'for_each'. + sequence.transform type.validate : $(self.source-types) ; + sequence.transform type.validate : $(self.target-types) ; + + local relevant-for-generator = + [ sequence.transform utility.ungrist : $(requirements:G) ] ; + self.relevant-features = [ property-set.create $(relevant-for-generator) ] ; + } + + ################# End of constructor ################# + + rule id ( ) + { + return $(self.id) ; + } + + # Returns the list of target type the generator accepts. + # + rule source-types ( ) + { + return $(self.source-types) ; + } + + # Returns the list of target types that this generator produces. It is + # assumed to be always the same -- i.e. it can not change depending on some + # provided list of sources. + # + rule target-types ( ) + { + return $(self.target-types) ; + } + + # Returns the required properties for this generator. Properties in returned + # set must be present in build properties if this generator is to be used. + # If result has grist-only element, that build properties must include some + # value of that feature. + # + # XXX: remove this method? + # + rule requirements ( ) + { + return $(self.requirements) ; + } + + rule set-rule-name ( rule-name ) + { + self.rule-name = $(rule-name) ; + } + + rule rule-name ( ) + { + return $(self.rule-name) ; + } + + # Returns a true value if the generator can be run with the specified + # properties. + # + rule match-rank ( property-set-to-match ) + { + # See if generator requirements are satisfied by 'properties'. Treat a + # feature name in requirements (i.e. grist-only element), as matching + # any value of the feature. + + if [ $(property-set-to-match).contains-raw $(self.property-requirements) ] && + [ $(property-set-to-match).contains-features $(self.feature-requirements) ] + { + return true ; + } + else + { + return ; + } + } + + # Returns another generator which differs from $(self) in + # - id + # - value to feature in properties + # + rule clone ( new-id : new-toolset-properties + ) + { + local g = [ new $(__class__) $(new-id) $(self.composing) : + $(self.source-types) : $(self.target-types-and-names) : + # Note: this does not remove any subfeatures of which + # might cause problems. + [ property.change $(self.requirements) : ] + $(new-toolset-properties) ] ; + return $(g) ; + } + + # Creates another generator that is the same as $(self), except that if + # 'base' is in target types of $(self), 'type' will in target types of the + # new generator. + # + rule clone-and-change-target-type ( base : type ) + { + local target-types ; + for local t in $(self.target-types-and-names) + { + local m = [ MATCH "([^\\(]*)(\\(.*\\))?" : $(t) ] ; + if $(m) = $(base) + { + target-types += $(type)$(m[2]:E="") ; + } + else + { + target-types += $(t) ; + } + } + + local g = [ new $(__class__) $(self.id) $(self.composing) : + $(self.source-types) : $(target-types) : $(self.requirements) ] ; + if $(self.rule-name) + { + $(g).set-rule-name $(self.rule-name) ; + } + return $(g) ; + } + + # Tries to invoke this generator on the given sources. Returns a list of + # generated targets (instances of 'virtual-target') and optionally a set of + # properties to be added to the usage-requirements for all the generated + # targets. Returning nothing from run indicates that the generator was + # unable to create the target. + # + rule run + ( + project # Project for which the targets are generated. + name ? # Used when determining the 'name' attribute for all + # generated targets. See the 'generated-targets' method. + : property-set # Desired properties for generated targets. + : sources + # Source targets. + ) + { + generators.dout [ indent ] " ** generator" $(self.id) ; + generators.dout [ indent ] " composing:" $(self.composing) ; + + if ! $(self.composing) && $(sources[2]) && $(self.source-types[2]) + { + import errors : error : errors.error ; + errors.error "Unsupported source/source-type combination" ; + } + + # We do not run composing generators if no name is specified. The reason + # is that composing generator combines several targets, which can have + # different names, and it cannot decide which name to give for produced + # target. Therefore, the name must be passed. + # + # This in effect, means that composing generators are runnable only at + # the top-level of a transformation graph, or if their name is passed + # explicitly. Thus, we dissallow composing generators in the middle. For + # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE + # will not be allowed as the OBJ -> STATIC_LIB generator is composing. + if ! $(self.composing) || $(name) + { + run-really $(project) $(name) : $(property-set) : $(sources) ; + } + } + + rule run-really ( project name ? : property-set : sources + ) + { + # Targets that this generator will consume directly. + local consumed = ; + # Targets that can not be consumed and will be returned as-is. + local bypassed = ; + + if $(self.composing) + { + consumed = [ convert-multiple-sources-to-consumable-types $(project) + : $(property-set) : $(sources) ] ; + } + else + { + consumed = [ convert-to-consumable-types $(project) $(name) + : $(property-set) : $(sources) ] ; + } + + local result ; + if $(consumed[2]) + { + result = [ construct-result $(consumed[2-]) : $(project) $(name) : + [ $(property-set).add $(consumed[1]) ] ] ; + } + + if $(result) + { + generators.dout [ indent ] " SUCCESS: " $(result) ; + } + else + { + generators.dout [ indent ] " FAILURE" ; + } + generators.dout ; + if $(result) + { + # Make sure that we propagate usage-requirements up the stack. + return [ $(result[1]).add $(consumed[1]) ] $(result[2-]) ; + } + } + + # Constructs the dependency graph to be returned by this generator. + # + rule construct-result + ( + consumed + # Already prepared list of consumable targets. + # Composing generators may receive multiple sources + # all of which will have types matching those in + # $(self.source-types). Non-composing generators with + # multiple $(self.source-types) will receive exactly + # len $(self.source-types) sources with types matching + # those in $(self.source-types). And non-composing + # generators with only a single source type may + # receive multiple sources with all of them of the + # type listed in $(self.source-types). + : project name ? + : property-set # Properties to be used for all actions created here. + ) + { + local result ; + + local relevant = [ toolset.relevant $(self.rule-name) ] ; + relevant = [ $(relevant).add $(self.relevant-features) ] ; + property-set = [ $(property-set).add $(relevant) ] ; + + # If this is a 1->1 transformation, apply it to all consumed targets in + # order. + if ! $(self.source-types[2]) && ! $(self.composing) + { + for local r in $(consumed) + { + result += [ generated-targets $(r) : $(property-set) : + $(project) $(name) ] ; + } + } + else if $(consumed) + { + result += [ generated-targets $(consumed) : $(property-set) : + $(project) $(name) ] ; + } + if $(result) + { + if [ class.is-a $(result[1]) : property-set ] + { + return [ $(result[1]).add $(relevant) ] $(result[2-]) ; + } + else { + return $(relevant) $(result) ; + } + } + } + + # Determine target name from fullname (maybe including path components) + # Place optional prefix and postfix around basename + # + rule determine-target-name ( fullname : prefix ? : postfix ? ) + { + # See if we need to add directory to the target name. + local dir = $(fullname:D) ; + local name = $(fullname:B) ; + + name = $(prefix:E=)$(name) ; + name = $(name)$(postfix:E=) ; + + if $(dir) + # Never append '..' to target path. + && ! [ MATCH .*(\\.\\.).* : $(dir) ] + && ! [ path.is-rooted $(dir) ] + { + # Relative path is always relative to the source directory. Retain + # it, so that users can have files with the same name in two + # different subdirectories. + name = $(dir)/$(name) ; + } + return $(name) ; + } + + # Determine the name of the produced target from the names of the sources. + # + rule determine-output-name ( sources + ) + { + # The simple case if when a name of source has single dot. Then, we take + # the part before dot. Several dots can be caused by: + # - using source file like a.host.cpp, or + # - a type whose suffix has a dot. Say, we can type 'host_cpp' with + # extension 'host.cpp'. + # In the first case, we want to take the part up to the last dot. In the + # second case -- not sure, but for now take the part up to the last dot + # too. + name = [ utility.basename [ $(sources[1]).name ] ] ; + for local s in $(sources[2-]) + { + if [ utility.basename [ $(s).name ] ] != $(name) + { + import errors : error : errors.error ; + errors.error "$(self.id): source targets have different names: cannot determine target name" ; + } + } + return [ determine-target-name [ $(sources[1]).name ] ] ; + } + + # Constructs targets that are created after consuming 'sources'. The result + # will be the list of virtual-target, which has the same length as the + # 'target-types' attribute and with corresponding types. + # + # When 'name' is empty, all source targets must have the same 'name' + # attribute value, which will be used instead of the 'name' argument. + # + # The 'name' attribute value for each generated target will be equal to the + # 'name' parameter if there is no name pattern for this type. Otherwise, the + # '%' symbol in the name pattern will be replaced with the 'name' parameter + # to obtain the 'name' attribute. + # + # For example, if targets types are T1 and T2 (with name pattern "%_x"), + # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created + # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually + # determines the basename of a file. + # + # Note that this pattern mechanism has nothing to do with implicit patterns + # in make. It is a way to produce a target whose name is different than the + # name of its source. + # + rule generated-targets ( sources + : property-set : project name ? ) + { + if ! $(name) + { + name = [ determine-output-name $(sources) ] ; + } + + # Assign an action for each target. + local action = [ action-class ] ; + local a = [ class.new $(action) $(sources) : $(self.rule-name) : + $(property-set) ] ; + + # Create generated target for each target type. + local targets ; + local pre = $(self.name-prefix) ; + local post = $(self.name-postfix) ; + for local t in $(self.target-types) + { + local generated-name = $(pre[1])$(name:BS)$(post[1]) ; + generated-name = $(generated-name:R=$(name:D)) ; + pre = $(pre[2-]) ; + post = $(post[2-]) ; + + targets += [ class.new file-target $(generated-name) : $(t) : + $(project) : $(a) ] ; + } + + return [ sequence.transform virtual-target.register : $(targets) ] ; + } + + # Attempts to convert 'sources' to targets of types that this generator can + # handle. The intention is to produce the set of targets that can be used + # when the generator is run. + # + rule convert-to-consumable-types + ( + project name ? + : property-set + : sources + + : only-one ? # Convert 'source' to only one of the source types. If + # there is more that one possibility, report an error. + ) + { + local _consumed ; + local missing-types ; + local usage-requirements ; + + if $(sources[2]) + { + # Do not know how to handle several sources yet. Just try to pass + # the request to other generator. + missing-types = $(self.source-types) ; + } + else + { + local temp = [ consume-directly $(sources) ] ; + if $(temp[1]) + { + usage-requirements = [ property-set.empty ] ; + _consumed = $(temp[1]) ; + } + missing-types = $(temp[2-]) ; + } + + # No need to search for transformation if some source type has consumed + # source and no more source types are needed. + if $(only-one) && $(_consumed) + { + missing-types = ; + } + + # TODO: we should check that only one source type is created if + # 'only-one' is true. + + if $(missing-types) + { + local transformed = [ generators.construct-types $(project) $(name) + : $(missing-types) : $(property-set) : $(sources) ] ; + + # Add targets of right type to 'consumed'. Add others to 'bypassed'. + # The 'generators.construct' rule has done its best to convert + # everything to the required type. There is no need to rerun it on + # targets of different types. + + usage-requirements = $(transformed[1]) ; + for local t in $(transformed[2-]) + { + if [ $(t).type ] in $(missing-types) + { + _consumed += $(t) ; + } + } + } + + return $(usage-requirements) [ sequence.unique $(_consumed) ] ; + } + + # Converts several files to consumable types. Called for composing + # generators only. + # + rule convert-multiple-sources-to-consumable-types ( project : property-set : + sources * ) + { + local result ; + # We process each source one-by-one, trying to convert it to a usable + # type. + if ! $(self.source-types) + { + # Anything is acceptable + return [ property-set.empty ] $(sources) ; + } + else + { + local usage-requirements = [ property-set.empty ] ; + local acceptible-types = [ sequence.unique + [ sequence.transform type.all-derived : $(self.source-types) ] ] ; + for local source in $(sources) + { + if ! [ $(source).type ] in $(acceptible-types) + { + local transformed = [ generators.construct-types $(project) + : $(self.source-types) : $(property-set) : $(source) ] ; + for local t in $(transformed[2-]) + { + if [ $(t).type ] in $(self.source-types) + { + result += $(t) ; + } + } + if ! $(transformed) + { + generators.dout [ indent ] " failed to convert " $(source) ; + } + else + { + usage-requirements = [ $(usage-requirements).add $(transformed[1]) ] ; + } + } + else + { + result += $(source) ; + } + } + return $(usage-requirements) [ sequence.unique $(result) : stable ] ; + } + } + + rule consume-directly ( source ) + { + local real-source-type = [ $(source).type ] ; + + # If there are no source types, we can consume anything. + local source-types = $(self.source-types) ; + source-types ?= $(real-source-type) ; + + local result = "" ; + local missing-types ; + + for local st in $(source-types) + { + # The 'source' if of the right type already. + if $(real-source-type) = $(st) || [ type.is-derived + $(real-source-type) $(st) ] + { + result = $(source) ; + } + else + { + missing-types += $(st) ; + } + } + return $(result) $(missing-types) ; + } + + # Returns the class to be used to actions. Default implementation returns + # "action". + # + rule action-class ( ) + { + return "action" ; + } +} + + +# Registers a new generator instance 'g'. +# +rule register ( g ) +{ + .all-generators += $(g) ; + + # A generator can produce several targets of the same type. We want unique + # occurrence of that generator in .generators.$(t) in that case, otherwise, + # it will be tried twice and we will get a false ambiguity. + for local t in [ sequence.unique [ $(g).target-types ] ] + { + .generators.$(t) += $(g) ; + } + + # Update the set of generators for toolset. + + # TODO: should we check that generator with this id is not already + # registered. For example, the fop.jam module intentionally declared two + # generators with the same id, so such check will break it. + local id = [ $(g).id ] ; + + # Some generators have multiple periods in their name, so a simple $(id:S=) + # will not generate the right toolset name. E.g. if id = gcc.compile.c++, + # then .generators-for-toolset.$(id:S=) will append to + # .generators-for-toolset.gcc.compile, which is a separate value from + # .generators-for-toolset.gcc. Correcting this makes generator inheritance + # work properly. See also inherit-generators in the toolset module. + local base = $(id) ; + while $(base:S) + { + base = $(base:B) ; + } + .generators-for-toolset.$(base) += $(g) ; + + + # After adding a new generator that can construct new target types, we need + # to clear the related cached viable source target type information for + # constructing a specific target type or using a specific generator. Cached + # viable source target type lists affected by this are those containing any + # of the target types constructed by the new generator or any of their base + # target types. + # + # A more advanced alternative to clearing that cached viable source target + # type information would be to expand it with additional source types or + # even better - mark it as needing to be expanded on next use. + # + # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077 + # mailing list thread for an even more advanced idea of how we could convert + # Boost Build's Jamfile processing, target selection and generator selection + # into separate steps which would prevent these caches from ever being + # invalidated. + # + # For now we just clear all the cached viable source target type information + # that does not simply state 'all types' and may implement a more detailed + # algorithm later on if it becomes needed. + + invalidate-extendable-viable-source-target-type-cache ; +} + + +# Creates a new non-composing 'generator' class instance and registers it. +# Returns the created instance. Rationale: the instance is returned so that it +# is possible to first register a generator and then call its 'run' method, +# bypassing the whole generator selection process. +# +rule register-standard ( id : source-types * : target-types + : requirements * ) +{ + local g = [ new generator $(id) : $(source-types) : $(target-types) : + $(requirements) ] ; + register $(g) ; + return $(g) ; +} + + +# Creates a new composing 'generator' class instance and registers it. +# +rule register-composing ( id : source-types * : target-types + : requirements * + ) +{ + local g = [ new generator $(id) true : $(source-types) : $(target-types) : + $(requirements) ] ; + register $(g) ; + return $(g) ; +} + + +# Returns all generators belonging to the given 'toolset', i.e. whose ids are +# '$(toolset).'. +# +rule generators-for-toolset ( toolset ) +{ + return $(.generators-for-toolset.$(toolset)) ; +} + + +# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when +# searching for generators that could produce a target of a certain type, both +# those generators are among viable generators, the overridden generator is +# immediately discarded. +# +# The overridden generators are discarded immediately after computing the list +# of viable generators but before running any of them. +# +rule override ( overrider-id : overridee-id ) +{ + .override.$(overrider-id) += $(overridee-id) ; +} + + +# Returns a list of source type which can possibly be converted to 'target-type' +# by some chain of generator invocation. +# +# More formally, takes all generators for 'target-type' and returns a union of +# source types for those generators and result of calling itself recursively on +# source types. +# +# Returns '*' in case any type should be considered a viable source type for the +# given type. +# +local rule viable-source-types-real ( target-type ) +{ + local result ; + + # 't0' is the initial list of target types we need to process to get a list + # of their viable source target types. New target types will not be added to + # this list. + local t0 = [ type.all-bases $(target-type) ] ; + + # 't' is the list of target types which have not yet been processed to get a + # list of their viable source target types. This list will get expanded as + # we locate more target types to process. + local t = $(t0) ; + + while $(t) + { + # Find all generators for the current type. Unlike + # 'find-viable-generators' we do not care about the property-set. + local generators = $(.generators.$(t[1])) ; + t = $(t[2-]) ; + + while $(generators) + { + local g = $(generators[1]) ; + generators = $(generators[2-]) ; + + if ! [ $(g).source-types ] + { + # Empty source types -- everything can be accepted. + result = * ; + # This will terminate this loop. + generators = ; + # This will terminate the outer loop. + t = ; + } + + for local source-type in [ $(g).source-types ] + { + if ! $(source-type) in $(result) + { + # If a generator accepts a 'source-type' it will also + # happily accept any type derived from it. + for local n in [ type.all-derived $(source-type) ] + { + if ! $(n) in $(result) + { + # Here there is no point in adding target types to + # the list of types to process in case they are or + # have already been on that list. We optimize this + # check by realizing that we only need to avoid the + # original target type's base types. Other target + # types that are or have been on the list of target + # types to process have been added to the 'result' + # list as well and have thus already been eliminated + # by the previous if. + if ! $(n) in $(t0) + { + t += $(n) ; + } + result += $(n) ; + } + } + } + } + } + } + + return $(result) ; +} + + +# Helper rule, caches the result of 'viable-source-types-real'. +# +rule viable-source-types ( target-type ) +{ + local key = .vst.$(target-type) ; + if ! $($(key)) + { + .vst-cached-types += $(target-type) ; + local v = [ viable-source-types-real $(target-type) ] ; + if ! $(v) + { + v = none ; + } + $(key) = $(v) ; + } + + if $($(key)) != none + { + return $($(key)) ; + } +} + + +# Returns the list of source types, which, when passed to 'run' method of +# 'generator', has some change of being eventually used (probably after +# conversion by other generators). +# +# Returns '*' in case any type should be considered a viable source type for the +# given generator. +# +rule viable-source-types-for-generator-real ( generator ) +{ + local source-types = [ $(generator).source-types ] ; + if ! $(source-types) + { + # If generator does not specify any source types, it might be a special + # generator like builtin.lib-generator which just relays to other + # generators. Return '*' to indicate that any source type is possibly + # OK, since we do not know for sure. + return * ; + } + else + { + local result ; + while $(source-types) + { + local s = $(source-types[1]) ; + source-types = $(source-types[2-]) ; + local viable-sources = [ generators.viable-source-types $(s) ] ; + if $(viable-sources) = * + { + result = * ; + source-types = ; # Terminate the loop. + } + else + { + result += [ type.all-derived $(s) ] $(viable-sources) ; + } + } + return [ sequence.unique $(result) ] ; + } +} + + +# Helper rule, caches the result of 'viable-source-types-for-generator'. +# +local rule viable-source-types-for-generator ( generator ) +{ + local key = .vstg.$(generator) ; + if ! $($(key)) + { + .vstg-cached-generators += $(generator) ; + local v = [ viable-source-types-for-generator-real $(generator) ] ; + if ! $(v) + { + v = none ; + } + $(key) = $(v) ; + } + + if $($(key)) != none + { + return $($(key)) ; + } +} + + +# Returns usage requirements + list of created targets. +# +local rule try-one-generator-really ( project name ? : generator : target-type + : property-set : sources * ) +{ + local targets = + [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ; + + local usage-requirements ; + local success ; + + generators.dout [ indent ] returned $(targets) ; + + if $(targets) + { + success = true ; + + if [ class.is-a $(targets[1]) : property-set ] + { + usage-requirements = $(targets[1]) ; + targets = $(targets[2-]) ; + } + else + { + usage-requirements = [ property-set.empty ] ; + } + } + + generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ; + generators.dout [ indent ] " " $(targets) ; + if $(usage-requirements) + { + generators.dout [ indent ] " with usage requirements:" $(usage-requirements) ; + } + + if $(success) + { + return $(usage-requirements) $(targets) ; + } +} + + +# Checks if generator invocation can be pruned, because it is guaranteed to +# fail. If so, quickly returns an empty list. Otherwise, calls +# try-one-generator-really. +# +local rule try-one-generator ( project name ? : generator : target-type + : property-set : sources * ) +{ + local source-types ; + for local s in $(sources) + { + source-types += [ $(s).type ] ; + } + local viable-source-types = [ viable-source-types-for-generator $(generator) + ] ; + + if $(source-types) && $(viable-source-types) != * && + ! [ set.intersection $(source-types) : $(viable-source-types) ] + { + local id = [ $(generator).id ] ; + generators.dout [ indent ] " ** generator '$(id)' pruned" ; + #generators.dout [ indent ] "source-types" '$(source-types)' ; + #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ; + } + else + { + return [ try-one-generator-really $(project) $(name) : $(generator) : + $(target-type) : $(property-set) : $(sources) ] ; + } +} + + +rule construct-types ( project name ? : target-types + : property-set + : sources + ) +{ + local result ; + local usage-requirements = [ property-set.empty ] ; + for local t in $(target-types) + { + local r = [ construct $(project) $(name) : $(t) : $(property-set) : + $(sources) ] ; + if $(r) + { + usage-requirements = [ $(usage-requirements).add $(r[1]) ] ; + result += $(r[2-]) ; + } + } + # TODO: have to introduce parameter controlling if several types can be + # matched and add appropriate checks. + + # TODO: need to review the documentation for 'construct' to see if it should + # return $(source) even if nothing can be done with it. Currents docs seem + # to imply that, contrary to the behaviour. + if $(result) + { + return $(usage-requirements) $(result) ; + } + else + { + return $(usage-requirements) $(sources) ; + } +} + + +# Ensures all 'targets' have their type. If this is not so, exists with error. +# +local rule ensure-type ( targets * ) +{ + for local t in $(targets) + { + if ! [ $(t).type ] + { + import errors ; + errors.error "target" [ $(t).str ] "has no type" ; + } + } +} + + +# Returns generators which can be used to construct target of specified type +# with specified properties. Uses the following algorithm: +# - iterates over requested target-type and all its bases (in the order returned +# by type.all-bases). +# - for each type find all generators that generate that type and whose +# requirements are satisfied by properties. +# - if the set of generators is not empty, returns that set. +# +# Note: this algorithm explicitly ignores generators for base classes if there +# is at least one generator for the requested target-type. +# +local rule find-viable-generators-aux ( target-type : property-set ) +{ + # Select generators that can create the required target type. + local viable-generators = ; + + import type ; + local t = $(target-type) ; + + if $(.debug) + { + generators.dout [ indent ] find-viable-generators target-type= $(target-type) + property-set= [ $(property-set).as-path ] ; + generators.dout [ indent ] "trying type" $(target-type) ; + } + + local generators = $(.generators.$(target-type)) ; + if $(generators) + { + if $(.debug) + { + generators.dout [ indent ] "there are generators for this type" ; + } + } + else + { + local t = [ type.base $(target-type) ] ; + + # Get the list of generators for the requested type. If no generator is + # registered, try base type, and so on. + while $(t) + { + if $(.debug) + { + generators.dout [ indent ] "trying type" $(t) ; + } + if $(.generators.$(t)) + { + generators.dout [ indent ] "there are generators for this type" ; + generators = $(.generators.$(t)) ; + + # We are here because there were no generators found for + # target-type but there are some generators for its base type. + # We will try to use them, but they will produce targets of + # base type, not of 'target-type'. So, we clone the generators + # and modify the list of target types. + local generators2 ; + for local g in $(generators) + { + # generators.register adds a generator to the list of + # generators for toolsets, which is a bit strange, but + # should work. That list is only used when inheriting a + # toolset, which should have been done before running + # generators. + generators2 += [ $(g).clone-and-change-target-type $(t) : + $(target-type) ] ; + generators.register $(generators2[-1]) ; + } + generators = $(generators2) ; + t = ; + } + else + { + t = [ type.base $(t) ] ; + } + } + } + + for local g in $(generators) + { + if $(.debug) + { + generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ; + } + + if [ $(g).match-rank $(property-set) ] + { + if $(.debug) + { + generators.dout [ indent ] " is viable" ; + } + viable-generators += $(g) ; + } + } + + return $(viable-generators) ; +} + + +rule find-viable-generators ( target-type : property-set ) +{ + local key = $(target-type).$(property-set) ; + local l = $(.fv.$(key)) ; + if ! $(l) + { + l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ; + if ! $(l) + { + l = none ; + } + .fv.$(key) = $(l) ; + } + + if $(l) = none + { + l = ; + } + + local viable-generators ; + for local g in $(l) + { + # Avoid trying the same generator twice on different levels. + if ! $(g) in $(.active-generators) + { + viable-generators += $(g) ; + } + else + { + generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ; + } + } + + # Generators which override 'all'. + local all-overrides ; + # Generators which are overridden. + local overriden-ids ; + for local g in $(viable-generators) + { + local id = [ $(g).id ] ; + local this-overrides = $(.override.$(id)) ; + overriden-ids += $(this-overrides) ; + if all in $(this-overrides) + { + all-overrides += $(g) ; + } + } + if $(all-overrides) + { + viable-generators = $(all-overrides) ; + } + local result ; + for local g in $(viable-generators) + { + if ! [ $(g).id ] in $(overriden-ids) + { + result += $(g) ; + } + } + + return $(result) ; +} + + +.construct-stack = ; + + +# Attempts to construct a target by finding viable generators, running them and +# selecting the dependency graph. +# +local rule construct-really ( project name ? : target-type : property-set : + sources * ) +{ + viable-generators = [ find-viable-generators $(target-type) : + $(property-set) ] ; + + generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ] + " viable generators" ; + + local result ; + local generators-that-succeeded ; + for local g in $(viable-generators) + { + # This variable will be restored on exit from this scope. + local .active-generators = $(g) $(.active-generators) ; + + local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type) + : $(property-set) : $(sources) ] ; + + if $(r) + { + generators-that-succeeded += $(g) ; + if $(result) + { + ECHO "Error: ambiguity found when searching for best transformation" ; + ECHO "Trying to produce type '$(target-type)' from: " ; + for local s in $(sources) + { + ECHO " - " [ $(s).str ] ; + } + ECHO "Generators that succeeded:" ; + for local g in $(generators-that-succeeded) + { + ECHO " - " [ $(g).id ] ; + } + ECHO "First generator produced: " ; + for local t in $(result[2-]) + { + ECHO " - " [ $(t).str ] ; + } + ECHO "Second generator produced: " ; + for local t in $(r[2-]) + { + ECHO " - " [ $(t).str ] ; + } + EXIT ; + } + else + { + result = $(r) ; + } + } + } + + return $(result) ; +} + + +# Attempts to create a target of 'target-type' with 'properties' from 'sources'. +# The 'sources' are treated as a collection of *possible* ingridients, i.e. +# there is no obligation to consume them all. +# +# Returns a list of targets. When this invocation is first instance of +# 'construct' in stack, returns only targets of requested 'target-type', +# otherwise, returns also unused sources and additionally generated targets. +# +# If 'top-level' is set, does not suppress generators that are already +# used in the stack. This may be useful in cases where a generator +# has to build a metatargets -- for example a target corresponding to +# built tool. +# +rule construct ( project name ? : target-type : property-set * : sources * : top-level ? ) +{ + local saved-active ; + if $(top-level) + { + saved-active = $(.active-generators) ; + .active-generators = ; + } + + # FIXME This is probably not intended be be run unconditionally, + # but changing it causes no_type to fail. + if "(.construct-stack)" + { + ensure-type $(sources) ; + } + + .construct-stack += 1 ; + + increase-indent ; + + if $(.debug) + { + generators.dout [ indent ] "*** construct" $(target-type) ; + + for local s in $(sources) + { + generators.dout [ indent ] " from" $(s) ; + } + generators.dout [ indent ] " properties:" [ $(property-set).raw ] ; + } + + local result = [ construct-really $(project) $(name) : $(target-type) : + $(property-set) : $(sources) ] ; + + decrease-indent ; + + .construct-stack = $(.construct-stack[2-]) ; + + if $(top-level) + { + .active-generators = $(saved-active) ; + } + + return $(result) ; +} + +# Given 'result', obtained from some generator or generators.construct, adds +# 'raw-properties' as usage requirements to it. If result already contains usage +# requirements -- that is the first element of result of an instance of the +# property-set class, the existing usage requirements and 'raw-properties' are +# combined. +# +rule add-usage-requirements ( result * : raw-properties * ) +{ + if $(result) + { + if [ class.is-a $(result[1]) : property-set ] + { + return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ; + } + else + { + return [ property-set.create $(raw-properties) ] $(result) ; + } + } +} + +rule dump ( ) +{ + for local g in $(.all-generators) + { + ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ; + } +} + diff --git a/src/boost/tools/build/src/build/generators.py b/src/boost/tools/build/src/build/generators.py new file mode 100644 index 000000000..23d1ea944 --- /dev/null +++ b/src/boost/tools/build/src/build/generators.py @@ -0,0 +1,1209 @@ +# Status: being ported by Vladimir Prus +# Base revision: 48649 +# TODO: replace the logging with dout + +# Copyright Vladimir Prus 2002. +# Copyright Rene Rivera 2006. +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Manages 'generators' --- objects which can do transformation between different +# target types and contain algorithm for finding transformation from sources +# to targets. +# +# The main entry point to this module is generators.construct rule. It is given +# a list of source targets, desired target type and a set of properties. +# It starts by selecting 'viable generators', which have any chances of producing +# the desired target type with the required properties. Generators are ranked and +# a set of most specific ones is selected. +# +# The most specific generators have their 'run' methods called, with the properties +# and list of sources. Each one selects target which can be directly consumed, and +# tries to convert the remaining ones to the types it can consume. This is done +# by recursively calling 'construct' with all consumable types. +# +# If the generator has collected all the targets it needs, it creates targets +# corresponding to result, and returns it. When all generators have been run, +# results of one of them are selected and returned as result. +# +# It's quite possible that 'construct' returns more targets that it was asked for. +# For example, it was asked to target type EXE, but the only found generators produces +# both EXE and TDS (file with debug) information. The extra target will be returned. +# +# Likewise, when generator tries to convert sources to consumable types, it can get +# more targets that it was asked for. The question is what to do with extra targets. +# Boost.Build attempts to convert them to requested types, and attempts as early as +# possible. Specifically, this is done after invoking each generator. (Later I'll +# document the rationale for trying extra target conversion at that point). +# +# That early conversion is not always desirable. Suppose a generator got a source of +# type Y and must consume one target of type X_1 and one target of type X_2. +# When converting Y to X_1 extra target of type Y_2 is created. We should not try to +# convert it to type X_1, because if we do so, the generator will get two targets +# of type X_1, and will be at loss as to which one to use. Because of that, the +# 'construct' rule has a parameter, telling if multiple targets can be returned. If +# the parameter is false, conversion of extra targets is not performed. + + +import re +import cStringIO +import os.path + +from virtual_target import Subvariant +from . import virtual_target, type, property_set, property +from b2.exceptions import BaseBoostBuildException +from b2.util.logger import * +from b2.util.utility import * +from b2.util import set as set_, is_iterable_typed, is_iterable, bjam_signature +from b2.util.sequence import unique +import b2.util.sequence as sequence +from b2.manager import get_manager +import b2.build.type + +def reset (): + """ Clear the module state. This is mainly for testing purposes. + """ + global __generators, __type_to_generators, __generators_for_toolset, __construct_stack + global __overrides, __active_generators + global __viable_generators_cache, __viable_source_types_cache + global __vstg_cached_generators, __vst_cached_types + + __generators = {} + __type_to_generators = {} + __generators_for_toolset = {} + __overrides = {} + + # TODO: can these be global? + __construct_stack = [] + __viable_generators_cache = {} + __viable_source_types_cache = {} + __active_generators = [] + + __vstg_cached_generators = [] + __vst_cached_types = [] + +reset () + +_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?') +_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?') + + +__debug = None +__indent = "" + +def debug(): + global __debug + if __debug is None: + __debug = "--debug-generators" in bjam.variable("ARGV") + return __debug + +def increase_indent(): + global __indent + __indent += " " + +def decrease_indent(): + global __indent + __indent = __indent[0:-4] + + +# Updated cached viable source target type information as needed after a new +# derived target type gets added. This is needed because if a target type is a +# viable source target type for some generator then all of the target type's +# derived target types are automatically viable as source target types for the +# same generator. Does nothing if a non-derived target type is passed to it. +# +def update_cached_information_with_a_new_type(type): + assert isinstance(type, basestring) + base_type = b2.build.type.base(type) + + if base_type: + for g in __vstg_cached_generators: + if base_type in __viable_source_types_cache.get(g, []): + __viable_source_types_cache[g].append(type) + + for t in __vst_cached_types: + if base_type in __viable_source_types_cache.get(t, []): + __viable_source_types_cache[t].append(type) + +# Clears cached viable source target type information except for target types +# and generators with all source types listed as viable. Should be called when +# something invalidates those cached values by possibly causing some new source +# types to become viable. +# +def invalidate_extendable_viable_source_target_type_cache(): + + global __vstg_cached_generators + generators_with_cached_source_types = __vstg_cached_generators + __vstg_cached_generators = [] + + for g in generators_with_cached_source_types: + if g in __viable_source_types_cache: + if __viable_source_types_cache[g] == ["*"]: + __vstg_cached_generators.append(g) + else: + del __viable_source_types_cache[g] + + global __vst_cached_types + types_with_cached_sources_types = __vst_cached_types + __vst_cached_types = [] + for t in types_with_cached_sources_types: + if t in __viable_source_types_cache: + if __viable_source_types_cache[t] == ["*"]: + __vst_cached_types.append(t) + else: + del __viable_source_types_cache[t] + +def dout(message): + if debug(): + print __indent + message + + +class InvalidTargetSource(BaseBoostBuildException): + """ + Should be raised when a target contains a source that is invalid. + """ + + +class Generator: + """ Creates a generator. + manager: the build manager. + id: identifies the generator + + rule: the rule which sets up build actions. + + composing: whether generator processes each source target in + turn, converting it to required types. + Ordinary generators pass all sources together to + recusrive generators.construct_types call. + + source_types (optional): types that this generator can handle + + target_types_and_names: types the generator will create and, optionally, names for + created targets. Each element should have the form + type["(" name-pattern ")"] + for example, obj(%_x). Name of generated target will be found + by replacing % with the name of source, provided explicit name + was not specified. + + requirements (optional) + + NOTE: all subclasses must have a similar signature for clone to work! + """ + def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []): + assert isinstance(id, basestring) + assert isinstance(composing, bool) + assert is_iterable_typed(source_types, basestring) + assert is_iterable_typed(target_types_and_names, basestring) + assert is_iterable_typed(requirements, basestring) + self.id_ = id + self.composing_ = composing + self.source_types_ = source_types + self.target_types_and_names_ = target_types_and_names + self.requirements_ = requirements + + self.target_types_ = [] + self.name_prefix_ = [] + self.name_postfix_ = [] + + for e in target_types_and_names: + # Create three parallel lists: one with the list of target types, + # and two other with prefixes and postfixes to be added to target + # name. We use parallel lists for prefix and postfix (as opposed + # to mapping), because given target type might occur several times, + # for example "H H(%_symbols)". + m = _re_separate_types_prefix_and_postfix.match (e) + + if not m: + raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id)) + + target_type = m.group (1) + if not target_type: target_type = '' + prefix = m.group (3) + if not prefix: prefix = '' + postfix = m.group (4) + if not postfix: postfix = '' + + self.target_types_.append (target_type) + self.name_prefix_.append (prefix) + self.name_postfix_.append (postfix) + + for x in self.source_types_: + type.validate (x) + + for x in self.target_types_: + type.validate (x) + + def clone (self, new_id, new_toolset_properties): + """ Returns another generator which differers from $(self) in + - id + - value to feature in properties + """ + assert isinstance(new_id, basestring) + assert is_iterable_typed(new_toolset_properties, basestring) + return self.__class__ (new_id, + self.composing_, + self.source_types_, + self.target_types_and_names_, + # Note: this does not remove any subfeatures of + # which might cause problems + property.change (self.requirements_, '') + new_toolset_properties) + + def clone_and_change_target_type(self, base, type): + """Creates another generator that is the same as $(self), except that + if 'base' is in target types of $(self), 'type' will in target types + of the new generator.""" + assert isinstance(base, basestring) + assert isinstance(type, basestring) + target_types = [] + for t in self.target_types_and_names_: + m = _re_match_type.match(t) + assert m + + if m.group(1) == base: + if m.group(2): + target_types.append(type + m.group(2)) + else: + target_types.append(type) + else: + target_types.append(t) + + return self.__class__(self.id_, self.composing_, + self.source_types_, + target_types, + self.requirements_) + + + def id(self): + return self.id_ + + def source_types (self): + """ Returns the list of target type the generator accepts. + """ + return self.source_types_ + + def target_types (self): + """ Returns the list of target types that this generator produces. + It is assumed to be always the same -- i.e. it cannot change depending + list of sources. + """ + return self.target_types_ + + def requirements (self): + """ Returns the required properties for this generator. Properties + in returned set must be present in build properties if this + generator is to be used. If result has grist-only element, + that build properties must include some value of that feature. + """ + return self.requirements_ + + def match_rank (self, ps): + """ Returns true if the generator can be run with the specified + properties. + """ + # See if generator's requirements are satisfied by + # 'properties'. Treat a feature name in requirements + # (i.e. grist-only element), as matching any value of the + # feature. + assert isinstance(ps, property_set.PropertySet) + all_requirements = self.requirements () + + property_requirements = [] + feature_requirements = [] + # This uses strings because genenator requirements allow + # the '' syntax without value and regular validation + # is not happy about that. + for r in all_requirements: + if get_value (r): + property_requirements.append (r) + + else: + feature_requirements.append (r) + + return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \ + and all(ps.get(get_grist(s)) for s in feature_requirements) + + def run (self, project, name, prop_set, sources): + """ Tries to invoke this generator on the given sources. Returns a + list of generated targets (instances of 'virtual-target'). + + project: Project for which the targets are generated. + + name: Determines the name of 'name' attribute for + all generated targets. See 'generated_targets' method. + + prop_set: Desired properties for generated targets. + + sources: Source targets. + """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + # intermediary targets don't have names, so None is possible + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + if project.manager ().logger ().on (): + project.manager ().logger ().log (__name__, " generator '%s'" % self.id_) + project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_) + + if not sources: + s = 'An empty source list was passed in to the "{}" generator'.format(self.id_) + if name: + s += ' for target "{}"'.format(name) + raise InvalidTargetSource(s) + + if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1: + raise BaseException ("Unsupported source/source_type combination") + + # We don't run composing generators if no name is specified. The reason + # is that composing generator combines several targets, which can have + # different names, and it cannot decide which name to give for produced + # target. Therefore, the name must be passed. + # + # This in effect, means that composing generators are runnable only + # at top-level of transofrmation graph, or if name is passed explicitly. + # Thus, we dissallow composing generators in the middle. For example, the + # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed + # (the OBJ -> STATIC_LIB generator is composing) + if not self.composing_ or name: + return self.run_really (project, name, prop_set, sources) + else: + return [] + + def run_really (self, project, name, prop_set, sources): + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + # intermediary targets don't have names, so None is possible + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + # consumed: Targets that this generator will consume directly. + + if self.composing_: + consumed = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources) + else: + consumed = self.convert_to_consumable_types (project, name, prop_set, sources) + + result = [] + if consumed: + result = self.construct_result (consumed, project, name, prop_set) + + if result: + if project.manager ().logger ().on (): + project.manager ().logger ().log (__name__, " SUCCESS: ", result) + + else: + project.manager ().logger ().log (__name__, " FAILURE") + + return result + + def construct_result (self, consumed, project, name, prop_set): + """ Constructs the dependency graph that will be returned by this + generator. + consumed: Already prepared list of consumable targets + If generator requires several source files will contain + exactly len $(self.source_types_) targets with matching types + Otherwise, might contain several targets with the type of + self.source_types_ [0] + project: + name: + prop_set: Properties to be used for all actions create here + """ + if __debug__: + from .targets import ProjectTarget + assert is_iterable_typed(consumed, virtual_target.VirtualTarget) + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + result = [] + # If this is 1->1 transformation, apply it to all consumed targets in order. + if len (self.source_types_) < 2 and not self.composing_: + + for r in consumed: + result.extend(self.generated_targets([r], prop_set, project, name)) + elif consumed: + result.extend(self.generated_targets(consumed, prop_set, project, name)) + + return result + + def determine_target_name(self, fullname): + assert isinstance(fullname, basestring) + # Determine target name from fullname (maybe including path components) + # Place optional prefix and postfix around basename + + dir = os.path.dirname(fullname) + name = os.path.basename(fullname) + idx = name.find(".") + if idx != -1: + name = name[:idx] + + if dir and not ".." in dir and not os.path.isabs(dir): + # Relative path is always relative to the source + # directory. Retain it, so that users can have files + # with the same in two different subdirectories. + name = dir + "/" + name + + return name + + def determine_output_name(self, sources): + """Determine the name of the produced target from the + names of the sources.""" + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + + # The simple case if when a name + # of source has single dot. Then, we take the part before + # dot. Several dots can be caused by: + # - Using source file like a.host.cpp + # - A type which suffix has a dot. Say, we can + # type 'host_cpp' with extension 'host.cpp'. + # In the first case, we want to take the part till the last + # dot. In the second case -- no sure, but for now take + # the part till the last dot too. + name = os.path.splitext(sources[0].name())[0] + + for s in sources[1:]: + n2 = os.path.splitext(s.name()) + if n2 != name: + get_manager().errors()( + "%s: source targets have different names: cannot determine target name" + % (self.id_)) + + # Names of sources might include directory. We should strip it. + return self.determine_target_name(sources[0].name()) + + + def generated_targets (self, sources, prop_set, project, name): + """ Constructs targets that are created after consuming 'sources'. + The result will be the list of virtual-target, which the same length + as 'target_types' attribute and with corresponding types. + + When 'name' is empty, all source targets must have the same value of + the 'name' attribute, which will be used instead of the 'name' argument. + + The value of 'name' attribute for each generated target will be equal to + the 'name' parameter if there's no name pattern for this type. Otherwise, + the '%' symbol in the name pattern will be replaced with the 'name' parameter + to obtain the 'name' attribute. + + For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes + for T1 and T2 are .t1 and t2, and source if foo.z, then created files would + be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the + basename of a file. + + Note that this pattern mechanism has nothing to do with implicit patterns + in make. It's a way to produce target which name is different for name of + source. + """ + if __debug__: + from .targets import ProjectTarget + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + if not name: + name = self.determine_output_name(sources) + + # Assign an action for each target + action = self.action_class() + a = action(project.manager(), sources, self.id_, prop_set) + + # Create generated target for each target type. + targets = [] + pre = self.name_prefix_ + post = self.name_postfix_ + for t in self.target_types_: + basename = os.path.basename(name) + generated_name = pre[0] + basename + post[0] + generated_name = os.path.join(os.path.dirname(name), generated_name) + pre = pre[1:] + post = post[1:] + + targets.append(virtual_target.FileTarget(generated_name, t, project, a)) + + return [ project.manager().virtual_targets().register(t) for t in targets ] + + def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False): + """ Attempts to convert 'source' to the types that this generator can + handle. The intention is to produce the set of targets can should be + used when generator is run. + only_one: convert 'source' to only one of source types + if there's more that one possibility, report an + error. + + Returns a pair: + consumed: all targets that can be consumed. + """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(name, basestring) or name is None + assert isinstance(project, ProjectTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(only_one, bool) + consumed = [] + missing_types = [] + + if len (sources) > 1: + # Don't know how to handle several sources yet. Just try + # to pass the request to other generator + missing_types = self.source_types_ + + else: + (c, m) = self.consume_directly (sources [0]) + consumed += c + missing_types += m + + # No need to search for transformation if + # some source type has consumed source and + # no more source types are needed. + if only_one and consumed: + missing_types = [] + + #TODO: we should check that only one source type + #if create of 'only_one' is true. + # TODO: consider if consuned/bypassed separation should + # be done by 'construct_types'. + + if missing_types: + transformed = construct_types (project, name, missing_types, prop_set, sources) + + # Add targets of right type to 'consumed'. Add others to + # 'bypassed'. The 'generators.construct' rule has done + # its best to convert everything to the required type. + # There's no need to rerun it on targets of different types. + + # NOTE: ignoring usage requirements + for t in transformed[1]: + if t.type() in missing_types: + consumed.append(t) + + consumed = unique(consumed) + + return consumed + + + def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources): + """ Converts several files to consumable types. + """ + if __debug__: + from .targets import ProjectTarget + + assert isinstance(project, ProjectTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + if not self.source_types_: + return list(sources) + + acceptable_types = set() + for t in self.source_types_: + acceptable_types.update(type.all_derived(t)) + + result = [] + for source in sources: + if source.type() not in acceptable_types: + transformed = construct_types( + project, None,self.source_types_, prop_set, [source]) + # construct_types returns [prop_set, [targets]] + for t in transformed[1]: + if t.type() in self.source_types_: + result.append(t) + if not transformed: + project.manager().logger().log(__name__, " failed to convert ", source) + else: + result.append(source) + + result = sequence.unique(result, stable=True) + return result + + + + def consume_directly (self, source): + assert isinstance(source, virtual_target.VirtualTarget) + real_source_type = source.type () + + # If there are no source types, we can consume anything + source_types = self.source_types() + if not source_types: + source_types = [real_source_type] + + consumed = [] + missing_types = [] + for st in source_types: + # The 'source' if of right type already) + if real_source_type == st or type.is_derived (real_source_type, st): + consumed = [source] + + else: + missing_types.append (st) + + return (consumed, missing_types) + + def action_class (self): + """ Returns the class to be used to actions. Default implementation + returns "action". + """ + return virtual_target.Action + + +def find (id): + """ Finds the generator with id. Returns None if not found. + """ + assert isinstance(id, basestring) + return __generators.get (id, None) + +def register (g): + """ Registers new generator instance 'g'. + """ + assert isinstance(g, Generator) + id = g.id() + + __generators [id] = g + + # A generator can produce several targets of the + # same type. We want unique occurrence of that generator + # in .generators.$(t) in that case, otherwise, it will + # be tried twice and we'll get false ambiguity. + for t in sequence.unique(g.target_types()): + __type_to_generators.setdefault(t, []).append(g) + + # Update the set of generators for toolset + + # TODO: should we check that generator with this id + # is not already registered. For example, the fop.jam + # module intentionally declared two generators with the + # same id, so such check will break it. + + # Some generators have multiple periods in their name, so the + # normal $(id:S=) won't generate the right toolset name. + # e.g. if id = gcc.compile.c++, then + # .generators-for-toolset.$(id:S=) will append to + # .generators-for-toolset.gcc.compile, which is a separate + # value from .generators-for-toolset.gcc. Correcting this + # makes generator inheritance work properly. + # See also inherit-generators in module toolset + base = id.split ('.', 100) [0] + + __generators_for_toolset.setdefault(base, []).append(g) + + # After adding a new generator that can construct new target types, we need + # to clear the related cached viable source target type information for + # constructing a specific target type or using a specific generator. Cached + # viable source target type lists affected by this are those containing any + # of the target types constructed by the new generator or any of their base + # target types. + # + # A more advanced alternative to clearing that cached viable source target + # type information would be to expand it with additional source types or + # even better - mark it as needing to be expanded on next use. + # + # For now we just clear all the cached viable source target type information + # that does not simply state 'all types' and may implement a more detailed + # algorithm later on if it becomes needed. + + invalidate_extendable_viable_source_target_type_cache() + + +def check_register_types(fn): + def wrapper(id, source_types, target_types, requirements=[]): + assert isinstance(id, basestring) + assert is_iterable_typed(source_types, basestring) + assert is_iterable_typed(target_types, basestring) + assert is_iterable_typed(requirements, basestring) + return fn(id, source_types, target_types, requirements=requirements) + wrapper.__name__ = fn.__name__ + wrapper.__doc__ = fn.__doc__ + return wrapper + + +@bjam_signature([['id'], ['source_types', '*'], ['target_types', '*'], ['requirements', '*']]) +@check_register_types +def register_standard (id, source_types, target_types, requirements = []): + """ Creates new instance of the 'generator' class and registers it. + Returns the creates instance. + Rationale: the instance is returned so that it's possible to first register + a generator and then call 'run' method on that generator, bypassing all + generator selection. + """ + g = Generator (id, False, source_types, target_types, requirements) + register (g) + return g + + +@check_register_types +def register_composing (id, source_types, target_types, requirements = []): + g = Generator (id, True, source_types, target_types, requirements) + register (g) + return g + +def generators_for_toolset (toolset): + """ Returns all generators which belong to 'toolset'. + """ + assert isinstance(toolset, basestring) + return __generators_for_toolset.get(toolset, []) + +def override (overrider_id, overridee_id): + """Make generator 'overrider-id' be preferred to + 'overridee-id'. If, when searching for generators + that could produce a target of certain type, + both those generators are among viable generators, + the overridden generator is immediately discarded. + + The overridden generators are discarded immediately + after computing the list of viable generators, before + running any of them.""" + assert isinstance(overrider_id, basestring) + assert isinstance(overridee_id, basestring) + + __overrides.setdefault(overrider_id, []).append(overridee_id) + +def __viable_source_types_real (target_type): + """ Returns a list of source type which can possibly be converted + to 'target_type' by some chain of generator invocation. + + More formally, takes all generators for 'target_type' and + returns union of source types for those generators and result + of calling itself recusrively on source types. + """ + assert isinstance(target_type, basestring) + generators = [] + + # 't0' is the initial list of target types we need to process to get a list + # of their viable source target types. New target types will not be added to + # this list. + t0 = type.all_bases (target_type) + + + # 't' is the list of target types which have not yet been processed to get a + # list of their viable source target types. This list will get expanded as + # we locate more target types to process. + t = t0 + + result = [] + while t: + # Find all generators for current type. + # Unlike 'find_viable_generators' we don't care about prop_set. + generators = __type_to_generators.get (t [0], []) + t = t[1:] + + for g in generators: + if not g.source_types(): + # Empty source types -- everything can be accepted + result = "*" + # This will terminate outer loop. + t = None + break + + for source_type in g.source_types (): + if not source_type in result: + # If generator accepts 'source_type' it + # will happily accept any type derived from it + all = type.all_derived (source_type) + for n in all: + if not n in result: + + # Here there is no point in adding target types to + # the list of types to process in case they are or + # have already been on that list. We optimize this + # check by realizing that we only need to avoid the + # original target type's base types. Other target + # types that are or have been on the list of target + # types to process have been added to the 'result' + # list as well and have thus already been eliminated + # by the previous if. + if not n in t0: + t.append (n) + result.append (n) + + return result + + +def viable_source_types (target_type): + """ Helper rule, caches the result of '__viable_source_types_real'. + """ + assert isinstance(target_type, basestring) + if target_type not in __viable_source_types_cache: + __vst_cached_types.append(target_type) + __viable_source_types_cache [target_type] = __viable_source_types_real (target_type) + return __viable_source_types_cache [target_type] + +def viable_source_types_for_generator_real (generator): + """ Returns the list of source types, which, when passed to 'run' + method of 'generator', has some change of being eventually used + (probably after conversion by other generators) + """ + assert isinstance(generator, Generator) + source_types = generator.source_types () + + if not source_types: + # If generator does not specify any source types, + # it might be special generator like builtin.lib-generator + # which just relays to other generators. Return '*' to + # indicate that any source type is possibly OK, since we don't + # know for sure. + return ['*'] + + else: + result = [] + for s in source_types: + viable_sources = viable_source_types(s) + if viable_sources == "*": + result = ["*"] + break + else: + result.extend(type.all_derived(s) + viable_sources) + return unique(result) + +def viable_source_types_for_generator (generator): + """ Caches the result of 'viable_source_types_for_generator'. + """ + assert isinstance(generator, Generator) + if generator not in __viable_source_types_cache: + __vstg_cached_generators.append(generator) + __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator) + + return __viable_source_types_cache[generator] + +def try_one_generator_really (project, name, generator, target_type, properties, sources): + """ Returns usage requirements + list of created targets. + """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(generator, Generator) + assert isinstance(target_type, basestring) + assert isinstance(properties, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + targets = generator.run (project, name, properties, sources) + + usage_requirements = [] + success = False + + dout("returned " + str(targets)) + + if targets: + success = True; + + if isinstance (targets[0], property_set.PropertySet): + usage_requirements = targets [0] + targets = targets [1] + + else: + usage_requirements = property_set.empty () + + dout( " generator" + generator.id() + " spawned ") + # generators.dout [ indent ] " " $(targets) ; +# if $(usage-requirements) +# { +# generators.dout [ indent ] " with usage requirements:" $(x) ; +# } + + if success: + return (usage_requirements, targets) + else: + return None + +def try_one_generator (project, name, generator, target_type, properties, sources): + """ Checks if generator invocation can be pruned, because it's guaranteed + to fail. If so, quickly returns empty list. Otherwise, calls + try_one_generator_really. + """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(generator, Generator) + assert isinstance(target_type, basestring) + assert isinstance(properties, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + source_types = [] + + for s in sources: + source_types.append (s.type ()) + + viable_source_types = viable_source_types_for_generator (generator) + + if source_types and viable_source_types != ['*'] and\ + not set_.intersection (source_types, viable_source_types): + if project.manager ().logger ().on (): + id = generator.id () + project.manager ().logger ().log (__name__, "generator '%s' pruned" % id) + project.manager ().logger ().log (__name__, "source_types" '%s' % source_types) + project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types) + + return [] + + else: + return try_one_generator_really (project, name, generator, target_type, properties, sources) + + +def construct_types (project, name, target_types, prop_set, sources): + + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert is_iterable_typed(target_types, basestring) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + + result = [] + usage_requirements = property_set.empty() + + for t in target_types: + r = construct (project, name, t, prop_set, sources) + + if r: + (ur, targets) = r + usage_requirements = usage_requirements.add(ur) + result.extend(targets) + + # TODO: have to introduce parameter controlling if + # several types can be matched and add appropriate + # checks + + # TODO: need to review the documentation for + # 'construct' to see if it should return $(source) even + # if nothing can be done with it. Currents docs seem to + # imply that, contrary to the behaviour. + if result: + return (usage_requirements, result) + + else: + return (usage_requirements, sources) + +def __ensure_type (targets): + """ Ensures all 'targets' have types. If this is not so, exists with + error. + """ + assert is_iterable_typed(targets, virtual_target.VirtualTarget) + for t in targets: + if not t.type (): + get_manager().errors()("target '%s' has no type" % str (t)) + +def find_viable_generators_aux (target_type, prop_set): + """ Returns generators which can be used to construct target of specified type + with specified properties. Uses the following algorithm: + - iterates over requested target_type and all it's bases (in the order returned bt + type.all-bases. + - for each type find all generators that generate that type and which requirements + are satisfied by properties. + - if the set of generators is not empty, returns that set. + + Note: this algorithm explicitly ignores generators for base classes if there's + at least one generator for requested target_type. + """ + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) + # Select generators that can create the required target type. + viable_generators = [] + initial_generators = [] + + from . import type + + # Try all-type generators first. Assume they have + # quite specific requirements. + all_bases = type.all_bases(target_type) + + for t in all_bases: + + initial_generators = __type_to_generators.get(t, []) + + if initial_generators: + dout("there are generators for this type") + if t != target_type: + # We're here, when no generators for target-type are found, + # but there are some generators for a base type. + # We'll try to use them, but they will produce targets of + # base type, not of 'target-type'. So, we clone the generators + # and modify the list of target types. + generators2 = [] + for g in initial_generators[:]: + # generators.register adds generator to the list of generators + # for toolsets, which is a bit strange, but should work. + # That list is only used when inheriting toolset, which + # should have being done before generators are run. + ng = g.clone_and_change_target_type(t, target_type) + generators2.append(ng) + register(ng) + + initial_generators = generators2 + break + + for g in initial_generators: + dout("trying generator " + g.id() + + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")") + + m = g.match_rank(prop_set) + if m: + dout(" is viable") + viable_generators.append(g) + + return viable_generators + +def find_viable_generators (target_type, prop_set): + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) + key = target_type + '.' + str (prop_set) + + l = __viable_generators_cache.get (key, None) + if not l: + l = [] + + if not l: + l = find_viable_generators_aux (target_type, prop_set) + + __viable_generators_cache [key] = l + + viable_generators = [] + for g in l: + # Avoid trying the same generator twice on different levels. + # TODO: is this really used? + if not g in __active_generators: + viable_generators.append (g) + else: + dout(" generator %s is active, discarding" % g.id()) + + # Generators which override 'all'. + all_overrides = [] + + # Generators which are overridden + overriden_ids = [] + + for g in viable_generators: + id = g.id () + + this_overrides = __overrides.get (id, []) + + if this_overrides: + overriden_ids.extend (this_overrides) + if 'all' in this_overrides: + all_overrides.append (g) + + if all_overrides: + viable_generators = all_overrides + + return [g for g in viable_generators if not g.id() in overriden_ids] + +def __construct_really (project, name, target_type, prop_set, sources): + """ Attempts to construct target by finding viable generators, running them + and selecting the dependency graph. + """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + viable_generators = find_viable_generators (target_type, prop_set) + + result = [] + + dout(" *** %d viable generators" % len (viable_generators)) + + generators_that_succeeded = [] + + for g in viable_generators: + __active_generators.append(g) + r = try_one_generator (project, name, g, target_type, prop_set, sources) + del __active_generators[-1] + + if r: + generators_that_succeeded.append(g) + if result: + output = cStringIO.StringIO() + print >>output, "ambiguity found when searching for best transformation" + print >>output, "Trying to produce type '%s' from: " % (target_type) + for s in sources: + print >>output, " - " + s.str() + print >>output, "Generators that succeeded:" + for g in generators_that_succeeded: + print >>output, " - " + g.id() + print >>output, "First generator produced: " + for t in result[1:]: + print >>output, " - " + str(t) + print >>output, "Second generator produced:" + for t in r[1:]: + print >>output, " - " + str(t) + get_manager().errors()(output.getvalue()) + else: + result = r; + + return result; + + +def construct (project, name, target_type, prop_set, sources, top_level=False): + """ Attempts to create target of 'target-type' with 'properties' + from 'sources'. The 'sources' are treated as a collection of + *possible* ingridients -- i.e. it is not required to consume + them all. If 'multiple' is true, the rule is allowed to return + several targets of 'target-type'. + + Returns a list of target. When this invocation is first instance of + 'construct' in stack, returns only targets of requested 'target-type', + otherwise, returns also unused sources and additionally generated + targets. + + If 'top-level' is set, does not suppress generators that are already + used in the stack. This may be useful in cases where a generator + has to build a metatarget -- for example a target corresponding to + built tool. + """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(top_level, bool) + global __active_generators + if top_level: + saved_active = __active_generators + __active_generators = [] + + global __construct_stack + if not __construct_stack: + __ensure_type (sources) + + __construct_stack.append (1) + + increase_indent () + + if project.manager().logger().on(): + dout( "*** construct " + target_type) + + for s in sources: + dout(" from " + str(s)) + + project.manager().logger().log (__name__, " properties: ", prop_set.raw ()) + + result = __construct_really(project, name, target_type, prop_set, sources) + + decrease_indent() + + __construct_stack = __construct_stack [1:] + + if top_level: + __active_generators = saved_active + + return result + +def add_usage_requirements (result, raw_properties): + if result: + if isinstance (result[0], property_set.PropertySet): + return (result[0].add_raw(raw_properties), result[1]) + else: + return (property_set.create(raw_properties), result) + #if [ class.is-a $(result[1]) : property-set ] + #{ + # return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ; + #} + #else + #{ + # return [ property-set.create $(raw-properties) ] $(result) ; + #} diff --git a/src/boost/tools/build/src/build/project.jam b/src/boost/tools/build/src/build/project.jam new file mode 100644 index 000000000..a4cef30c8 --- /dev/null +++ b/src/boost/tools/build/src/build/project.jam @@ -0,0 +1,1357 @@ +# Copyright 2002, 2003 Dave Abrahams +# Copyright 2002, 2005, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Implements project representation and loading. Each project is represented by: +# - a module where all the Jamfile content lives. +# - an instance of 'project-attributes' class. +# (given a module name, can be obtained using the 'attributes' rule) +# - an instance of 'project-target' class (from targets.jam) +# (given a module name, can be obtained using the 'target' rule) +# +# Typically, projects are created as result of loading a Jamfile, which is done +# by rules 'load' and 'initialize', below. First, a module is prepared and a new +# project-attributes instance is created. Some rules necessary for all projects +# are added to the module (see the 'project-rules' module). Default project +# attributes are set (inheriting parent project attributes, if it exists). After +# that the Jamfile is read. It can declare its own attributes using the +# 'project' rule which will be combined with any already set. +# +# The 'project' rule can also declare a project id which will be associated with +# the project module. +# +# Besides Jamfile projects, we also support 'standalone' projects created by +# calling 'initialize' in an arbitrary module and not specifying the project's +# location. After the call, the module can call the 'project' rule, declare main +# targets and behave as a regular project except that, since it is not +# associated with any location, it should only declare prebuilt targets. +# +# The list of all loaded Jamfiles is stored in the .project-locations variable. +# It is possible to obtain a module name for a location using the 'module-name' +# rule. Standalone projects are not recorded and can only be referenced using +# their project id. + +import "class" : new ; +import modules ; +import path ; +import print ; +import property-set ; +import sequence ; + + +.debug-loading = [ MATCH ^(--debug-loading)$ : [ modules.peek : ARGV ] ] ; + + +# Loads the Jamfile at the given location. After loading, project global file +# and Jamfiles needed by the requested one will be loaded recursively. If the +# Jamfile at that location is loaded already, does nothing. Returns the project +# module for the Jamfile. +# +rule load ( jamfile-location : synthesize ? ) +{ + local module-name = [ module-name $(jamfile-location) ] ; + # If Jamfile is already loaded, do not try again. + if ! $(module-name) in $(.jamfile-modules) + { + if $(.debug-loading) + { + ECHO Loading Jamfile at '$(jamfile-location)' ; + } + + load-jamfile $(jamfile-location) : $(module-name) : $(synthesize) ; + + # We want to make sure that child project are loaded only after parent + # projects. In particular, because parent projects define attributes + # which are then inherited by children, and we do not want children to + # be loaded before parent has defined everything. + # + # While "build-project" and "use-project" can potentially refer to child + # projects from parent projects, we do not immediately load child + # projects when seeing those attributes. Instead, we record the minimal + # information to be used only later. + load-used-projects $(module-name) ; + } + return $(module-name) ; +} + + +rule load-used-projects ( module-name ) +{ + local used = [ modules.peek $(module-name) : .used-projects ] ; + local location = [ attribute $(module-name) location ] ; + while $(used) + { + local id = $(used[1]) ; + local where = [ path.make $(used[2]) ] ; + register-id $(id) : [ load [ path.root $(where) $(location) ] ] ; + used = $(used[3-]) ; + } +} + + +# Note the use of character groups, as opposed to listing 'Jamroot' and +# 'jamroot'. With the latter, we would get duplicate matches on Windows and +# would have to eliminate duplicates. +JAMROOT ?= [ modules.peek : JAMROOT ] ; +JAMROOT ?= project-root.jam "[Jj]amroot" "[Jj]amroot." "[Jj]amroot.jam" ; + + +# Loads parent of Jamfile at 'location'. Issues an error if nothing is found. +# +rule load-parent ( location ) +{ + local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ; + if $(found) + { + return [ load $(found[1]:D) ] ; + } +} + + +# Returns the project module corresponding to the given project-id or plain +# directory name. Returns nothing if such a project can not be found. +# +rule find ( name : current-location ) +{ + local project-module ; + + # Try interpreting name as project id. + if [ path.is-rooted $(name) ] + { + project-module = $($(name).jamfile-module) ; + } + + if ! $(project-module) + { + local location = [ path.root [ path.make $(name) ] $(current-location) ] + ; + + # If no project is registered for the given location, try to load it. + # First see if we have a Jamfile. If not, then see if we might have a + # project root willing to act as a Jamfile. In that case, project root + # must be placed in the directory referred to by id. + + project-module = [ module-name $(location) ] ; + if ! $(project-module) in $(.jamfile-modules) + { + if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ] + { + project-module = [ load $(location) ] ; + } + else + { + project-module = ; + } + } + } + + return $(project-module) ; +} + + +# Returns the name of the module corresponding to 'jamfile-location'. If no +# module corresponds to that location yet, associates the default module name +# with that location. +# +rule module-name ( jamfile-location ) +{ + if ! $(.module.$(jamfile-location)) + { + # Root the path, so that locations are always unambiguous. Without this, + # we can not decide if '../../exe/program1' and '.' are the same paths. + local normalized = [ path.root $(jamfile-location) [ path.pwd ] ] ; + + # Quick & dirty fix to get the same module name when we supply two + # equivalent location paths, e.g. 'd:\Foo' & 'D:\fOo\bar\..' on Windows. + # Note that our current implementation will not work correctly if the + # given location references an empty folder, but in that case any later + # attempt to load a Jamfile from this location will fail anyway. + # FIXME: Implement this cleanly. Support for this type of path + # normalization already exists internally in Boost Jam and the current + # fix relies on the GLOB builtin rule using that support. Most likely we + # just need to add a new builtin rule to do this explicitly. + normalized = [ NORMALIZE_PATH $(normalized) ] ; + local glob-result = [ GLOB [ path.native $(normalized) ] : * ] ; + if $(glob-result) + { + normalized = $(glob-result[1]:D) ; + } + .module.$(jamfile-location) = Jamfile<$(normalized)> ; + } + return $(.module.$(jamfile-location)) ; +} + + +# Default patterns to search for the Jamfiles to use for build declarations. +# +JAMFILE = [ modules.peek : JAMFILE ] ; +JAMFILE ?= "[Bb]uild.jam" "[Jj]amfile.v2" "[Jj]amfile" "[Jj]amfile." "[Jj]amfile.jam" ; + + +# Find the Jamfile at the given location. This returns the exact names of all +# the Jamfiles in the given directory. The optional parent-root argument causes +# this to search not the given directory but the ones above it up to the +# parent-root directory. +# +rule find-jamfile ( + dir # The directory(s) to look for a Jamfile. + parent-root ? # Optional flag indicating to search for the parent Jamfile. + : no-errors ? + ) +{ + # Glob for all the possible Jamfiles according to the match pattern. + # + local jamfile-glob = ; + if $(parent-root) + { + if ! $(.parent-jamfile.$(dir)) + { + .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE) + ] ; + } + jamfile-glob = $(.parent-jamfile.$(dir)) ; + } + else + { + if ! $(.jamfile.$(dir)) + { + .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ; + } + jamfile-glob = $(.jamfile.$(dir)) ; + + } + + local jamfile-to-load = $(jamfile-glob) ; + # Multiple Jamfiles found in the same place. Warn about this and ensure we + # use only one of them. As a temporary convenience measure, if there is + # Jamfile.v2 among found files, suppress the warning and use it. + # + if $(jamfile-to-load[2-]) + { + local v2-jamfiles = [ MATCH "^(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)$" : + $(jamfile-to-load) ] ; + + if $(v2-jamfiles) && ! $(v2-jamfiles[2]) + { + jamfile-to-load = $(v2-jamfiles) ; + } + else + { + local jamfile = [ path.basename $(jamfile-to-load[1]) ] ; + ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!" + "Loading the first one: '$(jamfile)'." ; + } + + jamfile-to-load = $(jamfile-to-load[1]) ; + } + + # Could not find it, error. + # + if ! $(no-errors) && ! $(jamfile-to-load) + { + import errors ; + errors.error Unable to load Jamfile. + : Could not find a Jamfile in directory '$(dir)'. + : Attempted to find it with pattern '$(JAMFILE:J=" ")'. + : Please consult the documentation at "'https://www.bfgroup.xyz/b2/'." ; + } + + return $(jamfile-to-load) ; +} + + +# Default patterns to search for auto-include of package manager build declarations. +# +PACKAGE_MANAGER_BUILD_INFO(CONAN) = "conanbuildinfo.jam" ; + +# Default to using the package manager build info in this priority order: +# 1. Configuration, user, project, etc. +# 2. Command line argument "--use-package-manager=". +# 3. Environment variable "PACKAGE_MANAGER_BUILD_INFO". +# 4. Conan, others. +# +local .use-package-manager = [ MATCH "^--use-package-manager=(.*)$" : [ modules.peek : ARGV ] ] ; +PACKAGE_MANAGER_BUILD_INFO ?= $(PACKAGE_MANAGER_BUILD_INFO($(.use-package-manager:U))) ; +PACKAGE_MANAGER_BUILD_INFO ?= [ modules.peek : PACKAGE_MANAGER_BUILD_INFO ] ; +PACKAGE_MANAGER_BUILD_INFO ?= $(PACKAGE_MANAGER_BUILD_INFO(CONAN)) ; + + +# Load the configured package manager build information file. +# +rule load-package-manager-build-info ( ) +{ + # This first variable is the one from the configuration (user, project, etc). + local package-manager-build-info = [ modules.peek [ CALLER_MODULE ] : PACKAGE_MANAGER_BUILD_INFO ] ; + # And this is the rest as it takes it from the settings in the "project" module. + # I.e. the variable assignments above. + package-manager-build-info ?= $(PACKAGE_MANAGER_BUILD_INFO) ; + if $(package-manager-build-info) + { + local pm = [ path.glob $(dir) : $(package-manager-build-info) ] ; + pm = $(pm[1]) ; + local cm = [ CALLER_MODULE ] ; + local pm-tag = "$(cm)<$(pm:B)>" ; + if $(pm) && ! ( $(pm-tag) in $(.package-manager-build-info) ) + { + .package-manager-build-info += $(pm-tag) ; + # We found a matching builf info to load, but we have to be careful + # as the loading can affect the current project since it can define + # sub-projects. Hence we save and restore the current project. + local saved-project = $(.current-project) ; + modules.load $(cm) : $(pm) ; + .current-project = $(saved-project) ; + } + } +} + + +# Load a Jamfile at the given directory. Returns nothing. Will attempt to load +# the file as indicated by the JAMFILE patterns. Effect of calling this rule +# twice with the same 'dir' is undefined. +# +local rule load-jamfile ( dir : jamfile-module : synthesize ? ) +{ + # See if the Jamfile is where it should be. + # + local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ; + if ! $(jamfile-to-load) + { + jamfile-to-load = [ find-jamfile $(dir) : $(synthesize) ] ; + } + + if $(jamfile-to-load[2]) + { + import errors ; + errors.error "Multiple Jamfiles found at '$(dir)'" : + "Filenames are: " $(jamfile-to-load:D=) ; + } + + if ! $(jamfile-to-load) && $(synthesize) + { + jamfile-to-load = $(dir)/@ ; + } + + # Now load the Jamfile in its own context. + # The call to 'initialize' may load the parent Jamfile, which might contain + # a 'use-project' or a 'project.load' call, causing a second attempt to load + # the same project we are loading now. Checking inside .jamfile-modules + # prevents that second attempt from messing things up. + if ! $(jamfile-module) in $(.jamfile-modules) + { + local previous-project = $(.current-project) ; + + # Initialize the Jamfile module before loading. + initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ] : + $(jamfile-to-load:BS) ; + + # Auto-load package manager(s) build information. + IMPORT project : load-package-manager-build-info + : $(jamfile-module) : project.load-package-manager-build-info ; + modules.call-in $(jamfile-module) : project.load-package-manager-build-info ; + + if ! $(jamfile-module) in $(.jamfile-modules) + { + .jamfile-modules += $(jamfile-module) ; + + local saved-project = $(.current-project) ; + + mark-as-user $(jamfile-module) ; + if $(jamfile-to-load:B) = "@" + { + # Not a real jamfile to load. Synthsize the load. + modules.poke $(jamfile-module) : __name__ : $(jamfile-module) ; + modules.poke $(jamfile-module) : __file__ : [ path.native $(jamfile-to-load) ] ; + modules.poke $(jamfile-module) : __binding__ : [ path.native $(jamfile-to-load) ] ; + } + else + { + modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] + : . ; + if [ MATCH ^($(JAMROOT))$ : $(jamfile-to-load:BS) ] + { + jamfile = [ find-jamfile $(dir) : no-errors ] ; + if $(jamfile) + { + load-aux $(jamfile-module) : [ path.native $(jamfile) ] ; + } + } + } + + # Now do some checks. + if $(.current-project) != $(saved-project) + { + import errors ; + errors.error + The value of the .current-project variable has magically + : changed after loading a Jamfile. This means some of the + : targets might be defined in the wrong project. + : after loading $(jamfile-module) + : expected value $(saved-project) + : actual value $(.current-project) ; + } + + end-load $(previous-project) ; + + if $(.global-build-dir) + { + if [ attribute $(jamfile-module) location ] && ! [ attribute + $(jamfile-module) id ] + { + local project-root = [ attribute $(jamfile-module) + project-root ] ; + if $(project-root) = $(dir) + { + ECHO "warning: the --build-dir option was specified" ; + ECHO "warning: but Jamroot at '$(dir)'" ; + ECHO "warning: specified no project id" ; + ECHO "warning: the --build-dir option will be ignored" ; + } + } + } + } + } +} + + +# Called when done loading a project module. Restores the current project to its +# previous value and does some additional checking to make sure our 'currently +# loaded project' identifier does not get left with an invalid value. +# +rule end-load ( previous-project ? ) +{ + if ! $(.current-project) + { + import errors ; + errors.error Ending project loading requested when there was no project + currently being loaded. ; + } + + if ! $(previous-project) && $(.saved-current-project) + { + import errors ; + errors.error Ending project loading requested with no 'previous project' + when there were other projects still marked as being loaded + recursively. ; + } + + .current-project = $(previous-project) ; +} + + +rule mark-as-user ( module-name ) +{ + if USER_MODULE in [ RULENAMES ] + { + USER_MODULE $(module-name) ; + } +} + + +rule load-aux ( module-name : file ) +{ + mark-as-user $(module-name) ; + + module $(module-name) + { + include $(2) ; + local rules = [ RULENAMES $(1) ] ; + IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ; + } +} + + +.global-build-dir = [ MATCH ^--build-dir=(.*)$ : [ modules.peek : ARGV ] ] ; +if $(.global-build-dir) +{ + # If the option is specified several times, take the last value. + .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ; +} + + +# Initialize the module for a project. +# +rule initialize ( + module-name # The name of the project module. + : location ? # The location (directory) of the project to initialize. If + # not specified, a standalone project will be initialized. + : basename ? + ) +{ + if $(.debug-loading) + { + ECHO "Initializing project '$(module-name)'" ; + } + + local jamroot ; + + local parent-module ; + if $(module-name) in test-config all-config + { + # No parent. + } + else if $(module-name) = site-config + { + parent-module = test-config ; + } + else if $(module-name) = user-config + { + parent-module = site-config ; + } + else if $(module-name) = project-config + { + parent-module = user-config ; + } + else if $(location) + { + if ! [ MATCH ^($(JAMROOT))$ : $(basename) ] + { + # We search for parent/jamroot only if this is a jamfile project, i.e. + # if is not a standalone or a jamroot project. + parent-module = [ load-parent $(location) ] ; + } + if ! $(parent-module) + { + # We have a jamroot project, or a jamfile project + # without a parent that becomes a jamroot. Inherit from + # user-config (or project-config + # if it exists). + if $(project-config.attributes) + { + parent-module = project-config ; + } + else + { + parent-module = user-config ; + } + jamroot = true ; + } + } + + # TODO: need to consider if standalone projects can do anything but define + # prebuilt targets. If so, we need to give them a more sensible "location", + # so that source paths are correct. + location ?= "" ; + # Create the module for the Jamfile first. + module $(module-name) + { + } + + # load-parent can end up loading this module again. Make sure this is not + # duplicated. + if ! $($(module-name).attributes) + { + $(module-name).attributes = [ new project-attributes $(location) + $(module-name) ] ; + local attributes = $($(module-name).attributes) ; + + if $(location) + { + $(attributes).set source-location : [ path.make $(location) ] : + exact ; + } + else + { + local cfgs = project site test user all ; + if ! $(module-name) in $(cfgs)-config + { + # This is a standalone project with known location. Set its + # source location so it can declare targets. This is needed so + # you can put a .jam file with your sources and use it via + # 'using'. Standard modules (in the 'tools' subdir) may not + # assume source dir is set. + local s = [ modules.binding $(module-name) ] ; + if ! $(s) + { + import errors ; + errors.error Could not determine project location + $(module-name) ; + } + $(attributes).set source-location : $(s:D) : exact ; + } + } + + $(attributes).set requirements : [ property-set.empty ] : exact ; + $(attributes).set usage-requirements : [ property-set.empty ] : exact ; + + # Import rules common to all project modules from project-rules module, + # defined at the end of this file. + local rules = [ RULENAMES project-rules ] ; + IMPORT project-rules : $(rules) : $(module-name) : $(rules) ; + + if $(parent-module) + { + inherit-attributes $(module-name) : $(parent-module) ; + $(attributes).set parent-module : $(parent-module) : exact ; + } + + if $(jamroot) + { + $(attributes).set project-root : $(location) : exact ; + if ! $(.first-project-root) + { + .first-project-root = $(module-name) ; + } + } + + local parent ; + if $(parent-module) + { + parent = [ target $(parent-module) ] ; + } + + if ! $(.target.$(module-name)) + { + local requirements = [ attribute $(module-name) requirements ] ; + .target.$(module-name) = [ new project-target $(module-name) : + $(module-name) $(parent) : $(requirements) ] ; + + if $(.debug-loading) + { + ECHO Assigned project target $(.target.$(module-name)) to + '$(module-name)' ; + } + } + } + + .current-project = [ target $(module-name) ] ; +} + + +# Make 'project-module' inherit attributes of project root and parent module. +# +rule inherit-attributes ( project-module : parent-module ) +{ + local attributes = $($(project-module).attributes) ; + local pattributes = [ attributes $(parent-module) ] ; + # Parent module might be locationless configuration module. + if [ modules.binding $(parent-module) ] + { + $(attributes).set parent : + [ path.parent [ path.make [ modules.binding $(parent-module) ] ] ] ; + } + $(attributes).set project-root : + [ $(pattributes).get project-root ] : exact ; + $(attributes).set default-build : + [ $(pattributes).get default-build ] ; + $(attributes).set requirements : + [ $(pattributes).get requirements ] : exact ; + $(attributes).set usage-requirements : + [ $(pattributes).get usage-requirements ] : exact ; + + local parent-build-dir = [ $(pattributes).get build-dir ] ; + if $(parent-build-dir) + { + # Have to compute relative path from parent dir to our dir. Convert both + # paths to absolute, since we cannot find relative path from ".." to + # ".". + + local location = [ attribute $(project-module) location ] ; + local parent-location = [ attribute $(parent-module) location ] ; + + local pwd = [ path.pwd ] ; + local parent-dir = [ path.root $(parent-location) $(pwd) ] ; + local our-dir = [ path.root $(location) $(pwd) ] ; + $(attributes).set build-dir : [ path.join $(parent-build-dir) + [ path.relative $(our-dir) $(parent-dir) ] ] : exact ; + } +} + + +# Returns whether the given string is a valid registered project id. +# +rule is-registered-id ( id ) +{ + return $($(id).jamfile-module) ; +} + + +# Associate the given id with the given project module. Returns the possibly +# corrected project id. +# +rule register-id ( id : module ) +{ + id = [ path.root $(id) / ] ; + + if [ MATCH (//) : $(id) ] + { + import errors ; + errors.user-error Project id may not contain two consecutive slash + characters (project "id:" '$(id)'). ; + } + + local orig-module = $($(id).jamfile-module) ; + if $(orig-module) && $(orig-module) != $(module) + { + local new-file = [ modules.peek $(module) : __file__ ] ; + local new-location = [ project.attribute $(module) location ] ; + + local orig-file = [ modules.peek $(orig-module) : __file__ ] ; + local orig-main-id = [ project.attribute $(orig-module) id ] ; + local orig-location = [ project.attribute $(orig-module) location ] ; + local orig-project = [ target $(orig-module) ] ; + local orig-name = [ $(orig-project).name ] ; + + import errors ; + errors.user-error Attempt to redeclare already registered project id + '$(id)'. + : Original "project:" + : " " "Name:" $(orig-name:E=---) + : " " "Module:" $(orig-module) + : " " "Main id: "$(orig-main-id:E=---) + : " " "File:" $(orig-file:E=---) + : " " "Location:" $(orig-location:E=---) + : New "project:" + : " " "Module:" $(module) + : " " "File:" $(new-file:E=---) + : " " "Location:" $(new-location:E=---) ; + } + + $(id).jamfile-module = $(module) ; + return $(id) ; +} + + +# Class keeping all the attributes of a project. +# +# The standard attributes are "id", "location", "project-root", "parent" +# "requirements", "default-build", "source-location" and "projects-to-build". +# +class project-attributes +{ + import path ; + import print ; + import project ; + import property ; + import property-set ; + import sequence ; + + rule __init__ ( location project-module ) + { + self.location = $(location) ; + self.project-module = $(project-module) ; + } + + # Set the named attribute from the specification given by the user. The + # value actually set may be different. + # + rule set ( attribute : specification * + : exact ? # Sets value from 'specification' without any processing. + ) + { + if $(exact) + { + self.$(attribute) = $(specification) ; + } + else if $(attribute) = "requirements" + { + local result = [ property-set.refine-from-user-input + $(self.requirements) : $(specification) + : $(self.project-module) : $(self.location) ] ; + + if $(result[1]) = "@error" + { + import errors : error : errors.error ; + errors.error Requirements for project at '$(self.location)' + conflict with parent's. : "Explanation:" $(result[2-]) ; + } + + self.requirements = $(result) ; + } + else if $(attribute) = "usage-requirements" + { + local unconditional ; + for local p in $(specification) + { + local split = [ property.split-conditional $(p) ] ; + split ?= nothing $(p) ; + unconditional += $(split[2]) ; + } + + local non-free = [ property.remove free : $(unconditional) ] ; + if $(non-free) + { + import errors : error : errors.error ; + errors.error usage-requirements $(specification) have non-free + properties $(non-free) ; + } + local t = [ property.translate-paths $(specification) : + $(self.location) ] ; + if $(self.usage-requirements) + { + self.usage-requirements = [ property-set.create + [ $(self.usage-requirements).raw ] $(t) ] ; + } + else + { + self.usage-requirements = [ property-set.create $(t) ] ; + } + } + else if $(attribute) = "default-build" + { + self.default-build = [ property.make $(specification) ] ; + } + else if $(attribute) = "source-location" + { + self.source-location = ; + for local src-path in $(specification) + { + self.source-location += [ path.root [ path.make $(src-path) ] + $(self.location) ] ; + } + } + else if $(attribute) = "build-dir" + { + self.build-dir = [ path.root [ path.make $(specification) ] + $(self.location) ] ; + } + else if $(attribute) = "id" + { + self.id = [ project.register-id $(specification) : + $(self.project-module) ] ; + } + else if ! $(attribute) in "default-build" "location" "parent" + "projects-to-build" "project-root" "source-location" + { + import errors : error : errors.error ; + errors.error Invalid project attribute '$(attribute)' specified for + project at '$(self.location)' ; + } + else + { + self.$(attribute) = $(specification) ; + } + } + + # Returns the value of the given attribute. + # + rule get ( attribute ) + { + return $(self.$(attribute)) ; + } + + # Returns whether these attributes belong to a Jamroot project module. + # + rule is-jamroot ( ) + { + if $(self.location) && $(self.project-root) = $(self.location) + { + return true ; + } + } + + # Prints the project attributes. + # + rule print ( ) + { + local id = '$(self.id)' ; + print.section $(id:E=(none)) ; + print.list-start ; + print.list-item "Parent project:" $(self.parent:E=(none)) ; + print.list-item "Requirements:" [ $(self.requirements).raw ] ; + print.list-item "Default build:" $(self.default-build) ; + print.list-item "Source location:" $(self.source-location) ; + print.list-item "Projects to build:" [ sequence.insertion-sort + $(self.projects-to-build) ] ; + print.list-end ; + } +} + + +# Returns the build directory for standalone projects +# +rule standalone-build-dir ( ) +{ + project = [ target $(.first-project-root) ] ; + return [ path.join [ $(project).build-dir ] standalone ] ; +} + +# Returns the project which is currently being loaded. +# +rule current ( ) +{ + if ! $(.current-project) + { + import errors ; + errors.error Reference to the project currently being loaded requested + when there was no project module being loaded. ; + } + return $(.current-project) ; +} + + +# Temporarily changes the current project to 'project'. Should be followed by +# 'pop-current'. +# +rule push-current ( project ? ) +{ + .saved-current-project += $(.current-project) ; + .current-project = $(project) ; +} + + +rule pop-current ( ) +{ + .current-project = $(.saved-current-project[-1]) ; + .saved-current-project = $(.saved-current-project[1--2]) ; +} + + +# Returns the project-attribute instance for the specified Jamfile module. +# +rule attributes ( project ) +{ + return $($(project).attributes) ; +} + + +# Returns the value of the specified attribute in the specified Jamfile module. +# +rule attribute ( project attribute ) +{ + return [ $($(project).attributes).get $(attribute) ] ; +} + + +# Returns whether a project module is one of Boost Build's configuration +# modules. +# +rule is-config-module ( project ) +{ + local cfgs = project site test user ; + if $(project) in $(cfgs)-config + { + return true ; + } +} + + +# Returns whether a project module is a Jamroot project module. +# +rule is-jamroot-module ( project ) +{ + return [ $($(project).attributes).is-jamroot ] ; +} + + +# Returns a project's parent jamroot module. Returns nothing if there is no such +# module, i.e. if this is a standalone project or one of the internal Boost +# Build configuration projects. +# +rule get-jamroot-module ( project ) +{ + local jamroot-location = [ attribute $(project) project-root ] ; + if $(jamroot-location) + { + return [ module-name $(jamroot-location) ] ; + } +} + + +# Returns the project target corresponding to the 'project-module'. +# +rule target ( project-module : allow-missing ? ) +{ + if ! $(.target.$(project-module)) && ! $(allow-missing) + { + import errors ; + errors.user-error Project target requested but not yet assigned for + module '$(project-module)'. ; + } + return $(.target.$(project-module)) ; +} + + +# Defines a B2 extension project. Such extensions usually contain +# library targets and features that can be used by many people. Even though +# extensions are really projects, they can be initialized as a module would be +# with the "using" (project.project-rules.using) mechanism. +# +rule extension ( id space ? : options * : * ) +{ + # The caller is a standalone module for the extension. + local mod = [ CALLER_MODULE ] ; + + # We need to do the rest within the extension module. + module $(mod) + { + import path ; + + # Find the root project. + local root-project = [ project.current ] ; + root-project = [ $(root-project).project-module ] ; + while + [ project.attribute $(root-project) parent-module ] && + [ project.attribute $(root-project) parent-module ] != user-config + { + root-project = [ project.attribute $(root-project) parent-module ] ; + } + + # Default to creating extensions in /ext/.. project space. + local id = $(1[1]) ; + local space = $(1[2]) ; + space ?= ext ; + + # Create the project data, and bring in the project rules into the + # module. + project.initialize $(__name__) : [ path.join [ project.attribute + $(root-project) location ] $(space:L) $(id:L) ] ; + + # Create the project itself, i.e. the attributes. + project /$(space:L)/$(id:L) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : + $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) + : $(18) : $(19) ; + local attributes = [ project.attributes $(__name__) ] ; + + # Inherit from the root project of whomever is defining us. + project.inherit-attributes $(__name__) : $(root-project) ; + $(attributes).set parent-module : $(root-project) : exact ; + } +} + + +rule glob-internal ( project : wildcards + : excludes * : rule-name ) +{ + local location = [ $(project).get source-location ] ; + + local result ; + local paths = [ path.$(rule-name) $(location) : + [ sequence.transform path.make : $(wildcards) ] : + [ sequence.transform path.make : $(excludes) ] ] ; + if $(wildcards:D) || $(rule-name) != glob + { + # The paths we have found are relative to the current directory, but the + # names specified in the sources list are assumed to be relative to the + # source directory of the corresponding project. So, just make the names + # absolute. + for local p in $(paths) + { + # If the path is below source location, use relative path. + # Otherwise, use full path just to avoid any ambiguities. + local rel = [ path.relative $(p) $(location) : no-error ] ; + if $(rel) = not-a-child + { + result += [ path.root $(p) [ path.pwd ] ] ; + } + else + { + result += $(rel) ; + } + } + } + else + { + # There were no wildcards in the directory path, so the files are all in + # the source directory of the project. Just drop the directory, instead + # of making paths absolute. + result = $(paths:D="") ; + } + + return $(result) ; +} + + +rule glob-path-root ( root path ) +{ + return [ path.root $(path) $(root) ] ; +} + +rule glob-internal-ex ( project : paths + : wildcards + : excludes * : rule-name ) +{ + # Make the paths we search in absolute, if they aren't already absolute. + # If the given paths are relative, they will be relative to the source + # directory. So that's what we root against. + local source-location + = [ path.root [ $(project).get source-location ] [ path.pwd ] ] ; + local search-paths + = [ sequence.transform project.glob-path-root $(source-location) : $(paths) ] ; + paths + = [ path.$(rule-name) $(search-paths) : $(wildcards) : $(excludes) ] ; + # The paths we have found are absolute, but the names specified in the + # sources list are assumed to be relative to the source directory of the + # corresponding project. Make the results relative to the source again. + local result + = [ sequence.transform path.relative-to $(source-location) : $(paths) ] ; + + return $(result) ; +} + + +# This module defines rules common to all projects. +# +module project-rules +{ + import modules ; + + rule using ( toolset-module : * ) + { + import toolset ; + + local saved-project = [ modules.peek project : .current-project ] ; + + # Temporarily change the search path so the module referred to by + # 'using' can be placed in the same directory as Jamfile. User will + # expect the module to be found even though the directory is not in + # BOOST_BUILD_PATH. + local x = [ modules.peek : BOOST_BUILD_PATH ] ; + local caller = [ CALLER_MODULE ] ; + local caller-location = [ modules.binding $(caller) ] ; + modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ; + toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : + $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) + : $(18) : $(19) ; + modules.poke : BOOST_BUILD_PATH : $(x) ; + + # The above might have clobbered .current-project in case it caused a + # new project instance to be created (which would then automatically + # get set as the 'current' project). Restore the correct value so any + # main targets declared after this do not get mapped to the loaded + # module's project. + modules.poke project : .current-project : $(saved-project) ; + } + + rule import ( * : * : * ) + { + local caller = [ CALLER_MODULE ] ; + local saved-project = [ modules.peek project : .current-project ] ; + module $(caller) + { + modules.import $(1) : $(2) : $(3) ; + } + + # The above might have clobbered .current-project in case it caused a + # new project instance to be created (which would then automatically + # get set as the 'current' project). Restore the correct value so any + # main targets declared after this do not get mapped to the loaded + # module's project. + modules.poke project : .current-project : $(saved-project) ; + } + + rule project ( id ? : options * : * ) + { + import path ; + import project ; + + local caller = [ CALLER_MODULE ] ; + local attributes = [ project.attributes $(caller) ] ; + if $(id) + { + $(attributes).set id : $(id) ; + } + + local explicit-build-dir ; + + for n in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 + { + local option = $($(n)) ; + if $(option) + { + $(attributes).set $(option[1]) : $(option[2-]) ; + } + if $(option[1]) = "build-dir" + { + explicit-build-dir = [ path.make $(option[2-]) ] ; + } + } + + # If '--build-dir' is specified, change the build dir for the project. + local global-build-dir = [ modules.peek project : .global-build-dir ] ; + + if $(global-build-dir) + { + local location = [ $(attributes).get location ] ; + # Project with an empty location is a 'standalone' project such as + # user-config or qt. It has no build dir. If we try to set build dir + # for user-config, we shall then try to inherit it, with either + # weird or wrong consequences. + if $(location) && $(location) = [ $(attributes).get project-root ] + { + # Re-read the project id, since it might have been modified a + # bit when setting the project's id attribute, e.g. might have + # been prefixed by a slash if it was not already. + id = [ $(attributes).get id ] ; + # This is Jamroot. + if $(id) + { + if $(explicit-build-dir) && + [ path.is-rooted $(explicit-build-dir) ] + { + import errors ; + errors.user-error Absolute directory specified via + 'build-dir' project attribute : Do not know how to + combine that with the --build-dir option. ; + } + # Strip the leading slash from id. + local rid = [ MATCH ^/(.*) : $(id) ] ; + local p = [ path.join $(global-build-dir) $(rid) + $(explicit-build-dir) ] ; + + $(attributes).set build-dir : $(p) : exact ; + } + } + else + { + # Not Jamroot. + if $(explicit-build-dir) + { + import errors ; + errors.user-error When --build-dir is specified, the + 'build-dir' project : attribute is allowed only for + top-level 'project' invocations ; + } + } + } + } + + # Declare and set a project global constant. Project global constants are + # normal variables but should not be changed. They are applied to every + # child Jamfile. + # + rule constant ( name : value + ) + { + import project ; + local caller = [ CALLER_MODULE ] ; + local p = [ project.target $(caller) ] ; + $(p).add-constant $(name) : $(value) ; + } + + # Declare and set a project global constant, whose value is a path. The path + # is adjusted to be relative to the invocation directory. The given value + # path is taken to be either absolute, or relative to this project root. + # + rule path-constant ( name : value + ) + { + import project ; + local caller = [ CALLER_MODULE ] ; + local p = [ project.target $(caller) ] ; + $(p).add-constant $(name) : $(value) : path ; + } + + rule use-project ( id : where ) + { + # See comment in 'load' for explanation. + local caller = [ CALLER_MODULE ] ; + modules.poke $(caller) : .used-projects : [ modules.peek $(caller) : + .used-projects ] $(id) $(where) ; + } + + rule build-project ( dir ) + { + import project ; + local caller = [ CALLER_MODULE ] ; + local attributes = [ project.attributes $(caller) ] ; + local now = [ $(attributes).get projects-to-build ] ; + $(attributes).set projects-to-build : $(now) $(dir) ; + } + + rule explicit ( target-names * ) + { + import project ; + # If 'explicit' is used in a helper rule defined in Jamroot and + # inherited by children, then most of the time we want 'explicit' to + # operate on the Jamfile where the helper rule is invoked. + local t = [ project.current ] ; + for local n in $(target-names) + { + $(t).mark-target-as-explicit $(n) ; + } + } + + rule always ( target-names * ) + { + import project ; + local t = [ project.current ] ; + for local n in $(target-names) + { + $(t).mark-target-as-always $(n) ; + } + } + + rule glob ( wildcards + : excludes * ) + { + import project ; + return [ project.glob-internal [ project.current ] : $(wildcards) : + $(excludes) : glob ] ; + } + + rule glob-tree ( wildcards + : excludes * ) + { + import project ; + if $(wildcards:D) || $(excludes:D) + { + import errors ; + errors.user-error The patterns to 'glob-tree' may not include + directory ; + } + return [ project.glob-internal [ project.current ] : $(wildcards) : + $(excludes) : glob-tree ] ; + } + + rule glob-ex ( paths + : wildcards + : excludes * ) + { + import project ; + return [ project.glob-internal-ex [ project.current ] + : $(paths) : $(wildcards) : $(excludes) : glob ] ; + } + + rule glob-tree-ex ( paths + : wildcards + : excludes * ) + { + import project ; + return [ project.glob-internal-ex [ project.current ] + : $(paths) : $(wildcards) : $(excludes) : glob-tree ] ; + } + + # Calculates conditional requirements for multiple requirements at once. + # This is a shorthand to reduce duplication and to keep an inline + # declarative syntax. For example: + # + # lib x : x.cpp : [ conditional gcc debug : + # DEBUG_EXCEPTION DEBUG_TRACE ] ; + # + rule conditional ( condition + : requirements * ) + { + local condition = $(condition:J=,) ; + if [ MATCH "(:)" : $(condition) ] + { + return $(condition)$(requirements) ; + } + else + { + return "$(condition):$(requirements)" ; + } + } + + rule option ( name : value ) + { + local m = [ CALLER_MODULE ] ; + local cfgs = project site test user ; + if ! $(m) in $(cfgs)-config + { + import errors ; + errors.error The 'option' rule may only be used "in" Boost Build + configuration files. ; + } + import option ; + option.set $(name) : $(value) ; + } + + # This allows one to manually import a package manager build information file. + # The argument can be either a symbolic name of a supported package manager or + # the a glob pattern to load a b2 jam file. + # + rule use-packages ( name-or-glob-pattern ? ) + { + local m = [ CALLER_MODULE ] ; + local glob-pattern = $(name-or-glob-pattern) ; + local glob-for-name = [ modules.peek project : PACKAGE_MANAGER_BUILD_INFO($(name-or-glob-pattern:U)) ] ; + if $(glob-for-name) + { + glob-pattern = $(glob-for-name) ; + } + modules.call-in $(m) : constant PACKAGE_MANAGER_BUILD_INFO : $(glob-pattern) ; + IMPORT project : load-package-manager-build-info : $(m) : project.load-package-manager-build-info ; + modules.call-in $(m) : project.load-package-manager-build-info ; + } +} diff --git a/src/boost/tools/build/src/build/project.py b/src/boost/tools/build/src/build/project.py new file mode 100644 index 000000000..0a80c93b9 --- /dev/null +++ b/src/boost/tools/build/src/build/project.py @@ -0,0 +1,1285 @@ +# Status: ported. +# Base revision: 64488 + +# Copyright 2002, 2003 Dave Abrahams +# Copyright 2002, 2005, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Implements project representation and loading. Each project is represented +# by: +# - a module where all the Jamfile content live. +# - an instance of 'project-attributes' class. +# (given a module name, can be obtained using the 'attributes' rule) +# - an instance of 'project-target' class (from targets.jam) +# (given a module name, can be obtained using the 'target' rule) +# +# Typically, projects are created as result of loading a Jamfile, which is done +# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded +# and new project-attributes instance is created. Some rules necessary for +# project are added to the module (see 'project-rules' module) at the bottom of +# this file. Default project attributes are set (inheriting attributes of +# parent project, if it exists). After that the Jamfile is read. It can declare +# its own attributes using the 'project' rule which will be combined with any +# already set attributes. +# +# The 'project' rule can also declare a project id which will be associated +# with the project module. +# +# There can also be 'standalone' projects. They are created by calling +# 'initialize' on an arbitrary module and not specifying their location. After +# the call, the module can call the 'project' rule, declare main targets and +# behave as a regular project except that, since it is not associated with any +# location, it should only declare prebuilt targets. +# +# The list of all loaded Jamfiles is stored in the .project-locations variable. +# It is possible to obtain a module name for a location using the 'module-name' +# rule. Standalone projects are not recorded and can only be references using +# their project id. + +import b2.util.path +import b2.build.targets +from b2.build import property_set, property +from b2.build.errors import ExceptionWithUserContext +from b2.manager import get_manager + +import bjam +import b2 + +import re +import sys +import pkgutil +import os +import string +import imp +import traceback +import b2.util.option as option + +from b2.util import ( + record_jam_to_value_mapping, qualify_jam_action, is_iterable_typed, bjam_signature, + is_iterable) + + +class ProjectRegistry: + + def __init__(self, manager, global_build_dir): + self.manager = manager + self.global_build_dir = global_build_dir + self.project_rules_ = ProjectRules(self) + + # The target corresponding to the project being loaded now + self.current_project = None + + # The set of names of loaded project modules + self.jamfile_modules = {} + + # Mapping from location to module name + self.location2module = {} + + # Mapping from project id to project module + self.id2module = {} + + # Map from Jamfile directory to parent Jamfile/Jamroot + # location. + self.dir2parent_jamfile = {} + + # Map from directory to the name of Jamfile in + # that directory (or None). + self.dir2jamfile = {} + + # Map from project module to attributes object. + self.module2attributes = {} + + # Map from project module to target for the project + self.module2target = {} + + # Map from names to Python modules, for modules loaded + # via 'using' and 'import' rules in Jamfiles. + self.loaded_tool_modules_ = {} + + self.loaded_tool_module_path_ = {} + + # Map from project target to the list of + # (id,location) pairs corresponding to all 'use-project' + # invocations. + # TODO: should not have a global map, keep this + # in ProjectTarget. + self.used_projects = {} + + self.saved_current_project = [] + + self.JAMROOT = self.manager.getenv("JAMROOT"); + + # Note the use of character groups, as opposed to listing + # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate + # matches on windows and would have to eliminate duplicates. + if not self.JAMROOT: + self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"] + + # Default patterns to search for the Jamfiles to use for build + # declarations. + self.JAMFILE = self.manager.getenv("JAMFILE") + + if not self.JAMFILE: + self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile", + "[Jj]amfile.jam"] + + self.__python_module_cache = {} + + + def load (self, jamfile_location): + """Loads jamfile at the given location. After loading, project global + file and jamfile needed by the loaded one will be loaded recursively. + If the jamfile at that location is loaded already, does nothing. + Returns the project module for the Jamfile.""" + assert isinstance(jamfile_location, basestring) + + absolute = os.path.join(os.getcwd(), jamfile_location) + absolute = os.path.normpath(absolute) + jamfile_location = b2.util.path.relpath(os.getcwd(), absolute) + + mname = self.module_name(jamfile_location) + # If Jamfile is already loaded, do not try again. + if not mname in self.jamfile_modules: + + if "--debug-loading" in self.manager.argv(): + print "Loading Jamfile at '%s'" % jamfile_location + + self.load_jamfile(jamfile_location, mname) + + # We want to make sure that child project are loaded only + # after parent projects. In particular, because parent projects + # define attributes which are inherited by children, and we do not + # want children to be loaded before parents has defined everything. + # + # While "build-project" and "use-project" can potentially refer + # to child projects from parent projects, we do not immediately + # load child projects when seeing those attributes. Instead, + # we record the minimal information that will be used only later. + + self.load_used_projects(mname) + + return mname + + def load_used_projects(self, module_name): + assert isinstance(module_name, basestring) + # local used = [ modules.peek $(module-name) : .used-projects ] ; + used = self.used_projects[module_name] + + location = self.attribute(module_name, "location") + for u in used: + id = u[0] + where = u[1] + + self.use(id, os.path.join(location, where)) + + def load_parent(self, location): + """Loads parent of Jamfile at 'location'. + Issues an error if nothing is found.""" + assert isinstance(location, basestring) + found = b2.util.path.glob_in_parents( + location, self.JAMROOT + self.JAMFILE) + + if not found: + print "error: Could not find parent for project at '%s'" % location + print "error: Did not find Jamfile.jam or Jamroot.jam in any parent directory." + sys.exit(1) + + return self.load(os.path.dirname(found[0])) + + def find(self, name, current_location): + """Given 'name' which can be project-id or plain directory name, + return project module corresponding to that id or directory. + Returns nothing of project is not found.""" + assert isinstance(name, basestring) + assert isinstance(current_location, basestring) + + project_module = None + + # Try interpreting name as project id. + if name[0] == '/': + project_module = self.id2module.get(name) + + if not project_module: + location = os.path.join(current_location, name) + # If no project is registered for the given location, try to + # load it. First see if we have Jamfile. If not we might have project + # root, willing to act as Jamfile. In that case, project-root + # must be placed in the directory referred by id. + + project_module = self.module_name(location) + if not project_module in self.jamfile_modules: + if b2.util.path.glob([location], self.JAMROOT + self.JAMFILE): + project_module = self.load(location) + else: + project_module = None + + return project_module + + def module_name(self, jamfile_location): + """Returns the name of module corresponding to 'jamfile-location'. + If no module corresponds to location yet, associates default + module name with that location.""" + assert isinstance(jamfile_location, basestring) + module = self.location2module.get(jamfile_location) + if not module: + # Root the path, so that locations are always umbiguious. + # Without this, we can't decide if '../../exe/program1' and '.' + # are the same paths, or not. + jamfile_location = os.path.realpath( + os.path.join(os.getcwd(), jamfile_location)) + module = "Jamfile<%s>" % jamfile_location + self.location2module[jamfile_location] = module + return module + + def find_jamfile (self, dir, parent_root=0, no_errors=0): + """Find the Jamfile at the given location. This returns the + exact names of all the Jamfiles in the given directory. The optional + parent-root argument causes this to search not the given directory + but the ones above it up to the directory given in it.""" + assert isinstance(dir, basestring) + assert isinstance(parent_root, (int, bool)) + assert isinstance(no_errors, (int, bool)) + + # Glob for all the possible Jamfiles according to the match pattern. + # + jamfile_glob = None + if parent_root: + parent = self.dir2parent_jamfile.get(dir) + if not parent: + parent = b2.util.path.glob_in_parents(dir, + self.JAMFILE) + self.dir2parent_jamfile[dir] = parent + jamfile_glob = parent + else: + jamfile = self.dir2jamfile.get(dir) + if not jamfile: + jamfile = b2.util.path.glob([dir], self.JAMFILE) + self.dir2jamfile[dir] = jamfile + jamfile_glob = jamfile + + if len(jamfile_glob) > 1: + # Multiple Jamfiles found in the same place. Warn about this. + # And ensure we use only one of them. + # As a temporary convenience measure, if there's Jamfile.v2 amount + # found files, suppress the warning and use it. + # + pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)" + v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)] + if len(v2_jamfiles) == 1: + jamfile_glob = v2_jamfiles + else: + print """warning: Found multiple Jamfiles at '%s'!""" % (dir) + for j in jamfile_glob: + print " -", j + print "Loading the first one" + + # Could not find it, error. + if not no_errors and not jamfile_glob: + self.manager.errors()( + """Unable to load Jamfile. +Could not find a Jamfile in directory '%s' +Attempted to find it with pattern '%s'. +Please consult the documentation at 'https://www.bfgroup.xyz/b2/'.""" + % (dir, string.join(self.JAMFILE))) + + if jamfile_glob: + return jamfile_glob[0] + + def load_jamfile(self, dir, jamfile_module): + """Load a Jamfile at the given directory. Returns nothing. + Will attempt to load the file as indicated by the JAMFILE patterns. + Effect of calling this rule twice with the same 'dir' is underfined.""" + assert isinstance(dir, basestring) + assert isinstance(jamfile_module, basestring) + + # See if the Jamfile is where it should be. + is_jamroot = False + jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT) + if jamfile_to_load: + if len(jamfile_to_load) > 1: + get_manager().errors()( + "Multiple Jamfiles found at '{}'\n" + "Filenames are: {}" + .format(dir, ' '.join(os.path.basename(j) for j in jamfile_to_load)) + ) + is_jamroot = True + jamfile_to_load = jamfile_to_load[0] + else: + jamfile_to_load = self.find_jamfile(dir) + + dir = os.path.dirname(jamfile_to_load) + if not dir: + dir = "." + + self.used_projects[jamfile_module] = [] + + # Now load the Jamfile in it's own context. + # The call to 'initialize' may load parent Jamfile, which might have + # 'use-project' statement that causes a second attempt to load the + # same project we're loading now. Checking inside .jamfile-modules + # prevents that second attempt from messing up. + if not jamfile_module in self.jamfile_modules: + previous_project = self.current_project + # Initialize the jamfile module before loading. + self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load)) + + if not jamfile_module in self.jamfile_modules: + saved_project = self.current_project + self.jamfile_modules[jamfile_module] = True + + bjam.call("load", jamfile_module, jamfile_to_load) + + if is_jamroot: + jamfile = self.find_jamfile(dir, no_errors=True) + if jamfile: + bjam.call("load", jamfile_module, jamfile) + + # Now do some checks + if self.current_project != saved_project: + from textwrap import dedent + self.manager.errors()(dedent( + """ + The value of the .current-project variable has magically changed + after loading a Jamfile. This means some of the targets might be + defined a the wrong project. + after loading %s + expected value %s + actual value %s + """ + % (jamfile_module, saved_project, self.current_project) + )) + + self.end_load(previous_project) + + if self.global_build_dir: + id = self.attributeDefault(jamfile_module, "id", None) + project_root = self.attribute(jamfile_module, "project-root") + location = self.attribute(jamfile_module, "location") + + if location and project_root == dir: + # This is Jamroot + if not id: + # FIXME: go via errors module, so that contexts are + # shown? + print "warning: the --build-dir option was specified" + print "warning: but Jamroot at '%s'" % dir + print "warning: specified no project id" + print "warning: the --build-dir option will be ignored" + + def end_load(self, previous_project=None): + if not self.current_project: + self.manager.errors()( + 'Ending project loading requested when there was no project currently ' + 'being loaded.' + ) + + if not previous_project and self.saved_current_project: + self.manager.errors()( + 'Ending project loading requested with no "previous project" when there ' + 'other projects still being loaded recursively.' + ) + + self.current_project = previous_project + + def load_standalone(self, jamfile_module, file): + """Loads 'file' as standalone project that has no location + associated with it. This is mostly useful for user-config.jam, + which should be able to define targets, but although it has + some location in filesystem, we do not want any build to + happen in user's HOME, for example. + + The caller is required to never call this method twice on + the same file. + """ + assert isinstance(jamfile_module, basestring) + assert isinstance(file, basestring) + + self.used_projects[jamfile_module] = [] + bjam.call("load", jamfile_module, file) + self.load_used_projects(jamfile_module) + + def is_jamroot(self, basename): + assert isinstance(basename, basestring) + match = [ pat for pat in self.JAMROOT if re.match(pat, basename)] + if match: + return 1 + else: + return 0 + + def initialize(self, module_name, location=None, basename=None, standalone_path=''): + """Initialize the module for a project. + + module-name is the name of the project module. + location is the location (directory) of the project to initialize. + If not specified, standalone project will be initialized + standalone_path is the path to the source-location. + this should only be called from the python side. + """ + assert isinstance(module_name, basestring) + assert isinstance(location, basestring) or location is None + assert isinstance(basename, basestring) or basename is None + jamroot = False + parent_module = None + if module_name == "test-config": + # No parent + pass + elif module_name == "site-config": + parent_module = "test-config" + elif module_name == "user-config": + parent_module = "site-config" + elif module_name == "project-config": + parent_module = "user-config" + elif location and not self.is_jamroot(basename): + # We search for parent/project-root only if jamfile was specified + # --- i.e + # if the project is not standalone. + parent_module = self.load_parent(location) + elif location: + # It's either jamroot, or standalone project. + # If it's jamroot, inherit from user-config. + # If project-config module exist, inherit from it. + parent_module = 'user-config' + if 'project-config' in self.module2attributes: + parent_module = 'project-config' + jamroot = True + + # TODO: need to consider if standalone projects can do anything but defining + # prebuilt targets. If so, we need to give more sensible "location", so that + # source paths are correct. + if not location: + location = "" + + # the call to load_parent() above can end up loading this module again + # make sure we don't reinitialize the module's attributes + if module_name not in self.module2attributes: + if "--debug-loading" in self.manager.argv(): + print "Initializing project '%s'" % module_name + attributes = ProjectAttributes(self.manager, location, module_name) + self.module2attributes[module_name] = attributes + + python_standalone = False + if location: + attributes.set("source-location", [location], exact=1) + elif not module_name in ["test-config", "site-config", "user-config", "project-config"]: + # This is a standalone project with known location. Set source location + # so that it can declare targets. This is intended so that you can put + # a .jam file in your sources and use it via 'using'. Standard modules + # (in 'tools' subdir) may not assume source dir is set. + source_location = standalone_path + if not source_location: + source_location = self.loaded_tool_module_path_.get(module_name) + if not source_location: + self.manager.errors()('Standalone module path not found for "{}"' + .format(module_name)) + attributes.set("source-location", [source_location], exact=1) + python_standalone = True + + attributes.set("requirements", property_set.empty(), exact=True) + attributes.set("usage-requirements", property_set.empty(), exact=True) + attributes.set("default-build", property_set.empty(), exact=True) + attributes.set("projects-to-build", [], exact=True) + attributes.set("project-root", None, exact=True) + attributes.set("build-dir", None, exact=True) + + self.project_rules_.init_project(module_name, python_standalone) + + if parent_module: + self.inherit_attributes(module_name, parent_module) + attributes.set("parent-module", parent_module, exact=1) + + if jamroot: + attributes.set("project-root", location, exact=1) + + parent = None + if parent_module: + parent = self.target(parent_module) + + if module_name not in self.module2target: + target = b2.build.targets.ProjectTarget(self.manager, + module_name, module_name, parent, + self.attribute(module_name, "requirements"), + # FIXME: why we need to pass this? It's not + # passed in jam code. + self.attribute(module_name, "default-build")) + self.module2target[module_name] = target + + self.current_project = self.target(module_name) + + def inherit_attributes(self, project_module, parent_module): + """Make 'project-module' inherit attributes of project + root and parent module.""" + assert isinstance(project_module, basestring) + assert isinstance(parent_module, basestring) + + attributes = self.module2attributes[project_module] + pattributes = self.module2attributes[parent_module] + + # Parent module might be locationless user-config. + # FIXME: + #if [ modules.binding $(parent-module) ] + #{ + # $(attributes).set parent : [ path.parent + # [ path.make [ modules.binding $(parent-module) ] ] ] ; + # } + + attributes.set("project-root", pattributes.get("project-root"), exact=True) + attributes.set("default-build", pattributes.get("default-build"), exact=True) + attributes.set("requirements", pattributes.get("requirements"), exact=True) + attributes.set("usage-requirements", + pattributes.get("usage-requirements"), exact=1) + + parent_build_dir = pattributes.get("build-dir") + + if parent_build_dir: + # Have to compute relative path from parent dir to our dir + # Convert both paths to absolute, since we cannot + # find relative path from ".." to "." + + location = attributes.get("location") + parent_location = pattributes.get("location") + + our_dir = os.path.join(os.getcwd(), location) + parent_dir = os.path.join(os.getcwd(), parent_location) + + build_dir = os.path.join(parent_build_dir, + os.path.relpath(our_dir, parent_dir)) + attributes.set("build-dir", build_dir, exact=True) + + def register_id(self, id, module): + """Associate the given id with the given project module.""" + assert isinstance(id, basestring) + assert isinstance(module, basestring) + self.id2module[id] = module + + def current(self): + """Returns the project which is currently being loaded.""" + if not self.current_project: + get_manager().errors()( + 'Reference to the project currently being loaded requested ' + 'when there was no project module being loaded.' + ) + return self.current_project + + def set_current(self, c): + if __debug__: + from .targets import ProjectTarget + assert isinstance(c, ProjectTarget) + self.current_project = c + + def push_current(self, project): + """Temporary changes the current project to 'project'. Should + be followed by 'pop-current'.""" + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + self.saved_current_project.append(self.current_project) + self.current_project = project + + def pop_current(self): + if self.saved_current_project: + self.current_project = self.saved_current_project.pop() + else: + self.current_project = None + + def attributes(self, project): + """Returns the project-attribute instance for the + specified jamfile module.""" + assert isinstance(project, basestring) + return self.module2attributes[project] + + def attribute(self, project, attribute): + """Returns the value of the specified attribute in the + specified jamfile module.""" + assert isinstance(project, basestring) + assert isinstance(attribute, basestring) + try: + return self.module2attributes[project].get(attribute) + except: + raise BaseException("No attribute '%s' for project %s" % (attribute, project)) + + def attributeDefault(self, project, attribute, default): + """Returns the value of the specified attribute in the + specified jamfile module.""" + assert isinstance(project, basestring) + assert isinstance(attribute, basestring) + assert isinstance(default, basestring) or default is None + return self.module2attributes[project].getDefault(attribute, default) + + def target(self, project_module): + """Returns the project target corresponding to the 'project-module'.""" + assert isinstance(project_module, basestring) + if project_module not in self.module2target: + self.module2target[project_module] = \ + b2.build.targets.ProjectTarget(project_module, project_module, + self.attribute(project_module, "requirements")) + + return self.module2target[project_module] + + def use(self, id, location): + # Use/load a project. + assert isinstance(id, basestring) + assert isinstance(location, basestring) + saved_project = self.current_project + project_module = self.load(location) + declared_id = self.attributeDefault(project_module, "id", "") + + if not declared_id or declared_id != id: + # The project at 'location' either have no id or + # that id is not equal to the 'id' parameter. + if id in self.id2module and self.id2module[id] != project_module: + self.manager.errors()( +"""Attempt to redeclare already existing project id '%s' at location '%s'""" % (id, location)) + self.id2module[id] = project_module + + self.current_project = saved_project + + def add_rule(self, name, callable_): + """Makes rule 'name' available to all subsequently loaded Jamfiles. + + Calling that rule will relay to 'callable'.""" + assert isinstance(name, basestring) + assert callable(callable_) + self.project_rules_.add_rule(name, callable_) + + def project_rules(self): + return self.project_rules_ + + def glob_internal(self, project, wildcards, excludes, rule_name): + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert is_iterable_typed(wildcards, basestring) + assert is_iterable_typed(excludes, basestring) or excludes is None + assert isinstance(rule_name, basestring) + location = project.get("source-location")[0] + + result = [] + callable = b2.util.path.__dict__[rule_name] + + paths = callable([location], wildcards, excludes) + has_dir = 0 + for w in wildcards: + if os.path.dirname(w): + has_dir = 1 + break + + if has_dir or rule_name != "glob": + result = [] + # The paths we've found are relative to current directory, + # but the names specified in sources list are assumed to + # be relative to source directory of the corresponding + # prject. Either translate them or make absolute. + + for p in paths: + rel = os.path.relpath(p, location) + # If the path is below source location, use relative path. + if not ".." in rel: + result.append(rel) + else: + # Otherwise, use full path just to avoid any ambiguities. + result.append(os.path.abspath(p)) + + else: + # There were not directory in wildcard, so the files are all + # in the source directory of the project. Just drop the + # directory, instead of making paths absolute. + result = [os.path.basename(p) for p in paths] + + return result + + def __build_python_module_cache(self): + """Recursively walks through the b2/src subdirectories and + creates an index of base module name to package name. The + index is stored within self.__python_module_cache and allows + for an O(1) module lookup. + + For example, given the base module name `toolset`, + self.__python_module_cache['toolset'] will return + 'b2.build.toolset' + + pkgutil.walk_packages() will find any python package + provided a directory contains an __init__.py. This has the + added benefit of allowing libraries to be installed and + automatically available within the contrib directory. + + *Note*: pkgutil.walk_packages() will import any subpackage + in order to access its __path__variable. Meaning: + any initialization code will be run if the package hasn't + already been imported. + """ + cache = {} + for importer, mname, ispkg in pkgutil.walk_packages(b2.__path__, prefix='b2.'): + basename = mname.split('.')[-1] + # since the jam code is only going to have "import toolset ;" + # it doesn't matter if there are separately named "b2.build.toolset" and + # "b2.contrib.toolset" as it is impossible to know which the user is + # referring to. + if basename in cache: + self.manager.errors()('duplicate module name "{0}" ' + 'found in boost-build path'.format(basename)) + cache[basename] = mname + self.__python_module_cache = cache + + def load_module(self, name, extra_path=None): + """Load a Python module that should be usable from Jamfiles. + + There are generally two types of modules Jamfiles might want to + use: + - Core Boost.Build. Those are imported using plain names, e.g. + 'toolset', so this function checks if we have module named + b2.package.module already. + - Python modules in the same directory as Jamfile. We don't + want to even temporary add Jamfile's directory to sys.path, + since then we might get naming conflicts between standard + Python modules and those. + """ + assert isinstance(name, basestring) + assert is_iterable_typed(extra_path, basestring) or extra_path is None + # See if we loaded module of this name already + existing = self.loaded_tool_modules_.get(name) + if existing: + return existing + + # check the extra path as well as any paths outside + # of the b2 package and import the module if it exists + b2_path = os.path.normpath(b2.__path__[0]) + # normalize the pathing in the BOOST_BUILD_PATH. + # this allows for using startswith() to determine + # if a path is a subdirectory of the b2 root_path + paths = [os.path.normpath(p) for p in self.manager.boost_build_path()] + # remove all paths that start with b2's root_path + paths = [p for p in paths if not p.startswith(b2_path)] + # add any extra paths + paths.extend(extra_path) + + try: + # find_module is used so that the pyc's can be used. + # an ImportError is raised if not found + f, location, description = imp.find_module(name, paths) + except ImportError: + # if the module is not found in the b2 package, + # this error will be handled later + pass + else: + # we've found the module, now let's try loading it. + # it's possible that the module itself contains an ImportError + # which is why we're loading it in this else clause so that the + # proper error message is shown to the end user. + # TODO: does this module name really need to be mangled like this? + mname = name + "__for_jamfile" + self.loaded_tool_module_path_[mname] = location + module = imp.load_module(mname, f, location, description) + self.loaded_tool_modules_[name] = module + return module + + # the cache is created here due to possibly importing packages + # that end up calling get_manager() which might fail + if not self.__python_module_cache: + self.__build_python_module_cache() + + underscore_name = name.replace('-', '_') + # check to see if the module is within the b2 package + # and already loaded + mname = self.__python_module_cache.get(underscore_name) + if mname in sys.modules: + return sys.modules[mname] + # otherwise, if the module name is within the cache, + # the module exists within the BOOST_BUILD_PATH, + # load it. + elif mname: + # in some cases, self.loaded_tool_module_path_ needs to + # have the path to the file during the import + # (project.initialize() for example), + # so the path needs to be set *before* importing the module. + path = os.path.join(b2.__path__[0], *mname.split('.')[1:]) + self.loaded_tool_module_path_[mname] = path + # mname is guaranteed to be importable since it was + # found within the cache + __import__(mname) + module = sys.modules[mname] + self.loaded_tool_modules_[name] = module + return module + + self.manager.errors()("Cannot find module '%s'" % name) + + + +# FIXME: +# Defines a Boost.Build extension project. Such extensions usually +# contain library targets and features that can be used by many people. +# Even though extensions are really projects, they can be initialize as +# a module would be with the "using" (project.project-rules.using) +# mechanism. +#rule extension ( id : options * : * ) +#{ +# # The caller is a standalone module for the extension. +# local mod = [ CALLER_MODULE ] ; +# +# # We need to do the rest within the extension module. +# module $(mod) +# { +# import path ; +# +# # Find the root project. +# local root-project = [ project.current ] ; +# root-project = [ $(root-project).project-module ] ; +# while +# [ project.attribute $(root-project) parent-module ] && +# [ project.attribute $(root-project) parent-module ] != user-config +# { +# root-project = [ project.attribute $(root-project) parent-module ] ; +# } +# +# # Create the project data, and bring in the project rules +# # into the module. +# project.initialize $(__name__) : +# [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ; +# +# # Create the project itself, i.e. the attributes. +# # All extensions are created in the "/ext" project space. +# project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; +# local attributes = [ project.attributes $(__name__) ] ; +# +# # Inherit from the root project of whomever is defining us. +# project.inherit-attributes $(__name__) : $(root-project) ; +# $(attributes).set parent-module : $(root-project) : exact ; +# } +#} + + +class ProjectAttributes: + """Class keeping all the attributes of a project. + + The standard attributes are 'id', "location", "project-root", "parent" + "requirements", "default-build", "source-location" and "projects-to-build". + """ + + def __init__(self, manager, location, project_module): + self.manager = manager + self.location = location + self.project_module = project_module + self.attributes = {} + self.usage_requirements = None + + def set(self, attribute, specification, exact=False): + """Set the named attribute from the specification given by the user. + The value actually set may be different.""" + assert isinstance(attribute, basestring) + assert isinstance(exact, (int, bool)) + if __debug__ and not exact: + if attribute == 'requirements': + assert (isinstance(specification, property_set.PropertySet) + or all(isinstance(s, basestring) for s in specification)) + elif attribute in ( + 'usage-requirements', 'default-build', 'source-location', 'build-dir', 'id'): + assert is_iterable_typed(specification, basestring) + elif __debug__: + assert ( + isinstance(specification, (property_set.PropertySet, type(None), basestring)) + or all(isinstance(s, basestring) for s in specification) + ) + if exact: + self.__dict__[attribute] = specification + + elif attribute == "requirements": + self.requirements = property_set.refine_from_user_input( + self.requirements, specification, + self.project_module, self.location) + + elif attribute == "usage-requirements": + unconditional = [] + for p in specification: + split = property.split_conditional(p) + if split: + unconditional.append(split[1]) + else: + unconditional.append(p) + + non_free = property.remove("free", unconditional) + if non_free: + get_manager().errors()("usage-requirements %s have non-free properties %s" \ + % (specification, non_free)) + + t = property.translate_paths( + property.create_from_strings(specification, allow_condition=True), + self.location) + + existing = self.__dict__.get("usage-requirements") + if existing: + new = property_set.create(existing.all() + t) + else: + new = property_set.create(t) + self.__dict__["usage-requirements"] = new + + + elif attribute == "default-build": + self.__dict__["default-build"] = property_set.create(specification) + + elif attribute == "source-location": + source_location = [] + for path in specification: + source_location.append(os.path.join(self.location, path)) + self.__dict__["source-location"] = source_location + + elif attribute == "build-dir": + self.__dict__["build-dir"] = os.path.join(self.location, specification[0]) + + elif attribute == "id": + id = specification[0] + if id[0] != '/': + id = "/" + id + self.manager.projects().register_id(id, self.project_module) + self.__dict__["id"] = id + + elif not attribute in ["default-build", "location", + "source-location", "parent", + "projects-to-build", "project-root"]: + self.manager.errors()( +"""Invalid project attribute '%s' specified +for project at '%s'""" % (attribute, self.location)) + else: + self.__dict__[attribute] = specification + + def get(self, attribute): + assert isinstance(attribute, basestring) + return self.__dict__[attribute] + + def getDefault(self, attribute, default): + assert isinstance(attribute, basestring) + return self.__dict__.get(attribute, default) + + def dump(self): + """Prints the project attributes.""" + id = self.get("id") + if not id: + id = "(none)" + else: + id = id[0] + + parent = self.get("parent") + if not parent: + parent = "(none)" + else: + parent = parent[0] + + print "'%s'" % id + print "Parent project:%s", parent + print "Requirements:%s", self.get("requirements") + print "Default build:%s", string.join(self.get("debuild-build")) + print "Source location:%s", string.join(self.get("source-location")) + print "Projects to build:%s", string.join(self.get("projects-to-build").sort()); + +class ProjectRules: + """Class keeping all rules that are made available to Jamfile.""" + + def __init__(self, registry): + self.registry = registry + self.manager_ = registry.manager + self.rules = {} + self.local_names = [x for x in self.__class__.__dict__ + if x not in ["__init__", "init_project", "add_rule", + "error_reporting_wrapper", "add_rule_for_type", "reverse"]] + self.all_names_ = [x for x in self.local_names] + + def _import_rule(self, bjam_module, name, callable_): + assert isinstance(bjam_module, basestring) + assert isinstance(name, basestring) + assert callable(callable_) + if hasattr(callable_, "bjam_signature"): + bjam.import_rule(bjam_module, name, self.make_wrapper(callable_), callable_.bjam_signature) + else: + bjam.import_rule(bjam_module, name, self.make_wrapper(callable_)) + + + def add_rule_for_type(self, type): + assert isinstance(type, basestring) + rule_name = type.lower().replace("_", "-") + + @bjam_signature([['name'], ['sources', '*'], ['requirements', '*'], + ['default_build', '*'], ['usage_requirements', '*']]) + def xpto (name, sources=[], requirements=[], default_build=[], usage_requirements=[]): + + return self.manager_.targets().create_typed_target( + type, self.registry.current(), name, sources, + requirements, default_build, usage_requirements) + + self.add_rule(rule_name, xpto) + + def add_rule(self, name, callable_): + assert isinstance(name, basestring) + assert callable(callable_) + self.rules[name] = callable_ + self.all_names_.append(name) + + # Add new rule at global bjam scope. This might not be ideal, + # added because if a jamroot does 'import foo' where foo calls + # add_rule, we need to import new rule to jamroot scope, and + # I'm lazy to do this now. + self._import_rule("", name, callable_) + + def all_names(self): + return self.all_names_ + + def call_and_report_errors(self, callable_, *args, **kw): + assert callable(callable_) + result = None + try: + self.manager_.errors().push_jamfile_context() + result = callable_(*args, **kw) + except ExceptionWithUserContext, e: + e.report() + except Exception, e: + try: + self.manager_.errors().handle_stray_exception (e) + except ExceptionWithUserContext, e: + e.report() + finally: + self.manager_.errors().pop_jamfile_context() + + return result + + def make_wrapper(self, callable_): + """Given a free-standing function 'callable', return a new + callable that will call 'callable' and report all exceptins, + using 'call_and_report_errors'.""" + assert callable(callable_) + def wrapper(*args, **kw): + return self.call_and_report_errors(callable_, *args, **kw) + return wrapper + + def init_project(self, project_module, python_standalone=False): + assert isinstance(project_module, basestring) + assert isinstance(python_standalone, bool) + if python_standalone: + m = sys.modules[project_module] + + for n in self.local_names: + if n != "import_": + setattr(m, n, getattr(self, n)) + + for n in self.rules: + setattr(m, n, self.rules[n]) + + return + + for n in self.local_names: + # Using 'getattr' here gives us a bound method, + # while using self.__dict__[r] would give unbound one. + v = getattr(self, n) + if callable(v): + if n == "import_": + n = "import" + else: + n = string.replace(n, "_", "-") + + self._import_rule(project_module, n, v) + + for n in self.rules: + self._import_rule(project_module, n, self.rules[n]) + + def project(self, *args): + assert is_iterable(args) and all(is_iterable(arg) for arg in args) + jamfile_module = self.registry.current().project_module() + attributes = self.registry.attributes(jamfile_module) + + id = None + if args and args[0]: + id = args[0][0] + args = args[1:] + + if id: + attributes.set('id', [id]) + + explicit_build_dir = None + for a in args: + if a: + attributes.set(a[0], a[1:], exact=0) + if a[0] == "build-dir": + explicit_build_dir = a[1] + + # If '--build-dir' is specified, change the build dir for the project. + if self.registry.global_build_dir: + + location = attributes.get("location") + # Project with empty location is 'standalone' project, like + # user-config, or qt. It has no build dir. + # If we try to set build dir for user-config, we'll then + # try to inherit it, with either weird, or wrong consequences. + if location and location == attributes.get("project-root"): + # Re-read the project id, since it might have been changed in + # the project's attributes. + id = attributes.get('id') + + # This is Jamroot. + if id: + if explicit_build_dir and os.path.isabs(explicit_build_dir): + self.registry.manager.errors()( +"""Absolute directory specified via 'build-dir' project attribute +Don't know how to combine that with the --build-dir option.""") + + rid = id + if rid[0] == '/': + rid = rid[1:] + + p = os.path.join(self.registry.global_build_dir, rid) + if explicit_build_dir: + p = os.path.join(p, explicit_build_dir) + attributes.set("build-dir", p, exact=1) + elif explicit_build_dir: + self.registry.manager.errors()( +"""When --build-dir is specified, the 'build-dir' +attribute is allowed only for top-level 'project' invocations""") + + def constant(self, name, value): + """Declare and set a project global constant. + Project global constants are normal variables but should + not be changed. They are applied to every child Jamfile.""" + assert is_iterable_typed(name, basestring) + assert is_iterable_typed(value, basestring) + self.registry.current().add_constant(name[0], value) + + def path_constant(self, name, value): + """Declare and set a project global constant, whose value is a path. The + path is adjusted to be relative to the invocation directory. The given + value path is taken to be either absolute, or relative to this project + root.""" + assert is_iterable_typed(name, basestring) + assert is_iterable_typed(value, basestring) + if len(value) > 1: + self.registry.manager.errors()("path constant should have one element") + self.registry.current().add_constant(name[0], value, path=1) + + def use_project(self, id, where): + # See comment in 'load' for explanation why we record the + # parameters as opposed to loading the project now. + assert is_iterable_typed(id, basestring) + assert is_iterable_typed(where, basestring) + m = self.registry.current().project_module() + self.registry.used_projects[m].append((id[0], where[0])) + + def build_project(self, dir): + assert is_iterable_typed(dir, basestring) + jamfile_module = self.registry.current().project_module() + attributes = self.registry.attributes(jamfile_module) + now = attributes.get("projects-to-build") + attributes.set("projects-to-build", now + dir, exact=True) + + def explicit(self, target_names): + assert is_iterable_typed(target_names, basestring) + self.registry.current().mark_targets_as_explicit(target_names) + + def always(self, target_names): + assert is_iterable_typed(target_names, basestring) + self.registry.current().mark_targets_as_always(target_names) + + def glob(self, wildcards, excludes=None): + assert is_iterable_typed(wildcards, basestring) + assert is_iterable_typed(excludes, basestring)or excludes is None + return self.registry.glob_internal(self.registry.current(), + wildcards, excludes, "glob") + + def glob_tree(self, wildcards, excludes=None): + assert is_iterable_typed(wildcards, basestring) + assert is_iterable_typed(excludes, basestring) or excludes is None + bad = 0 + for p in wildcards: + if os.path.dirname(p): + bad = 1 + + if excludes: + for p in excludes: + if os.path.dirname(p): + bad = 1 + + if bad: + self.registry.manager.errors()( +"The patterns to 'glob-tree' may not include directory") + return self.registry.glob_internal(self.registry.current(), + wildcards, excludes, "glob_tree") + + + def using(self, toolset, *args): + # The module referred by 'using' can be placed in + # the same directory as Jamfile, and the user + # will expect the module to be found even though + # the directory is not in BOOST_BUILD_PATH. + # So temporary change the search path. + assert is_iterable_typed(toolset, basestring) + current = self.registry.current() + location = current.get('location') + + m = self.registry.load_module(toolset[0], [location]) + if "init" not in m.__dict__: + self.registry.manager.errors()( + "Tool module '%s' does not define the 'init' method" % toolset[0]) + m.init(*args) + + # The above might have clobbered .current-project. Restore the correct + # value. + self.registry.set_current(current) + + def import_(self, name, names_to_import=None, local_names=None): + assert is_iterable_typed(name, basestring) + assert is_iterable_typed(names_to_import, basestring) or names_to_import is None + assert is_iterable_typed(local_names, basestring)or local_names is None + name = name[0] + py_name = name + if py_name == "os": + py_name = "os_j" + jamfile_module = self.registry.current().project_module() + attributes = self.registry.attributes(jamfile_module) + location = attributes.get("location") + + saved = self.registry.current() + + m = self.registry.load_module(py_name, [location]) + + for f in m.__dict__: + v = m.__dict__[f] + f = f.replace("_", "-") + if callable(v): + qn = name + "." + f + self._import_rule(jamfile_module, qn, v) + record_jam_to_value_mapping(qualify_jam_action(qn, jamfile_module), v) + + + if names_to_import: + if not local_names: + local_names = names_to_import + + if len(names_to_import) != len(local_names): + self.registry.manager.errors()( +"""The number of names to import and local names do not match.""") + + for n, l in zip(names_to_import, local_names): + self._import_rule(jamfile_module, l, m.__dict__[n]) + + self.registry.set_current(saved) + + def conditional(self, condition, requirements): + """Calculates conditional requirements for multiple requirements + at once. This is a shorthand to be reduce duplication and to + keep an inline declarative syntax. For example: + + lib x : x.cpp : [ conditional gcc debug : + DEBUG_EXCEPTION DEBUG_TRACE ] ; + """ + assert is_iterable_typed(condition, basestring) + assert is_iterable_typed(requirements, basestring) + c = string.join(condition, ",") + if c.find(":") != -1: + return [c + r for r in requirements] + else: + return [c + ":" + r for r in requirements] + + def option(self, name, value): + assert is_iterable(name) and isinstance(name[0], basestring) + assert is_iterable(value) and isinstance(value[0], basestring) + name = name[0] + if not name in ["site-config", "user-config", "project-config"]: + get_manager().errors()("The 'option' rule may be used only in site-config or user-config") + + option.set(name, value[0]) diff --git a/src/boost/tools/build/src/build/property-set.jam b/src/boost/tools/build/src/build/property-set.jam new file mode 100644 index 000000000..5045611a2 --- /dev/null +++ b/src/boost/tools/build/src/build/property-set.jam @@ -0,0 +1,604 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import feature ; +import indirect ; +import path ; +import project ; +import property ; +import sequence ; +import set ; +import option ; + +# Class for storing a set of properties. +# +# There is 1<->1 correspondence between identity and value. No two instances +# of the class are equal. To maintain this property, the 'property-set.create' +# rule should be used to create new instances. Instances are immutable. +# +# Each property is classified with regard to its effect on build results. +# Incidental properties have no effect on build results, from B2's +# point of view. Others are either free, or non-free and we refer to non-free +# ones as 'base'. Each property belongs to exactly one of those categories. +# +# It is possible to get a list of properties belonging to each category as +# well as a list of properties with a specific attribute. +# +# Several operations, like and refine and as-path are provided. They all use +# caching whenever possible. +# +class property-set +{ + import errors ; + import feature ; + import modules ; + import path ; + import property ; + import property-set ; + import set ; + + rule __init__ ( raw-properties * ) + { + self.raw = $(raw-properties) ; + + for local p in $(raw-properties) + { + if ! $(p:G) + { + errors.error "Invalid property: '$(p)'" ; + } + } + } + + # Returns Jam list of stored properties. + # + rule raw ( ) + { + return $(self.raw) ; + } + + rule str ( ) + { + return "[" $(self.raw) "]" ; + } + + # Returns properties that are neither incidental nor free. + # + rule base ( ) + { + if ! $(self.base-initialized) + { + init-base ; + } + return $(self.base) ; + } + + # Returns free properties which are not incidental. + # + rule free ( ) + { + if ! $(self.base-initialized) + { + init-base ; + } + return $(self.free) ; + } + + # Returns relevant base properties. This is used for computing + # target paths, so it must return the expanded set of relevant + # properties. + # + rule base-relevant ( ) + { + if ! $(self.relevant-initialized) + { + init-relevant ; + } + return $(self.base-relevant) ; + } + + # Returns all properties marked as relevant by features-ps + # Does not attempt to expand features-ps in any way, as + # this matches what virtual-target.register needs. + # + rule relevant ( features-ps ) + { + if ! $(self.relevant.$(features-ps)) + { + local result ; + local features = [ $(features-ps).get ] ; + features = <$(features)> ; + local ignore-relevance = [ modules.peek + property-set : .ignore-relevance ] ; + for local p in $(self.raw) + { + if $(ignore-relevance) || $(p:G) in $(features) + { + local att = [ feature.attributes $(p:G) ] ; + if ! ( incidental in $(att) ) + { + result += $(p) ; + } + } + } + self.relevant.$(features-ps) = [ property-set.create $(result) ] ; + } + return $(self.relevant.$(features-ps)) ; + } + + # Returns dependency properties. + # + rule dependency ( ) + { + if ! $(self.dependency-initialized) + { + init-dependency ; + } + return $(self.dependency) ; + } + + rule non-dependency ( ) + { + if ! $(self.dependency-initialized) + { + init-dependency ; + } + return $(self.non-dependency) ; + } + + rule conditional ( ) + { + if ! $(self.conditional-initialized) + { + init-conditional ; + } + return $(self.conditional) ; + } + + rule non-conditional ( ) + { + if ! $(self.conditional-initialized) + { + init-conditional ; + } + return $(self.non-conditional) ; + } + + # Returns incidental properties. + # + rule incidental ( ) + { + if ! $(self.base-initialized) + { + init-base ; + } + return $(self.incidental) ; + } + + rule refine ( ps ) + { + if ! $(self.refined.$(ps)) + { + local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ; + if $(r[1]) != "@error" + { + self.refined.$(ps) = [ property-set.create $(r) ] ; + } + else + { + self.refined.$(ps) = $(r) ; + } + } + return $(self.refined.$(ps)) ; + } + + rule expand ( ) + { + if ! $(self.expanded) + { + self.expanded = [ property-set.create [ feature.expand $(self.raw) ] + ] ; + } + return $(self.expanded) ; + } + + rule expand-composites ( ) + { + if ! $(self.composites) + { + self.composites = [ property-set.create + [ feature.expand-composites $(self.raw) ] ] ; + } + return $(self.composites) ; + } + + rule evaluate-conditionals ( context ? ) + { + context ?= $(__name__) ; + if ! $(self.evaluated.$(context)) + { + self.evaluated.$(context) = [ property-set.create + [ property.evaluate-conditionals-in-context $(self.raw) : [ + $(context).raw ] ] ] ; + } + return $(self.evaluated.$(context)) ; + } + + rule propagated ( ) + { + if ! $(self.propagated-ps) + { + local result ; + for local p in $(self.raw) + { + if propagated in [ feature.attributes $(p:G) ] + { + result += $(p) ; + } + } + self.propagated-ps = [ property-set.create $(result) ] ; + } + return $(self.propagated-ps) ; + } + + rule add-defaults ( ) + { + if ! $(self.defaults) + { + self.defaults = [ property-set.create + [ feature.add-defaults $(self.raw) ] ] ; + } + return $(self.defaults) ; + } + + rule as-path ( ) + { + if ! $(self.as-path) + { + self.as-path = [ property.as-path [ base-relevant ] ] ; + } + return $(self.as-path) ; + } + + # Computes the path to be used for a target with the given properties. + # Returns a list of + # - the computed path + # - if the path is relative to the build directory, a value of 'true'. + # + rule target-path ( ) + { + if ! $(self.target-path) + { + # The feature can be used to explicitly change the + # location of generated targets. + local l = [ get ] ; + if $(l) + { + self.target-path = $(l) ; + } + else + { + local p = [ property-set.hash-maybe [ as-path ] ] ; + + # A real ugly hack. Boost regression test system requires + # specific target paths, and it seems that changing it to handle + # other directory layout is really hard. For that reason, we + # teach V2 to do the things regression system requires. The + # value of '' is prepended to the path. + local prefix = [ get ] ; + if $(prefix) + { + self.target-path = [ path.join $(prefix) $(p) ] ; + } + else + { + self.target-path = $(p) ; + } + if ! $(self.target-path) + { + self.target-path = . ; + } + # The path is relative to build dir. + self.target-path += true ; + } + } + return $(self.target-path) ; + } + + rule add ( ps ) + { + if ! $(self.added.$(ps)) + { + self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] + ; + } + return $(self.added.$(ps)) ; + } + + rule add-raw ( properties * ) + { + return [ add [ property-set.create $(properties) ] ] ; + } + + # Returns all values of 'feature'. + # + rule get ( feature ) + { + if ! $(self.map-built) + { + # For each feature, create a member var and assign all values to it. + # Since all regular member vars start with 'self', there will be no + # conflicts between names. + self.map-built = true ; + for local v in $(self.raw) + { + $(v:G) += $(v:G=) ; + } + } + return $($(feature)) ; + } + + # Returns true if the property-set contains all the + # specified properties. + # + rule contains-raw ( properties * ) + { + if $(properties) in $(self.raw) + { + return true ; + } + } + + # Returns true if the property-set has values for + # all the specified features + # + rule contains-features ( features * ) + { + if $(features) in $(self.raw:G) + { + return true ; + } + } + + # private + + rule init-base ( ) + { + for local p in $(self.raw) + { + local att = [ feature.attributes $(p:G) ] ; + # A feature can be both incidental and free, in which case we add it + # to incidental. + if incidental in $(att) + { + self.incidental += $(p) ; + } + else if free in $(att) + { + self.free += $(p) ; + } + else + { + self.base += $(p) ; + } + } + self.base-initialized = true ; + } + + rule init-relevant ( ) + { + local relevant-features = [ get ] ; + relevant-features = [ feature.expand-relevant $(relevant-features) ] ; + relevant-features = <$(relevant-features)> ; + ignore-relevance = [ modules.peek property-set : .ignore-relevance ] ; + for local p in $(self.raw) + { + if $(ignore-relevance) || $(p:G) in $(relevant-features) + { + local att = [ feature.attributes $(p:G) ] ; + if ! ( incidental in $(att) ) + { + self.relevant += $(p) ; + if ! ( free in $(att) ) + { + self.base-relevant += $(p) ; + } + } + } + } + self.relevant-initialized = true ; + } + + rule init-dependency ( ) + { + for local p in $(self.raw) + { + if dependency in [ feature.attributes $(p:G) ] + { + self.dependency += $(p) ; + } + else + { + self.non-dependency += $(p) ; + } + } + self.dependency-initialized = true ; + } + + rule init-conditional ( ) + { + for local p in $(self.raw) + { + # TODO: Note that non-conditional properties may contain colon (':') + # characters as well, e.g. free or indirect properties. Indirect + # properties for example contain a full Jamfile path in their value + # which on Windows file systems contains ':' as the drive separator. + if ( [ MATCH "(:)" : $(p:G=) ] && ! ( free in [ feature.attributes $(p:G) ] ) ) || $(p:G) = + { + self.conditional += $(p) ; + } + else + { + self.non-conditional += $(p) ; + } + } + self.conditional-initialized = true ; + } +} + +# This is a temporary measure to help users work around +# any problems. Remove it once we've verified that +# everything works. +if --ignore-relevance in [ modules.peek : ARGV ] +{ + .ignore-relevance = true ; +} + +# Creates a new 'property-set' instance for the given raw properties or returns +# an already existing ones. +# +rule create ( raw-properties * ) +{ + raw-properties = [ sequence.unique + [ sequence.insertion-sort $(raw-properties) ] ] ; + + local key = $(raw-properties:J=-:E=) ; + + if ! $(.ps.$(key)) + { + .ps.$(key) = [ new property-set $(raw-properties) ] ; + } + return $(.ps.$(key)) ; +} +NATIVE_RULE property-set : create ; + +if [ HAS_NATIVE_RULE class@property-set : get : 1 ] +{ + NATIVE_RULE class@property-set : get ; +} + +if [ HAS_NATIVE_RULE class@property-set : contains-features : 1 ] +{ + NATIVE_RULE class@property-set : contains-features ; +} + +# Creates a new 'property-set' instance after checking that all properties are +# valid and converting implicit properties into gristed form. +# +rule create-with-validation ( raw-properties * ) +{ + property.validate $(raw-properties) ; + return [ create [ property.make $(raw-properties) ] ] ; +} + + +# Creates a property-set from the input given by the user, in the context of +# 'jamfile-module' at 'location'. +# +rule create-from-user-input ( raw-properties * : jamfile-module location ) +{ + local project-id = [ project.attribute $(jamfile-module) id ] ; + project-id ?= [ path.root $(location) [ path.pwd ] ] ; + return [ property-set.create [ property.translate $(raw-properties) + : $(project-id) : $(location) : $(jamfile-module) ] ] ; +} + + +# Refines requirements with requirements provided by the user. Specially handles +# "-value" syntax in specification to remove given requirements. +# - parent-requirements -- property-set object with requirements to refine. +# - specification -- string list of requirements provided by the user. +# - project-module -- module to which context indirect features will be +# bound. +# - location -- path to which path features are relative. +# +rule refine-from-user-input ( parent-requirements : specification * : + project-module : location ) +{ + if ! $(specification) + { + return $(parent-requirements) ; + } + else + { + local add-requirements ; + local remove-requirements ; + + for local r in $(specification) + { + local m = [ MATCH "^-(.*)" : $(r) ] ; + if $(m) + { + remove-requirements += $(m) ; + } + else + { + add-requirements += $(r) ; + } + } + + if $(remove-requirements) + { + # Need to create a property set, so that path features and indirect + # features are translated just like they are in project + # requirements. + local ps = [ property-set.create-from-user-input + $(remove-requirements) : $(project-module) $(location) ] ; + + parent-requirements = [ property-set.create + [ set.difference + [ indirect.difference + [ $(parent-requirements).raw ] : [ $(ps).raw ] ] + : [ $(ps).raw ] + ] ] ; + specification = $(add-requirements) ; + } + + local requirements = [ property-set.create-from-user-input + $(specification) : $(project-module) $(location) ] ; + + return [ $(parent-requirements).refine $(requirements) ] ; + } +} + + +# Returns a property-set with an empty set of properties. +# +rule empty ( ) +{ + if ! $(.empty) + { + .empty = [ create ] ; + } + return $(.empty) ; +} + + +if [ option.get hash : : yes ] = yes +{ + rule hash-maybe ( path ? ) + { + path ?= "" ; + return [ MD5 $(path) ] ; + } +} +else +{ + rule hash-maybe ( path ? ) + { + return $(path) ; + } +} + +rule __test__ ( ) +{ + import errors : try catch ; + + try ; + create invalid-property ; + catch "Invalid property: 'invalid-property'" ; +} diff --git a/src/boost/tools/build/src/build/property.jam b/src/boost/tools/build/src/build/property.jam new file mode 100644 index 000000000..a7a2e71dd --- /dev/null +++ b/src/boost/tools/build/src/build/property.jam @@ -0,0 +1,1005 @@ +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Copyright 2020 Nikita Kniazev +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; +import indirect ; +import path ; +import regex ; +import string ; +import sequence ; +import set ; +import utility ; + + +# Refines 'properties' by overriding any non-free and non-conditional properties +# for which a different value is specified in 'requirements'. Returns the +# resulting list of properties. +# +rule refine ( properties * : requirements * ) +{ + local result ; + local unset ; + + # Collect all non-free features in requirements + for local r in $(requirements) + { + # Do not consider conditional requirements. + if ! [ MATCH "(:<)" : $(r:G=) ] && ! free in [ feature.attributes $(r:G) ] + { + if ! $(r) in $(properties) + { + # Kill subfeatures of properties that we're changing + local sub = [ modules.peek feature : $(r:G).subfeatures ] ; + if $(sub) + { + # non-specific subfeatures are still valid + sub = [ MATCH "(.*:.*)" : $(sub) ] ; + local name = [ utility.ungrist $(r:G) ] ; + unset += <$(name)-$(sub)> ; + } + } + unset += $(r:G) ; + } + } + + # Remove properties that are overridden by requirements + for local p in $(properties) + { + if [ MATCH "(:<)" : $(p:G=) ] || ! $(p:G) in $(unset) + { + result += $(p) ; + } + } + + return [ sequence.unique $(result) $(requirements) ] ; +} + + +# Removes all conditional properties whose conditions are not met. For those +# with met conditions, removes the condition. Properties in conditions are +# looked up in 'context'. +# +rule evaluate-conditionals-in-context ( properties * : context * ) +{ + # Import here to avoid cyclic dependency + import project ; + + local result ; + while $(properties) + { + local conditionals ; + local indirect ; + for local p in $(properties) + { + if [ MATCH "(:<)" : $(p) ] && ! free in [ feature.attributes $(p:G) ] + { + conditionals += $(p) ; + } + else if $(p:G) = + { + indirect += $(p) ; + } + else + { + result += $(p) ; + } + } + + properties = ; + for local p in $(conditionals) + { + # Separate condition and property. + local s = [ MATCH "^(.*):(<.*)" : $(p) ] ; + # Split condition into individual properties. + local condition = [ regex.split $(s[1]) "," ] ; + # Evaluate condition. + if ! [ MATCH ^(!).* : $(condition:G=) ] + { + # Only positive checks + if $(condition) in $(context) + { + properties += $(s[2]) ; + } + } + else + { + # Have negative checks + local fail ; + for local c in $(condition) + { + local c = [ MATCH ^(!)?(.*) : $(c) ] ; + # It is XOR: $(c[1]) = "!" ^ $(c[2]) in $(context) + if $(c[1]) = "!" && $(c[2]) in $(context) || $(c[1]) != "!" && ! ( $(c[2]) in $(context) ) + { + fail = true ; + break ; + } + } + if ! $(fail) + { + properties += $(s[2]) ; + } + } + } + for local i in [ MATCH "^@(.*)" : $(indirect:G=) ] + { + # If the rule was set in a project module, translate paths + # relative to that project's location. + local m = [ indirect.get-module $(i) ] ; + local p = [ project.target $(m) : allow-missing ] ; + local new = [ indirect.call $(i) $(context) ] ; + if $(p) && [ $(p).location ] + { + local location = [ $(p).location ] ; + local project-id = [ project.attribute $(m) id ] ; + project-id ?= [ path.root $(location) [ path.pwd ] ] ; + properties += + [ translate $(new) : $(project-id) : $(location) : $(m) ] ; + } + else + { + properties += $(new) ; + } + } + } + return $(result) ; +} + + +# Returns properties indicating how the conditionals in +# properties affect feature relevance. If the optional argument cond +# is passed, it is treated as extra conditions for all properties. +# +rule evaluate-conditional-relevance ( properties * : cond * ) +{ + cond = [ sequence.transform utility.ungrist : $(cond:G) ] ; + local result ; + for local p in $(properties) + { + # Separate condition and property. + local s = [ MATCH "^(.*):(<.*)" : $(p) ] ; + if ! $(s) || free in [ feature.attributes $(p:G) ] + { + local value = [ utility.ungrist $(p:G) ] ; + result += $(value):$(cond) ; + } + else + { + local condition = [ regex.split $(s[1]) "," ] ; + condition = [ MATCH "^!?(.*)" : $(condition) ] ; + condition = [ sequence.transform utility.ungrist : $(condition:G) ] $(cond) ; + local value = [ utility.ungrist $(s[2]:G) ] ; + result += $(value):$(condition) ; + } + } + return [ sequence.unique $(result) ] ; +} + + +rule expand-subfeatures-in-conditions ( properties * ) +{ + local result ; + for local p in $(properties) + { + local s = [ MATCH "^(.*):(<.*)" : $(p) ] ; + if ! $(s) + { + result += $(p) ; + } + else + { + local condition = $(s[1]) ; + local value = $(s[2]) ; + # Condition might include several elements. + condition = [ regex.split $(condition) "," ] ; + local e ; + for local c in $(condition) + { + # It is common for a condition to include a toolset or + # subfeatures that have not been defined. In that case we want + # the condition to simply 'never be satisfied' and validation + # would only produce a spurious error so we prevent it by + # passing 'true' as the second parameter. + e += [ feature.expand-subfeatures $(c) : true ] ; + } + if $(e) = $(condition) + { + # (todo) + # This is just an optimization and possibly a premature one at + # that. + # (todo) (12.07.2008.) (Jurko) + result += $(p) ; + } + else + { + result += "$(e:J=,):$(value)" ; + } + } + } + return $(result) ; +} + + +# Helper for as-path, below. Orders properties with the implicit ones first, and +# within the two sections in alphabetical order of feature name. +# +local rule path-order ( x y ) +{ + if $(y:G) && ! $(x:G) + { + return true ; + } + else if $(x:G) && ! $(y:G) + { + return ; + } + else + { + if ! $(x:G) + { + x = [ feature.expand-subfeatures $(x) ] ; + y = [ feature.expand-subfeatures $(y) ] ; + } + + if $(x[1]) < $(y[1]) + { + return true ; + } + } +} + + +local rule abbreviate-dashed ( string ) +{ + local r ; + for local part in [ regex.split $(string) - ] + { + r += [ string.abbreviate $(part) ] ; + } + return $(r:J=-) ; +} + + +local rule identity ( string ) +{ + return $(string) ; +} + + +if --abbreviate-paths in [ modules.peek : ARGV ] +{ + .abbrev = abbreviate-dashed ; +} +else +{ + .abbrev = identity ; +} + + +# Returns a path representing the given expanded property set. +# +rule as-path ( properties * ) +{ + local entry = .result.$(properties:J=-) ; + + if ! $($(entry)) + { + # Trim redundancy. + properties = [ feature.minimize $(properties) ] ; + + # Sort according to path-order. + properties = [ sequence.insertion-sort $(properties) : path-order ] ; + + local components ; + for local p in $(properties) + { + if ! hidden in [ feature.attributes $(p:G) ] + { + if $(p:G) + { + local f = [ utility.ungrist $(p:G) ] ; + p = $(f)-$(p:G=) ; + } + components += [ $(.abbrev) $(p) ] ; + } + } + + $(entry) = $(components:J=/) ; + } + + return $($(entry)) ; +} + + +# Exit with error if property is not valid. +# +local rule validate1 ( property ) +{ + local msg ; + if $(property:G) + { + local feature = $(property:G) ; + local value = $(property:G=) ; + + if ! [ feature.valid $(feature) ] + { + # Ungrist for better error messages. + feature = [ utility.ungrist $(property:G) ] ; + msg = "unknown feature '$(feature)'" ; + } + else if $(value) && ! free in [ feature.attributes $(feature) ] + { + feature.validate-value-string $(feature) $(value) ; + } + else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) ) + { + # Ungrist for better error messages. + feature = [ utility.ungrist $(property:G) ] ; + msg = "No value specified for feature '$(feature)'" ; + } + } + else + { + local feature = [ feature.implied-feature $(property) ] ; + feature.validate-value-string $(feature) $(property) ; + } + if $(msg) + { + import errors ; + errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ; + } +} + + +rule validate ( properties * ) +{ + for local p in $(properties) + { + validate1 $(p) ; + } +} + + +rule validate-property-sets ( property-sets * ) +{ + for local s in $(property-sets) + { + validate [ feature.split $(s) ] ; + } +} + + +# Expands any implicit property values in the given property 'specification' so +# they explicitly state their feature. +# +rule make ( specification * ) +{ + local result ; + for local e in $(specification) + { + if $(e:G) + { + result += $(e) ; + } + else if [ feature.is-implicit-value $(e) ] + { + local feature = [ feature.implied-feature $(e) ] ; + result += $(feature)$(e) ; + } + else + { + import errors ; + errors.error "'$(e)' is not a valid property specification" ; + } + } + return $(result) ; +} + + +# Returns a property set containing all the elements in 'properties' that do not +# have their attributes listed in 'attributes'. +# +rule remove ( attributes + : properties * ) +{ + local result ; + for local e in $(properties) + { + if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ] + { + result += $(e) ; + } + } + return $(result) ; +} + + +# Returns a property set containing all the elements in 'properties' that have +# their attributes listed in 'attributes'. +# +rule take ( attributes + : properties * ) +{ + local result ; + for local e in $(properties) + { + if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ] + { + result += $(e) ; + } + } + return $(result) ; +} + + +# Selects properties corresponding to any of the given features. +# +rule select ( features * : properties * ) +{ + local result ; + + # Add any missing angle brackets. + local empty = "" ; + features = $(empty:G=$(features)) ; + + for local p in $(properties) + { + if $(p:G) in $(features) + { + result += $(p) ; + } + } + return $(result) ; +} + + +# Returns a modified version of properties with all values of the given feature +# replaced by the given value. If 'value' is empty the feature will be removed. +# +rule change ( properties * : feature value ? ) +{ + local result ; + for local p in $(properties) + { + if $(p:G) = $(feature) + { + result += $(value:G=$(feature)) ; + } + else + { + result += $(p) ; + } + } + return $(result) ; +} + + +# If 'property' is a conditional property, returns the condition and the +# property. E.g. debug,gcc:full will become +# debug,gcc full. Otherwise, returns an empty +# string. +# +rule split-conditional ( property ) +{ + return [ MATCH "^(.+):(<.+)" : $(property) ] ; +} + + +rule translate-path-value ( value : path ) +{ + local t ; + for local v in [ regex.split $(value) "&&" ] + { + t += [ path.root [ path.make $(v) ] $(path) ] ; + } + return $(t:TJ="&&") ; +} + +rule translate-dependency-value ( value : project-id : project-location ) +{ + local split-target = [ regex.match ^(.*)//(.*) : $(value) ] ; + if $(split-target) + { + local rooted = [ path.root [ path.make $(split-target[1]) ] + [ path.root $(project-location) [ path.pwd ] ] ] ; + return $(rooted)//$(split-target[2]) ; + } + else if [ path.is-rooted $(value) ] + { + return $(value) ; + } + else + { + return $(project-id)//$(value) ; + } +} + +rule translate-indirect-value ( rulename : context-module ) +{ + if [ MATCH "^([^%]*)%([^%]+)$" : $(rulename) ] + { + # Rule is already in the 'indirect-rule' format. + return @$(rulename) ; + } + else + { + local v ; + if ! [ MATCH "([.])" : $(rulename) ] + { + # This is an unqualified rule name. The user might want to + # set flags on this rule name and toolset.flag + # auto-qualifies it. Need to do the same here so flag + # setting works. We can arrange for toolset.flag to *not* + # auto-qualify the argument but then two rules defined in + # two Jamfiles would conflict. + rulename = $(context-module).$(rulename) ; + } + v = [ indirect.make $(rulename) : $(context-module) ] ; + return @$(v) ; + } + +} + +# Equivalent to a calling all of: +# translate-path +# translate-indirect +# translate-dependency +# expand-subfeatures-in-conditions +# make +# +rule translate ( properties * : project-id : project-location : context-module ) +{ + local translate-path-rule = [ MATCH "^[@](.*)$" : "$(properties)" ] ; + local result ; + for local p in $(properties) + { + local split = [ split-conditional $(p) ] ; + local condition property ; + + if $(split) + { + condition = $(split[1]) ; + property = $(split[2]) ; + + local e ; + for local c in [ regex.split $(condition) "," ] + { + # strip negation for expansion and readd after + c = [ MATCH "^(!)?(.*)" : $(c) ] ; + local expanded = [ feature.expand-subfeatures $(c[2]) : true ] ; + e += $(c[1])$(expanded) ; + } + + condition = "$(e:J=,):" ; + } + else + { + property = $(p) ; + } + + local feature = $(property:G) ; + if ! $(feature) + { + if [ feature.is-implicit-value $(property) ] + { + feature = [ feature.implied-feature $(property) ] ; + result += $(condition:E=)$(feature)$(property) ; + } + else + { + import errors ; + errors.error "'$(property)' is not a valid property specification" ; + } + } else { + local attributes = [ feature.attributes $(feature) ] ; + local value ; + # Only free features should be translated + if free in $(attributes) + { + if path in $(attributes) + { + if $(translate-path-rule) + { + value = [ $(translate-path-rule) $(feature) $(property:G=) : $(properties) : $(project-id) : $(project-location) ] ; + } + if ! $(value) + { + value = [ translate-path-value $(property:G=) : $(project-location) ] ; + } + result += $(condition:E=)$(feature)$(value) ; + } + else if dependency in $(attributes) + { + value = [ translate-dependency-value $(property:G=) : $(project-id) : $(project-location) ] ; + result += $(condition:E=)$(feature)$(value) ; + } + else + { + local m = [ MATCH ^@(.+) : $(property:G=) ] ; + if $(m) + { + value = [ translate-indirect-value $(m) : $(context-module) ] ; + result += $(condition:E=)$(feature)$(value) ; + } + else + { + result += $(condition:E=)$(property) ; + } + } + } + else + { + result += $(condition:E=)$(property) ; + } + } + } + return $(result) ; +} + +# Interpret all path properties in 'properties' as relative to 'path'. The +# property values are assumed to be in system-specific form, and will be +# translated into normalized form. +# +rule translate-paths ( properties * : path ) +{ + local result ; + for local p in $(properties) + { + local split = [ split-conditional $(p) ] ; + local condition = "" ; + if $(split) + { + condition = "$(split[1]):" ; + p = $(split[2]) ; + } + + if path in [ feature.attributes $(p:G) ] + { + local values = [ regex.split $(p:TG=) "&&" ] ; + local t ; + for local v in $(values) + { + t += [ path.root [ path.make $(v) ] $(path) ] ; + } + t = $(t:J="&&") ; + result += $(condition)$(t:TG=$(p:G)) ; + } + else + { + result += $(condition)$(p) ; + } + } + return $(result) ; +} + + +# Assumes that all feature values that start with '@' are names of rules, used +# in 'context-module'. Such rules can be either local to the module or global. +# Converts such values into 'indirect-rule' format (see indirect.jam), so they +# can be called from other modules. Does nothing for such values that are +# already in the 'indirect-rule' format. +# +rule translate-indirect ( specification * : context-module ) +{ + local result ; + for local p in $(specification) + { + local m = [ MATCH ^@(.+) : $(p:G=) ] ; + if $(m) + { + local v ; + if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ] + { + # Rule is already in the 'indirect-rule' format. + v = $(m) ; + } + else + { + if ! [ MATCH "([.])" : $(m) ] + { + # This is an unqualified rule name. The user might want to + # set flags on this rule name and toolset.flag + # auto-qualifies it. Need to do the same here so flag + # setting works. We can arrange for toolset.flag to *not* + # auto-qualify the argument but then two rules defined in + # two Jamfiles would conflict. + m = $(context-module).$(m) ; + } + v = [ indirect.make $(m) : $(context-module) ] ; + } + + v = @$(v) ; + result += $(v:G=$(p:G)) ; + } + else + { + result += $(p) ; + } + } + return $(result) ; +} + + +# Binds all dependency properties in a list relative to the given project. +# Targets with absolute paths will be left unchanged and targets which have a +# project specified will have the path to the project interpreted relative to +# the specified location. +# +rule translate-dependencies ( specification * : project-id : location ) +{ + local result ; + for local p in $(specification) + { + local split = [ split-conditional $(p) ] ; + local condition = "" ; + if $(split) + { + condition = "$(split[1]):" ; + p = $(split[2]) ; + } + if dependency in [ feature.attributes $(p:G) ] + { + local split-target = [ regex.match ^(.*)//(.*) : $(p:G=) ] ; + if $(split-target) + { + local rooted = [ path.root [ path.make $(split-target[1]) ] + [ path.root $(location) [ path.pwd ] ] ] ; + result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ; + } + else if [ path.is-rooted $(p:G=) ] + { + result += $(condition)$(p) ; + } + else + { + result += $(condition)$(p:G)$(project-id)//$(p:G=) ; + } + } + else + { + result += $(condition)$(p) ; + } + } + return $(result) ; +} + + +# Class maintaining a property set -> string mapping. +# +class property-map +{ + import numbers ; + import sequence ; + + rule __init__ ( ) + { + self.next-flag = 1 ; + } + + # Associate 'value' with 'properties'. + # + rule insert ( properties * : value ) + { + self.all-flags += self.$(self.next-flag) ; + self.$(self.next-flag) = $(value) $(properties) ; + + self.next-flag = [ numbers.increment $(self.next-flag) ] ; + } + + # Returns the value associated with 'properties' or any subset of it. If + # more than one subset has a value assigned to it, returns the value for the + # longest subset, if it is unique. + # + rule find ( property-set ) + { + # First find all matches. + local matches ; + local match-ranks ; + for local i in $(self.all-flags) + { + local list = $($(i)) ; + if [ $(property-set).contains-raw $(list[2-]) ] + { + matches += $(list[1]) ; + match-ranks += [ sequence.length $(list) ] ; + } + } + local best = [ sequence.select-highest-ranked $(matches) + : $(match-ranks) ] ; + if $(best[2]) + { + import errors : error : errors.error ; + properties = [ $(property-set).raw ] ; + errors.error "Ambiguous key $(properties:J= :E=)" ; + } + return $(best) ; + } + + # Returns the value associated with 'properties'. If 'value' parameter is + # given, replaces the found value. + # + rule find-replace ( properties * : value ? ) + { + # First find all matches. + local matches ; + local match-ranks ; + for local i in $(self.all-flags) + { + if $($(i)[2-]) in $(properties) + { + matches += $(i) ; + match-ranks += [ sequence.length $($(i)) ] ; + } + } + local best = [ sequence.select-highest-ranked $(matches) + : $(match-ranks) ] ; + if $(best[2]) + { + import errors : error : errors.error ; + errors.error "Ambiguous key $(properties:J= :E=)" ; + } + local original = $($(best)[1]) ; + if $(value)-is-set + { + $(best) = $(value) $($(best)[2-]) ; + } + return $(original) ; + } +} + + +rule __test__ ( ) +{ + import assert ; + import "class" : new ; + import errors : try catch ; + import feature ; + + # Local rules must be explicitly re-imported. + import property : path-order abbreviate-dashed ; + + feature.prepare-test property-test-temp ; + + feature.feature toolset : gcc : implicit symmetric ; + feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 + 3.0.2 : optional ; + feature.feature define : : free ; + feature.feature runtime-link : dynamic static : symmetric link-incompatible ; + feature.feature optimization : on off ; + feature.feature variant : debug release : implicit composite symmetric ; + feature.feature rtti : on off : link-incompatible ; + + feature.compose debug : _DEBUG off ; + feature.compose release : NDEBUG on ; + + validate gcc gcc-3.0.1 : $(test-space) ; + + assert.true path-order $(test-space) debug foo ; + assert.false path-order $(test-space) foo debug ; + assert.true path-order $(test-space) gcc debug ; + assert.false path-order $(test-space) debug gcc ; + assert.true path-order $(test-space) on on ; + assert.false path-order $(test-space) on on ; + + assert.result-set-equal gcc off FOO + : refine gcc off + : FOO + : $(test-space) ; + + assert.result-set-equal gcc on + : refine gcc off + : on + : $(test-space) ; + + assert.result-set-equal gcc off + : refine gcc : off : $(test-space) ; + + assert.result-set-equal gcc off off:FOO + : refine gcc : off off:FOO + : $(test-space) ; + + assert.result-set-equal gcc:foo gcc:bar + : refine gcc:foo : gcc:bar + : $(test-space) ; + + assert.result + : evaluate-conditionals-in-context + release,off:MY_RELEASE + : gcc release on ; + + assert.result MY_RELEASE + : evaluate-conditionals-in-context + release,off:MY_RELEASE + : gcc release off ; + + assert.result MY_RELEASE + : evaluate-conditionals-in-context + release,!off:MY_RELEASE + : gcc release on ; + + assert.result + : evaluate-conditionals-in-context + release,!off:MY_RELEASE + : gcc release off ; + + assert.result debug + : as-path off debug + : $(test-space) ; + + assert.result gcc/debug/rtti-off + : as-path gcc off off debug + : $(test-space) ; + + assert.result optmz-off : abbreviate-dashed optimization-off ; + assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ; + + try ; + validate value : $(test-space) ; + catch "Invalid property 'value': unknown feature 'feature'." ; + + try ; + validate default : $(test-space) ; + catch \"default\" is not a known value of feature ; + + validate WHATEVER : $(test-space) ; + + try ; + validate : $(test-space) ; + catch "Invalid property '': No value specified for feature 'rtti'." ; + + try ; + validate value : $(test-space) ; + catch \"value\" is not an implicit feature value ; + + assert.result-set-equal on + : remove free implicit : gcc foo on : $(test-space) ; + + assert.result-set-equal a + : select include : a gcc ; + + assert.result-set-equal a + : select include bar : a gcc ; + + assert.result-set-equal a gcc + : select include : a gcc ; + + assert.result-set-equal kylix a + : change gcc a : kylix ; + + pm = [ new property-map ] ; + $(pm).insert gcc : o ; + $(pm).insert gcc NT : obj ; + $(pm).insert gcc CYGWIN : obj ; + + try ; + $(pm).find [ new property-set gcc NT CYGWIN ] ; + catch "Ambiguous key gcc NT CYGWIN" ; + + assert.equal o : [ $(pm).find-replace gcc ] ; + + assert.equal obj : [ $(pm).find-replace gcc NT ] ; + + try ; + $(pm).find-replace gcc NT CYGWIN ; + catch "Ambiguous key gcc NT CYGWIN" ; + + # Test ordinary properties. + assert.result : split-conditional gcc ; + + # Test properties with ":". + assert.result : split-conditional "FOO=A::B" ; + + # Test conditional feature. + assert.result-set-equal gcc,3.0 FOO + : split-conditional gcc,3.0:FOO ; + + # Test translate does not choke on negations in conditional + assert.result gcc,!off:HELLO + : translate gcc,!off:HELLO + : project-id : project-location : context-module ; + + feature.finish-test property-test-temp ; +} diff --git a/src/boost/tools/build/src/build/property.py b/src/boost/tools/build/src/build/property.py new file mode 100644 index 000000000..f3e8dfe4e --- /dev/null +++ b/src/boost/tools/build/src/build/property.py @@ -0,0 +1,750 @@ +# Status: ported, except for tests. +# Base revision: 64070 +# +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import re +import sys +from functools import total_ordering + +from b2.util.utility import * +from b2.build import feature +from b2.util import sequence, qualify_jam_action, is_iterable_typed +import b2.util.set +from b2.manager import get_manager + + +__re_two_ampersands = re.compile ('&&') +__re_comma = re.compile (',') +__re_split_condition = re.compile ('(.*):(<.*)') +__re_split_conditional = re.compile (r'(.+):<(.+)') +__re_colon = re.compile (':') +__re_has_condition = re.compile (r':<') +__re_separate_condition_and_property = re.compile (r'(.*):(<.*)') + +_not_applicable_feature='not-applicable-in-this-context' +feature.feature(_not_applicable_feature, [], ['free']) + +__abbreviated_paths = False + + +class PropertyMeta(type): + """ + This class exists to implement the isinstance() and issubclass() + hooks for the Property class. Since we've introduce the concept of + a LazyProperty, isinstance(p, Property) will fail when p is a LazyProperty. + Implementing both __instancecheck__ and __subclasscheck__ will allow + LazyProperty instances to pass the isinstance() and issubclass check for + the Property class. + + Additionally, the __call__ method intercepts the call to the Property + constructor to ensure that calling Property with the same arguments + will always return the same Property instance. + """ + _registry = {} + current_id = 1 + + def __call__(mcs, f, value, condition=None): + """ + This intercepts the call to the Property() constructor. + + This exists so that the same arguments will always return the same Property + instance. This allows us to give each instance a unique ID. + """ + from b2.build.feature import Feature + if not isinstance(f, Feature): + f = feature.get(f) + if condition is None: + condition = [] + key = (f, value) + tuple(sorted(condition)) + if key not in mcs._registry: + instance = super(PropertyMeta, mcs).__call__(f, value, condition) + mcs._registry[key] = instance + return mcs._registry[key] + + @staticmethod + def check(obj): + return (hasattr(obj, 'feature') and + hasattr(obj, 'value') and + hasattr(obj, 'condition')) + + def __instancecheck__(self, instance): + return self.check(instance) + + def __subclasscheck__(self, subclass): + return self.check(subclass) + + +@total_ordering +class Property(object): + + __slots__ = ('feature', 'value', 'condition', '_to_raw', '_hash', 'id') + __metaclass__ = PropertyMeta + + def __init__(self, f, value, condition=None): + assert(f.free or ':' not in value) + if condition is None: + condition = [] + + self.feature = f + self.value = value + self.condition = condition + self._hash = hash((self.feature, self.value) + tuple(sorted(self.condition))) + self.id = PropertyMeta.current_id + # increment the id counter. + # this allows us to take a list of Property + # instances and use their unique integer ID + # to create a key for PropertySet caching. This is + # much faster than string comparison. + PropertyMeta.current_id += 1 + + condition_str = '' + if condition: + condition_str = ",".join(str(p) for p in self.condition) + ':' + + self._to_raw = '{}<{}>{}'.format(condition_str, f.name, value) + + def to_raw(self): + return self._to_raw + + def __str__(self): + + return self._to_raw + + def __hash__(self): + return self._hash + + def __eq__(self, other): + return self._hash == other._hash + + def __lt__(self, other): + return (self.feature.name, self.value) < (other.feature.name, other.value) + + +@total_ordering +class LazyProperty(object): + def __init__(self, feature_name, value, condition=None): + if condition is None: + condition = [] + + self.__property = Property( + feature.get(_not_applicable_feature), feature_name + value, condition=condition) + self.__name = feature_name + self.__value = value + self.__condition = condition + self.__feature = None + + def __getattr__(self, item): + if self.__feature is None: + try: + self.__feature = feature.get(self.__name) + self.__property = Property(self.__feature, self.__value, self.__condition) + except KeyError: + pass + return getattr(self.__property, item) + + def __hash__(self): + return hash(self.__property) + + def __str__(self): + return self.__property._to_raw + + def __eq__(self, other): + return self.__property == other + + def __lt__(self, other): + return (self.feature.name, self.value) < (other.feature.name, other.value) + + +def create_from_string(s, allow_condition=False,allow_missing_value=False): + assert isinstance(s, basestring) + assert isinstance(allow_condition, bool) + assert isinstance(allow_missing_value, bool) + condition = [] + import types + if not isinstance(s, types.StringType): + print type(s) + if __re_has_condition.search(s): + + if not allow_condition: + raise BaseException("Conditional property is not allowed in this context") + + m = __re_separate_condition_and_property.match(s) + condition = m.group(1) + s = m.group(2) + + # FIXME: break dependency cycle + from b2.manager import get_manager + + if condition: + condition = [create_from_string(x) for x in condition.split(',')] + + feature_name = get_grist(s) + if not feature_name: + if feature.is_implicit_value(s): + f = feature.implied_feature(s) + value = s + p = Property(f, value, condition=condition) + else: + raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s) + else: + value = get_value(s) + if not value and not allow_missing_value: + get_manager().errors()("Invalid property '%s' -- no value specified" % s) + + if feature.valid(feature_name): + p = Property(feature.get(feature_name), value, condition=condition) + else: + # In case feature name is not known, it is wrong to do a hard error. + # Feature sets change depending on the toolset. So e.g. + # is an unknown feature when using toolset Y. + # + # Ideally we would like to ignore this value, but most of + # Boost.Build code expects that we return a valid Property. For this + # reason we use a sentinel feature. + # + # The underlying cause for this problem is that python port Property + # is more strict than its Jam counterpart and must always reference + # a valid feature. + p = LazyProperty(feature_name, value, condition=condition) + + return p + +def create_from_strings(string_list, allow_condition=False): + assert is_iterable_typed(string_list, basestring) + return [create_from_string(s, allow_condition) for s in string_list] + +def reset (): + """ Clear the module state. This is mainly for testing purposes. + """ + global __results + + # A cache of results from as_path + __results = {} + +reset () + + +def set_abbreviated_paths(on=True): + global __abbreviated_paths + if on == 'off': + on = False + on = bool(on) + __abbreviated_paths = on + + +def get_abbreviated_paths(): + return __abbreviated_paths or '--abbreviated-paths' in sys.argv + + +def path_order (x, y): + """ Helper for as_path, below. Orders properties with the implicit ones + first, and within the two sections in alphabetical order of feature + name. + """ + if x == y: + return 0 + + xg = get_grist (x) + yg = get_grist (y) + + if yg and not xg: + return -1 + + elif xg and not yg: + return 1 + + else: + if not xg: + x = feature.expand_subfeatures([x]) + y = feature.expand_subfeatures([y]) + + if x < y: + return -1 + elif x > y: + return 1 + else: + return 0 + +def identify(string): + return string + +# Uses Property +def refine (properties, requirements): + """ Refines 'properties' by overriding any non-free properties + for which a different value is specified in 'requirements'. + Conditional requirements are just added without modification. + Returns the resulting list of properties. + """ + assert is_iterable_typed(properties, Property) + assert is_iterable_typed(requirements, Property) + # The result has no duplicates, so we store it in a set + result = set() + + # Records all requirements. + required = {} + + # All the elements of requirements should be present in the result + # Record them so that we can handle 'properties'. + for r in requirements: + # Don't consider conditional requirements. + if not r.condition: + required[r.feature] = r + + for p in properties: + # Skip conditional properties + if p.condition: + result.add(p) + # No processing for free properties + elif p.feature.free: + result.add(p) + else: + if p.feature in required: + result.add(required[p.feature]) + else: + result.add(p) + + return sequence.unique(list(result) + requirements) + +def translate_paths (properties, path): + """ Interpret all path properties in 'properties' as relative to 'path' + The property values are assumed to be in system-specific form, and + will be translated into normalized form. + """ + assert is_iterable_typed(properties, Property) + result = [] + + for p in properties: + + if p.feature.path: + values = __re_two_ampersands.split(p.value) + + new_value = "&&".join(os.path.normpath(os.path.join(path, v)) for v in values) + + if new_value != p.value: + result.append(Property(p.feature, new_value, p.condition)) + else: + result.append(p) + + else: + result.append (p) + + return result + +def translate_indirect(properties, context_module): + """Assumes that all feature values that start with '@' are + names of rules, used in 'context-module'. Such rules can be + either local to the module or global. Qualified local rules + with the name of the module.""" + assert is_iterable_typed(properties, Property) + assert isinstance(context_module, basestring) + result = [] + for p in properties: + if p.value[0] == '@': + q = qualify_jam_action(p.value[1:], context_module) + get_manager().engine().register_bjam_action(q) + result.append(Property(p.feature, '@' + q, p.condition)) + else: + result.append(p) + + return result + +def validate (properties): + """ Exit with error if any of the properties is not valid. + properties may be a single property or a sequence of properties. + """ + if isinstance(properties, Property): + properties = [properties] + assert is_iterable_typed(properties, Property) + for p in properties: + __validate1(p) + +def expand_subfeatures_in_conditions (properties): + assert is_iterable_typed(properties, Property) + result = [] + for p in properties: + + if not p.condition: + result.append(p) + else: + expanded = [] + for c in p.condition: + # It common that condition includes a toolset which + # was never defined, or mentiones subfeatures which + # were never defined. In that case, validation will + # only produce an spirious error, so don't validate. + expanded.extend(feature.expand_subfeatures ([c], True)) + + # we need to keep LazyProperties lazy + if isinstance(p, LazyProperty): + value = p.value + feature_name = get_grist(value) + value = value.replace(feature_name, '') + result.append(LazyProperty(feature_name, value, condition=expanded)) + else: + result.append(Property(p.feature, p.value, expanded)) + + return result + +# FIXME: this should go +def split_conditional (property): + """ If 'property' is conditional property, returns + condition and the property, e.g + debug,gcc:full will become + debug,gcc full. + Otherwise, returns empty string. + """ + assert isinstance(property, basestring) + m = __re_split_conditional.match (property) + + if m: + return (m.group (1), '<' + m.group (2)) + + return None + + +def select (features, properties): + """ Selects properties which correspond to any of the given features. + """ + assert is_iterable_typed(properties, basestring) + result = [] + + # add any missing angle brackets + features = add_grist (features) + + return [p for p in properties if get_grist(p) in features] + +def validate_property_sets (sets): + if __debug__: + from .property_set import PropertySet + assert is_iterable_typed(sets, PropertySet) + for s in sets: + validate(s.all()) + +def evaluate_conditionals_in_context (properties, context): + """ Removes all conditional properties which conditions are not met + For those with met conditions, removes the condition. Properties + in conditions are looked up in 'context' + """ + if __debug__: + from .property_set import PropertySet + assert is_iterable_typed(properties, Property) + assert isinstance(context, PropertySet) + base = [] + conditional = [] + + for p in properties: + if p.condition: + conditional.append (p) + else: + base.append (p) + + result = base[:] + for p in conditional: + + # Evaluate condition + # FIXME: probably inefficient + if all(x in context for x in p.condition): + result.append(Property(p.feature, p.value)) + + return result + + +def change (properties, feature, value = None): + """ Returns a modified version of properties with all values of the + given feature replaced by the given value. + If 'value' is None the feature will be removed. + """ + assert is_iterable_typed(properties, basestring) + assert isinstance(feature, basestring) + assert isinstance(value, (basestring, type(None))) + result = [] + + feature = add_grist (feature) + + for p in properties: + if get_grist (p) == feature: + if value: + result.append (replace_grist (value, feature)) + + else: + result.append (p) + + return result + + +################################################################ +# Private functions + +def __validate1 (property): + """ Exit with error if property is not valid. + """ + assert isinstance(property, Property) + msg = None + + if not property.feature.free: + feature.validate_value_string (property.feature, property.value) + + +################################################################### +# Still to port. +# Original lines are prefixed with "# " +# +# +# import utility : ungrist ; +# import sequence : unique ; +# import errors : error ; +# import feature ; +# import regex ; +# import sequence ; +# import set ; +# import path ; +# import assert ; +# +# + + +# rule validate-property-sets ( property-sets * ) +# { +# for local s in $(property-sets) +# { +# validate [ feature.split $(s) ] ; +# } +# } +# + +def remove(attributes, properties): + """Returns a property sets which include all the elements + in 'properties' that do not have attributes listed in 'attributes'.""" + if isinstance(attributes, basestring): + attributes = [attributes] + assert is_iterable_typed(attributes, basestring) + assert is_iterable_typed(properties, basestring) + result = [] + for e in properties: + attributes_new = feature.attributes(get_grist(e)) + has_common_features = 0 + for a in attributes_new: + if a in attributes: + has_common_features = 1 + break + + if not has_common_features: + result += e + + return result + + +def take(attributes, properties): + """Returns a property set which include all + properties in 'properties' that have any of 'attributes'.""" + assert is_iterable_typed(attributes, basestring) + assert is_iterable_typed(properties, basestring) + result = [] + for e in properties: + if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))): + result.append(e) + return result + +def translate_dependencies(properties, project_id, location): + assert is_iterable_typed(properties, Property) + assert isinstance(project_id, basestring) + assert isinstance(location, basestring) + result = [] + for p in properties: + + if not p.feature.dependency: + result.append(p) + else: + v = p.value + m = re.match("(.*)//(.*)", v) + if m: + rooted = m.group(1) + if rooted[0] == '/': + # Either project id or absolute Linux path, do nothing. + pass + else: + rooted = os.path.join(os.getcwd(), location, rooted) + + result.append(Property(p.feature, rooted + "//" + m.group(2), p.condition)) + + elif os.path.isabs(v): + result.append(p) + else: + result.append(Property(p.feature, project_id + "//" + v, p.condition)) + + return result + + +class PropertyMap: + """ Class which maintains a property set -> string mapping. + """ + def __init__ (self): + self.__properties = [] + self.__values = [] + + def insert (self, properties, value): + """ Associate value with properties. + """ + assert is_iterable_typed(properties, basestring) + assert isinstance(value, basestring) + self.__properties.append(properties) + self.__values.append(value) + + def find (self, properties): + """ Return the value associated with properties + or any subset of it. If more than one + subset has value assigned to it, return the + value for the longest subset, if it's unique. + """ + assert is_iterable_typed(properties, basestring) + return self.find_replace (properties) + + def find_replace(self, properties, value=None): + assert is_iterable_typed(properties, basestring) + assert isinstance(value, (basestring, type(None))) + matches = [] + match_ranks = [] + + for i in range(0, len(self.__properties)): + p = self.__properties[i] + + if b2.util.set.contains (p, properties): + matches.append (i) + match_ranks.append(len(p)) + + best = sequence.select_highest_ranked (matches, match_ranks) + + if not best: + return None + + if len (best) > 1: + raise NoBestMatchingAlternative () + + best = best [0] + + original = self.__values[best] + + if value: + self.__values[best] = value + + return original + +# local rule __test__ ( ) +# { +# import errors : try catch ; +# import feature ; +# import feature : feature subfeature compose ; +# +# # local rules must be explicitly re-imported +# import property : path-order ; +# +# feature.prepare-test property-test-temp ; +# +# feature toolset : gcc : implicit symmetric ; +# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 +# 3.0 3.0.1 3.0.2 : optional ; +# feature define : : free ; +# feature runtime-link : dynamic static : symmetric link-incompatible ; +# feature optimization : on off ; +# feature variant : debug release : implicit composite symmetric ; +# feature rtti : on off : link-incompatible ; +# +# compose debug : _DEBUG off ; +# compose release : NDEBUG on ; +# +# import assert ; +# import "class" : new ; +# +# validate gcc gcc-3.0.1 : $(test-space) ; +# +# assert.result gcc off FOO +# : refine gcc off +# : FOO +# : $(test-space) +# ; +# +# assert.result gcc on +# : refine gcc off +# : on +# : $(test-space) +# ; +# +# assert.result gcc off +# : refine gcc : off : $(test-space) +# ; +# +# assert.result gcc off off:FOO +# : refine gcc : off off:FOO +# : $(test-space) +# ; +# +# assert.result gcc:foo gcc:bar +# : refine gcc:foo : gcc:bar +# : $(test-space) +# ; +# +# assert.result MY_RELEASE +# : evaluate-conditionals-in-context +# release,off:MY_RELEASE +# : gcc release off +# +# ; +# +# try ; +# validate value : $(test-space) ; +# catch "Invalid property 'value': unknown feature 'feature'." ; +# +# try ; +# validate default : $(test-space) ; +# catch \"default\" is not a known value of feature ; +# +# validate WHATEVER : $(test-space) ; +# +# try ; +# validate : $(test-space) ; +# catch "Invalid property '': No value specified for feature 'rtti'." ; +# +# try ; +# validate value : $(test-space) ; +# catch "value" is not a value of an implicit feature ; +# +# +# assert.result on +# : remove free implicit : gcc foo on : $(test-space) ; +# +# assert.result a +# : select include : a gcc ; +# +# assert.result a +# : select include bar : a gcc ; +# +# assert.result a gcc +# : select include : a gcc ; +# +# assert.result kylix a +# : change gcc a : kylix ; +# +# # Test ordinary properties +# assert.result +# : split-conditional gcc +# ; +# +# # Test properties with ":" +# assert.result +# : split-conditional FOO=A::B +# ; +# +# # Test conditional feature +# assert.result gcc,3.0 FOO +# : split-conditional gcc,3.0:FOO +# ; +# +# feature.finish-test property-test-temp ; +# } +# + diff --git a/src/boost/tools/build/src/build/property_set.py b/src/boost/tools/build/src/build/property_set.py new file mode 100644 index 000000000..3fc86de27 --- /dev/null +++ b/src/boost/tools/build/src/build/property_set.py @@ -0,0 +1,498 @@ +# Status: ported. +# Base revision: 40480 + +# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and +# distribute this software is granted provided this copyright notice appears in +# all copies. This software is provided "as is" without express or implied +# warranty, and with no claim as to its suitability for any purpose. + +import hashlib + +import bjam +from b2.util.utility import * +import property, feature +import b2.build.feature +from b2.exceptions import * +from b2.build.property import get_abbreviated_paths +from b2.util.sequence import unique +from b2.util.set import difference +from b2.util import cached, abbreviate_dashed, is_iterable_typed + +from b2.manager import get_manager + + +def reset (): + """ Clear the module state. This is mainly for testing purposes. + """ + global __cache + + # A cache of property sets + # TODO: use a map of weak refs? + __cache = {} + +reset () + + +def create (raw_properties = []): + """ Creates a new 'PropertySet' instance for the given raw properties, + or returns an already existing one. + """ + assert (is_iterable_typed(raw_properties, property.Property) + or is_iterable_typed(raw_properties, basestring)) + # FIXME: propagate to callers. + if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property): + x = raw_properties + else: + x = [property.create_from_string(ps) for ps in raw_properties] + + # These two lines of code are optimized to the current state + # of the Property class. Since this function acts as the caching + # frontend to the PropertySet class modifying these two lines + # could have a severe performance penalty. Be careful. + # It would be faster to sort by p.id, but some projects may rely + # on the fact that the properties are ordered alphabetically. So, + # we maintain alphabetical sorting so as to maintain backward compatibility. + x = sorted(set(x), key=lambda p: (p.feature.name, p.value, p.condition)) + key = tuple(p.id for p in x) + + if key not in __cache: + __cache [key] = PropertySet(x) + + return __cache [key] + +def create_with_validation (raw_properties): + """ Creates new 'PropertySet' instances after checking + that all properties are valid and converting implicit + properties into gristed form. + """ + assert is_iterable_typed(raw_properties, basestring) + properties = [property.create_from_string(s) for s in raw_properties] + property.validate(properties) + + return create(properties) + +def empty (): + """ Returns PropertySet with empty set of properties. + """ + return create () + +def create_from_user_input(raw_properties, jamfile_module, location): + """Creates a property-set from the input given by the user, in the + context of 'jamfile-module' at 'location'""" + assert is_iterable_typed(raw_properties, basestring) + assert isinstance(jamfile_module, basestring) + assert isinstance(location, basestring) + properties = property.create_from_strings(raw_properties, True) + properties = property.translate_paths(properties, location) + properties = property.translate_indirect(properties, jamfile_module) + + project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None) + if not project_id: + project_id = os.path.abspath(location) + properties = property.translate_dependencies(properties, project_id, location) + properties = property.expand_subfeatures_in_conditions(properties) + return create(properties) + + +def refine_from_user_input(parent_requirements, specification, jamfile_module, + location): + """Refines requirements with requirements provided by the user. + Specially handles "-value" syntax in specification + to remove given requirements. + - parent-requirements -- property-set object with requirements + to refine + - specification -- string list of requirements provided by the use + - project-module -- the module to which context indirect features + will be bound. + - location -- the path to which path features are relative.""" + assert isinstance(parent_requirements, PropertySet) + assert is_iterable_typed(specification, basestring) + assert isinstance(jamfile_module, basestring) + assert isinstance(location, basestring) + + if not specification: + return parent_requirements + + + add_requirements = [] + remove_requirements = [] + + for r in specification: + if r[0] == '-': + remove_requirements.append(r[1:]) + else: + add_requirements.append(r) + + if remove_requirements: + # Need to create property set, so that path features + # and indirect features are translated just like they + # are in project requirements. + ps = create_from_user_input(remove_requirements, + jamfile_module, location) + + parent_requirements = create(difference(parent_requirements.all(), + ps.all())) + specification = add_requirements + + requirements = create_from_user_input(specification, + jamfile_module, location) + + return parent_requirements.refine(requirements) + +class PropertySet: + """ Class for storing a set of properties. + - there's 1<->1 correspondence between identity and value. No + two instances of the class are equal. To maintain this property, + the 'PropertySet.create' rule should be used to create new instances. + Instances are immutable. + + - each property is classified with regard to it's effect on build + results. Incidental properties have no effect on build results, from + Boost.Build point of view. Others are either free, or non-free, which we + call 'base'. Each property belong to exactly one of those categories and + it's possible to get list of properties in each category. + + In addition, it's possible to get list of properties with specific + attribute. + + - several operations, like and refine and as_path are provided. They all use + caching whenever possible. + """ + def __init__ (self, properties=None): + if properties is None: + properties = [] + assert is_iterable_typed(properties, property.Property) + + self.all_ = properties + self._all_set = {p.id for p in properties} + + self.incidental_ = [] + self.free_ = [] + self.base_ = [] + self.dependency_ = [] + self.non_dependency_ = [] + self.conditional_ = [] + self.non_conditional_ = [] + self.propagated_ = [] + self.link_incompatible = [] + + # A cache of refined properties. + self.refined_ = {} + + # A cache of property sets created by adding properties to this one. + self.added_ = {} + + # Cache for the default properties. + self.defaults_ = None + + # Cache for the expanded properties. + self.expanded_ = None + + # Cache for the expanded composite properties + self.composites_ = None + + # Cache for property set with expanded subfeatures + self.subfeatures_ = None + + # Cache for the property set containing propagated properties. + self.propagated_ps_ = None + + # A map of features to its values. + self.feature_map_ = None + + # A tuple (target path, is relative to build directory) + self.target_path_ = None + + self.as_path_ = None + + # A cache for already evaluated sets. + self.evaluated_ = {} + + # stores the list of LazyProperty instances. + # these are being kept separate from the normal + # Property instances so that when this PropertySet + # tries to return one of its attributes, it + # will then try to evaluate the LazyProperty instances + # first before returning. + self.lazy_properties = [] + + for p in properties: + f = p.feature + if isinstance(p, property.LazyProperty): + self.lazy_properties.append(p) + # A feature can be both incidental and free, + # in which case we add it to incidental. + elif f.incidental: + self.incidental_.append(p) + elif f.free: + self.free_.append(p) + else: + self.base_.append(p) + + if p.condition: + self.conditional_.append(p) + else: + self.non_conditional_.append(p) + + if f.dependency: + self.dependency_.append (p) + elif not isinstance(p, property.LazyProperty): + self.non_dependency_.append (p) + + if f.propagated: + self.propagated_.append(p) + if f.link_incompatible: + self.link_incompatible.append(p) + + + def all(self): + return self.all_ + + def raw (self): + """ Returns the list of stored properties. + """ + # create a new list due to the LazyProperties. + # this gives them a chance to evaluate to their + # true Property(). This approach is being + # taken since calculations should not be using + # PropertySet.raw() + return [p._to_raw for p in self.all_] + + def __str__(self): + return ' '.join(p._to_raw for p in self.all_) + + def base (self): + """ Returns properties that are neither incidental nor free. + """ + result = [p for p in self.lazy_properties + if not(p.feature.incidental or p.feature.free)] + result.extend(self.base_) + return result + + def free (self): + """ Returns free properties which are not dependency properties. + """ + result = [p for p in self.lazy_properties + if not p.feature.incidental and p.feature.free] + result.extend(self.free_) + return result + + def non_free(self): + return self.base() + self.incidental() + + def dependency (self): + """ Returns dependency properties. + """ + result = [p for p in self.lazy_properties if p.feature.dependency] + result.extend(self.dependency_) + return self.dependency_ + + def non_dependency (self): + """ Returns properties that are not dependencies. + """ + result = [p for p in self.lazy_properties if not p.feature.dependency] + result.extend(self.non_dependency_) + return result + + def conditional (self): + """ Returns conditional properties. + """ + return self.conditional_ + + def non_conditional (self): + """ Returns properties that are not conditional. + """ + return self.non_conditional_ + + def incidental (self): + """ Returns incidental properties. + """ + result = [p for p in self.lazy_properties if p.feature.incidental] + result.extend(self.incidental_) + return result + + def refine (self, requirements): + """ Refines this set's properties using the requirements passed as an argument. + """ + assert isinstance(requirements, PropertySet) + if requirements not in self.refined_: + r = property.refine(self.all_, requirements.all_) + + self.refined_[requirements] = create(r) + + return self.refined_[requirements] + + def expand (self): + if not self.expanded_: + expanded = feature.expand(self.all_) + self.expanded_ = create(expanded) + return self.expanded_ + + def expand_subfeatures(self): + if not self.subfeatures_: + self.subfeatures_ = create(feature.expand_subfeatures(self.all_)) + return self.subfeatures_ + + def evaluate_conditionals(self, context=None): + assert isinstance(context, (PropertySet, type(None))) + if not context: + context = self + + if context not in self.evaluated_: + # FIXME: figure why the call messes up first parameter + self.evaluated_[context] = create( + property.evaluate_conditionals_in_context(self.all(), context)) + + return self.evaluated_[context] + + def propagated (self): + if not self.propagated_ps_: + self.propagated_ps_ = create (self.propagated_) + return self.propagated_ps_ + + def add_defaults (self): + # FIXME: this caching is invalidated when new features + # are declare inside non-root Jamfiles. + if not self.defaults_: + expanded = feature.add_defaults(self.all_) + self.defaults_ = create(expanded) + return self.defaults_ + + def as_path (self): + if not self.as_path_: + + def path_order (p1, p2): + + i1 = p1.feature.implicit + i2 = p2.feature.implicit + + if i1 != i2: + return i2 - i1 + else: + return cmp(p1.feature.name, p2.feature.name) + + # trim redundancy + properties = feature.minimize(self.base_) + + # sort according to path_order + properties.sort (path_order) + + components = [] + for p in properties: + f = p.feature + if f.implicit: + components.append(p.value) + else: + value = f.name.replace(':', '-') + "-" + p.value + if property.get_abbreviated_paths(): + value = abbreviate_dashed(value) + components.append(value) + + self.as_path_ = '/'.join(components) + + return self.as_path_ + + def target_path (self): + """ Computes the target path that should be used for + target with these properties. + Returns a tuple of + - the computed path + - if the path is relative to build directory, a value of + 'true'. + """ + if not self.target_path_: + # The feature can be used to explicitly + # change the location of generated targets + l = self.get ('') + if l: + computed = l[0] + is_relative = False + + else: + p = self.as_path() + if hash_maybe: + p = hash_maybe(p) + + # Really, an ugly hack. Boost regression test system requires + # specific target paths, and it seems that changing it to handle + # other directory layout is really hard. For that reason, + # we teach V2 to do the things regression system requires. + # The value o '' is predended to the path. + prefix = self.get ('') + + if prefix: + if len (prefix) > 1: + raise AlreadyDefined ("Two properties specified: '%s'" % prefix) + + computed = os.path.join(prefix[0], p) + + else: + computed = p + + if not computed: + computed = "." + + is_relative = True + + self.target_path_ = (computed, is_relative) + + return self.target_path_ + + def add (self, ps): + """ Creates a new property set containing the properties in this one, + plus the ones of the property set passed as argument. + """ + assert isinstance(ps, PropertySet) + if ps not in self.added_: + self.added_[ps] = create(self.all_ + ps.all()) + return self.added_[ps] + + def add_raw (self, properties): + """ Creates a new property set containing the properties in this one, + plus the ones passed as argument. + """ + return self.add (create (properties)) + + + def get (self, feature): + """ Returns all values of 'feature'. + """ + if type(feature) == type([]): + feature = feature[0] + if not isinstance(feature, b2.build.feature.Feature): + feature = b2.build.feature.get(feature) + assert isinstance(feature, b2.build.feature.Feature) + + if self.feature_map_ is None: + self.feature_map_ = {} + + for v in self.all_: + if v.feature not in self.feature_map_: + self.feature_map_[v.feature] = [] + self.feature_map_[v.feature].append(v.value) + + return self.feature_map_.get(feature, []) + + @cached + def get_properties(self, feature): + """Returns all contained properties associated with 'feature'""" + if not isinstance(feature, b2.build.feature.Feature): + feature = b2.build.feature.get(feature) + assert isinstance(feature, b2.build.feature.Feature) + + result = [] + for p in self.all_: + if p.feature == feature: + result.append(p) + return result + + def __contains__(self, item): + return item.id in self._all_set + +def hash(p): + m = hashlib.md5() + m.update(p) + return m.hexdigest() + +hash_maybe = hash if "--hash" in bjam.variable("ARGV") else None + diff --git a/src/boost/tools/build/src/build/readme.txt b/src/boost/tools/build/src/build/readme.txt new file mode 100644 index 000000000..b15055b8e --- /dev/null +++ b/src/boost/tools/build/src/build/readme.txt @@ -0,0 +1,11 @@ +Copyright 2001, 2002 Dave Abrahams +Copyright 2002 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE_1_0.txt or copy at +http://www.boost.org/LICENSE_1_0.txt) + +Development code for new build system. To run unit tests for jam code, execute: + + bjam --debug --build-system=test + +Comprehensive tests require Python. See ../test/readme.txt diff --git a/src/boost/tools/build/src/build/scanner.jam b/src/boost/tools/build/src/build/scanner.jam new file mode 100644 index 000000000..a7f9254df --- /dev/null +++ b/src/boost/tools/build/src/build/scanner.jam @@ -0,0 +1,163 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2002, 2003, 2004, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Implements scanners: objects computing implicit dependencies for files, such +# as includes in C++. +# +# A scanner has a regular expression used to find the dependencies, some data +# needed to interpret those dependencies (e.g., include paths), and code which +# establishing needed relationships between actual jam targets. +# +# Scanner objects are created by actions when they try to actualize virtual +# targets, passed to the virtual-target.actualize() method and are then +# associated with actual targets. It is possible to use several scanners for a +# single virtual-target. For example, a single source file might be compiled +# twice - each time using a different include path. In this case, two separate +# actual targets will be created, each having a scanner of its own. +# +# Typically, scanners are created from target type and the action's properties, +# using the rule 'get' in this module. Directly creating scanners is not +# recommended, as it might create multiple equvivalent but different instances, +# and lead to unnecessary actual target duplication. However, actions can also +# create scanners in a special way, instead of relying on just the target type. + +import "class" : new ; +import property ; +import property-set ; +import virtual-target ; + +# Base scanner class. +# +class scanner +{ + rule __init__ ( ) + { + } + + # Returns a pattern to use for scanning. + # + rule pattern ( ) + { + import errors : error : errors.error ; + errors.error "method must be overridden" ; + } + + # Establish necessary relationship between targets, given an actual target + # being scanned and a list of pattern matches in that file. + # + rule process ( target : matches * ) + { + import errors : error : errors.error ; + errors.error "method must be overridden" ; + } +} + + +# Registers a new generator class, specifying a set of properties relevant to +# this scanner. Constructor for that class should have one parameter: a list of +# properties. +# +rule register ( scanner-class : relevant-properties * ) +{ + .registered += $(scanner-class) ; + .relevant-properties.$(scanner-class) = $(relevant-properties) ; +} + + +# Common scanner class, usable when there is only one kind of includes (unlike +# C, where "" and <> includes have different search paths). +# +class common-scanner : scanner +{ + import scanner ; + + rule __init__ ( includes * ) + { + scanner.__init__ ; + self.includes = $(includes) ; + } + + rule process ( target : matches * : binding ) + { + local target_path = [ NORMALIZE_PATH $(binding:D) ] ; + + NOCARE $(matches) ; + INCLUDES $(target) : $(matches) ; + SEARCH on $(matches) = $(target_path) $(self.includes:G=) ; + ISFILE $(matches) ; + + scanner.propagate $(__name__) : $(matches) : $(target) ; + } +} + + +# Returns an instance of a previously registered scanner, with the specified +# properties. +# +rule get ( scanner-class : property-set ) +{ + if ! $(scanner-class) in $(.registered) + { + import errors ; + errors.error "attempt to get an unregistered scanner" ; + } + + local r = $(.rv-cache.$(property-set)) ; + if ! $(r) + { + r = [ property-set.create + [ property.select $(.relevant-properties.$(scanner-class)) : + [ $(property-set).raw ] ] ] ; + .rv-cache.$(property-set) = $(r) ; + } + + if ! $(scanner.$(scanner-class).$(r:J=-)) + { + local s = [ new $(scanner-class) [ $(r).raw ] ] ; + scanner.$(scanner-class).$(r:J=-) = $(s) ; + } + return $(scanner.$(scanner-class).$(r:J=-)) ; +} + + +# Installs the specified scanner on the actual target 'target'. +# +rule install ( scanner : target ) +{ + HDRSCAN on $(target) = [ $(scanner).pattern ] ; + SCANNER on $(target) = $(scanner) ; + HDRRULE on $(target) = scanner.hdrrule ; + + # Scanner reflects differences in properties affecting binding of 'target', + # which will be known when processing includes for it, and give information + # on how to interpret different include types (e.g. quoted vs. those in + # angle brackets in C files). + HDRGRIST on $(target) = $(scanner) ; +} + + +# Propagate scanner settings from 'including-target' to 'targets'. +# +rule propagate ( scanner : targets * : including-target ) +{ + HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ; + SCANNER on $(targets) = $(scanner) ; + HDRRULE on $(targets) = scanner.hdrrule ; + HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ; +} + + +rule hdrrule ( target : matches * : binding ) +{ + local scanner = [ on $(target) return $(SCANNER) ] ; + $(scanner).process $(target) : $(matches) : $(binding) ; +} + + +# hdrrule must be available at global scope so it can be invoked by header +# scanning. +# +IMPORT scanner : hdrrule : : scanner.hdrrule ; diff --git a/src/boost/tools/build/src/build/scanner.py b/src/boost/tools/build/src/build/scanner.py new file mode 100644 index 000000000..2d6e4abca --- /dev/null +++ b/src/boost/tools/build/src/build/scanner.py @@ -0,0 +1,167 @@ +# Status: ported. +# Base revision: 45462 +# +# Copyright 2003 Dave Abrahams +# Copyright 2002, 2003, 2004, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Implements scanners: objects that compute implicit dependencies for +# files, such as includes in C++. +# +# Scanner has a regular expression used to find dependencies, some +# data needed to interpret those dependencies (for example, include +# paths), and a code which actually established needed relationship +# between actual jam targets. +# +# Scanner objects are created by actions, when they try to actualize +# virtual targets, passed to 'virtual-target.actualize' method and are +# then associated with actual targets. It is possible to use +# several scanners for a virtual-target. For example, a single source +# might be used by to compile actions, with different include paths. +# In this case, two different actual targets will be created, each +# having scanner of its own. +# +# Typically, scanners are created from target type and action's +# properties, using the rule 'get' in this module. Directly creating +# scanners is not recommended, because it might create many equvivalent +# but different instances, and lead in unneeded duplication of +# actual targets. However, actions can also create scanners in a special +# way, instead of relying on just target type. +import property +import bjam +import os +from b2.manager import get_manager +from b2.util import is_iterable_typed + + +def reset (): + """ Clear the module state. This is mainly for testing purposes. + """ + global __scanners, __rv_cache, __scanner_cache + + # Maps registered scanner classes to relevant properties + __scanners = {} + + # A cache of scanners. + # The key is: class_name.properties_tag, where properties_tag is the concatenation + # of all relevant properties, separated by '-' + __scanner_cache = {} + +reset () + + +def register(scanner_class, relevant_properties): + """ Registers a new generator class, specifying a set of + properties relevant to this scanner. Ctor for that class + should have one parameter: list of properties. + """ + assert issubclass(scanner_class, Scanner) + assert isinstance(relevant_properties, basestring) + __scanners[str(scanner_class)] = relevant_properties + +def registered(scanner_class): + """ Returns true iff a scanner of that class is registered + """ + return str(scanner_class) in __scanners + +def get(scanner_class, properties): + """ Returns an instance of previously registered scanner + with the specified properties. + """ + assert issubclass(scanner_class, Scanner) + assert is_iterable_typed(properties, basestring) + scanner_name = str(scanner_class) + + if not registered(scanner_name): + raise BaseException ("attempt to get unregistered scanner: %s" % scanner_name) + + relevant_properties = __scanners[scanner_name] + r = property.select(relevant_properties, properties) + + scanner_id = scanner_name + '.' + '-'.join(r) + + if scanner_id not in __scanner_cache: + __scanner_cache[scanner_id] = scanner_class(r) + + return __scanner_cache[scanner_id] + +class Scanner: + """ Base scanner class. + """ + def __init__ (self): + pass + + def pattern (self): + """ Returns a pattern to use for scanning. + """ + raise BaseException ("method must be overridden") + + def process (self, target, matches, binding): + """ Establish necessary relationship between targets, + given actual target being scanned, and a list of + pattern matches in that file. + """ + raise BaseException ("method must be overridden") + + +# Common scanner class, which can be used when there's only one +# kind of includes (unlike C, where "" and <> includes have different +# search paths). +class CommonScanner(Scanner): + + def __init__ (self, includes): + Scanner.__init__(self) + self.includes = includes + + def process(self, target, matches, binding): + + target_path = os.path.normpath(os.path.dirname(binding[0])) + bjam.call("mark-included", target, matches) + + get_manager().engine().set_target_variable(matches, "SEARCH", + [target_path] + self.includes) + get_manager().scanners().propagate(self, matches) + +class ScannerRegistry: + + def __init__ (self, manager): + self.manager_ = manager + self.count_ = 0 + self.exported_scanners_ = {} + + def install (self, scanner, target, vtarget): + """ Installs the specified scanner on actual target 'target'. + vtarget: virtual target from which 'target' was actualized. + """ + assert isinstance(scanner, Scanner) + assert isinstance(target, basestring) + assert isinstance(vtarget, basestring) + engine = self.manager_.engine() + engine.set_target_variable(target, "HDRSCAN", scanner.pattern()) + if scanner not in self.exported_scanners_: + exported_name = "scanner_" + str(self.count_) + self.count_ = self.count_ + 1 + self.exported_scanners_[scanner] = exported_name + bjam.import_rule("", exported_name, scanner.process) + else: + exported_name = self.exported_scanners_[scanner] + + engine.set_target_variable(target, "HDRRULE", exported_name) + + # scanner reflects difference in properties affecting + # binding of 'target', which will be known when processing + # includes for it, will give information on how to + # interpret quoted includes. + engine.set_target_variable(target, "HDRGRIST", str(id(scanner))) + pass + + def propagate(self, scanner, targets): + assert isinstance(scanner, Scanner) + assert is_iterable_typed(targets, basestring) or isinstance(targets, basestring) + engine = self.manager_.engine() + engine.set_target_variable(targets, "HDRSCAN", scanner.pattern()) + engine.set_target_variable(targets, "HDRRULE", + self.exported_scanners_[scanner]) + engine.set_target_variable(targets, "HDRGRIST", str(id(scanner))) + diff --git a/src/boost/tools/build/src/build/targets.jam b/src/boost/tools/build/src/build/targets.jam new file mode 100644 index 000000000..dfbd7a1d4 --- /dev/null +++ b/src/boost/tools/build/src/build/targets.jam @@ -0,0 +1,1820 @@ +# Copyright Vladimir Prus 2002. +# Copyright Rene Rivera 2006. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports 'abstract' targets, which are targets explicitly defined in a +# Jamfile. +# +# Abstract targets are represented by classes derived from 'abstract-target' +# class. The first abstract target is 'project-target', which is created for +# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module +# (see project.jam). +# +# Project targets keep a list of 'main-target' instances. A main target is what +# the user explicitly defines in a Jamfile. It is possible to have several +# definitions for a main target, for example to have different lists of sources +# for different platforms. So, main targets keep a list of alternatives. +# +# Each alternative is an instance of 'abstract-target'. When a main target +# subvariant is defined by some rule, that rule will decide what class to use, +# create an instance of that class and add it to the list of alternatives for +# the main target. +# +# Rules supplied by the build system will use only targets derived from the +# 'basic-target' class, which will provide some default behaviour. There will be +# different classes derived from it such as 'make-target', created by the 'make' +# rule, and 'typed-target', created by rules such as 'exe' and 'lib'. +# +# +--------------------------+ +# | abstract-target | +# +==========================+ +# | name | +# | project | +# | | +# | generate(properties) = 0 | +# +-------------+------------+ +# | +# ^ +# / \ +# +-+-+ +# | +# | +# +------------------+-----+-------------------------------+ +# | | | +# | | | +# +-----------+----------+ +------+------+ +-------+------+ +# | project-target | | main-target | | basic-target | +# +======================+ 1 * +=============+ alternatives +==============+ +# | generate(properties) |o-----+ generate |<>------------->| generate | +# | main-target | +-------------+ | construct = 0| +# +----------------------+ +-------+------+ +# | +# ^ +# / \ +# +-+-+ +# | +# | +# ...--+-----------------+-----------------+------------------+ +# | | | | +# | | | | +# ... ---+-----+ +-------+------+ +------+------+ +-------+------+ +# | | typed-target | | make-target | | stage-target | +# . +==============+ +=============+ +==============+ +# . | construct | | construct | | construct | +# +--------------+ +-------------+ +--------------+ + +import assert ; +import build-request ; +import "class" : new ; +import feature ; +import indirect ; +import path ; +import property ; +import property-set ; +import sequence ; +import set ; +import toolset ; + + +# Base class for all abstract targets. +# +class abstract-target +{ + import assert ; + import "class" ; + import errors ; + import project ; + + rule __init__ ( name # Name of the target in Jamfile. + : project-target # The project target to which this one belongs. + ) + { + # Note: it might seem that we don't need either name or project at all. + # However, there are places where we really need it. One example is + # error messages which should name problematic targets. Another is + # setting correct paths for sources and generated files. + + self.name = $(name) ; + self.project = $(project-target) ; + self.location = [ errors.nearest-user-location ] ; + } + + # Returns the name of this target. + rule name ( ) + { + return $(self.name) ; + } + + # Returns the project for this target. + rule project ( ) + { + return $(self.project) ; + } + + # Return the location where the target was declared. + rule location ( ) + { + return $(self.location) ; + } + + # Returns a user-readable name for this target. + rule full-name ( ) + { + local location = [ $(self.project).get location ] ; + return $(location)/$(self.name) ; + } + + # Generates virtual targets for this abstract target using the specified + # properties, unless a different value of some feature is required by the + # target. + # On success, returns: + # - a property-set with the usage requirements to be applied to dependants + # - a list of produced virtual targets, which may be empty. + # If 'property-set' is empty, performs the default build of this target, in + # a way specific to the derived class. + # + rule generate ( property-set ) + { + errors.error "method should be defined in derived classes" ; + } + + rule rename ( new-name ) + { + self.name = $(new-name) ; + } +} + + +if --debug-building in [ modules.peek : ARGV ] +{ + modules.poke : .debug-building : true ; +} + + +rule indent ( ) +{ + return $(.indent:J="") ; +} + + +rule increase-indent ( ) +{ + .indent += " " ; +} + + +rule decrease-indent ( ) +{ + .indent = $(.indent[2-]) ; +} + + +# Project target class (derived from 'abstract-target'). +# +# This class has the following responsibilities: +# - Maintaining a list of main targets in this project and building them. +# +# Main targets are constructed in two stages: +# - When Jamfile is read, a number of calls to 'add-alternative' is made. At +# that time, alternatives can also be renamed to account for inline targets. +# - The first time 'main-target' or 'has-main-target' rule is called, all +# alternatives are enumerated and main targets are created. +# +class project-target : abstract-target +{ + import project ; + import targets ; + import path ; + import print ; + import property-set ; + import set ; + import sequence ; + import toolset ; + import "class" : new ; + + rule __init__ ( name : project-module parent-project ? + : requirements * : default-build * ) + { + abstract-target.__init__ $(name) : $(__name__) ; + + self.project-module = $(project-module) ; + self.location = [ project.attribute $(project-module) location ] ; + self.requirements = $(requirements) ; + self.default-build = $(default-build) ; + + if $(parent-project) + { + inherit $(parent-project) ; + } + } + + # This is needed only by the 'make' rule. Need to find a way to make 'make' + # work without this method. + # + rule project-module ( ) + { + return $(self.project-module) ; + } + + rule get ( attribute ) + { + return [ project.attribute $(self.project-module) $(attribute) ] ; + } + + rule build-dir ( ) + { + if ! $(self.build-dir) + { + self.build-dir = [ get build-dir ] ; + if ! $(self.build-dir) + { + local location = [ $(self.project).get location ] ; + if $(location) + { + self.build-dir = [ path.join $(location) bin ] ; + } + else + { + local id = [ get id ] ; + if $(id) + { + local rid = [ MATCH ^/(.*) : $(id) ] ; + self.build-dir = [ path.join [ project.standalone-build-dir ] $(rid) ] ; + } + else + { + errors.error "Could not create build-dir for standalone project $(self.project-module:E=)." + : "Missing project id" ; + } + } + } + } + return $(self.build-dir) ; + } + + # Generates all possible targets contained in this project. + # + rule generate ( property-set * ) + { + if [ modules.peek : .debug-building ] + { + ECHO [ targets.indent ] "building project" [ name ] + " ('$(__name__)') with" [ $(property-set).raw ] ; + targets.increase-indent ; + } + + local usage-requirements = [ property-set.empty ] ; + local targets ; + + for local t in [ targets-to-build ] + { + local g = [ $(t).generate $(property-set) ] ; + usage-requirements = [ $(usage-requirements).add $(g[1]) ] ; + targets += $(g[2-]) ; + } + targets.decrease-indent ; + return $(usage-requirements) [ sequence.unique $(targets) ] ; + } + + # Computes and returns a list of abstract-target instances which must be + # built when this project is built. + # + rule targets-to-build ( ) + { + local result ; + + if ! $(self.built-main-targets) + { + build-main-targets ; + } + + # Collect all main targets here, except for "explicit" ones. + for local t in $(self.main-targets) + { + if ! [ $(t).name ] in $(self.explicit-targets) + { + result += $(t) ; + } + } + + # Collect all projects referenced via "projects-to-build" attribute. + local self-location = [ get location ] ; + for local pn in [ get projects-to-build ] + { + result += [ find $(pn)/ ] ; + } + + return $(result) ; + } + + # Add 'target' to the list of targets in this project that should be build + # only by explicit request + # + rule mark-target-as-explicit ( target-name * ) + { + # Record the name of the target, not instance, since this rule is called + # before main target instances are created. + self.explicit-targets += $(target-name) ; + } + + rule mark-target-as-always ( target-name * ) + { + # Record the name of the target, not instance, since this rule is called + # before main target instances are created. + self.always-targets += $(target-name) ; + } + + # Add new target alternative + # + rule add-alternative ( target-instance ) + { + if $(self.built-main-targets) + { + import errors : error : errors.error ; + errors.error add-alternative called when main targets are already + created. : in project [ full-name ] ; + } + self.alternatives += $(target-instance) ; + if ! ( [ $(target-instance).name ] in $(self.alternative-names) ) + { + self.alternative-names += [ $(target-instance).name ] ; + } + } + + # Checks if an alternative was declared for the target. + # Unlike checking for a main target this does not require + # building the main targets. And hence can be used in/directly + # while loading a project. + # + rule has-alternative-for-target ( target-name ) + { + if $(target-name) in $(self.alternative-names) + { + return 1 ; + } + } + + # Returns a 'main-target' class instance corresponding to 'name'. + # + rule main-target ( name ) + { + if ! $(self.built-main-targets) + { + build-main-targets ; + } + return $(self.main-target.$(name)) ; + } + + # Returns whether a main target with the specified name exists. + # + rule has-main-target ( name ) + { + if ! $(self.built-main-targets) + { + build-main-targets ; + } + + if $(self.main-target.$(name)) + { + return true ; + } + } + + # Worker function for the find rule not implementing any caching and simply + # returning nothing in case the target can not be found. + # + rule find-really ( id ) + { + local result ; + local current-location = [ get location ] ; + + local split = [ MATCH ^(.*)//(.*)$ : $(id) ] ; + local project-part = $(split[1]) ; + local target-part = $(split[2]) ; + + local extra-error-message ; + if $(project-part) + { + # There is an explicitly specified project part in id. Looks up the + # project and passes the request to it. + local pm = [ project.find $(project-part) : $(current-location) ] ; + if $(pm) + { + project-target = [ project.target $(pm) ] ; + result = [ $(project-target).find $(target-part) : no-error ] ; + } + else + { + extra-error-message = could not resolve project reference + '$(project-part)' ; + if ! [ path.is-rooted $(project-part) ] + { + local rooted = [ path.root $(project-part) / ] ; + if $(rooted) && [ project.is-registered-id $(rooted) ] + { + extra-error-message += - possibly missing a leading + slash ('/') character. ; + } + } + } + } + else + { + # Interpret target-name as name of main target. Need to do this + # before checking for file. Consider the following scenario with a + # toolset not modifying its executable's names, e.g. gcc on + # Unix-like platforms: + # + # exe test : test.cpp ; + # install s : test : . ; + # + # After the first build we would have a target named 'test' in the + # Jamfile and a file named 'test' on the disk. We need the target to + # override the file. + result = [ main-target $(id) ] ; + + # Interpret id as an existing file reference. + if ! $(result) + { + result = [ new file-reference [ path.make $(id) ] : + $(self.project) ] ; + if ! [ $(result).exists ] + { + result = ; + } + } + + # Interpret id as project-id. + if ! $(result) + { + local project-module = [ project.find $(id) : + $(current-location) ] ; + if $(project-module) + { + result = [ project.target $(project-module) ] ; + } + } + } + + return $(result:E="") $(extra-error-message) ; + } + + # Find and return the target with the specified id, treated relative to + # self. Id may specify either a target or a file name with the target taking + # priority. May report an error or return nothing if the target is not found + # depending on the 'no-error' parameter. + # + rule find ( id : no-error ? ) + { + local v = $(.id.$(id)) ; + local extra-error-message ; + if ! $(v) + { + local r = [ find-really $(id) ] ; + v = $(r[1]) ; + extra-error-message = $(r[2-]) ; + if ! $(v) + { + v = none ; + } + .id.$(id) = $(v) ; + } + + if $(v) != none + { + return $(v) ; + } + else if ! $(no-error) + { + local current-location = [ get location ] ; + import errors : user-error : errors.user-error ; + errors.user-error Unable to find file or target named + : " " '$(id)' + : referred to from project at + : " " '$(current-location)' + : $(extra-error-message) ; + } + } + + rule build-main-targets ( ) + { + self.built-main-targets = true ; + for local a in $(self.alternatives) + { + local name = [ $(a).name ] ; + local target = $(self.main-target.$(name)) ; + if ! $(target) + { + local t = [ new main-target $(name) : $(self.project) ] ; + self.main-target.$(name) = $(t) ; + self.main-targets += $(t) ; + target = $(self.main-target.$(name)) ; + } + + if $(name) in $(self.always-targets) + { + $(a).always ; + } + + $(target).add-alternative $(a) ; + } + } + + # Accessor, add a constant. + # + rule add-constant ( + name # Variable name of the constant. + : value + # Value of the constant. + : type ? # Optional type of value. + ) + { + switch $(type) + { + case path : + local r ; + for local v in $(value) + { + local l = $(self.location) ; + if ! $(l) + { + # Project corresponding to config files do not have + # 'location' attribute, but do have source location. It + # might be more reasonable to make every project have a + # location and use some other approach to prevent buildable + # targets in config files, but that has been left for later. + l = [ get source-location ] ; + } + v = [ path.root [ path.make $(v) ] $(l) ] ; + # Now make the value absolute path. + v = [ path.root $(v) [ path.pwd ] ] ; + # Constants should be in platform-native form. + v = [ path.native $(v) ] ; + r += $(v) ; + } + value = $(r) ; + } + if ! $(name) in $(self.constants) + { + self.constants += $(name) ; + } + self.constant.$(name) = $(value) ; + # Inject the constant in the scope of the Jamroot module. + modules.poke $(self.project-module) : $(name) : $(value) ; + } + + rule inherit ( parent ) + { + for local c in [ modules.peek $(parent) : self.constants ] + { + # No need to pass the type. Path constants were converted to + # absolute paths already by parent. + add-constant $(c) : [ modules.peek $(parent) : self.constant.$(c) ] + ; + } + + # Import rules from parent. + local this-module = [ project-module ] ; + local parent-module = [ $(parent).project-module ] ; + # Do not import rules coming from 'project-rules' as they must be + # imported localized. + local user-rules = [ set.difference + [ RULENAMES $(parent-module) ] : + [ RULENAMES project-rules ] ] ; + IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) + ; + EXPORT $(this-module) : $(user-rules) ; + + toolset.inherit-flags $(this-module) : $(parent-module) ; + } +} + + +# Helper rules to detect cycles in main target references. +# +local rule start-building ( main-target-instance ) +{ + if $(main-target-instance) in $(.targets-being-built) + { + local names ; + for local t in $(.targets-being-built) $(main-target-instance) + { + local name = [ $(t).full-name ] ; + if $(t) = $(main-target-instance) + { + names += ***$(name)*** ; + } + else + { + names += $(name) ; + } + } + + import errors ; + errors.error "Recursion in main target references" + : "the following target are being built currently:" + : $(names:J=" -> ") ; + } + .targets-being-built += $(main-target-instance) ; +} + + +local rule end-building ( main-target-instance ) +{ + .targets-being-built = $(.targets-being-built[1--2]) ; +} + + +# A named top-level target in Jamfile. +# +class main-target : abstract-target +{ + import assert ; + import feature ; + import property-set ; + import sequence ; + import set ; + import targets : start-building end-building ; + import utility ; + + rule __init__ ( name : project ) + { + abstract-target.__init__ $(name) : $(project) ; + } + + # Add a new alternative for this target + rule add-alternative ( target ) + { + local d = [ $(target).default-build ] ; + if $(self.alternatives) && ( $(self.default-build) != $(d) ) + { + import errors : error : errors.error ; + errors.error "default build must be identical in all alternatives" + : "main target is" [ full-name ] + : "with" [ $(d).raw ] + : "differing from previous default build" + [ $(self.default-build).raw ] ; + } + else + { + self.default-build = $(d) ; + } + self.alternatives += $(target) ; + } + + # Returns the best viable alternative for this property-set. See the + # documentation for selection rules. + # + rule select-alternatives ( property-set debug ? ) + { + # When selecting alternatives we have to consider defaults, for example: + # lib l : l.cpp : debug ; + # lib l : l_opt.cpp : release ; + # will not work unless we add default value debug. + property-set = [ $(property-set).add-defaults ] ; + + # The algorithm: we keep the current best viable alternative. When we + # encounter a new best viable alternative, we compare it with the + # current one. + + local best ; + local best-properties ; + + if $(self.alternatives[2-]) + { + local bad ; + local worklist = $(self.alternatives) ; + while $(worklist) && ! $(bad) + { + local v = $(worklist[1]) ; + local properties = [ $(v).match $(property-set) $(debug) ] ; + + if $(properties) != no-match + { + if ! $(best) + { + best = $(v) ; + best-properties = $(properties) ; + } + else + { + if $(properties) = $(best-properties) + { + bad = true ; + } + else if $(properties) in $(best-properties) + { + # Do nothing, this alternative is worse + } + else if $(best-properties) in $(properties) + { + best = $(v) ; + best-properties = $(properties) ; + } + else + { + bad = true ; + } + } + } + worklist = $(worklist[2-]) ; + } + if ! $(bad) + { + return $(best) ; + } + } + else + { + return $(self.alternatives) ; + } + } + + # Features are relevant here if they could affect alternative + # selection. That is, base, non-conditional properties that + # are not identical in all target alternatives. + rule relevant-features ( ) + { + if $(self.alternatives[2-]) + { + if $(self.relevant-features) + { + return $(self.relevant-features) ; + } + local all-properties ; + for t in $(self.alternatives) + { + local ps = [ $(t).requirements ] ; + ps = [ property-set.create [ $(ps).non-conditional ] ] ; + all-properties += [ $(ps).base ] ; + } + all-properties = [ sequence.unique $(all-properties) ] ; + local result ; + for t in $(self.alternatives) + { + local ps = [ $(t).requirements ] ; + ps = [ property-set.create [ $(ps).non-conditional ] ] ; + local properties = [ set.difference $(all-properties) : [ $(ps).base ] ] ; + result += $(properties:G) ; + } + result = [ sequence.transform utility.ungrist : [ sequence.unique $(result) ] ] ; + self.relevant-features = [ property-set.create $(result) ] ; + return $(self.relevant-features) ; + } + else + { + return [ property-set.empty ] ; + } + } + + rule apply-default-build ( property-set ) + { + return [ targets.apply-default-build $(property-set) : + $(self.default-build) ] ; + } + + # Select an alternative for this main target, by finding all alternatives + # whose requirements are satisfied by 'properties' and picking the one with + # the longest requirements set. Returns the result of calling 'generate' on + # that alternative. + # + rule generate ( property-set ) + { + start-building $(__name__) ; + + local all-property-sets = [ apply-default-build $(property-set) ] ; + local relevant = [ relevant-features ] ; + local usage-requirements = [ property-set.empty ] ; + local result ; + for local p in $(all-property-sets) + { + local r = [ generate-really [ $(p).add $(relevant) ] ] ; + if $(r) + { + usage-requirements = [ $(usage-requirements).add $(r[1]) ] ; + result += $(r[2-]) ; + } + } + end-building $(__name__) ; + return $(usage-requirements) [ sequence.unique $(result) ] ; + } + + # Generates the main target with the given property set and returns a list + # which first element is property-set object containing usage-requirements + # of generated target and with generated virtual target in other elements. + # It is possible that no targets are generated. + # + local rule generate-really ( property-set ) + { + local best-alternatives = [ select-alternatives $(property-set) ] ; + if ! $(best-alternatives) + { + ECHO "error: No best alternative for" [ full-name ] ; + select-alternatives $(property-set) debug ; + return [ property-set.empty ] ; + } + else + { + # Now return virtual targets for the only alternative. + return [ $(best-alternatives).generate $(property-set) ] ; + } + } + + rule rename ( new-name ) + { + abstract-target.rename $(new-name) ; + for local a in $(self.alternatives) + { + $(a).rename $(new-name) ; + } + } +} + + +# Abstract target referring to a source file. This is an artificial entity +# allowing sources to a target to be represented using a list of abstract target +# instances. +# +class file-reference : abstract-target +{ + import virtual-target ; + import property-set ; + import path ; + + rule __init__ ( file : project ) + { + abstract-target.__init__ $(file) : $(project) ; + } + + rule generate ( properties ) + { + return [ property-set.empty ] [ virtual-target.from-file $(self.name) : + [ location ] : $(self.project) ] ; + } + + # Returns true if the referred file really exists. + rule exists ( ) + { + location ; + return $(self.file-path) ; + } + + # Returns the location of target. Needed by 'testing.jam'. + rule location ( ) + { + if ! $(self.file-location) + { + local source-location = [ $(self.project).get source-location ] ; + for local src-dir in $(source-location) + { + if ! $(self.file-location) + { + local location = [ path.root $(self.name) $(src-dir) ] ; + if [ CHECK_IF_FILE [ path.native $(location) ] ] + { + self.file-location = $(src-dir) ; + self.file-path = $(location) ; + } + } + } + } + return $(self.file-location) ; + } +} + + +# Given a target-reference, made in context of 'project', returns the +# abstract-target instance that is referred to, as well as properties explicitly +# specified for this reference. +# +rule resolve-reference ( target-reference : project ) +{ + # Separate target name from properties override. + local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ; + local id = $(split[1]) ; + if ! $(split) || ! $(id) + { + import errors ; + errors.error "Malformed target reference $(target-reference)" ; + } + local sproperties = ; + if $(split[3]) + { + sproperties = [ property.make [ feature.split $(split[3]) ] ] ; + sproperties = [ feature.expand $(sproperties) ] ; + } + + # Find the target. + local target = [ $(project).find $(id) ] ; + + return $(target) [ property-set.create $(sproperties) ] ; +} + + +# Attempts to generate the target given by target reference, which can refer +# both to a main target or to a file. Returns a list consisting of +# - usage requirements +# - generated virtual targets, if any +# +rule generate-from-reference ( + target-reference # Target reference. + : project # Project where the reference is made. + : property-set # Properties of the main target that makes the reference. +) +{ + local r = [ resolve-reference $(target-reference) : $(project) ] ; + local target = $(r[1]) ; + local sproperties = $(r[2]) ; + + # Take properties which should be propagated and refine them with + # source-specific requirements. + local propagated = [ $(property-set).propagated ] ; + local rproperties = [ $(propagated).refine $(sproperties) ] ; + if $(rproperties[1]) = "@error" + { + import errors ; + errors.error + "When building" [ full-name ] " with properties " $(properties) : + "Invalid properties specified for " $(source) ":" + $(rproperties[2-]) ; + } + return [ $(target).generate $(rproperties) ] ; +} + + +rule apply-default-build ( property-set : default-build ) +{ + # 1. First, see what properties from default-build are already present in + # property-set. + + local expanded = [ $(property-set).expand ] ; + local raw = [ $(property-set).raw ] ; + local specified-features = [ $(expanded).raw ] ; + specified-features = $(specified-features:G) ; + + local defaults-to-apply ; + for local d in [ $(default-build).raw ] + { + if ! $(d:G) in $(specified-features) + { + defaults-to-apply += $(d) ; + } + } + + # 2. If there are any defaults to be applied, form a new build request. Pass + # it through to 'expand-no-defaults' since default-build might contain + # "release debug" resulting in two property-sets. + local result ; + if $(defaults-to-apply) + { + # We have to compress subproperties here to prevent property lists like: + # msvc 7.1 multi + # + # from being expanded into: + # 7.1/multi + # msvc/7.1/multi + # + # due to a cross-product property combination. That may be an indication + # that build-request.expand-no-defaults is the wrong rule to use here. + properties = [ build-request.expand-no-defaults + [ feature.compress-subproperties $(raw) ] $(defaults-to-apply) ] ; + + if $(properties) + { + for local p in $(properties) + { + result += [ property-set.create + [ feature.split $(p) ] ] ; + } + } + else + { + result = [ property-set.empty ] ; + } + } + else + { + result = $(property-set) ; + } + return $(result) ; +} + + +# Given a build request and requirements, return properties common to dependency +# build request and target requirements. +# +# TODO: Document exactly what 'common properties' are, whether they should +# include default property values, whether they should contain any conditional +# properties or should those be already processed, etc. See whether there are +# any differences between use cases with empty and non-empty build-request as +# well as with requirements containing and those not containing any non-free +# features. +# +rule common-properties ( build-request requirements ) +{ + # For optimization, we add free requirements directly, without using a + # complex algorithm. This gives the complex algorithm a better chance of + # caching results. + local free = [ $(requirements).free ] ; + local non-free = [ property-set.create [ $(requirements).base ] + [ $(requirements).incidental ] ] ; + + local key = .rp.$(build-request)-$(non-free) ; + if ! $($(key)) + { + $(key) = [ common-properties2 $(build-request) $(non-free) ] ; + } + return [ $($(key)).add-raw $(free) ] ; +} + + +# Given a 'context' -- a set of already present properties, and 'requirements', +# decide which extra properties should be applied to 'context'. For conditional +# requirements, this means evaluating the condition. For indirect conditional +# requirements, this means calling a rule. Ordinary requirements are always +# applied. +# +# Handles the situation where evaluating one conditional requirement affects +# conditions of another conditional requirements, such as: +# gcc:release release:RELEASE +# +# If 'what' is 'refined' returns context refined with new requirements. If +# 'what' is 'added' returns just the requirements to be applied. +# +rule evaluate-requirements ( requirements : context ) +{ + # Apply non-conditional requirements. It is possible that further + # conditional requirement change a value set by non-conditional + # requirements. For example: + # + # exe a : a.cpp : single foo:multi ; + # + # I am not sure if this should be an error, or not, especially given that + # + # single + # + # might come from project's requirements. + + local unconditional = [ feature.expand [ $(requirements).non-conditional ] ] + ; + + local raw = [ $(context).raw ] ; + raw = [ property.refine $(raw) : $(unconditional) ] ; + + # We have collected properties that surely must be present in common + # properties. We now try to figure out what other properties should be added + # in order to satisfy rules (4)-(6) from the docs. + + local defaults = [ toolset.defaults ] ; + defaults = [ $(defaults).raw ] ; + + local conditionals = [ $(requirements).conditional ] ; + # The 'count' variable has one element for each conditional feature and for + # each occurrence of '' feature. It is used as a loop + # counter: for each iteration of the loop before we remove one element and + # the property set should stabilize before we are done. It is assumed that + # #conditionals iterations should be enough for properties to propagate + # along conditions in any direction. + local count = $(conditionals) $(defaults) + and-once-more ; + + local added-requirements ; + local added-defaults ; + + local current = $(raw) ; + + local ok ; + while $(count) + { + # We need to expand composites here so that the requirements can + # safely override composite features. + current = [ feature.expand-composites $(current) ] ; + current = [ feature.add-defaults $(current) ] ; + # Evaluate conditionals in context of current properties. + local e = [ property.evaluate-conditionals-in-context $(conditionals) : + $(current) ] ; + local d = [ property.evaluate-conditionals-in-context $(defaults) : + $(current) ] ; + + if $(e) = $(added-requirements) && $(d) = $(added-defaults) + { + # If we got the same result, we have found the final properties. + count = ; + ok = true ; + } + else + { + # Oops, conditional evaluation results have changed. Also 'current' + # contains leftovers from a previous evaluation. Recompute 'current' + # using initial properties and conditional requirements. + added-requirements = $(e) ; + current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ; + added-defaults = $(d) ; + current = [ property.refine $(d) : $(current) ] ; + } + count = $(count[2-]) ; + } + if ! $(ok) + { + import errors ; + errors.error Can not evaluate conditional properties $(conditionals) ; + } + + return [ property-set.create $(current) ] ; +} + + +rule common-properties2 ( build-request requirements ) +{ + return [ evaluate-requirements $(requirements) : $(build-request) ] ; +} + + +rule push-target ( target ) +{ + .targets = $(target) $(.targets) ; +} + +rule pop-target ( ) +{ + .targets = $(.targets[2-]) ; +} + +# Return the metatarget that is currently being generated. +rule current ( ) +{ + return $(.targets[1]) ; +} + + +# Implements the most standard way of constructing main target alternative from +# sources. Allows sources to be either file or other main target and handles +# generation of those dependency targets. +# +class basic-target : abstract-target +{ + import build-request ; + import build-system ; + import "class" : new ; + import feature ; + import property ; + import property-set ; + import sequence ; + import set ; + import targets ; + import virtual-target ; + + rule __init__ ( name : project : sources * : requirements * : + default-build * : usage-requirements * ) + { + abstract-target.__init__ $(name) : $(project) ; + + self.sources = $(sources) ; + if ! $(requirements) + { + requirements = [ property-set.empty ] ; + } + self.requirements = $(requirements) ; + if ! $(default-build) + { + default-build = [ property-set.empty ] ; + } + self.default-build = $(default-build) ; + if ! $(usage-requirements) + { + usage-requirements = [ property-set.empty ] ; + } + self.usage-requirements = $(usage-requirements) ; + + if $(sources:G) + { + import errors : user-error : errors.user-error ; + errors.user-error properties found "in" the 'sources' parameter + "for" [ full-name ] ; + } + } + + rule always ( ) + { + self.always = 1 ; + } + + # Returns the list of abstract-targets which are used as sources. The extra + # properties specified for sources are not represented. The only user for + # this rule at the moment is the "--dump-tests" feature of the test system. + # + rule sources ( ) + { + if ! $(self.source-targets) + { + for local s in $(self.sources) + { + self.source-targets += [ targets.resolve-reference $(s) : + $(self.project) ] ; + } + } + return $(self.source-targets) ; + } + + rule requirements ( ) + { + return $(self.requirements) ; + } + + rule default-build ( ) + { + return $(self.default-build) ; + } + + # Returns the alternative condition for this alternative, if the condition + # is satisfied by 'property-set'. + # + rule match ( property-set debug ? ) + { + # The condition is composed of all base non-conditional properties. We + # only expand subfeatures in the condition. We do not expand + # composites. We want to be able to put + # msvc-6.0 + # in requirements. On the other hand, if we have release as a + # condition it does not make sense to require full to be + # in the build request just to select this variant. + local bcondition = [ $(self.requirements).base ] ; + local ccondition = [ $(self.requirements).conditional ] ; + local condition = [ set.difference $(bcondition) : $(ccondition) ] ; + condition = [ feature.expand-subfeatures $(condition) : unchecked ] ; + if $(debug) + { + ECHO " next alternative: required properties:" + $(condition:E=(empty)) ; + } + + if $(condition) in [ $(property-set).raw ] + { + if $(debug) + { + ECHO " matched" ; + } + return $(condition) ; + } + else + { + if $(debug) + { + ECHO " not matched" ; + } + return no-match ; + } + } + + # Takes a target reference, which might be either target id or a dependency + # property, and generates that target using 'property-set' as a build + # request. + # + # The results are added to the variable called 'result-var'. Usage + # requirements are added to the variable called 'usage-requirements-var'. + # + rule generate-dependencies ( dependencies * : property-set : result-var + usage-requirements-var ) + { + for local dependency in $(dependencies) + { + local grist = $(dependency:G) ; + local id = $(dependency:G=) ; + local result = [ targets.generate-from-reference $(id) : + $(self.project) : $(property-set) ] ; + + $(result-var) += $(result[2-]:G=$(grist)) ; + $(usage-requirements-var) += [ $(result[1]).raw ] ; + } + } + + # Determines final build properties, generates sources, and calls + # 'construct'. This method should not be overridden. + # + rule generate ( property-set ) + { + if [ modules.peek : .debug-building ] + { + ECHO ; + local fn = [ full-name ] ; + ECHO [ targets.indent ] "Building target '$(fn)'" ; + targets.increase-indent ; + ECHO [ targets.indent ] Build "request:" $(property-set) + [ $(property-set).raw ] ; + local cf = [ build-system.command-line-free-features ] ; + ECHO [ targets.indent ] Command line free "features:" [ $(cf).raw ] ; + ECHO [ targets.indent ] Target "requirements:" + [ $(self.requirements).raw ] ; + } + targets.push-target $(__name__) ; + + # Apply free features from the command line. If user said + # define=FOO + # he most likely wants this define to be set for all compiles. + # Make it before check for already built. + property-set = [ $(property-set).add + [ build-system.command-line-free-features ] ] ; + + if ! $(self.generated.$(property-set)) + { + local rproperties = [ targets.common-properties $(property-set) + $(self.requirements) ] ; + + if [ modules.peek : .debug-building ] + { + ECHO ; + ECHO [ targets.indent ] "Common properties: " + [ $(rproperties).raw ] ; + } + + local skip ; + local skip-downstream ; + if $(rproperties[1]) = "@error" + { + ECHO [ targets.indent ] "Skipping build of:" [ full-name ] + "cannot compute common properties" ; + skip = true ; + skip-downstream = true ; + } + else if [ $(rproperties).get ] = no + { + # If we just see no, we cannot produce any reasonable + # diagnostics. The code that adds this property is expected + # to explain why a target is not built, for example using + # the configure.log-component-configuration function. + skip = true ; + skip-downstream = true ; + } + else + { + local source-targets ; + local properties = [ $(rproperties).non-dependency ] ; + local usage-requirements ; + + generate-dependencies [ $(rproperties).dependency ] : + $(rproperties) : properties usage-requirements ; + + generate-dependencies $(self.sources) : $(rproperties) : + source-targets usage-requirements ; + + if [ modules.peek : .debug-building ] + { + ECHO ; + ECHO [ targets.indent ] "Usage requirements for" + $(self.name)": " $(usage-requirements) ; + } + + # Skipping this target if a dependency is skipped. + # Subclasses can override this behavior. E.g. + # alias-target-class overrides this to not be skipped if a + # dependency is skipped. + if no in $(usage-requirements) + { + skip = [ skip-from-usage-requirements ] ; + skip-downstream = true ; + } + + if ! $(skip) + { + rproperties = [ property-set.create $(properties) + $(usage-requirements) ] ; + usage-requirements = [ property-set.create $(usage-requirements) + ] ; + + if [ modules.peek : .debug-building ] + { + ECHO [ targets.indent ] "Build properties: " + [ $(rproperties).raw ] ; + } + + local extra = [ $(rproperties).get ] ; + source-targets += $(extra:G=) ; + # We might get duplicate sources, for example if we link to two + # libraries having the same usage requirement. Use + # stable sort, since for some targets the order is important, + # e.g. RUN_PY targets need a python source to come first. + source-targets = [ sequence.unique $(source-targets) : stable ] + ; + + local result = [ construct $(self.name) : $(source-targets) : + $(rproperties) ] ; + + if $(result) + { + local gur = $(result[1]) ; + result = $(result[2-]) ; + + # Relevant is automatically applied to usage requirements + # and only applies for propagated features + local relevant = [ propagated-relevant + [ $(gur).get ] + [ $(rproperties).get ] ] ; + gur = [ property-set.create + [ property.change [ $(gur).raw ] : ] + $(relevant) ] ; + + local s = [ create-subvariant $(result) + : [ virtual-target.recent-targets ] + : $(property-set) : $(source-targets) + : $(rproperties) : $(usage-requirements) ] ; + virtual-target.clear-recent-targets ; + + if $(self.always) + { + for local t in [ $(s).created-targets ] + { + $(t).always ; + } + } + + local ur = [ compute-usage-requirements $(s) ] ; + if $(skip-downstream) + { + ur = [ $(ur).add [ property-set.create no ] + ] ; + } + ur = [ $(ur).add $(gur) ] ; + $(s).set-usage-requirements $(ur) ; + if [ modules.peek : .debug-building ] + { + ECHO [ targets.indent ] "Usage requirements from" + $(self.name)": " [ $(ur).raw ] ; + } + + self.generated.$(property-set) = $(ur) $(result) ; + } + } + } + + if $(skip) + { + # We are here either because there has been an error computing + # properties or there is no in properties or usage + # requirements. In the latter case we do not want any + # diagnostic. In the former case, we need diagnostics. FIXME + + # If this target fails to build, add no to properties to + # cause any parent target to fail to build. + self.generated.$(property-set) = [ property-set.create no + ] ; + } + } + else + { + if [ modules.peek : .debug-building ] + { + ECHO [ targets.indent ] "Already built" ; + local ur = $(self.generated.$(property-set)) ; + ur = $(ur[0]) ; + targets.increase-indent ; + ECHO [ targets.indent ] "Usage requirements from" + $(self.name)": " [ $(ur).raw ] ; + targets.decrease-indent ; + } + } + + targets.pop-target ; + targets.decrease-indent ; + return $(self.generated.$(property-set)) ; + } + + # Given the set of generated targets, and refined build properties, + # determines and sets appropriate usage requirements on those targets. + # + rule compute-usage-requirements ( subvariant ) + { + local rproperties = [ $(subvariant).build-properties ] ; + xusage-requirements = + [ $(self.usage-requirements).evaluate-conditionals + $(rproperties) ] ; + + # Filter out non-propagated properties + local relevant ; + for local r in [ $(xusage-requirements).get ] + { + local check = [ MATCH "(.*):(.*)" : $(r) ] ; + if $(check) { check = $(check[2]) ; } + else { check = $(r) ; } + if propagated in [ feature.attributes <$(check)> ] + { + relevant += $(r) ; + } + } + local raw = [ $(xusage-requirements).raw ] ; + local free = [ property.take free : $(raw) ] ; + if $(free) != $(raw) + { + if ! $(self.warned-usage-requirements) + { + self.warned-usage-requirements = true ; + ECHO "warning:" non-free usage requirements + [ set.difference $(raw) : $(free) ] ignored ; + ECHO "warning:" in main-target [ name ] at [ location ] ; + } + } + xusage-requirements = [ property-set.create + [ property.change $(free) : ] + $(relevant) ] ; + + # We generate all dependency properties and add them, as well as their + # usage requirements, to the result. + local extra ; + generate-dependencies [ $(xusage-requirements).dependency ] : + $(rproperties) : extra extra ; + + local result = [ property-set.create + [ $(xusage-requirements).non-dependency ] $(extra) ] ; + + # Propagate usage requirements we got from sources, except for the + # and features. + # + # That feature specifies which pch file to use, and should apply only to + # direct dependents. Consider: + # + # pch pch1 : ... + # lib lib1 : ..... pch1 ; + # pch pch2 : + # lib lib2 : pch2 lib1 ; + # + # Here, lib2 should not get property from pch1. + # + # Essentially, when those two features are in usage requirements, they + # are propagated only to direct dependents. We might need a more general + # mechanism, but for now, only those two features are special. + # + # TODO - Actually there are more possible candidates like for instance + # when listing static library X as a source for another static library. + # Then static library X will be added as a property to the + # second library's usage requirements but those requirements should last + # only up to the first executable or shared library that actually links + # to it. + local raw = [ $(subvariant).sources-usage-requirements ] ; + raw = [ $(raw).raw ] ; + raw = [ property.change $(raw) : ] ; + raw = [ property.change $(raw) : ] ; + return [ $(result).add [ property-set.create $(raw) ] ] ; + } + + local rule propagated-relevant ( values * ) + { + local result ; + for local v in [ feature.expand-relevant $(values) ] + { + if propagated in [ feature.attributes <$(v)> ] + { + result += $(v) ; + } + } + return $(result) ; + } + + # Creates new subvariant instances for 'targets'. + # 'root-targets' - virtual targets to be returned to dependants + # 'all-targets' - virtual targets created while building this main target + # 'build-request' - property-set instance with requested build properties + # + local rule create-subvariant ( root-targets * : all-targets * : + build-request : sources * : rproperties : usage-requirements ) + { + for local e in $(root-targets) + { + $(e).root true ; + } + + # Process all virtual targets that will be created if this main target + # is created. + local s = [ new subvariant $(__name__) : $(build-request) : $(sources) : + $(rproperties) : $(usage-requirements) : $(all-targets) ] ; + for local v in $(all-targets) + { + if ! [ $(v).creating-subvariant ] + { + $(v).creating-subvariant $(s) ; + } + } + return $(s) ; + } + + # Constructs virtual targets for this abstract target and the dependency + # graph. Returns a usage-requirements property-set and a list of virtual + # targets. Should be overridden in derived classes. + # + rule construct ( name : source-targets * : properties * ) + { + import errors : error : errors.error ; + errors.error "method should be defined in derived classes" ; + } + + # Determines if build of this target should be skipped when there is + # no in usage requirements. This should usually be true, unless + # the target is some kind of grouping, e.g. alias targets. + rule skip-from-usage-requirements ( ) + { + return true ; + } +} + + +class typed-target : basic-target +{ + import generators ; + + rule __init__ ( name : project : type : sources * : requirements * : + default-build * : usage-requirements * ) + { + basic-target.__init__ $(name) : $(project) : $(sources) : + $(requirements) : $(default-build) : $(usage-requirements) ; + + self.type = $(type) ; + } + + rule type ( ) + { + return $(self.type) ; + } + + rule construct ( name : source-targets * : property-set ) + { + local r = [ generators.construct $(self.project) $(name:S=) + : $(self.type) + : [ property-set.create [ $(property-set).raw ] + $(self.type) ] + : $(source-targets) : true ] ; + if ! $(r) + { + local viable-generators = [ generators.find-viable-generators + $(self.type) : $(property-set) ] ; + ECHO "WARNING: Unable to construct" [ full-name ] + "of type" $(self.type) + "with these properties:" [ $(property-set).raw ] ; + ECHO "WARNING: Considered these as possible generators:" ; + for local gen in $(viable-generators) + { + ECHO "WARNING:" [ $(gen).id ] + "with source types {" [ $(gen).source-types ] "}" + "and requirements {" [ $(gen).requirements ] "}" ; + } + + # Are there any top-level generators for this type/property set. + if ! [ generators.find-viable-generators $(self.type) : + $(property-set) ] + { + ECHO "error: no generators were found for type '$(self.type)'" ; + ECHO "error: and the requested properties" ; + ECHO "error: make sure you've configured the needed tools" ; + ECHO "See https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.overview.configuration" ; + EXIT "To debug this problem, try the --debug-generators option." + ; + } + } + return $(r) ; + } +} + + +# Return the list of sources to use, if main target rule is invoked with +# 'sources'. If there are any objects in 'sources', they are treated as main +# target instances, and the name of such targets are adjusted to be +# '__'. Such renaming is disabled if +# a non-empty value is passed as the 'no-renaming' parameter. +# +rule main-target-sources ( sources * : main-target-name : no-renaming ? ) +{ + local result ; + for local t in $(sources) + { + if [ class.is-instance $(t) ] + { + local name = [ $(t).name ] ; + if ! $(no-renaming) + { + name = $(main-target-name)__$(name) ; + $(t).rename $(name) ; + } + # Inline targets are not built by default. + local p = [ $(t).project ] ; + $(p).mark-target-as-explicit $(name) ; + result += $(name) ; + } + else + { + result += $(t) ; + } + } + return $(result) ; +} + + +# Returns the requirements to use when declaring a main target, obtained by +# translating all specified property paths and refining project requirements +# with the ones specified for the target. +# +rule main-target-requirements ( + specification * # Properties explicitly specified for the main target. + : project # Project where the main target is to be declared. +) +{ + local requirements = [ property-set.refine-from-user-input + [ $(project).get requirements ] : $(specification) : + [ $(project).project-module ] : [ $(project).get location ] ] ; + if $(requirements[1]) = "@error" + { + import errors ; + errors.error "Conflicting requirements for target:" $(requirements) ; + } + local result = [ $(requirements).add [ toolset.requirements ] ] ; + return [ $(result).add-raw [ property.evaluate-conditional-relevance [ $(result).raw ] ] ] ; +} + + +# Returns the usage requirements to use when declaring a main target, which are +# obtained by translating all specified property paths and adding project's +# usage requirements. +# +rule main-target-usage-requirements ( + specification * # Use-properties explicitly specified for a main target. + : project # Project where the main target is to be declared. +) +{ + local project-usage-requirements = [ $(project).get usage-requirements ] ; + + # We do not use 'refine-from-user-input' because: + # - I am not sure if removing parent's usage requirements makes sense + # - refining usage requirements is not needed, since usage requirements are + # always free. + local usage-requirements = [ property-set.create-from-user-input + $(specification) + : [ $(project).project-module ] [ $(project).get location ] ] ; + + local result = [ $(project-usage-requirements).add $(usage-requirements) ] ; + local relevant = + [ property.evaluate-conditional-relevance [ $(result).raw ] ] ; + return [ $(result).add-raw $(relevant) ] ; +} + + +# Return the default build value to use when declaring a main target, which is +# obtained by using the specified value if not empty and parent's default build +# attribute otherwise. +# +rule main-target-default-build ( + specification * # Default build explicitly specified for a main target. + : project # Project where the main target is to be declared. +) +{ + local result ; + if $(specification) + { + result = $(specification) ; + } + else + { + result = [ $(project).get default-build ] ; + } + return [ property-set.create-with-validation $(result) ] ; +} + + +# Registers the specified target as a main target alternative and returns it. +# +rule main-target-alternative ( target ) +{ + local ptarget = [ $(target).project ] ; + $(ptarget).add-alternative $(target) ; + return $(target) ; +} + + +# Creates a metatarget with the specified properties, using 'klass' as the +# class. The 'name', 'sources', 'requirements', 'default-build' and +# 'usage-requirements' are assumed to be in the form specified by the user in +# the Jamfile corresponding to 'project'. +# +rule create-metatarget ( klass : project : name : sources * : requirements * : + default-build * : usage-requirements * ) +{ + return [ targets.main-target-alternative [ new $(klass) $(name) : $(project) + : [ targets.main-target-sources $(sources) : $(name) ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) ] + : [ targets.main-target-usage-requirements $(usage-requirements) : + $(project) ] ] ] ; +} + + +# Creates a typed-target with the specified properties. The 'name', 'sources', +# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in +# the form specified by the user in the Jamfile corresponding to 'project'. +# +rule create-typed-target ( type : project : name : sources * : requirements * : + default-build * : usage-requirements * ) +{ + return [ targets.main-target-alternative [ new typed-target $(name) : + $(project) : $(type) + : [ targets.main-target-sources $(sources) : $(name) ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) ] + : [ targets.main-target-usage-requirements $(usage-requirements) : + $(project) ] ] ] ; +} diff --git a/src/boost/tools/build/src/build/targets.py b/src/boost/tools/build/src/build/targets.py new file mode 100644 index 000000000..20e97bc6b --- /dev/null +++ b/src/boost/tools/build/src/build/targets.py @@ -0,0 +1,1523 @@ +# Status: ported. +# Base revision: 64488 + +# Copyright Vladimir Prus 2002-2007. +# Copyright Rene Rivera 2006. +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports 'abstract' targets, which are targets explicitly defined in Jamfile. +# +# Abstract targets are represented by classes derived from 'AbstractTarget' class. +# The first abstract target is 'project_target', which is created for each +# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module. +# (see project.jam). +# +# Project targets keep a list of 'MainTarget' instances. +# A main target is what the user explicitly defines in a Jamfile. It is +# possible to have several definitions for a main target, for example to have +# different lists of sources for different platforms. So, main targets +# keep a list of alternatives. +# +# Each alternative is an instance of 'AbstractTarget'. When a main target +# subvariant is defined by some rule, that rule will decide what class to +# use, create an instance of that class and add it to the list of alternatives +# for the main target. +# +# Rules supplied by the build system will use only targets derived +# from 'BasicTarget' class, which will provide some default behaviour. +# There will be two classes derived from it, 'make-target', created by the +# 'make' rule, and 'TypedTarget', created by rules such as 'exe' and 'dll'. + +# +# +------------------------+ +# |AbstractTarget | +# +========================+ +# |name | +# |project | +# | | +# |generate(properties) = 0| +# +-----------+------------+ +# | +# ^ +# / \ +# +-+-+ +# | +# | +# +------------------------+------+------------------------------+ +# | | | +# | | | +# +----------+-----------+ +------+------+ +------+-------+ +# | project_target | | MainTarget | | BasicTarget | +# +======================+ 1 * +=============+ alternatives +==============+ +# | generate(properties) |o-----------+ generate |<>------------->| generate | +# | main-target | +-------------+ | construct = 0| +# +----------------------+ +--------------+ +# | +# ^ +# / \ +# +-+-+ +# | +# | +# ...--+----------------+------------------+----------------+---+ +# | | | | +# | | | | +# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+ +# | | TypedTarget | | make-target | | stage-target | +# . +==============+ +=============+ +==============+ +# . | construct | | construct | | construct | +# +--------------+ +-------------+ +--------------+ + +import re +import os.path +import sys + +from b2.manager import get_manager + +from b2.util.utility import * +import property, project, virtual_target, property_set, feature, generators, toolset +from virtual_target import Subvariant +from b2.exceptions import * +from b2.util.sequence import unique +from b2.util import path, bjam_signature, safe_isinstance, is_iterable_typed +from b2.build import errors +from b2.build.errors import user_error_checkpoint + +import b2.build.build_request as build_request + +import b2.util.set +_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$') + +class TargetRegistry: + + def __init__ (self): + # All targets that are currently being built. + # Only the key is id (target), the value is the actual object. + self.targets_being_built_ = {} + + # Current indent for debugging messages + self.indent_ = "" + + self.debug_building_ = "--debug-building" in bjam.variable("ARGV") + + self.targets_ = [] + + def main_target_alternative (self, target): + """ Registers the specified target as a main target alternatives. + Returns 'target'. + """ + assert isinstance(target, AbstractTarget) + target.project ().add_alternative (target) + return target + + def main_target_sources (self, sources, main_target_name, no_renaming=0): + """Return the list of sources to use, if main target rule is invoked + with 'sources'. If there are any objects in 'sources', they are treated + as main target instances, and the name of such targets are adjusted to + be '__'. Such renaming + is disabled is non-empty value is passed for 'no-renaming' parameter.""" + assert is_iterable_typed(sources, basestring) + assert isinstance(main_target_name, basestring) + assert isinstance(no_renaming, (int, bool)) + result = [] + + for t in sources: + + t = b2.util.jam_to_value_maybe(t) + + if isinstance (t, AbstractTarget): + name = t.name () + + if not no_renaming: + name = main_target_name + '__' + name + t.rename (name) + + # Inline targets are not built by default. + p = t.project() + p.mark_targets_as_explicit([name]) + result.append(name) + + else: + result.append (t) + + return result + + + def main_target_requirements(self, specification, project): + """Returns the requirement to use when declaring a main target, + which are obtained by + - translating all specified property paths, and + - refining project requirements with the one specified for the target + + 'specification' are the properties xplicitly specified for a + main target + 'project' is the project where the main taret is to be declared.""" + assert is_iterable_typed(specification, basestring) + assert isinstance(project, ProjectTarget) + # create a copy since the list is being modified + specification = list(specification) + specification.extend(toolset.requirements()) + + requirements = property_set.refine_from_user_input( + project.get("requirements"), specification, + project.project_module(), project.get("location")) + + return requirements + + def main_target_usage_requirements (self, specification, project): + """ Returns the use requirement to use when declaraing a main target, + which are obtained by + - translating all specified property paths, and + - adding project's usage requirements + specification: Use-properties explicitly specified for a main target + project: Project where the main target is to be declared + """ + assert is_iterable_typed(specification, basestring) + assert isinstance(project, ProjectTarget) + project_usage_requirements = project.get ('usage-requirements') + + # We don't use 'refine-from-user-input' because I'm not sure if: + # - removing of parent's usage requirements makes sense + # - refining of usage requirements is not needed, since usage requirements + # are always free. + usage_requirements = property_set.create_from_user_input( + specification, project.project_module(), project.get("location")) + + return project_usage_requirements.add (usage_requirements) + + def main_target_default_build (self, specification, project): + """ Return the default build value to use when declaring a main target, + which is obtained by using specified value if not empty and parent's + default build attribute otherwise. + specification: Default build explicitly specified for a main target + project: Project where the main target is to be declared + """ + assert is_iterable_typed(specification, basestring) + assert isinstance(project, ProjectTarget) + if specification: + return property_set.create_with_validation(specification) + else: + return project.get ('default-build') + + def start_building (self, main_target_instance): + """ Helper rules to detect cycles in main target references. + """ + assert isinstance(main_target_instance, MainTarget) + if id(main_target_instance) in self.targets_being_built_: + names = [] + for t in self.targets_being_built_.values() + [main_target_instance]: + names.append (t.full_name()) + + get_manager().errors()("Recursion in main target references\n") + + self.targets_being_built_[id(main_target_instance)] = main_target_instance + + def end_building (self, main_target_instance): + assert isinstance(main_target_instance, MainTarget) + assert (id(main_target_instance) in self.targets_being_built_) + del self.targets_being_built_ [id (main_target_instance)] + + def create_typed_target (self, type, project, name, sources, requirements, default_build, usage_requirements): + """ Creates a TypedTarget with the specified properties. + The 'name', 'sources', 'requirements', 'default_build' and + 'usage_requirements' are assumed to be in the form specified + by the user in Jamfile corresponding to 'project'. + """ + assert isinstance(type, basestring) + assert isinstance(project, ProjectTarget) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + return self.main_target_alternative (TypedTarget (name, project, type, + self.main_target_sources (sources, name), + self.main_target_requirements (requirements, project), + self.main_target_default_build (default_build, project), + self.main_target_usage_requirements (usage_requirements, project))) + + def increase_indent(self): + self.indent_ += " " + + def decrease_indent(self): + self.indent_ = self.indent_[0:-4] + + def logging(self): + return self.debug_building_ + + def log(self, message): + if self.debug_building_: + print self.indent_ + message + + def push_target(self, target): + assert isinstance(target, AbstractTarget) + self.targets_.append(target) + + def pop_target(self): + self.targets_ = self.targets_[:-1] + + def current(self): + return self.targets_[0] + + +class GenerateResult: + + def __init__ (self, ur=None, targets=None): + if not targets: + targets = [] + assert isinstance(ur, property_set.PropertySet) or ur is None + assert is_iterable_typed(targets, virtual_target.VirtualTarget) + + self.__usage_requirements = ur + self.__targets = targets + + if not self.__usage_requirements: + self.__usage_requirements = property_set.empty () + + def usage_requirements (self): + return self.__usage_requirements + + def targets (self): + return self.__targets + + def extend (self, other): + assert (isinstance (other, GenerateResult)) + + self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ()) + self.__targets.extend (other.targets ()) + +class AbstractTarget: + """ Base class for all abstract targets. + """ + def __init__ (self, name, project, manager = None): + """ manager: the Manager object + name: name of the target + project: the project target to which this one belongs + manager:the manager object. If none, uses project.manager () + """ + assert isinstance(name, basestring) + assert (isinstance (project, ProjectTarget)) + # Note: it might seem that we don't need either name or project at all. + # However, there are places where we really need it. One example is error + # messages which should name problematic targets. Another is setting correct + # paths for sources and generated files. + + # Why allow manager to be specified? Because otherwise project target could not derive + # from this class. + if manager: + self.manager_ = manager + else: + self.manager_ = project.manager () + + self.name_ = name + self.project_ = project + self.location_ = errors.nearest_user_location() + + def manager (self): + return self.manager_ + + def name (self): + """ Returns the name of this target. + """ + return self.name_ + + def project (self): + """ Returns the project for this target. + """ + return self.project_ + + def location (self): + """ Return the location where the target was declared. + """ + return self.location_ + + def full_name (self): + """ Returns a user-readable name for this target. + """ + location = self.project ().get ('location') + return location + '/' + self.name_ + + def generate (self, property_set): + """ Takes a property set. Generates virtual targets for this abstract + target, using the specified properties, unless a different value of some + feature is required by the target. + On success, returns a GenerateResult instance with: + - a property_set with the usage requirements to be + applied to dependents + - a list of produced virtual targets, which may be + empty. + If 'property_set' is empty, performs default build of this + target, in a way specific to derived class. + """ + raise BaseException ("method should be defined in derived classes") + + def rename (self, new_name): + assert isinstance(new_name, basestring) + self.name_ = new_name + +class ProjectTarget (AbstractTarget): + """ Project target class (derived from 'AbstractTarget') + + This class these responsibilities: + - maintaining a list of main target in this project and + building it + + Main targets are constructed in two stages: + - When Jamfile is read, a number of calls to 'add_alternative' is made. + At that time, alternatives can also be renamed to account for inline + targets. + - The first time 'main-target' or 'has-main-target' rule is called, + all alternatives are enumerated an main targets are created. + """ + def __init__ (self, manager, name, project_module, parent_project, requirements, default_build): + assert isinstance(project_module, basestring) + assert isinstance(parent_project, (ProjectTarget, type(None))) + assert isinstance(requirements, (type(None), property_set.PropertySet)) + assert isinstance(default_build, (type(None), property_set.PropertySet)) + AbstractTarget.__init__ (self, name, self, manager) + + self.project_module_ = project_module + self.location_ = manager.projects().attribute (project_module, 'location') + self.requirements_ = requirements + self.default_build_ = default_build + + self.build_dir_ = None + + # A cache of IDs + self.ids_cache_ = {} + + # True is main targets have already been built. + self.built_main_targets_ = False + + # A list of the registered alternatives for this project. + self.alternatives_ = [] + + # A map from main target name to the target corresponding + # to it. + self.main_target_ = {} + + # Targets marked as explicit. + self.explicit_targets_ = set() + + # Targets marked as always + self.always_targets_ = set() + + # The constants defined for this project. + self.constants_ = {} + + # Whether targets for all main target are already created. + self.built_main_targets_ = 0 + + if parent_project: + self.inherit (parent_project) + + + # TODO: This is needed only by the 'make' rule. Need to find the + # way to make 'make' work without this method. + def project_module (self): + return self.project_module_ + + def get (self, attribute): + assert isinstance(attribute, basestring) + return self.manager().projects().attribute( + self.project_module_, attribute) + + def build_dir (self): + if not self.build_dir_: + self.build_dir_ = self.get ('build-dir') + if not self.build_dir_: + self.build_dir_ = os.path.join(self.project_.get ('location'), 'bin') + + return self.build_dir_ + + def generate (self, ps): + """ Generates all possible targets contained in this project. + """ + assert isinstance(ps, property_set.PropertySet) + self.manager_.targets().log( + "Building project '%s' with '%s'" % (self.name (), str(ps))) + self.manager_.targets().increase_indent () + + result = GenerateResult () + + for t in self.targets_to_build (): + g = t.generate (ps) + result.extend (g) + + self.manager_.targets().decrease_indent () + return result + + def targets_to_build (self): + """ Computes and returns a list of AbstractTarget instances which + must be built when this project is built. + """ + result = [] + + if not self.built_main_targets_: + self.build_main_targets () + + # Collect all main targets here, except for "explicit" ones. + for n, t in self.main_target_.iteritems (): + if not t.name () in self.explicit_targets_: + result.append (t) + + # Collect all projects referenced via "projects-to-build" attribute. + self_location = self.get ('location') + for pn in self.get ('projects-to-build'): + result.append (self.find(pn + "/")) + + return result + + def mark_targets_as_explicit (self, target_names): + """Add 'target' to the list of targets in this project + that should be build only by explicit request.""" + + # Record the name of the target, not instance, since this + # rule is called before main target instances are created. + assert is_iterable_typed(target_names, basestring) + self.explicit_targets_.update(target_names) + + def mark_targets_as_always(self, target_names): + assert is_iterable_typed(target_names, basestring) + self.always_targets_.update(target_names) + + def add_alternative (self, target_instance): + """ Add new target alternative. + """ + assert isinstance(target_instance, AbstractTarget) + if self.built_main_targets_: + raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ()) + + self.alternatives_.append (target_instance) + + def main_target (self, name): + assert isinstance(name, basestring) + if not self.built_main_targets_: + self.build_main_targets() + + return self.main_target_[name] + + def has_main_target (self, name): + """Tells if a main target with the specified name exists.""" + assert isinstance(name, basestring) + if not self.built_main_targets_: + self.build_main_targets() + + return name in self.main_target_ + + def create_main_target (self, name): + """ Returns a 'MainTarget' class instance corresponding to the 'name'. + """ + assert isinstance(name, basestring) + if not self.built_main_targets_: + self.build_main_targets () + + return self.main_targets_.get (name, None) + + + def find_really(self, id): + """ Find and return the target with the specified id, treated + relative to self. + """ + assert isinstance(id, basestring) + + result = None + current_location = self.get ('location') + + __re_split_project_target = re.compile (r'(.*)//(.*)') + split = __re_split_project_target.match (id) + + project_part = None + target_part = None + + if split: + project_part = split.group(1) + target_part = split.group(2) + if not target_part: + get_manager().errors()( + 'Project ID, "{}", is not a valid target reference. There should ' + 'be either a target name after the "//" or the "//" should be removed ' + 'from the target reference.' + .format(id) + ) + + + project_registry = self.project_.manager ().projects () + + extra_error_message = '' + if project_part: + # There's explicit project part in id. Looks up the + # project and pass the request to it. + pm = project_registry.find (project_part, current_location) + + if pm: + project_target = project_registry.target (pm) + result = project_target.find (target_part, no_error=1) + + else: + extra_error_message = "error: could not find project '$(project_part)'" + + else: + # Interpret target-name as name of main target + # Need to do this before checking for file. Consider this: + # + # exe test : test.cpp ; + # install s : test : . ; + # + # After first build we'll have target 'test' in Jamfile and file + # 'test' on the disk. We need target to override the file. + + result = None + if self.has_main_target(id): + result = self.main_target(id) + + if not result: + result = FileReference (self.manager_, id, self.project_) + if not result.exists (): + # File actually does not exist. + # Reset 'target' so that an error is issued. + result = None + + + if not result: + # Interpret id as project-id + project_module = project_registry.find (id, current_location) + if project_module: + result = project_registry.target (project_module) + + return result + + def find (self, id, no_error = False): + assert isinstance(id, basestring) + assert isinstance(no_error, int) # also matches bools + v = self.ids_cache_.get (id, None) + + if not v: + v = self.find_really (id) + self.ids_cache_ [id] = v + + if v or no_error: + return v + + raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location'))) + + + def build_main_targets (self): + self.built_main_targets_ = True + + for a in self.alternatives_: + name = a.name () + if name not in self.main_target_: + t = MainTarget (name, self.project_) + self.main_target_ [name] = t + + if name in self.always_targets_: + a.always() + + self.main_target_ [name].add_alternative (a) + + def add_constant(self, name, value, path=0): + """Adds a new constant for this project. + + The constant will be available for use in Jamfile + module for this project. If 'path' is true, + the constant will be interpreted relatively + to the location of project. + """ + assert isinstance(name, basestring) + assert is_iterable_typed(value, basestring) + assert isinstance(path, int) # will also match bools + if path: + l = self.location_ + if not l: + # Project corresponding to config files do not have + # 'location' attribute, but do have source location. + # It might be more reasonable to make every project have + # a location and use some other approach to prevent buildable + # targets in config files, but that's for later. + l = self.get('source-location') + + value = os.path.join(l, value[0]) + # Now make the value absolute path. Constants should be in + # platform-native form. + value = [os.path.normpath(os.path.join(os.getcwd(), value))] + + self.constants_[name] = value + bjam.call("set-variable", self.project_module(), name, value) + + def inherit(self, parent_project): + assert isinstance(parent_project, ProjectTarget) + for c in parent_project.constants_: + # No need to pass the type. Path constants were converted to + # absolute paths already by parent. + self.add_constant(c, parent_project.constants_[c]) + + # Import rules from parent + this_module = self.project_module() + parent_module = parent_project.project_module() + + rules = bjam.call("RULENAMES", parent_module) + if not rules: + rules = [] + user_rules = [x for x in rules + if x not in self.manager().projects().project_rules().all_names()] + if user_rules: + bjam.call("import-rules-from-parent", parent_module, this_module, user_rules) + +class MainTarget (AbstractTarget): + """ A named top-level target in Jamfile. + """ + def __init__ (self, name, project): + AbstractTarget.__init__ (self, name, project) + self.alternatives_ = [] + self.best_alternative = None + self.default_build_ = property_set.empty () + + def add_alternative (self, target): + """ Add a new alternative for this target. + """ + assert isinstance(target, BasicTarget) + d = target.default_build () + + if self.alternatives_ and self.default_build_ != d: + get_manager().errors()("default build must be identical in all alternatives\n" + "main target is '%s'\n" + "with '%s'\n" + "differing from previous default build: '%s'" % (self.full_name (), d.raw (), self.default_build_.raw ())) + + else: + self.default_build_ = d + + self.alternatives_.append (target) + + def __select_alternatives (self, property_set_, debug): + """ Returns the best viable alternative for this property_set + See the documentation for selection rules. + # TODO: shouldn't this be 'alternative' (singular)? + """ + # When selecting alternatives we have to consider defaults, + # for example: + # lib l : l.cpp : debug ; + # lib l : l_opt.cpp : release ; + # won't work unless we add default value debug. + assert isinstance(property_set_, property_set.PropertySet) + assert isinstance(debug, int) # also matches bools + + property_set_ = property_set_.add_defaults () + + # The algorithm: we keep the current best viable alternative. + # When we've got new best viable alternative, we compare it + # with the current one. + best = None + best_properties = None + + if len (self.alternatives_) == 0: + return None + + if len (self.alternatives_) == 1: + return self.alternatives_ [0] + + if debug: + print "Property set for selection:", property_set_ + + for v in self.alternatives_: + properties = v.match (property_set_, debug) + + if properties is not None: + if not best: + best = v + best_properties = properties + + else: + if b2.util.set.equal (properties, best_properties): + return None + + elif b2.util.set.contains (properties, best_properties): + # Do nothing, this alternative is worse + pass + + elif b2.util.set.contains (best_properties, properties): + best = v + best_properties = properties + + else: + return None + + return best + + def apply_default_build (self, property_set_): + assert isinstance(property_set_, property_set.PropertySet) + return apply_default_build(property_set_, self.default_build_) + + def generate (self, ps): + """ Select an alternative for this main target, by finding all alternatives + which requirements are satisfied by 'properties' and picking the one with + longest requirements set. + Returns the result of calling 'generate' on that alternative. + """ + assert isinstance(ps, property_set.PropertySet) + self.manager_.targets ().start_building (self) + + # We want composite properties in build request act as if + # all the properties it expands too are explicitly specified. + ps = ps.expand () + + all_property_sets = self.apply_default_build (ps) + + result = GenerateResult () + + for p in all_property_sets: + result.extend (self.__generate_really (p)) + + self.manager_.targets ().end_building (self) + + return result + + def __generate_really (self, prop_set): + """ Generates the main target with the given property set + and returns a list which first element is property_set object + containing usage_requirements of generated target and with + generated virtual target in other elements. It's possible + that no targets are generated. + """ + assert isinstance(prop_set, property_set.PropertySet) + best_alternative = self.__select_alternatives (prop_set, debug=0) + self.best_alternative = best_alternative + + if not best_alternative: + # FIXME: revive. + # self.__select_alternatives(prop_set, debug=1) + self.manager_.errors()( + "No best alternative for '%s'.\n" + % (self.full_name(),)) + + result = best_alternative.generate (prop_set) + + # Now return virtual targets for the only alternative + return result + + def rename(self, new_name): + assert isinstance(new_name, basestring) + AbstractTarget.rename(self, new_name) + for a in self.alternatives_: + a.rename(new_name) + +class FileReference (AbstractTarget): + """ Abstract target which refers to a source file. + This is artificial creature; it's useful so that sources to + a target can be represented as list of abstract target instances. + """ + def __init__ (self, manager, file, project): + AbstractTarget.__init__ (self, file, project) + self.file_location_ = None + + def generate (self, properties): + return GenerateResult (None, [ + self.manager_.virtual_targets ().from_file ( + self.name_, self.location(), self.project_) ]) + + def exists (self): + """ Returns true if the referred file really exists. + """ + if self.location (): + return True + else: + return False + + def location (self): + # Returns the location of target. Needed by 'testing.jam' + if not self.file_location_: + source_location = self.project_.get('source-location') + + for src_dir in source_location: + location = os.path.join(src_dir, self.name()) + if os.path.isfile(location): + self.file_location_ = src_dir + self.file_path = location + break + + return self.file_location_ + +def resolve_reference(target_reference, project): + """ Given a target_reference, made in context of 'project', + returns the AbstractTarget instance that is referred to, as well + as properties explicitly specified for this reference. + """ + # Separate target name from properties override + assert isinstance(target_reference, basestring) + assert isinstance(project, ProjectTarget) + split = _re_separate_target_from_properties.match (target_reference) + if not split: + raise BaseException ("Invalid reference: '%s'" % target_reference) + + id = split.group (1) + + sproperties = [] + + if split.group (3): + sproperties = property.create_from_strings(feature.split(split.group(3))) + sproperties = feature.expand_composites(sproperties) + + # Find the target + target = project.find (id) + + return (target, property_set.create(sproperties)) + +def generate_from_reference(target_reference, project, property_set_): + """ Attempts to generate the target given by target reference, which + can refer both to a main target or to a file. + Returns a list consisting of + - usage requirements + - generated virtual targets, if any + target_reference: Target reference + project: Project where the reference is made + property_set: Properties of the main target that makes the reference + """ + assert isinstance(target_reference, basestring) + assert isinstance(project, ProjectTarget) + assert isinstance(property_set_, property_set.PropertySet) + target, sproperties = resolve_reference(target_reference, project) + + # Take properties which should be propagated and refine them + # with source-specific requirements. + propagated = property_set_.propagated() + rproperties = propagated.refine(sproperties) + + return target.generate(rproperties) + + + +class BasicTarget (AbstractTarget): + """ Implements the most standard way of constructing main target + alternative from sources. Allows sources to be either file or + other main target and handles generation of those dependency + targets. + """ + def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None): + assert is_iterable_typed(sources, basestring) + assert isinstance(requirements, property_set.PropertySet) or requirements is None + assert isinstance(default_build, property_set.PropertySet) or default_build is None + assert isinstance(usage_requirements, property_set.PropertySet) or usage_requirements is None + AbstractTarget.__init__ (self, name, project) + + for s in sources: + if get_grist (s): + raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name)) + + self.sources_ = sources + + if not requirements: requirements = property_set.empty () + self.requirements_ = requirements + + if not default_build: default_build = property_set.empty () + self.default_build_ = default_build + + if not usage_requirements: usage_requirements = property_set.empty () + self.usage_requirements_ = usage_requirements + + # A cache for resolved references + self.source_targets_ = None + + # A cache for generated targets + self.generated_ = {} + + # A cache for build requests + self.request_cache = {} + + # Result of 'capture_user_context' has everything. For example, if this + # target is declare as result of loading Jamfile which was loaded when + # building target B which was requested from A, then we'll have A, B and + # Jamroot location in context. We only care about Jamroot location, most + # of the times. + self.user_context_ = self.manager_.errors().capture_user_context()[-1:] + + self.always_ = False + + def always(self): + self.always_ = True + + def sources (self): + """ Returns the list of AbstractTargets which are used as sources. + The extra properties specified for sources are not represented. + The only used of this rule at the moment is the '--dump-tests' + feature of the test system. + """ + if self.source_targets_ == None: + self.source_targets_ = [] + for s in self.sources_: + self.source_targets_.append(resolve_reference(s, self.project_)[0]) + + return self.source_targets_ + + def requirements (self): + return self.requirements_ + + def default_build (self): + return self.default_build_ + + def common_properties (self, build_request, requirements): + """ Given build request and requirements, return properties + common to dependency build request and target build + properties. + """ + # For optimization, we add free unconditional requirements directly, + # without using complex algorithsm. + # This gives the complex algorithm better chance of caching results. + # The exact effect of this "optimization" is no longer clear + assert isinstance(build_request, property_set.PropertySet) + assert isinstance(requirements, property_set.PropertySet) + free_unconditional = [] + other = [] + for p in requirements.all(): + if p.feature.free and not p.condition and p.feature.name != 'conditional': + free_unconditional.append(p) + else: + other.append(p) + other = property_set.create(other) + + key = (build_request, other) + if key not in self.request_cache: + self.request_cache[key] = self.__common_properties2 (build_request, other) + + return self.request_cache[key].add_raw(free_unconditional) + + # Given 'context' -- a set of already present properties, and 'requirements', + # decide which extra properties should be applied to 'context'. + # For conditional requirements, this means evaluating condition. For + # indirect conditional requirements, this means calling a rule. Ordinary + # requirements are always applied. + # + # Handles situation where evaluating one conditional requirements affects + # condition of another conditional requirements, for example: + # + # gcc:release release:RELEASE + # + # If 'what' is 'refined' returns context refined with new requirements. + # If 'what' is 'added' returns just the requirements that must be applied. + def evaluate_requirements(self, requirements, context, what): + # Apply non-conditional requirements. + # It's possible that that further conditional requirement change + # a value set by non-conditional requirements. For example: + # + # exe a : a.cpp : single foo:multi ; + # + # I'm not sure if this should be an error, or not, especially given that + # + # single + # + # might come from project's requirements. + assert isinstance(requirements, property_set.PropertySet) + assert isinstance(context, property_set.PropertySet) + assert isinstance(what, basestring) + unconditional = feature.expand(requirements.non_conditional()) + + context = context.refine(property_set.create(unconditional)) + + # We've collected properties that surely must be present in common + # properties. We now try to figure out what other properties + # should be added in order to satisfy rules (4)-(6) from the docs. + + conditionals = property_set.create(requirements.conditional()) + + # It's supposed that #conditionals iterations + # should be enough for properties to propagate along conditions in any + # direction. + max_iterations = len(conditionals.all()) +\ + len(requirements.get("")) + 1 + + added_requirements = [] + current = context + + # It's assumed that ordinary conditional requirements can't add + # properties, and that rules referred + # by properties can't add new + # properties. So the list of indirect conditionals + # does not change. + indirect = requirements.get("") + + ok = 0 + for i in range(0, max_iterations): + + e = conditionals.evaluate_conditionals(current).all()[:] + + # Evaluate indirect conditionals. + for i in indirect: + new = None + i = b2.util.jam_to_value_maybe(i) + if callable(i): + # This is Python callable, yeah. + new = i(current) + else: + # Name of bjam function. Because bjam is unable to handle + # list of Property, pass list of strings. + br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()]) + if br: + new = property.create_from_strings(br) + if new: + new = property.translate_paths(new, self.project().location()) + e.extend(new) + + if e == added_requirements: + # If we got the same result, we've found final properties. + ok = 1 + break + else: + # Oops, results of evaluation of conditionals has changed. + # Also 'current' contains leftover from previous evaluation. + # Recompute 'current' using initial properties and conditional + # requirements. + added_requirements = e + current = context.refine(property_set.create(feature.expand(e))) + + if not ok: + self.manager().errors()("Can't evaluate conditional properties " + + str(conditionals)) + + + if what == "added": + return property_set.create(unconditional + added_requirements) + elif what == "refined": + return current + else: + self.manager().errors("Invalid value of the 'what' parameter") + + def __common_properties2(self, build_request, requirements): + # This guarantees that default properties are present + # in result, unless they are overridden by some requirement. + # TODO: There is possibility that we've added bar, which is composite + # and expands to bar2, but default value of is not bar2, + # in which case it's not clear what to do. + # + assert isinstance(build_request, property_set.PropertySet) + assert isinstance(requirements, property_set.PropertySet) + build_request = build_request.add_defaults() + # Featured added by 'add-default' can be composite and expand + # to features without default values -- so they are not added yet. + # It could be clearer/faster to expand only newly added properties + # but that's not critical. + build_request = build_request.expand() + + return self.evaluate_requirements(requirements, build_request, + "refined") + + def match (self, property_set_, debug): + """ Returns the alternative condition for this alternative, if + the condition is satisfied by 'property_set'. + """ + # The condition is composed of all base non-conditional properties. + # It's not clear if we should expand 'self.requirements_' or not. + # For one thing, it would be nice to be able to put + # msvc-6.0 + # in requirements. + # On the other hand, if we have release in condition it + # does not make sense to require full to be in + # build request just to select this variant. + assert isinstance(property_set_, property_set.PropertySet) + bcondition = self.requirements_.base () + ccondition = self.requirements_.conditional () + condition = b2.util.set.difference (bcondition, ccondition) + + if debug: + print " next alternative: required properties:", [str(p) for p in condition] + + if b2.util.set.contains (condition, property_set_.all()): + + if debug: + print " matched" + + return condition + + else: + return None + + + def generate_dependency_targets (self, target_ids, property_set_): + assert is_iterable_typed(target_ids, basestring) + assert isinstance(property_set_, property_set.PropertySet) + targets = [] + usage_requirements = [] + for id in target_ids: + + result = generate_from_reference(id, self.project_, property_set_) + targets += result.targets() + usage_requirements += result.usage_requirements().all() + + return (targets, usage_requirements) + + def generate_dependency_properties(self, properties, ps): + """ Takes a target reference, which might be either target id + or a dependency property, and generates that target using + 'property_set' as build request. + + Returns a tuple (result, usage_requirements). + """ + assert is_iterable_typed(properties, property.Property) + assert isinstance(ps, property_set.PropertySet) + result_properties = [] + usage_requirements = [] + for p in properties: + + result = generate_from_reference(p.value, self.project_, ps) + + for t in result.targets(): + result_properties.append(property.Property(p.feature, t)) + + usage_requirements += result.usage_requirements().all() + + return (result_properties, usage_requirements) + + + + + @user_error_checkpoint + def generate (self, ps): + """ Determines final build properties, generates sources, + and calls 'construct'. This method should not be + overridden. + """ + assert isinstance(ps, property_set.PropertySet) + self.manager_.errors().push_user_context( + "Generating target " + self.full_name(), self.user_context_) + + if self.manager().targets().logging(): + self.manager().targets().log( + "Building target '%s'" % self.name_) + self.manager().targets().increase_indent () + self.manager().targets().log( + "Build request: '%s'" % str (ps.raw ())) + cf = self.manager().command_line_free_features() + self.manager().targets().log( + "Command line free features: '%s'" % str (cf.raw ())) + self.manager().targets().log( + "Target requirements: %s'" % str (self.requirements().raw ())) + + self.manager().targets().push_target(self) + + if ps not in self.generated_: + + # Apply free features form the command line. If user + # said + # define=FOO + # he most likely want this define to be set for all compiles. + ps = ps.refine(self.manager().command_line_free_features()) + rproperties = self.common_properties (ps, self.requirements_) + + self.manager().targets().log( + "Common properties are '%s'" % str (rproperties)) + + if rproperties.get("") != ["no"]: + + result = GenerateResult () + + properties = rproperties.non_dependency () + + (p, u) = self.generate_dependency_properties (rproperties.dependency (), rproperties) + properties += p + assert all(isinstance(p, property.Property) for p in properties) + usage_requirements = u + + (source_targets, u) = self.generate_dependency_targets (self.sources_, rproperties) + usage_requirements += u + + self.manager_.targets().log( + "Usage requirements for '%s' are '%s'" % (self.name_, usage_requirements)) + + # FIXME: + + rproperties = property_set.create(properties + usage_requirements) + usage_requirements = property_set.create (usage_requirements) + + self.manager_.targets().log( + "Build properties: '%s'" % str(rproperties)) + + source_targets += rproperties.get('') + + # We might get duplicate sources, for example if + # we link to two library which have the same in + # usage requirements. + # Use stable sort, since for some targets the order is + # important. E.g. RUN_PY target need python source to come + # first. + source_targets = unique(source_targets, stable=True) + + # FIXME: figure why this call messes up source_targets in-place + result = self.construct (self.name_, source_targets[:], rproperties) + + if result: + assert len(result) == 2 + gur = result [0] + result = result [1] + + if self.always_: + for t in result: + t.always() + + s = self.create_subvariant ( + result, + self.manager().virtual_targets().recent_targets(), ps, + source_targets, rproperties, usage_requirements) + self.manager().virtual_targets().clear_recent_targets() + + ur = self.compute_usage_requirements (s) + ur = ur.add (gur) + s.set_usage_requirements (ur) + + self.manager_.targets().log ( + "Usage requirements from '%s' are '%s'" % + (self.name(), str(rproperties))) + + self.generated_[ps] = GenerateResult (ur, result) + else: + self.generated_[ps] = GenerateResult (property_set.empty(), []) + else: + # If we just see no, we cannot produce any reasonable + # diagnostics. The code that adds this property is expected + # to explain why a target is not built, for example using + # the configure.log-component-configuration function. + + # If this target fails to build, add no to properties + # to cause any parent target to fail to build. Except that it + # - does not work now, since we check for no only in + # common properties, but not in properties that came from + # dependencies + # - it's not clear if that's a good idea anyway. The alias + # target, for example, should not fail to build if a dependency + # fails. + self.generated_[ps] = GenerateResult( + property_set.create(["no"]), []) + else: + self.manager().targets().log ("Already built") + + self.manager().targets().pop_target() + self.manager().targets().decrease_indent() + + return self.generated_[ps] + + def compute_usage_requirements (self, subvariant): + """ Given the set of generated targets, and refined build + properties, determines and sets appripriate usage requirements + on those targets. + """ + assert isinstance(subvariant, virtual_target.Subvariant) + rproperties = subvariant.build_properties () + xusage_requirements =self.evaluate_requirements( + self.usage_requirements_, rproperties, "added") + + # We generate all dependency properties and add them, + # as well as their usage requirements, to result. + (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties) + extra = r1 + r2 + + result = property_set.create (xusage_requirements.non_dependency () + extra) + + # Propagate usage requirements we've got from sources, except + # for the and features. + # + # That feature specifies which pch file to use, and should apply + # only to direct dependents. Consider: + # + # pch pch1 : ... + # lib lib1 : ..... pch1 ; + # pch pch2 : + # lib lib2 : pch2 lib1 ; + # + # Here, lib2 should not get property from pch1. + # + # Essentially, when those two features are in usage requirements, + # they are propagated only to direct dependents. We might need + # a more general mechanism, but for now, only those two + # features are special. + properties = [] + for p in subvariant.sources_usage_requirements().all(): + if p.feature.name not in ('pch-header', 'pch-file'): + properties.append(p) + if 'shared' in rproperties.get('link'): + new_properties = [] + for p in properties: + if p.feature.name != 'library': + new_properties.append(p) + properties = new_properties + + result = result.add_raw(properties) + return result + + def create_subvariant (self, root_targets, all_targets, + build_request, sources, + rproperties, usage_requirements): + """Creates a new subvariant-dg instances for 'targets' + - 'root-targets' the virtual targets will be returned to dependents + - 'all-targets' all virtual + targets created while building this main target + - 'build-request' is property-set instance with + requested build properties""" + assert is_iterable_typed(root_targets, virtual_target.VirtualTarget) + assert is_iterable_typed(all_targets, virtual_target.VirtualTarget) + assert isinstance(build_request, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(rproperties, property_set.PropertySet) + assert isinstance(usage_requirements, property_set.PropertySet) + + for e in root_targets: + e.root (True) + + s = Subvariant (self, build_request, sources, + rproperties, usage_requirements, all_targets) + + for v in all_targets: + if not v.creating_subvariant(): + v.creating_subvariant(s) + + return s + + def construct (self, name, source_targets, properties): + """ Constructs the virtual targets for this abstract targets and + the dependency graph. Returns a tuple consisting of the properties and the list of virtual targets. + Should be overridden in derived classes. + """ + raise BaseException ("method should be defined in derived classes") + + +class TypedTarget (BasicTarget): + import generators + + def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements): + assert isinstance(type, basestring) + BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements) + self.type_ = type + + def __jam_repr__(self): + return b2.util.value_to_jam(self) + + def type (self): + return self.type_ + + def construct (self, name, source_targets, prop_set): + assert isinstance(name, basestring) + assert is_iterable_typed(source_targets, virtual_target.VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) + r = generators.construct (self.project_, os.path.splitext(name)[0], + self.type_, + prop_set.add_raw(['' + self.type_]), + source_targets, True) + + if not r: + print "warning: Unable to construct '%s'" % self.full_name () + + # Are there any top-level generators for this type/property set. + if not generators.find_viable_generators (self.type_, prop_set): + print "error: no generators were found for type '" + self.type_ + "'" + print "error: and the requested properties" + print "error: make sure you've configured the needed tools" + print "See https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.overview.configuration" + + print "To debug this problem, try the --debug-generators option." + sys.exit(1) + + return r + +def apply_default_build(property_set_, default_build): + # 1. First, see what properties from default_build + # are already present in property_set. + assert isinstance(property_set_, property_set.PropertySet) + assert isinstance(default_build, property_set.PropertySet) + + defaults_to_apply = [] + for d in default_build.all(): + if not property_set_.get(d.feature): + defaults_to_apply.append(d) + + # 2. If there's any defaults to be applied, form the new + # build request. Pass it throw 'expand-no-defaults', since + # default_build might contain "release debug", which will + # result in two property_sets. + result = [] + if defaults_to_apply: + + # We have to compress subproperties here to prevent + # property lists like: + # + # msvc 7.1 multi + # + # from being expanded into: + # + # 7.1/multi + # msvc/7.1/multi + # + # due to cross-product property combination. That may + # be an indication that + # build_request.expand-no-defaults is the wrong rule + # to use here. + properties = build_request.expand_no_defaults( + [property_set.create([p]) for p in + feature.compress_subproperties(property_set_.all()) + defaults_to_apply] + ) + + if properties: + for p in properties: + result.append(property_set.create(feature.expand(p.all()))) + else: + result = [property_set.empty()] + + else: + result.append (property_set_) + + return result + + +def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements): + assert isinstance(name, basestring) + assert isinstance(type, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + assert is_iterable_typed(usage_requirements, basestring) + + from b2.manager import get_manager + t = get_manager().targets() + + project = get_manager().projects().current() + + return t.main_target_alternative( + TypedTarget(name, project, type, + t.main_target_sources(sources, name), + t.main_target_requirements(requirements, project), + t.main_target_default_build(default_build, project), + t.main_target_usage_requirements(usage_requirements, project))) + + +def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]): + assert isinstance(name, basestring) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + assert is_iterable_typed(usage_requirements, basestring) + from b2.manager import get_manager + t = get_manager().targets() + + project = get_manager().projects().current() + + return t.main_target_alternative( + klass(name, project, + t.main_target_sources(sources, name), + t.main_target_requirements(requirements, project), + t.main_target_default_build(default_build, project), + t.main_target_usage_requirements(usage_requirements, project))) + +def metatarget_function_for_class(class_): + + @bjam_signature((["name"], ["sources", "*"], ["requirements", "*"], + ["default_build", "*"], ["usage_requirements", "*"])) + def create_metatarget(name, sources, requirements = [], default_build = None, usage_requirements = []): + + from b2.manager import get_manager + t = get_manager().targets() + + project = get_manager().projects().current() + + return t.main_target_alternative( + class_(name, project, + t.main_target_sources(sources, name), + t.main_target_requirements(requirements, project), + t.main_target_default_build(default_build, project), + t.main_target_usage_requirements(usage_requirements, project))) + + return create_metatarget diff --git a/src/boost/tools/build/src/build/toolset.jam b/src/boost/tools/build/src/build/toolset.jam new file mode 100644 index 000000000..a632605cc --- /dev/null +++ b/src/boost/tools/build/src/build/toolset.jam @@ -0,0 +1,703 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2005 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Support for toolset definition. + +import errors ; +import feature ; +import generators ; +import numbers ; +import path ; +import property ; +import regex ; +import sequence ; +import set ; +import property-set ; +import order ; +import "class" : new ; +import utility ; + + +.flag-no = 1 ; + +.ignore-requirements = ; + +# This is used only for testing, to make sure we do not get random extra +# elements in paths. +if --ignore-toolset-requirements in [ modules.peek : ARGV ] +{ + .ignore-requirements = 1 ; +} + + +# Initializes an additional toolset-like module. First load the 'toolset-module' +# and then calls its 'init' rule with trailing arguments. +# +rule using ( toolset-module : * ) +{ + import $(toolset-module) ; + $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) + ; +} + + +# Expands subfeatures in each property sets, e.g. 'gcc-3.2' will be +# converted to 'gcc/3.2'. +# +local rule normalize-condition ( property-sets * ) +{ + local result ; + for local p in $(property-sets) + { + local split = [ feature.split $(p) ] ; + local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ; + result += $(expanded:J=/) ; + } + return $(result) ; +} + + +# Specifies if the 'flags' rule should check that the invoking module is the +# same as the module we are setting the flag for. 'v' can be either 'checked' or +# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore +# the setting that was in effect before calling this rule. +# +rule push-checking-for-flags-module ( v ) +{ + .flags-module-checking = $(v) $(.flags-module-checking) ; +} + +rule pop-checking-for-flags-module ( ) +{ + .flags-module-checking = $(.flags-module-checking[2-]) ; +} + + +# Specifies features that are referenced by the action rule. +# This is necessary in order to detect that these features +# are relevant. +# +rule uses-features ( rule-or-module : features * : unchecked ? ) +{ + local caller = [ CALLER_MODULE ] ; + if ! [ MATCH ".*([.]).*" : $(rule-or-module) ] + && [ MATCH "(Jamfile<.*)" : $(caller) ] + { + # Unqualified rule name, used inside Jamfile. Most likely used with + # 'make' or 'notfile' rules. This prevents setting flags on the entire + # Jamfile module (this will be considered as rule), but who cares? + # Probably, 'flags' rule should be split into 'flags' and + # 'flags-on-module'. + rule-or-module = $(caller).$(rule-or-module) ; + } + else + { + local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ; + if $(unchecked) != unchecked + && $(.flags-module-checking[1]) != unchecked + && $(module_) != $(caller) + { + errors.error "Module $(caller) attempted to set flags for module $(module_)" ; + } + } + .uses-features.$(rule-or-module) += $(features) ; +} + +# Specifies the flags (variables) that must be set on targets under certain +# conditions, described by arguments. +# +rule flags ( + rule-or-module # If contains a dot, should be a rule name. The flags will + # be applied when that rule is used to set up build + # actions. + # + # If does not contain dot, should be a module name. The + # flag will be applied for all rules in that module. If + # module for rule is different from the calling module, an + # error is issued. + + variable-name # Variable that should be set on target. + condition * : # A condition when this flag should be applied. Should be a + # set of property sets. If one of those property sets is + # contained in the build properties, the flag will be used. + # Implied values are not allowed: "gcc" should be + # used, not just "gcc". Subfeatures, like in + # "gcc-3.2" are allowed. If left empty, the flag + # will be used unconditionally. + # + # Property sets may use value-less properties ('' vs. + # 'value') to match absent properties. This allows to + # separately match: + # + # /64 + # ia64/ + # + # Where both features are optional. Without this syntax + # we would be forced to define "default" values. + + values * : # The value to add to variable. If is specified, + # then the value of 'feature' will be added. + unchecked ? # If value 'unchecked' is passed, will not test that flags + # are set for the calling module. + : hack-hack ? # For + # flags rule OPTIONS : -model ansi + # Treat as condition + # FIXME: ugly hack. +) +{ + local caller = [ CALLER_MODULE ] ; + if ! [ MATCH ".*([.]).*" : $(rule-or-module) ] + && [ MATCH "(Jamfile<.*)" : $(caller) ] + { + # Unqualified rule name, used inside Jamfile. Most likely used with + # 'make' or 'notfile' rules. This prevents setting flags on the entire + # Jamfile module (this will be considered as rule), but who cares? + # Probably, 'flags' rule should be split into 'flags' and + # 'flags-on-module'. + rule-or-module = $(caller).$(rule-or-module) ; + } + else + { + local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ; + if $(unchecked) != unchecked + && $(.flags-module-checking[1]) != unchecked + && $(module_) != $(caller) + { + errors.error "Module $(caller) attempted to set flags for module $(module_)" ; + } + } + + if $(condition) && ! $(condition:G=) && ! $(hack-hack) + { + # We have condition in the form '', that is, without value. + # That is an older syntax: + # flags gcc.link RPATH ; + # for compatibility, convert it to + # flags gcc.link RPATH : ; + values = $(condition) ; + condition = ; + } + + if $(condition) + { + property.validate-property-sets $(condition) ; + condition = [ normalize-condition $(condition) ] ; + } + + add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ; +} + + +# Adds a new flag setting with the specified values. Does no checking. +# +local rule add-flag ( rule-or-module : variable-name : condition * : values * ) +{ + .$(rule-or-module).flags += $(.flag-no) ; + + # Store all flags for a module. + local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ; + .module-flags.$(module_) += $(.flag-no) ; + # Store flag-no -> rule-or-module mapping. + .rule-or-module.$(.flag-no) = $(rule-or-module) ; + + .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ; + .$(rule-or-module).values.$(.flag-no) += $(values) ; + .$(rule-or-module).condition.$(.flag-no) += $(condition) ; + + .flag-no = [ numbers.increment $(.flag-no) ] ; +} + + +# Returns the first element of 'property-sets' which is a subset of +# 'properties' or an empty list if no such element exists. +# +rule find-property-subset ( property-sets * : properties * ) +{ + # Cut property values off. + local prop-keys = $(properties:G) ; + + local result ; + for local s in $(property-sets) + { + if ! $(result) + { + # Handle value-less properties like '' (compare with + # 'x86'). + + local set = [ feature.split $(s) ] ; + + # Find the set of features that + # - have no property specified in required property set + # - are omitted in the build property set. + local default-props ; + for local i in $(set) + { + # If $(i) is a value-less property it should match default value + # of an optional property. See the first line in the example + # below: + # + # property set properties result + # foo foo match + # foo foo foo no match + # foo foo foo no match + # foo foo foo foo match + if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) ) + { + default-props += $(i) ; + } + } + + if $(set) in $(properties) $(default-props) + { + result = $(s) ; + } + } + } + return $(result) ; +} + + +# Returns a value to be added to some flag for some target based on the flag's +# value definition and the given target's property set. +# +rule handle-flag-value ( value * : properties * ) +{ + local result ; + if $(value:G) + { + local matches = [ property.select $(value) : $(properties) ] ; + local order ; + for local p in $(matches) + { + local att = [ feature.attributes $(p:G) ] ; + if dependency in $(att) + { + # The value of a dependency feature is a target and needs to be + # actualized. + result += [ $(p:G=).actualize ] ; + } + else if path in $(att) || free in $(att) + { + local values ; + # Treat features with && in the value specially -- each + # &&-separated element is considered a separate value. This is + # needed to handle searched libraries or include paths, which + # may need to be in a specific order. + if ! [ MATCH (&&) : $(p:G=) ] + { + values = $(p:G=) ; + } + else + { + values = [ regex.split $(p:G=) "&&" ] ; + } + if path in $(att) + { + values = [ sequence.transform path.native : $(values) ] ; + } + result += $(values) ; + if $(values[2]) + { + if ! $(order) + { + order = [ new order ] ; + } + local prev ; + for local v in $(values) + { + if $(prev) + { + $(order).add-pair $(prev) $(v) ; + } + prev = $(v) ; + } + } + } + else + { + result += $(p:G=) ; + } + } + if $(order) + { + result = [ $(order).order [ sequence.unique $(result) : stable ] ] ; + DELETE_MODULE $(order) ; + } + } + else + { + result += $(value) ; + } + return $(result) ; +} + + +# Given a rule name and a property set, returns a list of interleaved variables +# names and values which must be set on targets for that rule/property-set +# combination. +# +rule set-target-variables-aux ( rule-or-module : property-set ) +{ + local result ; + properties = [ $(property-set).raw ] ; + for local f in $(.$(rule-or-module).flags) + { + local variable = $(.$(rule-or-module).variable.$(f)) ; + local condition = $(.$(rule-or-module).condition.$(f)) ; + local values = $(.$(rule-or-module).values.$(f)) ; + + if ! $(condition) || + [ find-property-subset $(condition) : $(properties) ] + { + local processed ; + for local v in $(values) + { + # The value might be so needs special treatment. + processed += [ handle-flag-value $(v) : $(properties) ] ; + } + for local r in $(processed) + { + result += $(variable) $(r) ; + } + } + } + + # Strip away last dot separated part and recurse. + local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ; + if $(next) + { + result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ; + } + return $(result) ; +} + +rule relevant-features ( rule-or-module ) +{ + local result ; + if ! $(.relevant-features.$(rule-or-module)) + { + for local f in $(.$(rule-or-module).flags) + { + local condition = $(.$(rule-or-module).condition.$(f)) ; + local values = $(.$(rule-or-module).values.$(f)) ; + + for local c in $(condition) + { + for local p in [ feature.split $(c) ] + { + if $(p:G) + { + result += $(p:G) ; + } + else + { + local temp = [ feature.expand-subfeatures $(p) ] ; + result += $(temp:G) ; + } + } + } + + for local v in $(values) + { + if $(v:G) + { + result += $(v:G) ; + } + } + } + + # Strip away last dot separated part and recurse. + local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ; + if $(next) + { + result += [ relevant-features $(next[1]) ] ; + } + result = [ sequence.unique $(result) ] ; + if $(result[1]) = "" + { + result = $(result) ; + } + .relevant-features.$(rule-or-module) = $(result) ; + return $(result) ; + } + else + { + return $(.relevant-features.$(rule-or-module)) ; + } +} + +# Returns a list of all the features which were +# passed to uses-features. +local rule used-features ( rule-or-module ) +{ + if ! $(.used-features.$(rule-or-module)) + { + local result = $(.uses-features.$(rule-or-module)) ; + + # Strip away last dot separated part and recurse. + local next = [ MATCH "^(.+)\\.([^\\.])*" : $(rule-or-module) ] ; + if $(next) + { + result += [ used-features $(next[1]) ] ; + } + result = [ sequence.unique $(result) ] ; + if $(result[1]) = "" + { + result = $(result) ; + } + .used-features.$(rule-or-module) = $(result) ; + return $(result) ; + } + else + { + return $(.used-features.$(rule-or-module)) ; + } +} + +rule filter-property-set ( rule-or-module : property-set ) +{ + local key = .filtered.property-set.$(rule-or-module).$(property-set) ; + if ! $($(key)) + { + local relevant = [ relevant-features $(rule-or-module) ] ; + local result ; + for local p in [ $(property-set).raw ] + { + if $(p:G) in $(relevant) + { + result += $(p) ; + } + } + $(key) = [ property-set.create $(result) ] ; + } + return $($(key)) ; +} + +rule set-target-variables ( rule-or-module targets + : property-set ) +{ + property-set = [ filter-property-set $(rule-or-module) : $(property-set) ] ; + local key = .stv.$(rule-or-module).$(property-set) ; + local settings = $($(key)) ; + if ! $(settings) + { + settings = [ set-target-variables-aux $(rule-or-module) : + $(property-set) ] ; + + if ! $(settings) + { + settings = none ; + } + $(key) = $(settings) ; + } + + if $(settings) != none + { + local var-name = ; + for local name-or-value in $(settings) + { + if $(var-name) + { + $(var-name) on $(targets) += $(name-or-value) ; + var-name = ; + } + else + { + var-name = $(name-or-value) ; + } + } + } +} + + +# Returns a property-set indicating which features are relevant +# for the given rule. +# +rule relevant ( rule-name ) +{ + if ! $(.relevant-features-ps.$(rule-name)) + { + local features = [ sequence.transform utility.ungrist : + [ relevant-features $(rule-name) ] + [ used-features $(rule-name) ] ] ; + .relevant-features-ps.$(rule-name) = + [ property-set.create $(features) ] ; + } + return $(.relevant-features-ps.$(rule-name)) ; +} + + +# Make toolset 'toolset', defined in a module of the same name, inherit from +# 'base'. +# 1. The 'init' rule from 'base' is imported into 'toolset' with full name. +# Another 'init' is called, which forwards to the base one. +# 2. All generators from 'base' are cloned. The ids are adjusted and +# property in requires is adjusted too. +# 3. All flags are inherited. +# 4. All rules are imported. +# +rule inherit ( toolset : base ) +{ + import $(base) ; + inherit-generators $(toolset) : $(base) ; + inherit-flags $(toolset) : $(base) ; + inherit-rules $(toolset) : $(base) ; +} + + +rule inherit-generators ( toolset properties * : base : generators-to-ignore * ) +{ + properties ?= $(toolset) ; + local base-generators = [ generators.generators-for-toolset $(base) ] ; + for local g in $(base-generators) + { + local id = [ $(g).id ] ; + + if ! $(id) in $(generators-to-ignore) + { + # Some generator names have multiple periods in their name, so + # $(id:B=$(toolset)) does not generate the right new-id name. E.g. + # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is + # not what we want. Manually parse the base and suffix. If there is + # a better way to do this, I would love to see it. See also the + # register() rule in the generators module. + local base = $(id) ; + local suffix = "" ; + while $(base:S) + { + suffix = $(base:S)$(suffix) ; + base = $(base:B) ; + } + local new-id = $(toolset)$(suffix) ; + + generators.register [ $(g).clone $(new-id) : $(properties) ] ; + } + } +} + + +# Brings all flag definitions from the 'base' toolset into the 'toolset' +# toolset. Flag definitions whose conditions make use of properties in +# 'prohibited-properties' are ignored. Do not confuse property and feature, for +# example on and off, so blocking one of them does +# not block the other one. +# +# The flag conditions are not altered at all, so if a condition includes a name, +# or version of a base toolset, it will not ever match the inheriting toolset. +# When such flag settings must be inherited, define a rule in base toolset +# module and call it as needed. +# +rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * ) +{ + for local f in $(.module-flags.$(base)) + { + local rule-or-module = $(.rule-or-module.$(f)) ; + if ( [ set.difference + $(.$(rule-or-module).condition.$(f)) : + $(prohibited-properties) ] + || ! $(.$(rule-or-module).condition.$(f)) + ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) ) + { + local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ; + local new-rule-or-module ; + if $(rule_) + { + new-rule-or-module = $(toolset).$(rule_) ; + } + else + { + new-rule-or-module = $(toolset) ; + } + + add-flag + $(new-rule-or-module) + : $(.$(rule-or-module).variable.$(f)) + : $(.$(rule-or-module).condition.$(f)) + : $(.$(rule-or-module).values.$(f)) ; + } + } +} + + +rule inherit-rules ( toolset : base : localize ? ) +{ + # It appears that "action" creates a local rule. + local base-generators = [ generators.generators-for-toolset $(base) ] ; + local rules ; + for local g in $(base-generators) + { + rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ; + } + rules = [ sequence.unique $(rules) ] ; + IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ; + IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ; +} + +.requirements = [ property-set.empty ] ; + +# Return the list of global 'toolset requirements'. Those requirements will be +# automatically added to the requirements of any main target. +# +rule requirements ( ) +{ + return $(.requirements) ; +} + + +# Adds elements to the list of global 'toolset requirements'. The requirements +# will be automatically added to the requirements for all main targets, as if +# they were specified literally. For best results, all requirements added should +# be conditional or indirect conditional. +# +rule add-requirements ( requirements * ) +{ + if ! $(.ignore-requirements) + { + requirements = [ property.translate-indirect $(requirements) : [ CALLER_MODULE ] ] ; + requirements = [ property.expand-subfeatures-in-conditions $(requirements) ] ; + requirements = [ property.make $(requirements) ] ; + .requirements = [ $(.requirements).add-raw $(requirements) ] ; + } +} + +# Returns the global toolset defaults. +# +.defaults = [ property-set.empty ] ; + +rule defaults ( ) +{ + return $(.defaults) ; +} + +# Add elements to the list of global toolset defaults. These properties +# should be conditional and will override the default value of the feature. +# Do not use this for non-conditionals. Use feature.set-default instead. +# +rule add-defaults ( properties * ) +{ + if ! $(.ignore-requirements) + { + properties = [ property.translate-indirect $(properties) : [ CALLER_MODULE ] ] ; + properties = [ property.expand-subfeatures-in-conditions $(properties) ] ; + properties = [ property.make $(properties) ] ; + .defaults = [ $(.defaults).add-raw $(properties) ] ; + } +} + + +rule __test__ ( ) +{ + import assert ; + local p = 0 1 2 3 4 ; + assert.result 1/2/3 : find-property-subset 1/2/3 0/0/1 2/5 9 : $(p) ; + assert.result : find-property-subset 0/0/9/9/5 9 : $(p) ; + + local p-set = / 0/ /1 0/1 ; + assert.result / : find-property-subset $(p-set) : ; + assert.result 0/ : find-property-subset $(p-set) : 0 2 ; + assert.result /1 : find-property-subset $(p-set) : 1 2 ; + assert.result 0/1 : find-property-subset $(p-set) : 0 1 ; +} diff --git a/src/boost/tools/build/src/build/toolset.py b/src/boost/tools/build/src/build/toolset.py new file mode 100644 index 000000000..6e68cecb7 --- /dev/null +++ b/src/boost/tools/build/src/build/toolset.py @@ -0,0 +1,417 @@ +# Status: being ported by Vladimir Prus +# Base revision: 40958 +# +# Copyright 2003 Dave Abrahams +# Copyright 2005 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +""" Support for toolset definition. +""" +import sys + +import feature, property, generators, property_set +import b2.util.set +import bjam + +from b2.util import cached, qualify_jam_action, is_iterable_typed, is_iterable +from b2.util.utility import * +from b2.util import bjam_signature, sequence +from b2.manager import get_manager + +__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*') +__re_two_ampersands = re.compile ('(&&)') +__re_first_segment = re.compile ('([^.]*).*') +__re_first_group = re.compile (r'[^.]*\.(.*)') +_ignore_toolset_requirements = '--ignore-toolset-requirements' not in sys.argv + +# Flag is a mechanism to set a value +# A single toolset flag. Specifies that when certain +# properties are in build property set, certain values +# should be appended to some variable. +# +# A flag applies to a specific action in specific module. +# The list of all flags for a module is stored, and each +# flag further contains the name of the rule it applies +# for, +class Flag: + + def __init__(self, variable_name, values, condition, rule = None): + assert isinstance(variable_name, basestring) + assert is_iterable(values) and all( + isinstance(v, (basestring, type(None))) for v in values) + assert is_iterable_typed(condition, property_set.PropertySet) + assert isinstance(rule, (basestring, type(None))) + self.variable_name = variable_name + self.values = values + self.condition = condition + self.rule = rule + + def __str__(self): + return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\ + ", " + str(self.condition) + ", " + str(self.rule) + ")") + +def reset (): + """ Clear the module state. This is mainly for testing purposes. + """ + global __module_flags, __flags, __stv + + # Mapping from module name to a list of all flags that apply + # to either that module directly, or to any rule in that module. + # Each element of the list is Flag instance. + # So, for module named xxx this might contain flags for 'xxx', + # for 'xxx.compile', for 'xxx.compile.c++', etc. + __module_flags = {} + + # Mapping from specific rule or module name to a list of Flag instances + # that apply to that name. + # Say, it might contain flags for 'xxx.compile.c++'. If there are + # entries for module name 'xxx', they are flags for 'xxx' itself, + # not including any rules in that module. + __flags = {} + + # A cache for variable settings. The key is generated from the rule name and the properties. + __stv = {} + +reset () + +# FIXME: --ignore-toolset-requirements +def using(toolset_module, *args): + if isinstance(toolset_module, (list, tuple)): + toolset_module = toolset_module[0] + loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]); + loaded_toolset_module.init(*args) + +# FIXME push-checking-for-flags-module .... +# FIXME: investigate existing uses of 'hack-hack' parameter +# in jam code. + +@bjam_signature((["rule_or_module", "variable_name", "condition", "*"], + ["values", "*"])) +def flags(rule_or_module, variable_name, condition, values = []): + """ Specifies the flags (variables) that must be set on targets under certain + conditions, described by arguments. + rule_or_module: If contains dot, should be a rule name. + The flags will be applied when that rule is + used to set up build actions. + + If does not contain dot, should be a module name. + The flags will be applied for all rules in that + module. + If module for rule is different from the calling + module, an error is issued. + + variable_name: Variable that should be set on target + + condition A condition when this flag should be applied. + Should be set of property sets. If one of + those property sets is contained in build + properties, the flag will be used. + Implied values are not allowed: + "gcc" should be used, not just + "gcc". Subfeatures, like in "gcc-3.2" + are allowed. If left empty, the flag will + always used. + + Property sets may use value-less properties + ('' vs. 'value') to match absent + properties. This allows to separately match + + /64 + ia64/ + + Where both features are optional. Without this + syntax we'd be forced to define "default" value. + + values: The value to add to variable. If + is specified, then the value of 'feature' + will be added. + """ + assert isinstance(rule_or_module, basestring) + assert isinstance(variable_name, basestring) + assert is_iterable_typed(condition, basestring) + assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values) + caller = bjam.caller() + if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"): + # Unqualified rule name, used inside Jamfile. Most likely used with + # 'make' or 'notfile' rules. This prevents setting flags on the entire + # Jamfile module (this will be considered as rule), but who cares? + # Probably, 'flags' rule should be split into 'flags' and + # 'flags-on-module'. + rule_or_module = qualify_jam_action(rule_or_module, caller) + else: + # FIXME: revive checking that we don't set flags for a different + # module unintentionally + pass + + if condition and not replace_grist (condition, ''): + # We have condition in the form '', that is, without + # value. That's a previous syntax: + # + # flags gcc.link RPATH ; + # for compatibility, convert it to + # flags gcc.link RPATH : ; + values = [ condition ] + condition = None + + if condition: + transformed = [] + for c in condition: + # FIXME: 'split' might be a too raw tool here. + pl = [property.create_from_string(s,False,True) for s in c.split('/')] + pl = feature.expand_subfeatures(pl); + transformed.append(property_set.create(pl)) + condition = transformed + + property.validate_property_sets(condition) + + __add_flag (rule_or_module, variable_name, condition, values) + +def set_target_variables (manager, rule_or_module, targets, ps): + """ + """ + assert isinstance(rule_or_module, basestring) + assert is_iterable_typed(targets, basestring) + assert isinstance(ps, property_set.PropertySet) + settings = __set_target_variables_aux(manager, rule_or_module, ps) + + if settings: + for s in settings: + for target in targets: + manager.engine ().set_target_variable (target, s [0], s[1], True) + +def find_satisfied_condition(conditions, ps): + """Returns the first element of 'property-sets' which is a subset of + 'properties', or an empty list if no such element exists.""" + assert is_iterable_typed(conditions, property_set.PropertySet) + assert isinstance(ps, property_set.PropertySet) + + for condition in conditions: + + found_all = True + for i in condition.all(): + + if i.value: + found = i.value in ps.get(i.feature) + else: + # Handle value-less properties like '' (compare with + # 'x86'). + # If $(i) is a value-less property it should match default + # value of an optional property. See the first line in the + # example below: + # + # property set properties result + # foo foo match + # foo foo foo no match + # foo foo foo no match + # foo foo foo foo match + found = not ps.get(i.feature) + + found_all = found_all and found + + if found_all: + return condition + + return None + + +def register (toolset): + """ Registers a new toolset. + """ + assert isinstance(toolset, basestring) + feature.extend('toolset', [toolset]) + +def inherit_generators (toolset, properties, base, generators_to_ignore = []): + assert isinstance(toolset, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(base, basestring) + assert is_iterable_typed(generators_to_ignore, basestring) + if not properties: + properties = [replace_grist (toolset, '')] + + base_generators = generators.generators_for_toolset(base) + + for g in base_generators: + id = g.id() + + if not id in generators_to_ignore: + # Some generator names have multiple periods in their name, so + # $(id:B=$(toolset)) doesn't generate the right new_id name. + # e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++, + # which is not what we want. Manually parse the base and suffix + # (if there's a better way to do this, I'd love to see it.) + # See also register in module generators. + (base, suffix) = split_action_id(id) + + new_id = toolset + '.' + suffix + + generators.register(g.clone(new_id, properties)) + +def inherit_flags(toolset, base, prohibited_properties = []): + """Brings all flag definitions from the 'base' toolset into the 'toolset' + toolset. Flag definitions whose conditions make use of properties in + 'prohibited-properties' are ignored. Don't confuse property and feature, for + example on and off, so blocking one of them does + not block the other one. + + The flag conditions are not altered at all, so if a condition includes a name, + or version of a base toolset, it won't ever match the inheriting toolset. When + such flag settings must be inherited, define a rule in base toolset module and + call it as needed.""" + assert isinstance(toolset, basestring) + assert isinstance(base, basestring) + assert is_iterable_typed(prohibited_properties, basestring) + for f in __module_flags.get(base, []): + + if not f.condition or b2.util.set.difference(f.condition, prohibited_properties): + match = __re_first_group.match(f.rule) + rule_ = None + if match: + rule_ = match.group(1) + + new_rule_or_module = '' + + if rule_: + new_rule_or_module = toolset + '.' + rule_ + else: + new_rule_or_module = toolset + + __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values) + + +def inherit_rules(toolset, base): + engine = get_manager().engine() + new_actions = {} + for action_name, action in engine.actions.iteritems(): + module, id = split_action_id(action_name) + if module == base: + new_action_name = toolset + '.' + id + # make sure not to override any existing actions + # that may have been declared already + if new_action_name not in engine.actions: + new_actions[new_action_name] = action + + engine.actions.update(new_actions) + +###################################################################################### +# Private functions + +@cached +def __set_target_variables_aux (manager, rule_or_module, ps): + """ Given a rule name and a property set, returns a list of tuples of + variables names and values, which must be set on targets for that + rule/properties combination. + """ + assert isinstance(rule_or_module, basestring) + assert isinstance(ps, property_set.PropertySet) + result = [] + + for f in __flags.get(rule_or_module, []): + + if not f.condition or find_satisfied_condition (f.condition, ps): + processed = [] + for v in f.values: + # The value might be so needs special + # treatment. + processed += __handle_flag_value (manager, v, ps) + + for r in processed: + result.append ((f.variable_name, r)) + + # strip away last dot separated part and recurse. + next = __re_split_last_segment.match(rule_or_module) + + if next: + result.extend(__set_target_variables_aux( + manager, next.group(1), ps)) + + return result + +def __handle_flag_value (manager, value, ps): + assert isinstance(value, basestring) + assert isinstance(ps, property_set.PropertySet) + result = [] + + if get_grist (value): + f = feature.get(value) + values = ps.get(f) + + for value in values: + + if f.dependency: + # the value of a dependency feature is a target + # and must be actualized + result.append(value.actualize()) + + elif f.path or f.free: + + # Treat features with && in the value + # specially -- each &&-separated element is considered + # separate value. This is needed to handle searched + # libraries, which must be in specific order. + if not __re_two_ampersands.search(value): + result.append(value) + + else: + result.extend(value.split ('&&')) + else: + result.append (value) + else: + result.append (value) + + return sequence.unique(result, stable=True) + +def __add_flag (rule_or_module, variable_name, condition, values): + """ Adds a new flag setting with the specified values. + Does no checking. + """ + assert isinstance(rule_or_module, basestring) + assert isinstance(variable_name, basestring) + assert is_iterable_typed(condition, property_set.PropertySet) + assert is_iterable(values) and all( + isinstance(v, (basestring, type(None))) for v in values) + f = Flag(variable_name, values, condition, rule_or_module) + + # Grab the name of the module + m = __re_first_segment.match (rule_or_module) + assert m + module = m.group(1) + + __module_flags.setdefault(module, []).append(f) + __flags.setdefault(rule_or_module, []).append(f) + +__requirements = [] + +def requirements(): + """Return the list of global 'toolset requirements'. + Those requirements will be automatically added to the requirements of any main target.""" + return __requirements + +def add_requirements(requirements): + """Adds elements to the list of global 'toolset requirements'. The requirements + will be automatically added to the requirements for all main targets, as if + they were specified literally. For best results, all requirements added should + be conditional or indirect conditional.""" + assert is_iterable_typed(requirements, basestring) + + if _ignore_toolset_requirements: + __requirements.extend(requirements) + + +# Make toolset 'toolset', defined in a module of the same name, +# inherit from 'base' +# 1. The 'init' rule from 'base' is imported into 'toolset' with full +# name. Another 'init' is called, which forwards to the base one. +# 2. All generators from 'base' are cloned. The ids are adjusted and +# property in requires is adjusted too +# 3. All flags are inherited +# 4. All rules are imported. +def inherit(toolset, base): + assert isinstance(toolset, basestring) + assert isinstance(base, basestring) + get_manager().projects().load_module(base, ['.']); + + inherit_generators(toolset, [], base) + inherit_flags(toolset, base) + inherit_rules(toolset, base) diff --git a/src/boost/tools/build/src/build/type.jam b/src/boost/tools/build/src/build/type.jam new file mode 100644 index 000000000..365aaf8d1 --- /dev/null +++ b/src/boost/tools/build/src/build/type.jam @@ -0,0 +1,410 @@ +# Copyright 2002, 2003 Dave Abrahams +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Deals with target type declaration and defines target class which supports +# typed targets. + +import "class" : new ; +import feature ; +import generators : * ; +import os ; +import param ; +import project ; +import property ; +import scanner ; + +# The following import would create a circular dependency: +# project -> project-root -> builtin -> type -> targets -> project +# import targets ; + +# The feature is optional so it would never get added implicitly. It is used +# only for internal purposes and in all cases we want to use it explicitly. +feature.feature target-type : : composite optional ; + +feature.feature main-target-type : : optional incidental ; +feature.feature base-target-type : : composite optional free ; + + +# Registers a target type, possible derived from a 'base-type'. Providing a list +# of 'suffixes' here is a shortcut for separately calling the register-suffixes +# rule with the given suffixes and the set-generated-target-suffix rule with the +# first given suffix. +# +rule register ( type : suffixes * : base-type ? ) +{ + # Type names cannot contain hyphens, because when used as feature-values + # they would be interpreted as composite features which need to be + # decomposed. + switch $(type) + { + case *-* : + import errors ; + errors.error "type name \"$(type)\" contains a hyphen" ; + } + + if $(type) in $(.types) + { + import errors ; + errors.error "Type $(type) is already registered." ; + } + + if $(base-type) && ! $(base-type) in $(.types) + { + import errors ; + errors.error "Type $(base-type) is not registered." ; + } + + { + .types += $(type) ; + .base.$(type) = $(base-type) ; + .derived.$(base-type) += $(type) ; + .bases.$(type) = $(type) $(.bases.$(base-type)) ; + + # Store suffixes for generated targets. + .suffixes.$(type) = [ new property-map ] ; + + # Store prefixes for generated targets (e.g. "lib" for library). + .prefixes.$(type) = [ new property-map ] ; + + if $(suffixes)-is-defined + { + # Specify mapping from suffixes to type. + register-suffixes $(suffixes) : $(type) ; + # By default generated targets of 'type' will use the first of + #'suffixes'. This may be overridden. + set-generated-target-suffix $(type) : : $(suffixes[1]) ; + } + + feature.extend target-type : $(type) ; + feature.extend main-target-type : $(type) ; + feature.extend base-target-type : $(type) ; + + feature.compose $(type) : $(base-type:G=) ; + feature.compose $(type) : $(base-type) ; + + # We used to declare the main target rule only when a 'main' parameter + # has been specified. However, it is hard to decide that a type will + # *never* need a main target rule and so from time to time we needed to + # make yet another type 'main'. So now a main target rule is defined for + # each type. + main-rule-name = [ type-to-rule-name $(type) ] ; + .main-target-type.$(main-rule-name) = $(type) ; + IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ; + + # Adding a new derived type affects generator selection so we need to + # make the generator selection module update any of its cached + # information related to a new derived type being defined. + generators.update-cached-information-with-a-new-type $(type) ; + } +} + + +# Given a type, returns the name of the main target rule which creates targets +# of that type. +# +rule type-to-rule-name ( type ) +{ + # Lowercase everything. Convert underscores to dashes. + import regex ; + local n = [ regex.split $(type:L) "_" ] ; + return $(n:J=-) ; +} + + +# Given a main target rule name, returns the type for which it creates targets. +# +rule type-from-rule-name ( rule-name ) +{ + return $(.main-target-type.$(rule-name)) ; +} + + +# Specifies that files with suffix from 'suffixes' be recognized as targets of +# type 'type'. Issues an error if a different type is already specified for any +# of the suffixes. +# +rule register-suffixes ( suffixes + : type ) +{ + for local s in $(suffixes) + { + if ! $(.type.$(s)) + { + .type.$(s) = $(type) ; + } + else if $(.type.$(s)) != $(type) + { + import errors ; + errors.error Attempting to specify multiple types for suffix + \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ; + } + } +} + + +# Returns true iff type has been registered. +# +rule registered ( type ) +{ + if $(type) in $(.types) + { + return true ; + } +} + + +# Issues an error if 'type' is unknown. +# +rule validate ( type ) +{ + if ! [ registered $(type) ] + { + import errors ; + errors.error "Unknown target type $(type)" ; + } +} + + +# Sets a scanner class that will be used for this 'type'. +# +rule set-scanner ( type : scanner ) +{ + validate $(type) ; + .scanner.$(type) = $(scanner) ; +} + + +# Returns a scanner instance appropriate to 'type' and 'properties'. +# +rule get-scanner ( type : property-set ) +{ + if $(.scanner.$(type)) + { + return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ; + } +} + + +# Returns a base type for the given type or nothing in case the given type is +# not derived. +# +rule base ( type ) +{ + return $(.base.$(type)) ; +} + + +# Returns the given type and all of its base types in order of their distance +# from type. +# +rule all-bases ( type ) +{ + return $(.bases.$(type)) ; +} + + +# Returns the given type and all of its derived types in order of their distance +# from type. +# +rule all-derived ( type ) +{ + local result = $(type) ; + for local d in $(.derived.$(type)) + { + result += [ all-derived $(d) ] ; + } + return $(result) ; +} + + +# Returns true if 'type' is equal to 'base' or has 'base' as its direct or +# indirect base. +# +rule is-derived ( type base ) +{ + if $(base) in $(.bases.$(type)) + { + return true ; + } +} + +# Returns true if 'type' is either derived from or is equal to 'base'. +# +# TODO: It might be that is-derived and is-subtype were meant to be different +# rules - one returning true for type = base and one not, but as currently +# implemented they are actually the same. Clean this up. +# +rule is-subtype ( type base ) +{ + return [ is-derived $(type) $(base) ] ; +} + + + + +# Sets a file suffix to be used when generating a target of 'type' with the +# specified properties. Can be called with no properties if no suffix has +# already been specified for the 'type'. The 'suffix' parameter can be an empty +# string ("") to indicate that no suffix should be used. +# +# Note that this does not cause files with 'suffix' to be automatically +# recognized as being of 'type'. Two different types can use the same suffix for +# their generated files but only one type can be auto-detected for a file with +# that suffix. User should explicitly specify which one using the +# register-suffixes rule. +# +rule set-generated-target-suffix ( type : properties * : suffix ) +{ + set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ; +} + + +# Change the suffix previously registered for this type/properties combination. +# If suffix is not yet specified, sets it. +# +rule change-generated-target-suffix ( type : properties * : suffix ) +{ + change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ; +} + + +# Returns the suffix used when generating a file of 'type' with the given +# properties. +# +rule generated-target-suffix ( type : property-set ) +{ + return [ generated-target-ps suffix : $(type) : $(property-set) ] ; +} + + +# Sets a target prefix that should be used when generating targets of 'type' +# with the specified properties. Can be called with empty properties if no +# prefix for 'type' has been specified yet. +# +# The 'prefix' parameter can be empty string ("") to indicate that no prefix +# should be used. +# +# Usage example: library names use the "lib" prefix on unix. +# +rule set-generated-target-prefix ( type : properties * : prefix ) +{ + set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ; +} + + +# Change the prefix previously registered for this type/properties combination. +# If prefix is not yet specified, sets it. +# +rule change-generated-target-prefix ( type : properties * : prefix ) +{ + change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ; +} + + +rule generated-target-prefix ( type : property-set ) +{ + return [ generated-target-ps prefix : $(type) : $(property-set) ] ; +} + + +# Common rules for prefix/suffix provisioning follow. + +local rule set-generated-target-ps ( ps : type : properties * : psval ) +{ + $(.$(ps)es.$(type)).insert $(properties) : $(psval) ; +} + + +local rule change-generated-target-ps ( ps : type : properties * : psval ) +{ + local prev = [ $(.$(ps)es.$(type)).find-replace $(properties) : $(psval) ] ; + if ! $(prev) + { + set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ; + } +} + + +# Returns either prefix or suffix (as indicated by 'ps') that should be used +# when generating a target of 'type' with the specified properties. Parameter +# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for +# 'type', returns prefix/suffix for base type, if any. +# +local rule generated-target-ps ( ps : type : property-set ) +{ + local result ; + local found ; + while $(type) && ! $(found) + { + result = [ $(.$(ps)es.$(type)).find $(property-set) ] ; + # If the prefix/suffix is explicitly set to an empty string, we consider + # prefix/suffix to be found. If we were not to compare with "", there + # would be no way to specify an empty prefix/suffix. + if $(result)-is-defined + { + found = true ; + } + type = $(.base.$(type)) ; + } + if $(result) = "" + { + result = ; + } + return $(result) ; +} + + +# Returns file type given its name. If there are several dots in filename, tries +# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will +# be tried. +# +rule type ( filename ) +{ + if [ os.name ] in NT CYGWIN + { + filename = $(filename:L) ; + } + local type ; + while ! $(type) && $(filename:S) + { + local suffix = $(filename:S) ; + type = $(.type$(suffix)) ; + filename = $(filename:S=) ; + } + return $(type) ; +} + + +# Rule used to construct all main targets. Note that this rule gets imported +# into the global namespace under different alias names and the exact target +# type to construct is selected based on the alias used to actually invoke this +# rule. +# +rule main-target-rule ( name : sources * : requirements * : default-build * : + usage-requirements * ) +{ + param.handle-named-params + sources requirements default-build usage-requirements ; + # First discover the required target type based on the exact alias used to + # invoke this rule. + local bt = [ BACKTRACE 1 ] ; + local rulename = $(bt[4]) ; + local target-type = [ type-from-rule-name $(rulename) ] ; + + # This is a circular module dependency and so must be imported here. + import targets ; + + return [ targets.create-typed-target $(target-type) : [ project.current ] : + $(name) : $(sources) : $(requirements) : $(default-build) : + $(usage-requirements) ] ; +} + + +rule __test__ ( ) +{ + import assert ; + + # TODO: Add tests for all the is-derived, is-base & related type relation + # checking rules. +} diff --git a/src/boost/tools/build/src/build/type.py b/src/boost/tools/build/src/build/type.py new file mode 100644 index 000000000..9f6237d7d --- /dev/null +++ b/src/boost/tools/build/src/build/type.py @@ -0,0 +1,381 @@ +# Status: ported. +# Base revision: 45462. + +# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and +# distribute this software is granted provided this copyright notice appears in +# all copies. This software is provided "as is" without express or implied +# warranty, and with no claim as to its suitability for any purpose. + + + +import re +import os +import os.path +from b2.util.utility import replace_grist, os_name +from b2.exceptions import * +from b2.build import feature, property, scanner +from b2.util import bjam_signature, is_iterable_typed + + +__re_hyphen = re.compile ('-') + +def __register_features (): + """ Register features need by this module. + """ + # The feature is optional so that it is never implicitly added. + # It's used only for internal purposes, and in all cases we + # want to explicitly use it. + feature.feature ('target-type', [], ['composite', 'optional']) + feature.feature ('main-target-type', [], ['optional', 'incidental']) + feature.feature ('base-target-type', [], ['composite', 'optional', 'free']) + +def reset (): + """ Clear the module state. This is mainly for testing purposes. + Note that this must be called _after_ resetting the module 'feature'. + """ + global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache + + __register_features () + + # Stores suffixes for generated targets. + __prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()] + + # Maps suffixes to types + __suffixes_to_types = {} + + # A map with all the registered types, indexed by the type name + # Each entry is a dictionary with following values: + # 'base': the name of base type or None if type has no base + # 'derived': a list of names of type which derive from this one + # 'scanner': the scanner class registered for this type, if any + __types = {} + + # Caches suffixes for targets with certain properties. + __target_suffixes_cache = {} + +reset () + +@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"])) +def register (type, suffixes = [], base_type = None): + """ Registers a target type, possibly derived from a 'base-type'. + If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'. + Also, the first element gives the suffix to be used when constructing and object of + 'type'. + type: a string + suffixes: None or a sequence of strings + base_type: None or a string + """ + # Type names cannot contain hyphens, because when used as + # feature-values they will be interpreted as composite features + # which need to be decomposed. + if __re_hyphen.search (type): + raise BaseException ('type name "%s" contains a hyphen' % type) + + # it's possible for a type to be registered with a + # base type that hasn't been registered yet. in the + # check for base_type below and the following calls to setdefault() + # the key `type` will be added to __types. When the base type + # actually gets registered, it would fail after the simple check + # of "type in __types"; thus the check for "'base' in __types[type]" + if type in __types and 'base' in __types[type]: + raise BaseException ('Type "%s" is already registered.' % type) + + entry = __types.setdefault(type, {}) + entry['base'] = base_type + entry.setdefault('derived', []) + entry.setdefault('scanner', None) + + if base_type: + __types.setdefault(base_type, {}).setdefault('derived', []).append(type) + + if len (suffixes) > 0: + # Generated targets of 'type' will use the first of 'suffixes' + # (this may be overridden) + set_generated_target_suffix (type, [], suffixes [0]) + + # Specify mapping from suffixes to type + register_suffixes (suffixes, type) + + feature.extend('target-type', [type]) + feature.extend('main-target-type', [type]) + feature.extend('base-target-type', [type]) + + if base_type: + feature.compose ('' + type, [replace_grist (base_type, '')]) + feature.compose ('' + type, ['' + base_type]) + + import b2.build.generators as generators + # Adding a new derived type affects generator selection so we need to + # make the generator selection module update any of its cached + # information related to a new derived type being defined. + generators.update_cached_information_with_a_new_type(type) + + # FIXME: resolving recursive dependency. + from b2.manager import get_manager + get_manager().projects().project_rules().add_rule_for_type(type) + +# FIXME: quick hack. +def type_from_rule_name(rule_name): + assert isinstance(rule_name, basestring) + return rule_name.upper().replace("-", "_") + + +def register_suffixes (suffixes, type): + """ Specifies that targets with suffix from 'suffixes' have the type 'type'. + If a different type is already specified for any of syffixes, issues an error. + """ + assert is_iterable_typed(suffixes, basestring) + assert isinstance(type, basestring) + for s in suffixes: + if s in __suffixes_to_types: + old_type = __suffixes_to_types [s] + if old_type != type: + raise BaseException ('Attempting to specify type for suffix "%s"\nOld type: "%s", New type "%s"' % (s, old_type, type)) + else: + __suffixes_to_types [s] = type + +def registered (type): + """ Returns true iff type has been registered. + """ + assert isinstance(type, basestring) + return type in __types + +def validate (type): + """ Issues an error if 'type' is unknown. + """ + assert isinstance(type, basestring) + if not registered (type): + raise BaseException ("Unknown target type '%s'" % type) + +def set_scanner (type, scanner): + """ Sets a scanner class that will be used for this 'type'. + """ + if __debug__: + from .scanner import Scanner + assert isinstance(type, basestring) + assert issubclass(scanner, Scanner) + validate (type) + __types [type]['scanner'] = scanner + +def get_scanner (type, prop_set): + """ Returns a scanner instance appropriate to 'type' and 'property_set'. + """ + if __debug__: + from .property_set import PropertySet + assert isinstance(type, basestring) + assert isinstance(prop_set, PropertySet) + if registered (type): + scanner_type = __types [type]['scanner'] + if scanner_type: + return scanner.get (scanner_type, prop_set.raw ()) + pass + + return None + +def base(type): + """Returns a base type for the given type or nothing in case the given type is + not derived.""" + assert isinstance(type, basestring) + return __types[type]['base'] + +def all_bases (type): + """ Returns type and all of its bases, in the order of their distance from type. + """ + assert isinstance(type, basestring) + result = [] + while type: + result.append (type) + type = __types [type]['base'] + + return result + +def all_derived (type): + """ Returns type and all classes that derive from it, in the order of their distance from type. + """ + assert isinstance(type, basestring) + result = [type] + for d in __types [type]['derived']: + result.extend (all_derived (d)) + + return result + +def is_derived (type, base): + """ Returns true if 'type' is 'base' or has 'base' as its direct or indirect base. + """ + assert isinstance(type, basestring) + assert isinstance(base, basestring) + # TODO: this isn't very efficient, especially for bases close to type + if base in all_bases (type): + return True + else: + return False + +def is_subtype (type, base): + """ Same as is_derived. Should be removed. + """ + assert isinstance(type, basestring) + assert isinstance(base, basestring) + # TODO: remove this method + return is_derived (type, base) + +@bjam_signature((["type"], ["properties", "*"], ["suffix"])) +def set_generated_target_suffix (type, properties, suffix): + """ Sets a target suffix that should be used when generating target + of 'type' with the specified properties. Can be called with + empty properties if no suffix for 'type' was specified yet. + This does not automatically specify that files 'suffix' have + 'type' --- two different types can use the same suffix for + generating, but only one type should be auto-detected for + a file with that suffix. User should explicitly specify which + one. + + The 'suffix' parameter can be empty string ("") to indicate that + no suffix should be used. + """ + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(suffix, basestring) + set_generated_target_ps(1, type, properties, suffix) + + + +def change_generated_target_suffix (type, properties, suffix): + """ Change the suffix previously registered for this type/properties + combination. If suffix is not yet specified, sets it. + """ + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(suffix, basestring) + change_generated_target_ps(1, type, properties, suffix) + +def generated_target_suffix(type, properties): + if __debug__: + from .property_set import PropertySet + assert isinstance(type, basestring) + assert isinstance(properties, PropertySet) + return generated_target_ps(1, type, properties) + + +@bjam_signature((["type"], ["properties", "*"], ["prefix"])) +def set_generated_target_prefix(type, properties, prefix): + """ + Sets a file prefix to be used when generating a target of 'type' with the + specified properties. Can be called with no properties if no prefix has + already been specified for the 'type'. The 'prefix' parameter can be an empty + string ("") to indicate that no prefix should be used. + + Note that this does not cause files with 'prefix' to be automatically + recognized as being of 'type'. Two different types can use the same prefix for + their generated files but only one type can be auto-detected for a file with + that prefix. User should explicitly specify which one using the + register-prefixes rule. + + Usage example: library names use the "lib" prefix on unix. + """ + set_generated_target_ps(0, type, properties, prefix) + +# Change the prefix previously registered for this type/properties combination. +# If prefix is not yet specified, sets it. +def change_generated_target_prefix(type, properties, prefix): + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(prefix, basestring) + change_generated_target_ps(0, type, properties, prefix) + +def generated_target_prefix(type, properties): + if __debug__: + from .property_set import PropertySet + assert isinstance(type, basestring) + assert isinstance(properties, PropertySet) + return generated_target_ps(0, type, properties) + +def set_generated_target_ps(is_suffix, type, properties, val): + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(val, basestring) + properties.append ('' + type) + __prefixes_suffixes[is_suffix].insert (properties, val) + +def change_generated_target_ps(is_suffix, type, properties, val): + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(val, basestring) + properties.append ('' + type) + prev = __prefixes_suffixes[is_suffix].find_replace(properties, val) + if not prev: + set_generated_target_ps(is_suffix, type, properties, val) + +# Returns either prefix or suffix (as indicated by 'is_suffix') that should be used +# when generating a target of 'type' with the specified properties. +# If no prefix/suffix is specified for 'type', returns prefix/suffix for +# base type, if any. +def generated_target_ps_real(is_suffix, type, properties): + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + result = '' + found = False + while type and not found: + result = __prefixes_suffixes[is_suffix].find (['' + type] + properties) + + # Note that if the string is empty (""), but not null, we consider + # suffix found. Setting prefix or suffix to empty string is fine. + if result is not None: + found = True + + type = __types [type]['base'] + + if not result: + result = '' + return result + +def generated_target_ps(is_suffix, type, prop_set): + """ Returns suffix that should be used when generating target of 'type', + with the specified properties. If not suffix were specified for + 'type', returns suffix for base type, if any. + """ + if __debug__: + from .property_set import PropertySet + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert isinstance(prop_set, PropertySet) + key = (is_suffix, type, prop_set) + v = __target_suffixes_cache.get(key, None) + + if not v: + v = generated_target_ps_real(is_suffix, type, prop_set.raw()) + __target_suffixes_cache [key] = v + + return v + +def type(filename): + """ Returns file type given it's name. If there are several dots in filename, + tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and + "so" will be tried. + """ + assert isinstance(filename, basestring) + while 1: + filename, suffix = os.path.splitext (filename) + if not suffix: return None + suffix = suffix[1:] + + if suffix in __suffixes_to_types: + return __suffixes_to_types[suffix] + +# NOTE: moved from tools/types/register +def register_type (type, suffixes, base_type = None, os = []): + """ Register the given type on the specified OSes, or on remaining OSes + if os is not specified. This rule is injected into each of the type + modules for the sake of convenience. + """ + assert isinstance(type, basestring) + assert is_iterable_typed(suffixes, basestring) + assert isinstance(base_type, basestring) or base_type is None + assert is_iterable_typed(os, basestring) + if registered (type): + return + + if not os or os_name () in os: + register (type, suffixes, base_type) diff --git a/src/boost/tools/build/src/build/version.jam b/src/boost/tools/build/src/build/version.jam new file mode 100644 index 000000000..cc9a09ad0 --- /dev/null +++ b/src/boost/tools/build/src/build/version.jam @@ -0,0 +1,225 @@ +# Copyright 2021 Nikita Kniazev +# Copyright 2002, 2003, 2004, 2006 Vladimir Prus +# Copyright 2008, 2012 Jurko Gospodnetic +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import numbers ; + + +# Mirror engine JAM_VERSION +.major = "4" ; +.minor = "8" ; + + +rule boost-build ( ) +{ + return "$(.major).$(.minor)-git" ; +} + + +rule print ( ) +{ + if [ verify-engine-version ] + { + ECHO "B2" [ boost-build ] ; + } +} + + +rule verify-engine-version ( ) +{ + local v = [ modules.peek : JAM_VERSION ] ; + + if $(v[1]) != $(.major) || $(v[2]) != $(.minor) + { + local argv = [ modules.peek : ARGV ] ; + local e = $(argv[1]) ; + local l = [ modules.binding version ] ; + l = $(l:D) ; + l = $(l:D) ; + ECHO "warning: mismatched versions of B2 engine and core" ; + ECHO "warning: B2 engine ($(e)) is $(v:J=.)" ; + ECHO "warning: B2 core (at $(l)) is" [ boost-build ] ; + } + else + { + return true ; + } +} + + +# Utility rule for testing whether all elements in a sequence are equal to 0. +# +local rule is-all-zeroes ( sequence * ) +{ + local result = "true" ; + for local e in $(sequence) + { + if $(e) != "0" + { + result = "" ; + } + } + return $(result) ; +} + + +# Returns "true" if the first version is less than the second one. +# +rule version-less ( lhs + : rhs + ) +{ + numbers.check $(lhs) ; + numbers.check $(rhs) ; + + local done ; + local result ; + + while ! $(done) && $(lhs) && $(rhs) + { + if [ numbers.less $(lhs[1]) $(rhs[1]) ] + { + done = "true" ; + result = "true" ; + } + else if [ numbers.less $(rhs[1]) $(lhs[1]) ] + { + done = "true" ; + } + else + { + lhs = $(lhs[2-]) ; + rhs = $(rhs[2-]) ; + } + } + if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] ) + { + result = "true" ; + } + + return $(result) ; +} + +# Returns "true" if the required version is compatible with the having one. +# This uses sematic versioning where (major.x.y) is compatible with +# (major.n.m) and (major.x.z). And is incompatible for other values. +# +rule version-compatible ( req + : has + ) +{ + numbers.check $(req) ; + numbers.check $(has) ; + + if $(req) = $(has) + { + return true ; + } + + while $(req) && [ numbers.equal $(req[1]) $(has[1]:E=0) ] + { + req = $(req[2-]) ; + has = $(has[2-]) ; + } + + if $(req) + { + return ; + } + + return true ; +} + + +# Returns "true" if the current JAM version version is at least the given +# version. +# +rule check-jam-version ( version + ) +{ + local version-tag = $(version:J=.) ; + if ! $(version-tag) + { + import errors ; + errors.error Invalid version "specifier:" : $(version:E="(undefined)") ; + } + + if ! $(.jam-version-check.$(version-tag))-is-defined + { + local jam-version = [ modules.peek : JAM_VERSION ] ; + if ! $(jam-version) + { + import errors ; + errors.error "Unable to deduce Boost Jam version. Your Boost Jam" + "installation is most likely terribly outdated." ; + } + .jam-version-check.$(version-tag) = "true" ; + if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ] + { + .jam-version-check.$(version-tag) = "" ; + } + } + return $(.jam-version-check.$(version-tag)) ; +} + + +rule __test__ ( ) +{ + import assert ; + + local jam-version = [ modules.peek : JAM_VERSION ] ; + local future-version = $(jam-version) ; + future-version += "1" ; + + assert.true check-jam-version $(jam-version) ; + assert.false check-jam-version $(future-version) ; + + assert.true version-less 0 : 1 ; + assert.false version-less 0 : 0 ; + assert.true version-less 1 : 2 ; + assert.false version-less 1 : 1 ; + assert.false version-less 2 : 1 ; + assert.true version-less 3 1 20 : 3 4 10 ; + assert.false version-less 3 1 10 : 3 1 10 ; + assert.false version-less 3 4 10 : 3 1 20 ; + assert.true version-less 3 1 20 5 1 : 3 4 10 ; + assert.false version-less 3 1 10 5 1 : 3 1 10 ; + assert.false version-less 3 4 10 5 1 : 3 1 20 ; + assert.true version-less 3 1 20 : 3 4 10 5 1 ; + assert.true version-less 3 1 10 : 3 1 10 5 1 ; + assert.false version-less 3 4 10 : 3 1 20 5 1 ; + assert.false version-less 3 1 10 : 3 1 10 0 0 ; + assert.false version-less 3 1 10 0 0 : 3 1 10 ; + assert.false version-less 3 1 10 0 : 3 1 10 0 0 ; + assert.false version-less 3 1 10 0 : 03 1 10 0 0 ; + assert.false version-less 03 1 10 0 : 3 1 10 0 0 ; + + # TODO: Add tests for invalid input data being sent to version-less. + + + assert.true version-compatible 4 : 4 ; + assert.true version-compatible 4 : 4 9 ; + assert.false version-compatible 4 9 : 4 ; + assert.true version-compatible 4 9 : 4 9 ; + assert.false version-compatible 4 9 1 : 4 9 ; + assert.true version-compatible 4 9 1 : 4 9 1 ; + assert.false version-compatible 4 8 : 4 9 ; + assert.false version-compatible 4 8 1 : 4 9 ; + assert.false version-compatible 4 8 1 : 4 9 1 ; + assert.true version-compatible 5 : 5 ; + assert.false version-compatible 5 : 4 ; + assert.false version-compatible 5 : 4 9 ; + assert.false version-compatible 5 1 : 5 ; + assert.true version-compatible 5 1 : 5 1 ; + assert.false version-compatible 5 1 : 5 2 ; + assert.false version-compatible 5 1 1 : 5 ; + assert.false version-compatible 5 1 1 : 5 1 ; + assert.false version-compatible 5 2 : 5 ; + assert.false version-compatible 5 2 : 5 1 ; + assert.true version-compatible 5 2 : 5 2 ; + assert.true version-compatible 4 : 4 0 ; + assert.true version-compatible 4 0 : 4 ; + assert.true version-compatible 04 : 4 ; + assert.true version-compatible 04 : 04 ; + assert.true version-compatible 04 : 4 ; + assert.true version-compatible 04 00 : 04 ; + assert.true version-compatible 04 : 04 00 ; +} + diff --git a/src/boost/tools/build/src/build/version.py b/src/boost/tools/build/src/build/version.py new file mode 100644 index 000000000..88299060e --- /dev/null +++ b/src/boost/tools/build/src/build/version.py @@ -0,0 +1,38 @@ +import os +import sys + +import bjam + + +from b2.manager import get_manager + + +MANAGER = get_manager() +ERROR_HANDLER = MANAGER.errors() + +_major = "2015" +_minor = "07" + + +def boost_build(): + return "{}.{}-git".format(_major, _minor) + + +def verify_engine_version(): + major, minor, _ = v = bjam.variable('JAM_VERSION') + if major != _major or minor != _minor: + from textwrap import dedent + engine = sys.argv[0] + core = os.path.dirname(os.path.dirname(__file__)) + print dedent("""\ + warning: mismatched version of Boost.Build engine core + warning: Boost.Build engine "{}" is "{}" + warning: Boost.Build core at {} is {} + """.format(engine, '.'.join(v), core, boost_build())) + return False + return True + + +def report(): + if verify_engine_version(): + print "Boost.Build " + boost_build() diff --git a/src/boost/tools/build/src/build/virtual-target.jam b/src/boost/tools/build/src/build/virtual-target.jam new file mode 100644 index 000000000..85ef46cdc --- /dev/null +++ b/src/boost/tools/build/src/build/virtual-target.jam @@ -0,0 +1,1394 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2005, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Implements virtual targets, which correspond to actual files created during a +# build, but are not yet targets in Jam sense. They are needed, for example, +# when searching for possible transformation sequences, when it is not yet known +# whether a particular target should be created at all. +# +# +--------------------------+ +# | virtual-target | +# +==========================+ +# | actualize | +# +--------------------------+ +# | actualize-action() = 0 | +# | actualize-location() = 0 | +# +----------------+---------+ +# | +# ^ +# / \ +# +-+-+ +# | +# +---------------------+ +-------+--------------+ +# | action | | abstract-file-target | +# +=====================| * +======================+ +# | action-name | +--+ action | +# | properties | | +----------------------+ +# +---------------------+--+ | actualize-action() | +# | actualize() |0..1 +-----------+----------+ +# | path() | | +# | adjust-properties() | sources | +# | actualize-sources() | targets | +# +------+--------------+ ^ +# | / \ +# ^ +-+-+ +# / \ | +# +-+-+ +-------------+-------------+ +# | | | +# | +------+---------------+ +--------+-------------+ +# | | file-target | | searched-lib-target | +# | +======================+ +======================+ +# | | actualize-location() | | actualize-location() | +# | +----------------------+ +----------------------+ +# | +# +-+------------------------------+ +# | | +# +----+----------------+ +---------+-----------+ +# | compile-action | | link-action | +# +=====================+ +=====================+ +# | adjust-properties() | | adjust-properties() | +# +---------------------+ | actualize-sources() | +# +---------------------+ +# +# The 'compile-action' and 'link-action' classes are not defined here but in +# builtin.jam modules. They are shown in the diagram to give the big picture. + +import "class" : new ; +import feature ; +import path ; +import property-set ; +import sequence ; +import set ; +import toolset ; +import type ; +import utility ; + + +# Models a potential target. It can be converted into a Jam target and used in +# building, if needed. However, it can be also dropped, which allows us to +# search for different transformations and select only one. +# +class virtual-target +{ + import scanner ; + import sequence ; + import utility ; + import virtual-target ; + + rule __init__ ( + name # Target/project name. + : project # Project to which this target belongs. + ) + { + self.name = $(name) ; + self.project = $(project) ; + self.dependencies = ; + } + + # Name of this target. + # + rule name ( ) + { + return $(self.name) ; + } + + # Project of this target. + # + rule project ( ) + { + return $(self.project) ; + } + + # Adds additional 'virtual-target' instances this one depends on. + # + rule depends ( d + ) + { + self.dependencies = [ sequence.merge $(self.dependencies) : + [ sequence.insertion-sort $(d) ] ] ; + } + + rule dependencies ( ) + { + return $(self.dependencies) ; + } + + rule always ( ) + { + .always = 1 ; + } + + rule fail-expected ( ) + { + .fail-expected = 1 ; + } + + # Generates all the actual targets and sets up build actions for this + # target. + # + # If 'scanner' is specified, creates an additional target with the same + # location as the actual target, which will depend on the actual target and + # be associated with a 'scanner'. That additional target is returned. See + # the docs (#dependency_scanning) for rationale. Target must correspond to a + # file if 'scanner' is specified. + # + # If scanner is not specified then the actual target is returned. + # + rule actualize ( scanner ? ) + { + local actual-name = [ actualize-no-scanner ] ; + + if $(.always) + { + ALWAYS $(actual-name) ; + } + + if $(.fail-expected) + { + FAIL_EXPECTED $(actual-name) ; + } + + if ! $(scanner) + { + return $(actual-name) ; + } + else + { + # Add the scanner instance to the grist for name. + local g = [ sequence.join [ utility.ungrist $(actual-name:G) ] + $(scanner) : - ] ; + local name = $(actual-name:G=$(g)) ; + + if ! $(self.made.$(scanner)) + { + self.made.$(scanner) = true ; + actualize-location $(name) ; + scanner.install $(scanner) : $(name) ; + } + return $(name) ; + } + } + +# private: (overridables) + + # Sets/gets the 'root' flag. Target is root if it directly corresponds to + # some variant of a main target. + # + rule root ( set ? ) + { + if $(set) + { + self.root = true ; + } + return $(self.root) ; + } + + + # Sets up build actions for 'target'. Should call appropriate rules and set + # target variables. + # + rule actualize-action ( target ) + { + import errors : error : errors.error ; + errors.error "method should be defined in derived classes" ; + } + + # Sets up variables on 'target' which specify its location. + # + rule actualize-location ( target ) + { + import errors : error : errors.error ; + errors.error "method should be defined in derived classes" ; + } + + # If the target is a generated one, returns the path where it will be + # generated. Otherwise, returns an empty list. + # + rule path ( ) + { + import errors : error : errors.error ; + errors.error "method should be defined in derived classes" ; + } + + # Returns the actual target name to be used in case when no scanner is + # involved. + # + rule actual-name ( ) + { + import errors : error : errors.error ; + errors.error "method should be defined in derived classes" ; + } + + # Returns additional properties that are relevant for this target + # beyond those required by the action. + # + rule relevant ( ) + { + return [ property-set.empty ] ; + } + +# implementation + rule actualize-no-scanner ( ) + { + # In fact, we just need to merge virtual-target with + # abstract-file-target as the latter is the only class derived from the + # former. But that has been left for later. + + import errors : error : errors.error ; + errors.error "method should be defined in derived classes" ; + } +} + + +# Target corresponding to a file. The exact mapping for file is not yet +# specified in this class. (TODO: Actually, the class name could be better...) +# +# May be a source file (when no action is specified) or a derived file +# (otherwise). +# +# The target's grist is a concatenation of its project's location, action +# properties (for derived targets) and, optionally, value identifying the main +# target. +# +class abstract-file-target : virtual-target +{ + import project ; + import regex ; + import sequence ; + import path ; + import type ; + import property-set ; + import indirect ; + + rule __init__ ( + name # Target's name. + exact ? # If non-empty, the name is exactly the name created file + # should have. Otherwise, the '__init__' method will add a + # suffix obtained from 'type' by calling + # 'type.generated-target-suffix'. + : type ? # Target's type. + : project + : action ? + ) + { + virtual-target.__init__ $(name) : $(project) ; + + self.type = $(type) ; + self.action = $(action) ; + if $(action) + { + $(action).add-targets $(__name__) ; + + if $(self.type) && ! $(exact) + { + _adjust-name $(name) ; + } + } + } + + rule type ( ) + { + return $(self.type) ; + } + + # Sets the path. When generating target name, it will override any path + # computation from properties. + # + rule set-path ( path ) + { + self.path = [ path.native $(path) ] ; + } + + # Returns the currently set action. + # + rule action ( ) + { + return $(self.action) ; + } + + # Gets or sets the subvariant which created this target. Subvariant is set + # when target is brought into existence and is never changed after that. In + # particular, if a target is shared by multiple subvariants, only the first + # one is stored. + # + rule creating-subvariant ( s ? # If specified, specifies the value to set, + # which should be a 'subvariant' class + # instance. + ) + { + if $(s) && ! $(self.creating-subvariant) + { + self.creating-subvariant = $(s) ; + } + return $(self.creating-subvariant) ; + } + + rule actualize-action ( target ) + { + if $(self.action) + { + $(self.action).actualize ; + } + } + + # Return a human-readable representation of this target. If this target has + # an action, that is: + # + # { -. ... } + # + # otherwise, it is: + # + # { . } + # + rule str ( ) + { + local action = [ action ] ; + local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ; + + if $(action) + { + local sources = [ $(action).sources ] ; + local action-name = [ $(action).action-name ] ; + + local ss ; + for local s in $(sources) + { + ss += [ $(s).str ] ; + } + + return "{" $(action-name)-$(name-dot-type) $(ss) "}" ; + } + else + { + return "{" $(name-dot-type) "}" ; + } + } + + rule less ( a ) + { + if [ str ] < [ $(a).str ] + { + return true ; + } + } + + rule equal ( a ) + { + if [ str ] = [ $(a).str ] + { + return true ; + } + } + +# private: + rule actual-name ( ) + { + if ! $(self.actual-name) + { + local grist = [ grist ] ; + local basename = [ path.native $(self.name) ] ; + self.actual-name = <$(grist)>$(basename) ; + } + return $(self.actual-name) ; + } + + # Helper to 'actual-name', above. Computes a unique prefix used to + # distinguish this target from other targets with the same name creating + # different files. + # + rule grist ( ) + { + # Depending on target, there may be different approaches to generating + # unique prefixes. We generate prefixes in the form: + # + local path = [ path ] ; + if $(path) + { + # The target will be generated to a known path. Just use the path + # for identification, since path is as unique as it can get. + return p$(path) ; + } + else + { + # File is either source, which will be searched for, or is not a + # file at all. Use the location of project for distinguishing. + local project-location = [ $(self.project).get location ] ; + local location-grist = [ sequence.join [ regex.split + $(project-location) "/" ] : "!" ] ; + + if $(self.action) + { + local ps = [ $(self.action).properties ] ; + local property-grist = [ $(ps).as-path ] ; + # 'property-grist' can be empty when 'ps' is an empty property + # set. + if $(property-grist) + { + location-grist = $(location-grist)/$(property-grist) ; + } + } + + return l$(location-grist) ; + } + } + + # Given the target name specified in constructor, returns the name which + # should be really used, by looking at the properties. Tag properties + # need to be specified as @rule-name. This makes Boost Build call the + # specified rule with the target name, type and properties to get the new + # name. If no property is specified or the rule specified by + # returns nothing, returns the result of calling + # virtual-target.add-prefix-and-suffix. + # + rule _adjust-name ( specified-name ) + { + local ps ; + if $(self.action) + { + ps = [ $(self.action).properties ] ; + } + else + { + ps = [ property-set.empty ] ; + } + + # Add this target object for use in getting additional information + # when tagging. + ps = [ property-set.create [ $(ps).raw ] $(__name__) ] ; + + local tag = [ $(ps).get ] ; + + if $(tag) + { + local rule-name = [ MATCH ^@(.*) : $(tag) ] ; + if $(rule-name) + { + if $(tag[2]) + { + import errors : error : errors.error ; + errors.error @rulename is present but is not the only + feature. ; + } + + self.name = [ indirect.call $(rule-name) $(specified-name) + : $(self.type) : $(ps) ] ; + } + else + { + import errors : error : errors.error ; + errors.error property value must be '@rule-name'. ; + } + } + + # If there is no tag or the tag rule returned nothing. + if ! $(tag) || ! $(self.name) + { + self.name = [ virtual-target.add-prefix-and-suffix $(specified-name) + : $(self.type) : $(ps) ] ; + } + } + + rule actualize-no-scanner ( ) + { + local name = [ actual-name ] ; + + # Do anything only on the first invocation. + if ! $(self.made-no-scanner) + { + self.made-no-scanner = true ; + + if $(self.action) + { + # For non-derived target, we do not care if there are several + # virtual targets that refer to the same name. One case when + # this is unavoidable is when the file name is main.cpp and two + # targets have types CPP (for compiling) and MOCCABLE_CPP (for + # conversion to H via Qt tools). + virtual-target.register-actual-name $(name) : $(__name__) ; + } + + for local i in $(self.dependencies) + { + DEPENDS $(name) : [ $(i).actualize ] ; + } + + actualize-location $(name) ; + actualize-action $(name) ; + } + return $(name) ; + } +} + + +# Appends the suffix appropriate to 'type/property-set' combination to the +# specified name and returns the result. +# +rule add-prefix-and-suffix ( specified-name : type ? : property-set ) +{ + local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ; + + # Handle suffixes for which no leading dot is desired. Those are specified + # by enclosing them in <...>. Needed by python so it can create "_d.so" + # extensions, for example. + if $(suffix:G) + { + suffix = [ utility.ungrist $(suffix) ] ; + } + else + { + suffix = .$(suffix) ; + } + + local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ; + + if [ MATCH ^($(prefix)) : $(specified-name) ] + { + prefix = ; + } + return $(prefix:E="")$(specified-name)$(suffix:E="") ; +} + + +# File targets with explicitly known location. +# +# The file path is determined as +# * Value passed to the 'set-path' method, if any. +# * For derived files, project's build dir, joined with components that +# describe action properties. If free properties are not equal to the +# project's reference properties an element with the name of the main +# target is added. +# * For source files, project's source dir. +# +# The file suffix is determined as: +# * The value passed to the 'suffix' method, if any. +# * The suffix corresponding to the target's type. +# +class file-target : abstract-file-target +{ + import "class" : new ; + import common ; + + rule __init__ ( + name exact ? + : type ? # Optional type for this target. + : project + : action ? + : path ? + ) + { + abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) : + $(action) ; + + self.path = $(path) ; + } + + rule clone-with-different-type ( new-type ) + { + return [ new file-target $(self.name) exact : $(new-type) : + $(self.project) : $(self.action) : $(self.path) ] ; + } + + rule actualize-location ( target ) + { + # Scanner targets are always bound to already existing files in already + # existing folder. They need to be marked as depending on their base + # target (i.e. the target being scanned) but, unlike regular + # dependencies set up by the DEPENDS rule, they must not depend on any + # targets already marked as included by the base target. Otherwise such + # an included file being newer than the file being scanned would cause + # the scanner target to be updated, further causing any target depending + # on that scanner target to be rebuilt. This is the exact relationship + # as set up by Boost Jam's SEARCH binding method (needed to support + # searching for generated targets) so we want to bind scanner targets + # using this method instead of explicitly specifying their location + # using LOCATE. + # + # FIXME: We recognize scanner targets by their given name being + # different from this target's actual name. This is a hack and should be + # cleaned up by reorganizing who knows about scanners in the + # virtual-target/abstract-file-target/file-target/notfile-target/ + # searched-lib-target/... class hierarchy. + local is-scanner-target ; + if $(target) != [ actual-name ] + { + is-scanner-target = true ; + } + + if $(self.action) && ! $(is-scanner-target) + { + # This is a derived file. + local path = [ path ] ; + LOCATE on $(target) = $(path) ; + + # Make sure the path exists. + DEPENDS $(target) : $(path) ; + common.MkDir $(path) ; + + # It is possible that the target name includes a directory too, for + # example when installing headers. Create that directory. + if $(target:D) + { + local d = $(target:D) ; + d = $(d:R=$(path)) ; + DEPENDS $(target) : $(d) ; + common.MkDir $(d) ; + } + + # For a real file target, we create a fake target depending on the + # real target. This allows us to run + # + # b2 hello.o + # + # without trying to guess the name of the real target. Note that the + # target has no directory name and uses a special grist. + # + # First, that means that "b2 hello.o" will build all known hello.o + # targets. Second, the grist makes sure this target will not be + # confused with other targets, for example, if we have subdir 'test' + # with target 'test' in it that includes a 'test.o' file, then the + # target for directory will be just 'test' the target for test.o + # will be test.o and the target we create below + # will be test.o + DEPENDS $(target:G=e) : $(target) ; + # Allow b2 / to work. This will not catch all + # possible ways to refer to the path (relative/absolute, extra ".", + # various "..", but should help in obvious cases. + DEPENDS $(target:G=e:R=$(path)) : $(target) ; + } + else + { + SEARCH on $(target) = [ path.native $(self.path) ] ; + } + } + + # Returns the directory for this target. + # + rule path ( ) + { + if ! $(self.path) + { + if $(self.action) + { + local p = [ $(self.action).properties ] ; + local path,relative-to-build-dir = [ $(p).target-path ] ; + local path = $(path,relative-to-build-dir[1]) ; + local relative-to-build-dir = $(path,relative-to-build-dir[2]) ; + + if $(relative-to-build-dir) + { + path = [ path.join [ $(self.project).build-dir ] $(path) ] ; + } + + self.path = [ path.native $(path) ] ; + } + } + return $(self.path) ; + } +} + + +class notfile-target : abstract-file-target +{ + rule __init__ ( name : project : action ? ) + { + abstract-file-target.__init__ $(name) : : $(project) : $(action) ; + } + + # Returns nothing to indicate that the target's path is not known. + # + rule path ( ) + { + return ; + } + + rule actualize-location ( target ) + { + NOTFILE $(target) ; + ALWAYS $(target) ; + # TEMPORARY $(target) ; + NOUPDATE $(target) ; + } +} + + +# Class representing an action. Both 'targets' and 'sources' should list +# instances of 'virtual-target'. Action name should name a rule with this +# prototype: +# rule action-name ( targets + : sources * : properties * ) +# Targets and sources are passed as actual Jam targets. The rule may not +# establish additional dependency relationships. +# +class action +{ + import "class" ; + import indirect ; + import path ; + import property-set ; + import set : difference ; + import toolset ; + import type ; + + rule __init__ ( sources * : action-name + : property-set ? ) + { + self.sources = $(sources) ; + + self.action-name = [ indirect.make-qualified $(action-name) ] ; + + if ! $(property-set) + { + property-set = [ property-set.empty ] ; + } + + if ! [ class.is-instance $(property-set) ] + { + import errors : error : errors.error ; + errors.error "Property set instance required" ; + } + + self.properties = $(property-set) ; + } + + rule add-targets ( targets * ) + { + self.targets += $(targets) ; + } + + rule replace-targets ( old-targets * : new-targets * ) + { + self.targets = [ set.difference $(self.targets) : $(old-targets) ] ; + self.targets += $(new-targets) ; + } + + rule targets ( ) + { + return $(self.targets) ; + } + + rule sources ( ) + { + return $(self.sources) ; + } + + rule action-name ( ) + { + return $(self.action-name) ; + } + + rule properties ( ) + { + return $(self.properties) ; + } + + # Generates actual build instructions. + # + rule actualize ( ) + { + if ! $(self.actualized) + { + self.actualized = true ; + + local ps = [ properties ] ; + local properties = [ adjust-properties $(ps) ] ; + + local actual-targets ; + for local i in [ targets ] + { + actual-targets += [ $(i).actualize ] ; + } + + actualize-sources [ sources ] : $(properties) ; + + DEPENDS $(actual-targets) : $(self.actual-sources) + $(self.dependency-only-sources) ; + + # Action name can include additional rule arguments, which should + # not be passed to 'set-target-variables'. + toolset.set-target-variables + [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets) + : $(properties) ; + + # Reflect ourselves in a variable for the target. This allows + # looking up additional info for the action given the raw target. + # For example to debug or output action information from action + # rules. + .action on $(actual-targets) = $(__name__) ; + + #indirect.call $(self.action-name) $(actual-targets) + # : $(self.actual-sources) : [ $(properties).raw ] ; + execute $(self.action-name) $(actual-targets) + : $(self.actual-sources) : [ $(properties).raw ] ; + + # Since we set up the creating action here, we set up the action for + # cleaning up as well. + common.Clean clean-all : $(actual-targets) ; + } + } + + # Helper for 'actualize-sources'. For each passed source, actualizes it with + # the appropriate scanner. Returns the actualized virtual targets. + # + rule actualize-source-type ( sources * : property-set ) + { + local result = ; + for local i in $(sources) + { + local scanner ; + if [ $(i).type ] + { + scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ; + } + result += [ $(i).actualize $(scanner) ] ; + } + return $(result) ; + } + + # Creates actual Jam targets for sources. Initializes the following member + # variables: + # 'self.actual-sources' -- sources passed to the updating action. + # 'self.dependency-only-sources' -- sources marked as dependencies, but + # are not used otherwise. + # + # New values will be *appended* to the variables. They may be non-empty if + # caller wants it. + # + rule actualize-sources ( sources * : property-set ) + { + local dependencies = [ $(self.properties).get ] ; + + self.dependency-only-sources += + [ actualize-source-type $(dependencies) : $(property-set) ] ; + self.actual-sources += + [ actualize-source-type $(sources) : $(property-set) ] ; + + # This is used to help b2 find dependencies in generated headers and + # other main targets, e.g. in: + # + # make a.h : ....... ; + # exe hello : hello.cpp : a.h ; + # + # For b2 to find the dependency the generated target must be + # actualized (i.e. have its Jam target constructed). In the above case, + # if we are building just hello ("b2 hello"), 'a.h' will not be + # actualized unless we do it here. + local implicit = [ $(self.properties).get ] ; + for local i in $(implicit) + { + $(i:G=).actualize ; + } + } + + # Determines real properties when trying to build with 'properties'. This is + # the last chance to fix properties, for example to adjust includes to get + # generated headers correctly. Default implementation simply returns its + # argument. + # + rule adjust-properties ( property-set ) + { + return $(property-set) ; + } + + # Execute the action rule on the given targets, sources, and properties. + # Since this does the final call to the engine action rule this takes + # engine level targets and raw properties. One could override this, for + # example, to set additional variables on the target that might be + # difficult to determine just using toolset flags. + # Note, you must call this base rule when overriding as otherwise the + # actions will not execute and the engine will not run commands. + # + rule execute ( action-name targets + : sources * : properties * ) + { + indirect.call $(action-name) $(targets) : $(sources) : $(properties) ; + } +} + + +# Action class which does nothing --- it produces the targets with specific +# properties out of nowhere. It is needed to distinguish virtual targets with +# different properties that are known to exist and have no actions which create +# them. +# +class null-action : action +{ + rule __init__ ( property-set ? ) + { + action.__init__ : .no-action : $(property-set) ; + } + + rule actualize ( ) + { + if ! $(self.actualized) + { + self.actualized = true ; + for local i in [ targets ] + { + $(i).actualize ; + } + } + } +} + + +# Class which acts exactly like 'action', except that its sources are not +# scanned for dependencies. +# +class non-scanning-action : action +{ + rule __init__ ( sources * : action-name + : property-set ? ) + { + action.__init__ $(sources) : $(action-name) : $(property-set) ; + } + + rule actualize-source-type ( sources * : property-set ) + { + local result ; + for local i in $(sources) + { + result += [ $(i).actualize ] ; + } + return $(result) ; + } +} + + +# Creates a virtual target with an appropriate name and type from 'file'. If a +# target with that name in that project already exists, returns that already +# created target. +# +# FIXME: a more correct way would be to compute the path to the file, based on +# name and source location for the project, and use that path to determine if +# the target has already been created. This logic should be shared with how we +# usually find targets identified by a specific target id. It should also be +# updated to work correctly when the file is specified using both relative and +# absolute paths. +# +# TODO: passing a project with all virtual targets is starting to be annoying. +# +rule from-file ( file : file-loc : project ) +{ + import type ; # Had to do this here to break a circular dependency. + + # Check whether we already created a target corresponding to this file. + local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ; + + if $(.files.$(path)) + { + return $(.files.$(path)) ; + } + else + { + local name = [ path.make $(file) ] ; + local type = [ type.type $(file) ] ; + local result ; + + result = [ new file-target $(file) : $(type) : $(project) : : + $(file-loc) ] ; + + .files.$(path) = $(result) ; + return $(result) ; + } +} + + +# Registers a new virtual target. Checks if there is already a registered target +# with the same name, type, project and subvariant properties as well as the +# same sources and equal action. If such target is found it is returned and a +# new 'target' is not registered. Otherwise, 'target' is registered and +# returned. +# +rule register ( target ) +{ + local signature = [ sequence.join [ $(target).path ] [ $(target).name ] : - + ] ; + + local result ; + for local t in $(.cache.$(signature)) + { + local a1 = [ $(t).action ] ; + local a2 = [ $(target).action ] ; + + if ! $(result) + { + if ! $(a1) && ! $(a2) + { + result = $(t) ; + } + else if $(a1) && $(a2) && + ( [ $(a1).action-name ] = [ $(a2).action-name ] ) && + ( [ $(a1).sources ] = [ $(a2).sources ] ) + { + local ps1 = [ $(a1).properties ] ; + local ps2 = [ $(a2).properties ] ; + local relevant = [ toolset.relevant [ $(a1).action-name ] ] ; + relevant = [ $(relevant).add [ $(target).relevant ] ] ; + local p1 = [ $(ps1).relevant $(relevant) ] ; + local p2 = [ $(ps2).relevant $(relevant) ] ; + if $(p1) = $(p2) + { + result = $(t) ; + } + } + } + } + + if ! $(result) + { + .cache.$(signature) += $(target) ; + result = $(target) ; + } + + .recent-targets += $(result) ; + .all-targets += $(result) ; + + return $(result) ; +} + + +# Each target returned by 'register' is added to the .recent-targets list, +# returned by this function. This allows us to find all virtual targets created +# when building a specific main target, even those constructed only as +# intermediate targets. +# +rule recent-targets ( ) +{ + return $(.recent-targets) ; +} + + +rule clear-recent-targets ( ) +{ + .recent-targets = ; +} + + +# Returns all virtual targets ever created. +# +rule all-targets ( ) +{ + return $(.all-targets) ; +} + + +# Returns all targets from 'targets' with types equal to 'type' or derived from +# it. +# +rule select-by-type ( type : targets * ) +{ + local result ; + for local t in $(targets) + { + if [ type.is-subtype [ $(t).type ] $(type) ] + { + result += $(t) ; + } + } + return $(result) ; +} + + +rule register-actual-name ( actual-name : virtual-target ) +{ + if $(.actual.$(actual-name)) + { + local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ; + local cmt1-name ; + if $(cs1)-is-defined + { + local cmt1 = [ $(cs1).main-target ] ; + cmt1-name = [ $(cmt1).full-name ] ; + } + local cs2 = [ $(virtual-target).creating-subvariant ] ; + local cmt2-name ; + if $(cs2)-is-defined + { + local cmt2 = [ $(cs2).main-target ] ; + cmt2-name = [ $(cmt2).full-name ] ; + } + local extra-error-information ; + if ! $(cs1)-is-defined || ! $(cs2)-is-defined + { + extra-error-information = Encountered a virtual-target without a + creating subvariant. It could be the virtual target has not been + registered via the virtual-target.register rule. ; + } + + local action1 = [ $(.actual.$(actual-name)).action ] ; + local action2 = [ $(virtual-target).action ] ; + local properties-added ; + local properties-removed ; + if $(action1) && $(action2) + { + local p1 = [ $(action1).properties ] ; + local p2 = [ $(action2).properties ] ; + # Only show features that are relevant for either target. + local relevant = [ $(p1).get ] [ $(p2).get ] ; + relevant = [ feature.expand-relevant $(relevant) ] ; + # The presence of relevant can potentially mess things up, + # so we always need to show it. + relevant += relevant ; + relevant = [ property-set.create $(relevant) ] ; + p1 = [ $(p1).relevant $(relevant) ] ; + p2 = [ $(p2).relevant $(relevant) ] ; + p1 = [ $(p1).raw ] ; + p2 = [ $(p2).raw ] ; + properties-removed = [ set.difference $(p1) : $(p2) ] ; + properties-removed ?= "none" ; + properties-added = [ set.difference $(p2) : $(p1) ] ; + properties-added ?= "none" ; + } + import errors : user-error : errors.user-error ; + errors.user-error "Name clash for '$(actual-name)'" + : "" + : "Tried to build the target twice, with property sets having " + : "these incompatible properties:" + : "" + : " - " $(properties-removed) + : " - " $(properties-added) + : "" + : "Please make sure to have consistent requirements for these " + : "properties everywhere in your project, especially for install" + : "targets." + ; + } + else + { + .actual.$(actual-name) = $(virtual-target) ; + } +} + + +# Traverses the dependency graph of 'target' and return all targets that will be +# created before this one is created. If the root of some dependency graph is +# found during traversal, it is either included or not, depending on the +# 'include-roots' value. In either case traversal stops at root targets, i.e. +# root target sources are not traversed. +# +rule traverse ( target : include-roots ? : include-sources ? ) +{ + local result ; + if [ $(target).action ] + { + local action = [ $(target).action ] ; + # This includes the 'target' as well. + result += [ $(action).targets ] ; + + for local t in [ $(action).sources ] + { + if ! [ $(t).root ] + { + result += [ traverse $(t) : $(include-roots) : + $(include-sources) ] ; + } + else if $(include-roots) + { + result += $(t) ; + } + } + } + else if $(include-sources) + { + result = $(target) ; + } + return $(result) ; +} + + +# Takes an 'action' instance and creates a new instance of it and all targets +# produced by the action. The rule-name and properties are set to +# 'new-rule-name' and 'new-properties', if those are specified. Returns the +# cloned action. +# +rule clone-action ( action : new-project : new-action-name ? : new-properties ? + ) +{ + if ! $(new-action-name) + { + new-action-name = [ $(action).action-name ] ; + } + if ! $(new-properties) + { + new-properties = [ $(action).properties ] ; + } + + local action-class = [ modules.peek $(action) : __class__ ] ; + local cloned-action = [ class.new $(action-class) + [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ; + + local cloned-targets ; + for local target in [ $(action).targets ] + { + local n = [ $(target).name ] ; + # Do not modify produced target names. + local cloned-target = [ class.new file-target $(n) exact : + [ $(target).type ] : $(new-project) : $(cloned-action) ] ; + local d = [ $(target).dependencies ] ; + if $(d) + { + $(cloned-target).depends $(d) ; + } + $(cloned-target).root [ $(target).root ] ; + $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ; + + cloned-targets += $(cloned-target) ; + } + + return $(cloned-action) ; +} + + +class subvariant +{ + import sequence ; + import type ; + + rule __init__ ( main-target # The instance of main-target class. + : property-set # Properties requested for this target. + : sources * + : build-properties # Actually used properties. + : sources-usage-requirements # Properties propagated from sources. + : created-targets * ) # Top-level created targets. + { + self.main-target = $(main-target) ; + self.properties = $(property-set) ; + self.sources = $(sources) ; + self.build-properties = $(build-properties) ; + self.sources-usage-requirements = $(sources-usage-requirements) ; + self.created-targets = $(created-targets) ; + + # Pre-compose a list of other dependency graphs this one depends on. + local deps = [ $(build-properties).get ] ; + for local d in $(deps) + { + self.other-dg += [ $(d:G=).creating-subvariant ] ; + } + + self.other-dg = [ sequence.unique $(self.other-dg) ] ; + } + + rule main-target ( ) + { + return $(self.main-target) ; + } + + rule created-targets ( ) + { + return $(self.created-targets) ; + } + + rule requested-properties ( ) + { + return $(self.properties) ; + } + + rule build-properties ( ) + { + return $(self.build-properties) ; + } + + rule sources-usage-requirements ( ) + { + return $(self.sources-usage-requirements) ; + } + + rule set-usage-requirements ( usage-requirements ) + { + self.usage-requirements = $(usage-requirements) ; + } + + rule usage-requirements ( ) + { + return $(self.usage-requirements) ; + } + + # Returns all targets referenced by this subvariant, either directly or + # indirectly, and either as sources, or as dependency properties. Targets + # referred to using the dependency property are returned as properties, not + # targets. + # + rule all-referenced-targets ( theset ) + { + # Find directly referenced targets. + local deps = [ $(self.build-properties).dependency ] ; + local all-targets = $(self.sources) $(deps) ; + + # Find other subvariants. + local r ; + for local t in $(all-targets) + { + if ! [ $(theset).contains $(t) ] + { + $(theset).add $(t) ; + r += [ $(t:G=).creating-subvariant ] ; + } + } + r = [ sequence.unique $(r) ] ; + for local s in $(r) + { + if $(s) != $(__name__) + { + $(s).all-referenced-targets $(theset) ; + } + } + } + + # Returns the properties specifying implicit include paths to generated + # headers. This traverses all targets in this subvariant and subvariants + # referred by properties. For all targets of type + # 'target-type' (or for all targets, if 'target-type' is not specified), the + # result will contain <$(feature)>path-to-that-target. + # + rule implicit-includes ( feature : target-type ? ) + { + local key = ii$(feature)-$(target-type:E="") ; + if ! $($(key))-is-not-empty + { + local target-paths = [ all-target-directories $(target-type) ] ; + target-paths = [ sequence.unique $(target-paths) ] ; + local result = $(target-paths:G=$(feature)) ; + if ! $(result) + { + result = "" ; + } + $(key) = $(result) ; + } + if $($(key)) = "" + { + return ; + } + else + { + return $($(key)) ; + } + } + + rule all-target-directories ( target-type ? ) + { + if ! $(self.target-directories.$(target-type:E=)) + { + compute-target-directories $(target-type) ; + } + return $(self.target-directories.$(target-type:E=)) ; + } + + rule compute-target-directories ( target-type ? ) + { + local result ; + for local t in $(self.created-targets) + { + # Skip targets of the wrong type. + local type = [ $(t).type ] ; + if ! $(target-type) || + ( $(type) && [ type.is-derived $(type) $(target-type) ] ) + { + result = [ sequence.merge $(result) : [ $(t).path ] ] ; + } + } + for local d in $(self.other-dg) + { + result += [ $(d).all-target-directories $(target-type) ] ; + } + self.target-directories.$(target-type:E=) = $(result) ; + } +} diff --git a/src/boost/tools/build/src/build/virtual_target.py b/src/boost/tools/build/src/build/virtual_target.py new file mode 100644 index 000000000..8dfd9fbf0 --- /dev/null +++ b/src/boost/tools/build/src/build/virtual_target.py @@ -0,0 +1,1175 @@ +# Status: ported. +# Base revision: 64488. +# +# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and +# distribute this software is granted provided this copyright notice appears in +# all copies. This software is provided "as is" without express or implied +# warranty, and with no claim as to its suitability for any purpose. + +# Implements virtual targets, which correspond to actual files created during +# build, but are not yet targets in Jam sense. They are needed, for example, +# when searching for possible transormation sequences, when it's not known +# if particular target should be created at all. +# +# +# +--------------------------+ +# | VirtualTarget | +# +==========================+ +# | actualize | +# +--------------------------+ +# | actualize_action() = 0 | +# | actualize_location() = 0 | +# +----------------+---------+ +# | +# ^ +# / \ +# +-+-+ +# | +# +---------------------+ +-------+--------------+ +# | Action | | AbstractFileTarget | +# +=====================| * +======================+ +# | action_name | +--+ action | +# | properties | | +----------------------+ +# +---------------------+--+ | actualize_action() | +# | actualize() |0..1 +-----------+----------+ +# | path() | | +# | adjust_properties() | sources | +# | actualize_sources() | targets | +# +------+--------------+ ^ +# | / \ +# ^ +-+-+ +# / \ | +# +-+-+ +-------------+-------------+ +# | | | +# | +------+---------------+ +--------+-------------+ +# | | FileTarget | | SearchedLibTarget | +# | +======================+ +======================+ +# | | actualize-location() | | actualize-location() | +# | +----------------------+ +----------------------+ +# | +# +-+------------------------------+ +# | | +# +----+----------------+ +---------+-----------+ +# | CompileAction | | LinkAction | +# +=====================+ +=====================+ +# | adjust_properties() | | adjust_properties() | +# +---------------------+ | actualize_sources() | +# +---------------------+ +# +# The 'CompileAction' and 'LinkAction' classes are defined not here, +# but in builtin.jam modules. They are shown in the diagram to give +# the big picture. + +import bjam + +import re +import os.path +import string +import types + +from b2.util import path, utility, set, is_iterable_typed +from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value +from b2.util.sequence import unique +from b2.tools import common +from b2.exceptions import * +import b2.build.type +import b2.build.property_set as property_set + +import b2.build.property as property + +from b2.manager import get_manager +from b2.util import bjam_signature + +__re_starts_with_at = re.compile ('^@(.*)') + +class VirtualTargetRegistry: + def __init__ (self, manager): + self.manager_ = manager + + # A cache for FileTargets + self.files_ = {} + + # A cache for targets. + self.cache_ = {} + + # A map of actual names to virtual targets. + # Used to make sure we don't associate same + # actual target to two virtual targets. + self.actual_ = {} + + self.recent_targets_ = [] + + # All targets ever registered + self.all_targets_ = [] + + self.next_id_ = 0 + + def register (self, target): + """ Registers a new virtual target. Checks if there's already registered target, with the same + name, type, project and subvariant properties, and also with the same sources + and equal action. If such target is found it is returned and 'target' is not registered. + Otherwise, 'target' is registered and returned. + """ + assert isinstance(target, VirtualTarget) + if target.path(): + signature = target.path() + "-" + target.name() + else: + signature = "-" + target.name() + + result = None + if signature not in self.cache_: + self.cache_ [signature] = [] + + for t in self.cache_ [signature]: + a1 = t.action () + a2 = target.action () + + # TODO: why are we checking for not result? + if not result: + if not a1 and not a2: + result = t + else: + if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources (): + ps1 = a1.properties () + ps2 = a2.properties () + p1 = ps1.base () + ps1.free () +\ + b2.util.set.difference(ps1.dependency(), ps1.incidental()) + p2 = ps2.base () + ps2.free () +\ + b2.util.set.difference(ps2.dependency(), ps2.incidental()) + if p1 == p2: + result = t + + if not result: + self.cache_ [signature].append (target) + result = target + + # TODO: Don't append if we found pre-existing target? + self.recent_targets_.append(result) + self.all_targets_.append(result) + + return result + + def from_file (self, file, file_location, project): + """ Creates a virtual target with appropriate name and type from 'file'. + If a target with that name in that project was already created, returns that already + created target. + TODO: more correct way would be to compute path to the file, based on name and source location + for the project, and use that path to determine if the target was already created. + TODO: passing project with all virtual targets starts to be annoying. + """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(file, basestring) + assert isinstance(file_location, basestring) + assert isinstance(project, ProjectTarget) + # Check if we've created a target corresponding to this file. + path = os.path.join(os.getcwd(), file_location, file) + path = os.path.normpath(path) + + if path in self.files_: + return self.files_ [path] + + file_type = b2.build.type.type (file) + + result = FileTarget (file, file_type, project, + None, file_location) + self.files_ [path] = result + + return result + + def recent_targets(self): + """Each target returned by 'register' is added to a list of + 'recent-target', returned by this function. So, this allows + us to find all targets created when building a given main + target, even if the target.""" + + return self.recent_targets_ + + def clear_recent_targets(self): + self.recent_targets_ = [] + + def all_targets(self): + # Returns all virtual targets ever created + return self.all_targets_ + + # Returns all targets from 'targets' with types + # equal to 'type' or derived from it. + def select_by_type(self, type, targets): + return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)] + + def register_actual_name (self, actual_name, virtual_target): + assert isinstance(actual_name, basestring) + assert isinstance(virtual_target, VirtualTarget) + if actual_name in self.actual_: + cs1 = self.actual_ [actual_name].creating_subvariant () + cs2 = virtual_target.creating_subvariant () + cmt1 = cs1.main_target () + cmt2 = cs2.main_target () + + action1 = self.actual_ [actual_name].action () + action2 = virtual_target.action () + + properties_added = [] + properties_removed = [] + if action1 and action2: + p1 = action1.properties () + p1 = p1.raw () + p2 = action2.properties () + p2 = p2.raw () + + properties_removed = set.difference (p1, p2) + if not properties_removed: + properties_removed = ["none"] + + properties_added = set.difference (p2, p1) + if not properties_added: + properties_added = ["none"] + + # FIXME: Revive printing of real location. + get_manager().errors()( + "Duplicate name of actual target: '%s'\n" + "previous virtual target '%s'\n" + "created from '%s'\n" + "another virtual target '%s'\n" + "created from '%s'\n" + "added properties:\n%s\n" + "removed properties:\n%s\n" + % (actual_name, + self.actual_ [actual_name], cmt1.project().location(), + virtual_target, + cmt2.project().location(), + '\n'.join('\t' + p for p in properties_added), + '\n'.join('\t' + p for p in properties_removed))) + + else: + self.actual_ [actual_name] = virtual_target + + + def add_suffix (self, specified_name, file_type, prop_set): + """ Appends the suffix appropriate to 'type/property_set' combination + to the specified name and returns the result. + """ + assert isinstance(specified_name, basestring) + assert isinstance(file_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) + suffix = b2.build.type.generated_target_suffix (file_type, prop_set) + + if suffix: + return specified_name + '.' + suffix + + else: + return specified_name + +class VirtualTarget: + """ Potential target. It can be converted into jam target and used in + building, if needed. However, it can be also dropped, which allows + to search for different transformation and select only one. + name: name of this target. + project: project to which this target belongs. + """ + def __init__ (self, name, project): + if __debug__: + from .targets import ProjectTarget + assert isinstance(name, basestring) + assert isinstance(project, ProjectTarget) + self.name_ = name + self.project_ = project + self.dependencies_ = [] + self.always_ = False + + # Caches if dapendencies for scanners have already been set. + self.made_ = {} + + def manager(self): + return self.project_.manager() + + def virtual_targets(self): + return self.manager().virtual_targets() + + def name (self): + """ Name of this target. + """ + return self.name_ + + def project (self): + """ Project of this target. + """ + return self.project_ + + def depends (self, d): + """ Adds additional instances of 'VirtualTarget' that this + one depends on. + """ + self.dependencies_ = unique (self.dependencies_ + d).sort () + + def dependencies (self): + return self.dependencies_ + + def always(self): + self.always_ = True + + def actualize (self, scanner = None): + """ Generates all the actual targets and sets up build actions for + this target. + + If 'scanner' is specified, creates an additional target + with the same location as actual target, which will depend on the + actual target and be associated with 'scanner'. That additional + target is returned. See the docs (#dependency_scanning) for rationale. + Target must correspond to a file if 'scanner' is specified. + + If scanner is not specified, then actual target is returned. + """ + if __debug__: + from .scanner import Scanner + assert scanner is None or isinstance(scanner, Scanner) + actual_name = self.actualize_no_scanner () + + if self.always_: + bjam.call("ALWAYS", actual_name) + + if not scanner: + return actual_name + + else: + # Add the scanner instance to the grist for name. + g = '-'.join ([ungrist(get_grist(actual_name)), str(id(scanner))]) + + name = replace_grist (actual_name, '<' + g + '>') + + if name not in self.made_: + self.made_ [name] = True + + self.project_.manager ().engine ().add_dependency (name, actual_name) + + self.actualize_location (name) + + self.project_.manager ().scanners ().install (scanner, name, str (self)) + + return name + +# private: (overridables) + + def actualize_action (self, target): + """ Sets up build actions for 'target'. Should call appropriate rules + and set target variables. + """ + raise BaseException ("method should be defined in derived classes") + + def actualize_location (self, target): + """ Sets up variables on 'target' which specify its location. + """ + raise BaseException ("method should be defined in derived classes") + + def path (self): + """ If the target is generated one, returns the path where it will be + generated. Otherwise, returns empty list. + """ + raise BaseException ("method should be defined in derived classes") + + def actual_name (self): + """ Return that actual target name that should be used + (for the case where no scanner is involved) + """ + raise BaseException ("method should be defined in derived classes") + + +class AbstractFileTarget (VirtualTarget): + """ Target which correspond to a file. The exact mapping for file + is not yet specified in this class. (TODO: Actually, the class name + could be better...) + + May be a source file (when no action is specified), or + derived file (otherwise). + + The target's grist is concatenation of project's location, + properties of action (for derived files), and, optionally, + value identifying the main target. + + exact: If non-empty, the name is exactly the name + created file should have. Otherwise, the '__init__' + method will add suffix obtained from 'type' by + calling 'type.generated-target-suffix'. + + type: optional type of this target. + """ + def __init__ (self, name, type, project, action = None, exact=False): + assert isinstance(type, basestring) or type is None + assert action is None or isinstance(action, Action) + assert isinstance(exact, (int, bool)) + VirtualTarget.__init__ (self, name, project) + + self.type_ = type + + self.action_ = action + self.exact_ = exact + + if action: + action.add_targets ([self]) + + if self.type and not exact: + self.__adjust_name (name) + + + self.actual_name_ = None + self.path_ = None + self.intermediate_ = False + self.creating_subvariant_ = None + + # True if this is a root target. + self.root_ = False + + def type (self): + return self.type_ + + def set_path (self, path): + """ Sets the path. When generating target name, it will override any path + computation from properties. + """ + assert isinstance(path, basestring) + self.path_ = os.path.normpath(path) + + def action (self): + """ Returns the action. + """ + return self.action_ + + def root (self, set = None): + """ Sets/gets the 'root' flag. Target is root is it directly correspods to some + variant of a main target. + """ + assert isinstance(set, (int, bool, type(None))) + if set: + self.root_ = True + return self.root_ + + def creating_subvariant (self, s = None): + """ Gets or sets the subvariant which created this target. Subvariant + is set when target is brought into existence, and is never changed + after that. In particual, if target is shared by subvariant, only + the first is stored. + s: If specified, specified the value to set, + which should be instance of 'subvariant' class. + """ + assert s is None or isinstance(s, Subvariant) + if s and not self.creating_subvariant (): + if self.creating_subvariant (): + raise BaseException ("Attempt to change 'dg'") + + else: + self.creating_subvariant_ = s + + return self.creating_subvariant_ + + def actualize_action (self, target): + assert isinstance(target, basestring) + if self.action_: + self.action_.actualize () + + # Return a human-readable representation of this target + # + # If this target has an action, that's: + # + # { -. ... } + # + # otherwise, it's: + # + # { . } + # + def str(self): + a = self.action() + + name_dot_type = self.name_ + "." + self.type_ + + if a: + action_name = a.action_name() + ss = [ s.str() for s in a.sources()] + + return "{ %s-%s %s}" % (action_name, name_dot_type, str(ss)) + else: + return "{ " + name_dot_type + " }" + +# private: + + def actual_name (self): + if not self.actual_name_: + self.actual_name_ = '<' + self.grist() + '>' + os.path.normpath(self.name_) + + return self.actual_name_ + + def grist (self): + """Helper to 'actual_name', above. Compute unique prefix used to distinguish + this target from other targets with the same name which create different + file. + """ + # Depending on target, there may be different approaches to generating + # unique prefixes. We'll generate prefixes in the form + # + path = self.path () + + if path: + # The target will be generated to a known path. Just use the path + # for identification, since path is as unique as it can get. + return 'p' + path + + else: + # File is either source, which will be searched for, or is not a file at + # all. Use the location of project for distinguishing. + project_location = self.project_.get ('location') + path_components = b2.util.path.split(project_location) + location_grist = '!'.join (path_components) + + if self.action_: + ps = self.action_.properties () + property_grist = ps.as_path () + # 'property_grist' can be empty when 'ps' is an empty + # property set. + if property_grist: + location_grist = location_grist + '/' + property_grist + + return 'l' + location_grist + + def __adjust_name(self, specified_name): + """Given the target name specified in constructor, returns the + name which should be really used, by looking at the properties. + The tag properties come in two flavour: + - value, + - @rule-name + In the first case, value is just added to name + In the second case, the specified rule is called with specified name, + target type and properties and should return the new name. + If not property is specified, or the rule specified by + returns nothing, returns the result of calling + virtual-target.add-suffix""" + assert isinstance(specified_name, basestring) + if self.action_: + ps = self.action_.properties() + else: + ps = property_set.empty() + + # FIXME: I'm not sure how this is used, need to check with + # Rene to figure out how to implement + #~ We add ourselves to the properties so that any tag rule can get + #~ more direct information about the target than just that available + #~ through the properties. This is useful in implementing + #~ name changes based on the sources of the target. For example to + #~ make unique names of object files based on the source file. + #~ --grafik + #ps = property_set.create(ps.raw() + ["%s" % "XXXX"]) + #ps = [ property-set.create [ $(ps).raw ] $(__name__) ] ; + + tag = ps.get("") + + if tag: + + if len(tag) > 1: + get_manager().errors()( + """@rulename is present but is not the only feature""") + + tag = tag[0] + if callable(tag): + self.name_ = tag(specified_name, self.type_, ps) + else: + if not tag[0] == '@': + self.manager_.errors()("""The value of the feature must be '@rule-nane'""") + + exported_ps = b2.util.value_to_jam(ps, methods=True) + self.name_ = b2.util.call_jam_function( + tag[1:], specified_name, self.type_, exported_ps) + if self.name_: + self.name_ = self.name_[0] + + # If there's no tag or the tag rule returned nothing. + if not tag or not self.name_: + self.name_ = add_prefix_and_suffix(specified_name, self.type_, ps) + + def actualize_no_scanner(self): + name = self.actual_name() + + # Do anything only on the first invocation + if not self.made_: + self.made_[name] = True + + if self.action_: + # For non-derived target, we don't care if there + # are several virtual targets that refer to the same name. + # One case when this is unavoidable is when file name is + # main.cpp and two targets have types CPP (for compiling) + # and MOCCABLE_CPP (for conversion to H via Qt tools). + self.virtual_targets().register_actual_name(name, self) + + for i in self.dependencies_: + self.manager_.engine().add_dependency(name, i.actualize()) + + self.actualize_location(name) + self.actualize_action(name) + + return name + +@bjam_signature((["specified_name"], ["type"], ["property_set"])) +def add_prefix_and_suffix(specified_name, type, property_set): + """Appends the suffix appropriate to 'type/property-set' combination + to the specified name and returns the result.""" + + property_set = b2.util.jam_to_value_maybe(property_set) + + suffix = "" + if type: + suffix = b2.build.type.generated_target_suffix(type, property_set) + + # Handle suffixes for which no leading dot is desired. Those are + # specified by enclosing them in <...>. Needed by python so it + # can create "_d.so" extensions, for example. + if get_grist(suffix): + suffix = ungrist(suffix) + elif suffix: + suffix = "." + suffix + + prefix = "" + if type: + prefix = b2.build.type.generated_target_prefix(type, property_set) + + if specified_name.startswith(prefix): + prefix = "" + + if not prefix: + prefix = "" + if not suffix: + suffix = "" + return prefix + specified_name + suffix + + +class FileTarget (AbstractFileTarget): + """ File target with explicitly known location. + + The file path is determined as + - value passed to the 'set_path' method, if any + - for derived files, project's build dir, joined with components + that describe action's properties. If the free properties + are not equal to the project's reference properties + an element with name of main target is added. + - for source files, project's source dir + + The file suffix is + - the value passed to the 'suffix' method, if any, or + - the suffix which correspond to the target's type. + """ + def __init__ (self, name, type, project, action = None, path=None, exact=False): + assert isinstance(type, basestring) or type is None + assert action is None or isinstance(action, Action) + assert isinstance(exact, (int, bool)) + AbstractFileTarget.__init__ (self, name, type, project, action, exact) + + self.path_ = path + + def __str__(self): + if self.type_: + return self.name_ + "." + self.type_ + else: + return self.name_ + + def clone_with_different_type(self, new_type): + assert isinstance(new_type, basestring) + return FileTarget(self.name_, new_type, self.project_, + self.action_, self.path_, exact=True) + + def actualize_location (self, target): + assert isinstance(target, basestring) + engine = self.project_.manager_.engine () + + if self.action_: + # This is a derived file. + path = self.path () + engine.set_target_variable (target, 'LOCATE', path) + + # Make sure the path exists. + engine.add_dependency (target, path) + common.mkdir(engine, path) + + # It's possible that the target name includes a directory + # too, for example when installing headers. Create that + # directory. + d = os.path.dirname(get_value(target)) + if d: + d = os.path.join(path, d) + engine.add_dependency(target, d) + common.mkdir(engine, d) + + # For real file target, we create a fake target that + # depends on the real target. This allows to run + # + # bjam hello.o + # + # without trying to guess the name of the real target. + # Note the that target has no directory name, and a special + # grist . + # + # First, that means that "bjam hello.o" will build all + # known hello.o targets. + # Second, the grist makes sure this target won't be confused + # with other targets, for example, if we have subdir 'test' + # with target 'test' in it that includes 'test.o' file, + # then the target for directory will be just 'test' the target + # for test.o will be test.o and the target + # we create below will be test.o + engine.add_dependency("%s" % get_value(target), target) + + # Allow bjam / to work. This won't catch all + # possible ways to refer to the path (relative/absolute, extra ".", + # various "..", but should help in obvious cases. + engine.add_dependency("%s" % (os.path.join(path, get_value(target))), target) + + else: + # This is a source file. + engine.set_target_variable (target, 'SEARCH', self.project_.get ('source-location')) + + + def path (self): + """ Returns the directory for this target. + """ + if not self.path_: + if self.action_: + p = self.action_.properties () + (target_path, relative_to_build_dir) = p.target_path () + + if relative_to_build_dir: + # Indicates that the path is relative to + # build dir. + target_path = os.path.join (self.project_.build_dir (), target_path) + + # Store the computed path, so that it's not recomputed + # any more + self.path_ = target_path + + return os.path.normpath(self.path_) + + +class NotFileTarget(AbstractFileTarget): + + def __init__(self, name, project, action): + assert isinstance(action, Action) + AbstractFileTarget.__init__(self, name, None, project, action) + + def path(self): + """Returns nothing, to indicate that target path is not known.""" + return None + + def actualize_location(self, target): + assert isinstance(target, basestring) + bjam.call("NOTFILE", target) + bjam.call("ALWAYS", target) + bjam.call("NOUPDATE", target) + + +class Action: + """ Class which represents an action. + Both 'targets' and 'sources' should list instances of 'VirtualTarget'. + Action name should name a rule with this prototype + rule action_name ( targets + : sources * : properties * ) + Targets and sources are passed as actual jam targets. The rule may + not establish dependency relationship, but should do everything else. + """ + def __init__ (self, manager, sources, action_name, prop_set): + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(action_name, basestring) or action_name is None + assert(isinstance(prop_set, property_set.PropertySet)) + self.sources_ = sources + self.action_name_ = action_name + if not prop_set: + prop_set = property_set.empty() + self.properties_ = prop_set + if not all(isinstance(v, VirtualTarget) for v in prop_set.get('implicit-dependency')): + import pdb + pdb.set_trace() + + self.manager_ = manager + self.engine_ = self.manager_.engine () + self.targets_ = [] + + # Indicates whether this has been actualized or not. + self.actualized_ = False + + self.dependency_only_sources_ = [] + self.actual_sources_ = [] + + + def add_targets (self, targets): + assert is_iterable_typed(targets, VirtualTarget) + self.targets_ += targets + + + def replace_targets(self, old_targets, new_targets): + assert is_iterable_typed(old_targets, VirtualTarget) + assert is_iterable_typed(new_targets, VirtualTarget) + self.targets_ = [t for t in self.targets_ if not t in old_targets] + new_targets + + def targets (self): + return self.targets_ + + def sources (self): + return self.sources_ + + def action_name (self): + return self.action_name_ + + def properties (self): + return self.properties_ + + def actualize (self): + """ Generates actual build instructions. + """ + if self.actualized_: + return + + self.actualized_ = True + + ps = self.properties () + properties = self.adjust_properties (ps) + + + actual_targets = [] + + for i in self.targets (): + actual_targets.append (i.actualize ()) + + self.actualize_sources (self.sources (), properties) + + self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_) + + # FIXME: check the comment below. Was self.action_name_ [1] + # Action name can include additional rule arguments, which should not + # be passed to 'set-target-variables'. + # FIXME: breaking circular dependency + import toolset + toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties) + + engine = self.manager_.engine () + + # FIXME: this is supposed to help --out-xml option, but we don't + # implement that now, and anyway, we should handle it in Python, + # not but putting variables on bjam-level targets. + bjam.call("set-target-variable", actual_targets, ".action", repr(self)) + + self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_, + properties) + + # Since we set up creating action here, we also set up + # action for cleaning up + self.manager_.engine ().set_update_action ('common.Clean', 'clean-all', + actual_targets) + + return actual_targets + + def actualize_source_type (self, sources, prop_set): + """ Helper for 'actualize_sources'. + For each passed source, actualizes it with the appropriate scanner. + Returns the actualized virtual targets. + """ + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) + result = [] + for i in sources: + scanner = None + +# FIXME: what's this? +# if isinstance (i, str): +# i = self.manager_.get_object (i) + + if i.type (): + scanner = b2.build.type.get_scanner (i.type (), prop_set) + + r = i.actualize (scanner) + result.append (r) + + return result + + def actualize_sources (self, sources, prop_set): + """ Creates actual jam targets for sources. Initializes two member + variables: + 'self.actual_sources_' -- sources which are passed to updating action + 'self.dependency_only_sources_' -- sources which are made dependencies, but + are not used otherwise. + + New values will be *appended* to the variables. They may be non-empty, + if caller wants it. + """ + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) + dependencies = self.properties_.get ('') + + self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set) + self.actual_sources_ += self.actualize_source_type (sources, prop_set) + + # This is used to help bjam find dependencies in generated headers + # in other main targets. + # Say: + # + # make a.h : ....... ; + # exe hello : hello.cpp : a.h ; + # + # However, for bjam to find the dependency the generated target must + # be actualized (i.e. have the jam target). In the above case, + # if we're building just hello ("bjam hello"), 'a.h' won't be + # actualized unless we do it here. + implicit = self.properties_.get("") + + for i in implicit: + i.actualize() + + def adjust_properties (self, prop_set): + """ Determines real properties when trying building with 'properties'. + This is last chance to fix properties, for example to adjust includes + to get generated headers correctly. Default implementation returns + its argument. + """ + assert isinstance(prop_set, property_set.PropertySet) + return prop_set + + +class NullAction (Action): + """ Action class which does nothing --- it produces the targets with + specific properties out of nowhere. It's needed to distinguish virtual + targets with different properties that are known to exist, and have no + actions which create them. + """ + def __init__ (self, manager, prop_set): + assert isinstance(prop_set, property_set.PropertySet) + Action.__init__ (self, manager, [], None, prop_set) + + def actualize (self): + if not self.actualized_: + self.actualized_ = True + + for i in self.targets (): + i.actualize () + +class NonScanningAction(Action): + """Class which acts exactly like 'action', except that the sources + are not scanned for dependencies.""" + + def __init__(self, sources, action_name, property_set): + #FIXME: should the manager parameter of Action.__init__ + #be removed? -- Steven Watanabe + Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set) + + def actualize_source_type(self, sources, ps=None): + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(ps, property_set.PropertySet) or ps is None + result = [] + for s in sources: + result.append(s.actualize()) + return result + +def traverse (target, include_roots = False, include_sources = False): + """ Traverses the dependency graph of 'target' and return all targets that will + be created before this one is created. If root of some dependency graph is + found during traversal, it's either included or not, dependencing of the + value of 'include_roots'. In either case, sources of root are not traversed. + """ + assert isinstance(target, VirtualTarget) + assert isinstance(include_roots, (int, bool)) + assert isinstance(include_sources, (int, bool)) + result = [] + + if target.action (): + action = target.action () + + # This includes 'target' as well + result += action.targets () + + for t in action.sources (): + + # FIXME: + # TODO: see comment in Manager.register_object () + #if not isinstance (t, VirtualTarget): + # t = target.project_.manager_.get_object (t) + + if not t.root (): + result += traverse (t, include_roots, include_sources) + + elif include_roots: + result.append (t) + + elif include_sources: + result.append (target) + + return result + +def clone_action (action, new_project, new_action_name, new_properties): + """Takes an 'action' instances and creates new instance of it + and all produced target. The rule-name and properties are set + to 'new-rule-name' and 'new-properties', if those are specified. + Returns the cloned action.""" + if __debug__: + from .targets import ProjectTarget + assert isinstance(action, Action) + assert isinstance(new_project, ProjectTarget) + assert isinstance(new_action_name, basestring) + assert isinstance(new_properties, property_set.PropertySet) + if not new_action_name: + new_action_name = action.action_name() + + if not new_properties: + new_properties = action.properties() + + cloned_action = action.__class__(action.manager_, action.sources(), new_action_name, + new_properties) + + cloned_targets = [] + for target in action.targets(): + + n = target.name() + # Don't modify the name of the produced targets. Strip the directory f + cloned_target = FileTarget(n, target.type(), new_project, + cloned_action, exact=True) + + d = target.dependencies() + if d: + cloned_target.depends(d) + cloned_target.root(target.root()) + cloned_target.creating_subvariant(target.creating_subvariant()) + + cloned_targets.append(cloned_target) + + return cloned_action + +class Subvariant: + + def __init__ (self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets): + """ + main_target: The instance of MainTarget class + prop_set: Properties requested for this target + sources: + build_properties: Actually used properties + sources_usage_requirements: Properties propagated from sources + created_targets: Top-level created targets + """ + if __debug__: + from .targets import AbstractTarget + assert isinstance(main_target, AbstractTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(build_properties, property_set.PropertySet) + assert isinstance(sources_usage_requirements, property_set.PropertySet) + assert is_iterable_typed(created_targets, VirtualTarget) + self.main_target_ = main_target + self.properties_ = prop_set + self.sources_ = sources + self.build_properties_ = build_properties + self.sources_usage_requirements_ = sources_usage_requirements + self.created_targets_ = created_targets + + self.usage_requirements_ = None + + # Pre-compose the list of other dependency graphs, on which this one + # depends + deps = build_properties.get('') + + self.other_dg_ = [] + for d in deps: + self.other_dg_.append(d.creating_subvariant ()) + + self.other_dg_ = unique (self.other_dg_) + + self.implicit_includes_cache_ = {} + self.target_directories_ = None + + def main_target (self): + return self.main_target_ + + def created_targets (self): + return self.created_targets_ + + def requested_properties (self): + return self.properties_ + + def build_properties (self): + return self.build_properties_ + + def sources_usage_requirements (self): + return self.sources_usage_requirements_ + + def set_usage_requirements (self, usage_requirements): + assert isinstance(usage_requirements, property_set.PropertySet) + self.usage_requirements_ = usage_requirements + + def usage_requirements (self): + return self.usage_requirements_ + + def all_referenced_targets(self, result): + """Returns all targets referenced by this subvariant, + either directly or indirectly, and either as sources, + or as dependency properties. Targets referred with + dependency property are returned a properties, not targets.""" + if __debug__: + from .property import Property + assert is_iterable_typed(result, (VirtualTarget, Property)) + # Find directly referenced targets. + deps = self.build_properties().dependency() + all_targets = self.sources_ + deps + + # Find other subvariants. + r = [] + for e in all_targets: + if not e in result: + result.add(e) + if isinstance(e, property.Property): + t = e.value + else: + t = e + + # FIXME: how can this be? + cs = t.creating_subvariant() + if cs: + r.append(cs) + r = unique(r) + for s in r: + if s != self: + s.all_referenced_targets(result) + + + def implicit_includes (self, feature, target_type): + """ Returns the properties which specify implicit include paths to + generated headers. This traverses all targets in this subvariant, + and subvariants referred by properties. + For all targets which are of type 'target-type' (or for all targets, + if 'target_type' is not specified), the result will contain + <$(feature)>path-to-that-target. + """ + assert isinstance(feature, basestring) + assert isinstance(target_type, basestring) + if not target_type: + key = feature + else: + key = feature + "-" + target_type + + + result = self.implicit_includes_cache_.get(key) + if not result: + target_paths = self.all_target_directories(target_type) + target_paths = unique(target_paths) + result = ["<%s>%s" % (feature, p) for p in target_paths] + self.implicit_includes_cache_[key] = result + + return result + + def all_target_directories(self, target_type = None): + assert isinstance(target_type, (basestring, type(None))) + # TODO: does not appear to use target_type in deciding + # if we've computed this already. + if not self.target_directories_: + self.target_directories_ = self.compute_target_directories(target_type) + return self.target_directories_ + + def compute_target_directories(self, target_type=None): + assert isinstance(target_type, (basestring, type(None))) + result = [] + for t in self.created_targets(): + if not target_type or b2.build.type.is_derived(t.type(), target_type): + result.append(t.path()) + + for d in self.other_dg_: + result.extend(d.all_target_directories(target_type)) + + result = unique(result) + return result diff --git a/src/boost/tools/build/src/build_system.py b/src/boost/tools/build/src/build_system.py new file mode 100644 index 000000000..1702acf5a --- /dev/null +++ b/src/boost/tools/build/src/build_system.py @@ -0,0 +1,682 @@ +# Status: mostly ported. Missing is --out-xml support, 'configure' integration +# and some FIXME. +# Base revision: 64351 + +# Copyright 2003, 2005 Dave Abrahams +# Copyright 2006 Rene Rivera +# Copyright 2003, 2004, 2005, 2006, 2007 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) +import os +import sys +import re + +import bjam + +# set this early on since some of the following modules +# require looking at the sys.argv +sys.argv = bjam.variable("ARGV") + + +from b2.build.engine import Engine +from b2.manager import Manager +from b2.util.path import glob +from b2.build import feature, property_set +import b2.build.virtual_target +from b2.build.targets import ProjectTarget +import b2.build.build_request +from b2.build.errors import ExceptionWithUserContext +import b2.tools.common +from b2.build.toolset import using + +import b2.build.virtual_target as virtual_target +import b2.build.build_request as build_request + +import b2.util.regex + +from b2.manager import get_manager +from b2.util import cached +from b2.util import option + +################################################################################ +# +# Module global data. +# +################################################################################ + +# Flag indicating we should display additional debugging information related to +# locating and loading Boost Build configuration files. +debug_config = False + +# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a +# directory, then we want to clean targets which are in 'foo' as well as those +# in any children Jamfiles under foo but not in any unrelated Jamfiles. To +# achieve this we collect a list of projects under which cleaning is allowed. +project_targets = [] + +# Virtual targets obtained when building main targets references on the command +# line. When running 'bjam --clean main_target' we want to clean only files +# belonging to that main target so we need to record which targets are produced +# for it. +results_of_main_targets = [] + +# Was an XML dump requested? +out_xml = False + +# Default toolset & version to be used in case no other toolset has been used +# explicitly by either the loaded configuration files, the loaded project build +# scripts or an explicit toolset request on the command line. If not specified, +# an arbitrary default will be used based on the current host OS. This value, +# while not strictly necessary, has been added to allow testing Boost-Build's +# default toolset usage functionality. +default_toolset = None +default_toolset_version = None + +################################################################################ +# +# Public rules. +# +################################################################################ + +# Returns the property set with the free features from the currently processed +# build request. +# +def command_line_free_features(): + return command_line_free_features + +# Sets the default toolset & version to be used in case no other toolset has +# been used explicitly by either the loaded configuration files, the loaded +# project build scripts or an explicit toolset request on the command line. For +# more detailed information see the comment related to used global variables. +# +def set_default_toolset(toolset, version=None): + default_toolset = toolset + default_toolset_version = version + + +pre_build_hook = [] + +def add_pre_build_hook(callable): + pre_build_hook.append(callable) + +post_build_hook = None + +def set_post_build_hook(callable): + post_build_hook = callable + +################################################################################ +# +# Local rules. +# +################################################################################ + +# Returns actual Jam targets to be used for executing a clean request. +# +def actual_clean_targets(targets): + + # Construct a list of projects explicitly detected as targets on this build + # system run. These are the projects under which cleaning is allowed. + for t in targets: + if isinstance(t, b2.build.targets.ProjectTarget): + project_targets.append(t.project_module()) + + # Construct a list of targets explicitly detected on this build system run + # as a result of building main targets. + targets_to_clean = set() + for t in results_of_main_targets: + # Do not include roots or sources. + targets_to_clean.update(virtual_target.traverse(t)) + + to_clean = [] + for t in get_manager().virtual_targets().all_targets(): + + # Remove only derived targets. + if t.action(): + p = t.project() + if t in targets_to_clean or should_clean_project(p.project_module()): + to_clean.append(t) + + return [t.actualize() for t in to_clean] + +_target_id_split = re.compile("(.*)//(.*)") + +# Given a target id, try to find and return the corresponding target. This is +# only invoked when there is no Jamfile in ".". This code somewhat duplicates +# code in project-target.find but we can not reuse that code without a +# project-targets instance. +# +def find_target(target_id): + + projects = get_manager().projects() + m = _target_id_split.match(target_id) + if m: + pm = projects.find(m.group(1), ".") + else: + pm = projects.find(target_id, ".") + + if pm: + result = projects.target(pm) + + if m: + result = result.find(m.group(2)) + + return result + +def initialize_config_module(module_name, location=None): + + get_manager().projects().initialize(module_name, location) + +# Helper rule used to load configuration files. Loads the first configuration +# file with the given 'filename' at 'path' into module with name 'module-name'. +# Not finding the requested file may or may not be treated as an error depending +# on the must-find parameter. Returns a normalized path to the loaded +# configuration file or nothing if no file was loaded. +# +def load_config(module_name, filename, paths, must_find=False): + + if debug_config: + print "notice: Searching '%s' for '%s' configuration file '%s." \ + % (paths, module_name, filename) + + where = None + for path in paths: + t = os.path.join(path, filename) + if os.path.exists(t): + where = t + break + + if where: + where = os.path.realpath(where) + + if debug_config: + print "notice: Loading '%s' configuration file '%s' from '%s'." \ + % (module_name, filename, where) + + # Set source location so that path-constant in config files + # with relative paths work. This is of most importance + # for project-config.jam, but may be used in other + # config files as well. + attributes = get_manager().projects().attributes(module_name) ; + attributes.set('source-location', os.path.dirname(where), True) + get_manager().projects().load_standalone(module_name, where) + + else: + msg = "Configuration file '%s' not found in '%s'." % (filename, path) + if must_find: + get_manager().errors()(msg) + + elif debug_config: + print msg + + return where + +# Loads all the configuration files used by Boost Build in the following order: +# +# -- test-config -- +# Loaded only if specified on the command-line using the --test-config +# command-line parameter. It is ok for this file not to exist even if +# specified. If this configuration file is loaded, regular site and user +# configuration files will not be. If a relative path is specified, file is +# searched for in the current folder. +# +# -- site-config -- +# Always named site-config.jam. Will only be found if located on the system +# root path (Windows), /etc (non-Windows), user's home folder or the Boost +# Build path, in that order. Not loaded in case the test-config configuration +# file is loaded or the --ignore-site-config command-line option is specified. +# +# -- user-config -- +# Named user-config.jam by default or may be named explicitly using the +# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment +# variable. If named explicitly the file is looked for from the current working +# directory and if the default one is used then it is searched for in the +# user's home directory and the Boost Build path, in that order. Not loaded in +# case either the test-config configuration file is loaded or an empty file +# name is explicitly specified. If the file name has been given explicitly then +# the file must exist. +# +# Test configurations have been added primarily for use by Boost Build's +# internal unit testing system but may be used freely in other places as well. +# +def load_configuration_files(): + + # Flag indicating that site configuration should not be loaded. + ignore_site_config = "--ignore-site-config" in sys.argv + + initialize_config_module("test-config") + test_config = None + for a in sys.argv: + m = re.match("--test-config=(.*)$", a) + if m: + test_config = b2.util.unquote(m.group(1)) + break + + if test_config: + where = load_config("test-config", os.path.basename(test_config), [os.path.dirname(test_config)]) + if where: + if debug_config: + print "notice: Regular site and user configuration files will" + print "notice: be ignored due to the test configuration being loaded." + + user_path = [os.path.expanduser("~")] + bjam.variable("BOOST_BUILD_PATH") + site_path = ["/etc"] + user_path + if os.name in ["nt"]: + site_path = [os.getenv("SystemRoot")] + user_path + + if debug_config and not test_config and ignore_site_config: + print "notice: Site configuration files will be ignored due to the" + print "notice: --ignore-site-config command-line option." + + initialize_config_module("site-config") + if not test_config and not ignore_site_config: + load_config('site-config', 'site-config.jam', site_path) + + initialize_config_module('user-config') + if not test_config: + + # Here, user_config has value of None if nothing is explicitly + # specified, and value of '' if user explicitly does not want + # to load any user config. + user_config = None + for a in sys.argv: + m = re.match("--user-config=(.*)$", a) + if m: + user_config = m.group(1) + break + + if user_config is None: + user_config = os.getenv("BOOST_BUILD_USER_CONFIG") + + # Special handling for the case when the OS does not strip the quotes + # around the file name, as is the case when using Cygwin bash. + user_config = b2.util.unquote(user_config) + explicitly_requested = user_config + + if user_config is None: + user_config = "user-config.jam" + + if user_config: + if explicitly_requested: + + user_config = os.path.abspath(user_config) + + if debug_config: + print "notice: Loading explicitly specified user configuration file:" + print " " + user_config + + load_config('user-config', os.path.basename(user_config), [os.path.dirname(user_config)], True) + else: + load_config('user-config', os.path.basename(user_config), user_path) + else: + if debug_config: + print "notice: User configuration file loading explicitly disabled." + + # We look for project-config.jam from "." upward. I am not sure this is + # 100% right decision, we might as well check for it only alongside the + # Jamroot file. However: + # - We need to load project-config.jam before Jamroot + # - We probably need to load project-config.jam even if there is no Jamroot + # - e.g. to implement automake-style out-of-tree builds. + if os.path.exists("project-config.jam"): + file = ["project-config.jam"] + else: + file = b2.util.path.glob_in_parents(".", ["project-config.jam"]) + + if file: + initialize_config_module('project-config', os.path.dirname(file[0])) + load_config('project-config', "project-config.jam", [os.path.dirname(file[0])], True) + + get_manager().projects().end_load() + + +# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or +# toolset=xx,yy,...zz in the command line. May return additional properties to +# be processed as if they had been specified by the user. +# +def process_explicit_toolset_requests(): + + extra_properties = [] + + option_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^--toolset=(.*)$") + for e in option.split(',')] + feature_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^toolset=(.*)$") + for e in option.split(',')] + + for t in option_toolsets + feature_toolsets: + + # Parse toolset-version/properties. + (toolset_version, toolset, version) = re.match("(([^-/]+)-?([^/]+)?)/?.*", t).groups() + + if debug_config: + print "notice: [cmdline-cfg] Detected command-line request for '%s': toolset= %s version=%s" \ + % (toolset_version, toolset, version) + + # If the toolset is not known, configure it now. + known = False + if toolset in feature.values("toolset"): + known = True + + if known and version and not feature.is_subvalue("toolset", toolset, "version", version): + known = False + # TODO: we should do 'using $(toolset)' in case no version has been + # specified and there are no versions defined for the given toolset to + # allow the toolset to configure its default version. For this we need + # to know how to detect whether a given toolset has any versions + # defined. An alternative would be to do this whenever version is not + # specified but that would require that toolsets correctly handle the + # case when their default version is configured multiple times which + # should be checked for all existing toolsets first. + + if not known: + + if debug_config: + print "notice: [cmdline-cfg] toolset '%s' not previously configured; attempting to auto-configure now" % toolset_version + if version is not None: + using(toolset, version) + else: + using(toolset) + + else: + + if debug_config: + + print "notice: [cmdline-cfg] toolset '%s' already configured" % toolset_version + + # Make sure we get an appropriate property into the build request in + # case toolset has been specified using the "--toolset=..." command-line + # option form. + if not t in sys.argv and not t in feature_toolsets: + + if debug_config: + print "notice: [cmdline-cfg] adding toolset=%s) to the build request." % t ; + extra_properties += "toolset=%s" % t + + return extra_properties + + + +# Returns 'true' if the given 'project' is equal to or is a (possibly indirect) +# child to any of the projects requested to be cleaned in this build system run. +# Returns 'false' otherwise. Expects the .project-targets list to have already +# been constructed. +# +@cached +def should_clean_project(project): + + if project in project_targets: + return True + else: + + parent = get_manager().projects().attribute(project, "parent-module") + if parent and parent != "user-config": + return should_clean_project(parent) + else: + return False + +################################################################################ +# +# main() +# ------ +# +################################################################################ + +def main(): + + # FIXME: document this option. + if "--profiling" in sys.argv: + import cProfile + r = cProfile.runctx('main_real()', globals(), locals(), "stones.prof") + + import pstats + stats = pstats.Stats("stones.prof") + stats.strip_dirs() + stats.sort_stats('time', 'calls') + stats.print_callers(20) + return r + else: + try: + return main_real() + except ExceptionWithUserContext, e: + e.report() + +def main_real(): + + global debug_config, out_xml + + debug_config = "--debug-configuration" in sys.argv + out_xml = any(re.match("^--out-xml=(.*)$", a) for a in sys.argv) + + engine = Engine() + + global_build_dir = option.get("build-dir") + manager = Manager(engine, global_build_dir) + + import b2.build.configure as configure + + if "--version" in sys.argv: + from b2.build import version + version.report() + return + + # This module defines types and generator and what not, + # and depends on manager's existence + import b2.tools.builtin + + b2.tools.common.init(manager) + + load_configuration_files() + + # Load explicitly specified toolset modules. + extra_properties = process_explicit_toolset_requests() + + # Load the actual project build script modules. We always load the project + # in the current folder so 'use-project' directives have any chance of + # being seen. Otherwise, we would not be able to refer to subprojects using + # target ids. + current_project = None + projects = get_manager().projects() + if projects.find(".", "."): + current_project = projects.target(projects.load(".")) + + # Load the default toolset module if no other has already been specified. + if not feature.values("toolset"): + + dt = default_toolset + dtv = None + if default_toolset: + dtv = default_toolset_version + else: + dt = "gcc" + if os.name == 'nt': + dt = "msvc" + # FIXME: + #else if [ os.name ] = MACOSX + #{ + # default-toolset = darwin ; + #} + + print "warning: No toolsets are configured." + print "warning: Configuring default toolset '%s'." % dt + print "warning: If the default is wrong, your build may not work correctly." + print "warning: Use the \"toolset=xxxxx\" option to override our guess." + print "warning: For more configuration options, please consult" + print "warning: https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.overview.configuration" + + using(dt, dtv) + + # Parse command line for targets and properties. Note that this requires + # that all project files already be loaded. + (target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties) + + # Check that we actually found something to build. + if not current_project and not target_ids: + get_manager().errors()("no Jamfile in current directory found, and no target references specified.") + # FIXME: + # EXIT + + # Flags indicating that this build system run has been started in order to + # clean existing instead of create new targets. Note that these are not the + # final flag values as they may get changed later on due to some special + # targets being specified on the command line. + clean = "--clean" in sys.argv + cleanall = "--clean-all" in sys.argv + + # List of explicitly requested files to build. Any target references read + # from the command line parameter not recognized as one of the targets + # defined in the loaded Jamfiles will be interpreted as an explicitly + # requested file to build. If any such files are explicitly requested then + # only those files and the targets they depend on will be built and they + # will be searched for among targets that would have been built had there + # been no explicitly requested files. + explicitly_requested_files = [] + + # List of Boost Build meta-targets, virtual-targets and actual Jam targets + # constructed in this build system run. + targets = [] + virtual_targets = [] + actual_targets = [] + + explicitly_requested_files = [] + + # Process each target specified on the command-line and convert it into + # internal Boost Build target objects. Detect special clean target. If no + # main Boost Build targets were explicitly requested use the current project + # as the target. + for id in target_ids: + if id == "clean": + clean = 1 + else: + t = None + if current_project: + t = current_project.find(id, no_error=1) + else: + t = find_target(id) + + if not t: + print "notice: could not find main target '%s'" % id + print "notice: assuming it's a name of file to create " ; + explicitly_requested_files.append(id) + else: + targets.append(t) + + if not targets: + targets = [projects.target(projects.module_name("."))] + + # FIXME: put this BACK. + + ## if [ option.get dump-generators : : true ] + ## { + ## generators.dump ; + ## } + + + # We wish to put config.log in the build directory corresponding + # to Jamroot, so that the location does not differ depending on + # directory where we do build. The amount of indirection necessary + # here is scary. + first_project = targets[0].project() + first_project_root_location = first_project.get('project-root') + first_project_root_module = manager.projects().load(first_project_root_location) + first_project_root = manager.projects().target(first_project_root_module) + first_build_build_dir = first_project_root.build_dir() + configure.set_log_file(os.path.join(first_build_build_dir, "config.log")) + + virtual_targets = [] + + global results_of_main_targets + + # Expand properties specified on the command line into multiple property + # sets consisting of all legal property combinations. Each expanded property + # set will be used for a single build run. E.g. if multiple toolsets are + # specified then requested targets will be built with each of them. + # The expansion is being performed as late as possible so that the feature + # validation is performed after all necessary modules (including project targets + # on the command line) have been loaded. + if properties: + expanded = [] + for p in properties: + expanded.extend(build_request.convert_command_line_element(p)) + + expanded = build_request.expand_no_defaults(expanded) + else: + expanded = [property_set.empty()] + + # Now that we have a set of targets to build and a set of property sets to + # build the targets with, we can start the main build process by using each + # property set to generate virtual targets from all of our listed targets + # and any of their dependants. + for p in expanded: + manager.set_command_line_free_features(property_set.create(p.free())) + + for t in targets: + try: + g = t.generate(p) + if not isinstance(t, ProjectTarget): + results_of_main_targets.extend(g.targets()) + virtual_targets.extend(g.targets()) + except ExceptionWithUserContext, e: + e.report() + except Exception: + raise + + # Convert collected virtual targets into actual raw Jam targets. + for t in virtual_targets: + actual_targets.append(t.actualize()) + + j = option.get("jobs") + if j: + bjam.call("set-variable", 'PARALLELISM', j) + + k = option.get("keep-going", "true", "true") + if k in ["on", "yes", "true"]: + bjam.call("set-variable", "KEEP_GOING", "1") + elif k in ["off", "no", "false"]: + bjam.call("set-variable", "KEEP_GOING", "0") + else: + print "error: Invalid value for the --keep-going option" + sys.exit() + + # The 'all' pseudo target is not strictly needed expect in the case when we + # use it below but people often assume they always have this target + # available and do not declare it themselves before use which may cause + # build failures with an error message about not being able to build the + # 'all' target. + bjam.call("NOTFILE", "all") + + # And now that all the actual raw Jam targets and all the dependencies + # between them have been prepared all that is left is to tell Jam to update + # those targets. + if explicitly_requested_files: + # Note that this case can not be joined with the regular one when only + # exact Boost Build targets are requested as here we do not build those + # requested targets but only use them to construct the dependency tree + # needed to build the explicitly requested files. + # FIXME: add $(.out-xml) + bjam.call("UPDATE", ["%s" % x for x in explicitly_requested_files]) + elif cleanall: + bjam.call("UPDATE", "clean-all") + elif clean: + manager.engine().set_update_action("common.Clean", "clean", + actual_clean_targets(targets)) + bjam.call("UPDATE", "clean") + else: + # FIXME: + #configure.print-configure-checks-summary ; + + if pre_build_hook: + for h in pre_build_hook: + h() + + bjam.call("DEPENDS", "all", actual_targets) + ok = bjam.call("UPDATE_NOW", "all") # FIXME: add out-xml + if post_build_hook: + post_build_hook(ok) + # Prevent automatic update of the 'all' target, now that + # we have explicitly updated what we wanted. + bjam.call("UPDATE") + + if manager.errors().count() == 0: + return ["ok"] + else: + return [] diff --git a/src/boost/tools/build/src/contrib/__init__.py b/src/boost/tools/build/src/contrib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/boost/tools/build/src/contrib/boost.jam b/src/boost/tools/build/src/contrib/boost.jam new file mode 100644 index 000000000..6bf28d197 --- /dev/null +++ b/src/boost/tools/build/src/contrib/boost.jam @@ -0,0 +1,309 @@ +# Copyright 2008 - 2013 Roland Schwarz +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Boost library support module. +# +# This module allows to use the boost library from boost-build projects. The +# location of a boost source tree or the path to a pre-built version of the +# library can be configured from either site-config.jam or user-config.jam. If +# no location is configured the module looks for a BOOST_ROOT environment +# variable, which should point to a boost source tree. As a last resort it tries +# to use pre-built libraries from the standard search path of the compiler. +# +# If the location to a source tree is known, the module can be configured from +# the *-config.jam files: +# +# using boost : 1.35 : /path-to-boost-root ; +# +# If the location to a pre-built version is known: +# +# using boost : 1.34 +# : /usr/local/include/boost_1_34 +# /usr/local/lib +# ; +# +# It is legal to configure more than one boost library version in the config +# files. The version identifier is used to disambiguate between them. The first +# configured version becomes the default. +# +# To use a boost library you need to put a 'use' statement into your Jamfile: +# +# import boost ; +# +# boost.use-project 1.35 ; +# +# If you do not care about a specific version you just can omit the version +# part, in which case the default is picked up: +# +# boost.use-project ; +# +# The library can be referenced with the project identifier '/boost'. To +# reference the program_options you would specify: +# +# exe myexe : mysrc.cpp : /boost//program_options ; +# +# Note that the requirements are automatically transformed into suitable tags to +# find the correct pre-built library. +# + +import common ; +import modules ; +import numbers ; +import project ; +import property-set ; +import regex ; +import toolset ; + +.boost.auto_config = [ property-set.create system ] ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +# Configuration of the boost library to use. +# +# This can either be a boost source tree or pre-built libraries. The 'version' +# parameter must be a valid boost version number, e.g. 1.35, if specifying a +# pre-built version with versioned layout. It may be a symbolic name, e.g. +# 'trunk' if specifying a source tree. The options are specified as named +# parameters (like properties). The following parameters are available: +# +# /path-to-boost-root : Specify a source tree. +# /path-to-include : The include directory to search. +# /path-to-library : The library directory to search. +# system or versioned : Built library layout. +# my_build_id : The custom build id to use. +# +rule init +( + version # Version identifier. + : options * # Set the option properties. +) +{ + if $(.boost.$(version)) + { + import errors ; + errors.user-error Boost $(version) already configured. ; + } + else + { + if $(.debug-configuration) + { + if ! $(.boost_default) + { + echo notice\: configuring default boost library $(version) ; + } + echo notice\: configuring boost library $(version) ; + } + .boost_default ?= $(version) ; # the first configured is default + .boost.$(version) = [ property-set.create $(options) ] ; + } +} + +# Use a certain version of the library. +# +# The use-project rule causes the module to define a boost project of searchable +# pre-built boost libraries, or references a source tree of the boost library. +# If the 'version' parameter is omitted either the configured default (first in +# config files) is used or an auto configuration will be attempted. +# +rule use-project +( + version ? # The version of the library to use. +) +{ + project.push-current [ project.current ] ; + version ?= $(.boost_default) ; + version ?= auto_config ; + + if $(.initialized) + { + if $(.initialized) != $(version) + { + import errors ; + errors.user-error Attempt to use $(__name__) with different + parameters. ; + } + } + else + { + if $(.boost.$(version)) + { + local opt = $(.boost.$(version)) ; + local root = [ $(opt).get ] ; + local inc = [ $(opt).get ] ; + local lib = [ $(opt).get ] ; + + if $(.debug-configuration) + { + echo notice\: using boost library $(version) [ $(opt).raw ] ; + } + + .layout = [ $(opt).get ] ; + .layout ?= versioned ; + .build_id = [ $(opt).get ] ; + .version_tag = [ regex.replace $(version) "[*\\/:.\"\' ]" "_" ] ; + .initialized = $(version) ; + + if ( $(root) && $(inc) ) + || ( $(root) && $(lib) ) + || ( $(lib) && ! $(inc) ) + || ( ! $(lib) && $(inc) ) + { + import errors ; + errors.user-error Ambiguous parameters, use either or + with . ; + } + else if ! $(root) && ! $(inc) + { + root = [ modules.peek : BOOST_ROOT ] ; + } + + local prj = [ project.current ] ; + local mod = [ $(prj).project-module ] ; + + if $(root) + { + modules.call-in $(mod) : use-project boost : $(root) ; + } + else + { + project.initialize $(__name__) ; + # It is possible to override the setup of the searched libraries + # per version. The (unlikely) 0.0.1 tag is meant as an example + # template only. + switch $(version) + { + case 0.0.1 : boost_0_0_1 $(inc) $(lib) ; + case * : boost_std $(inc) $(lib) ; + } + } + } + else + { + import errors ; + errors.user-error Reference to unconfigured boost version. ; + } + } + project.pop-current ; +} + +local rule boost_lib_std ( id : shared-lib-define ) +{ + lib $(id) : : : : shared:$(shared-lib-define) ; +} + +rule boost_std ( inc ? lib ? ) +{ +# The default definitions for pre-built libraries. + + project boost + : usage-requirements $(inc) BOOST_ALL_NO_LIB + : requirements @tag_std $(lib) + ; + + alias headers ; + boost_lib_std chrono : BOOST_CHRONO_DYN_LINK ; + boost_lib_std container : BOOST_CONTAINER_DYN_LINK ; + boost_lib_std date_time : BOOST_DATE_TIME_DYN_LINK ; + boost_lib_std filesystem : BOOST_FILE_SYSTEM_DYN_LINK ; + boost_lib_std graph : BOOST_GRAPH_DYN_LINK ; + boost_lib_std graph_parallel : BOOST_GRAPH_DYN_LINK ; + boost_lib_std iostreams : BOOST_IOSTREAMS_DYN_LINK ; + boost_lib_std json : BOOST_JSON_DYN_LINK ; + boost_lib_std locale : BOOST_LOCALE_DYN_LINK ; + boost_lib_std log : BOOST_LOG_DYN_LINK ; + boost_lib_std log_setup : BOOST_LOG_SETUP_DYN_LINK ; + boost_lib_std math_c99 : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_c99f : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_c99l : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std mpi : BOOST_MPI_DYN_LINK ; + boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ; + boost_lib_std program_options : BOOST_PROGRAM_OPTIONS_DYN_LINK ; + boost_lib_std python : BOOST_PYTHON_DYN_LINK ; + boost_lib_std python3 : BOOST_PYTHON_DYN_LINK ; + boost_lib_std random : BOOST_RANDOM_DYN_LINK ; + boost_lib_std regex : BOOST_REGEX_DYN_LINK ; + boost_lib_std serialization : BOOST_SERIALIZATION_DYN_LINK ; + boost_lib_std signals : BOOST_SIGNALS_DYN_LINK ; + boost_lib_std system : BOOST_SYSTEM_DYN_LINK ; + boost_lib_std test_exec_monitor : BOOST_TEST_DYN_LINK ; + boost_lib_std thread : BOOST_THREAD_DYN_DLL ; + boost_lib_std timer : BOOST_TIMER_DYN_DLL ; + boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ; + boost_lib_std wave : BOOST_WAVE_DYN_LINK ; + boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ; +} + +# Example placeholder for rules defining Boost library project & library targets +# for a specific Boost library version. Copy under a different name and model +# after the boost_std rule. Please note that it is also possible to have a per +# version taging rule in case the tagging algorithm changes between versions. +# +rule boost_0_0_1 ( inc ? lib ? ) +{ + echo "You are trying to use an example placeholder for boost libs." ; +} + +rule tag_std ( name : type ? : property-set ) +{ + name = boost_$(name) ; + if ( [ $(property-set).get ] in static ) && + ( [ $(property-set).get ] in windows ) + { + name = lib$(name) ; + } + + local result ; + if $(.layout) = system + { + local version = [ MATCH "^([0-9]+)_([0-9]+)" : $(.version_tag) ] ; + if $(version[1]) = "1" && [ numbers.less $(version[2]) 39 ] + { + result = [ tag_tagged $(name) : $(type) : $(property-set) ] ; + } + else + { + result = [ tag_system $(name) : $(type) : $(property-set) ] ; + } + } + else if $(.layout) = tagged + { + result = [ tag_tagged $(name) : $(type) : $(property-set) ] ; + } + else if $(.layout) = versioned + { + result = [ tag_versioned $(name) : $(type) : $(property-set) ] ; + } + else + { + import errors ; + errors.error Missing layout. ; + } + + return $(result) ; +} + +rule tag_system ( name : type ? : property-set ) +{ + return [ common.format-name -$(.build_id) : $(name) : $(type) : + $(property-set) ] ; +} + +rule tag_tagged ( name : type ? : property-set ) +{ + return [ common.format-name -$(.build_id) : + $(name) : $(type) : $(property-set) ] ; +} + +rule tag_versioned ( name : type ? : property-set ) +{ + return [ common.format-name + -$(.version_tag) -$(.build_id) : $(name) : $(type) : $(property-set) ] ; +} diff --git a/src/boost/tools/build/src/contrib/boost.py b/src/boost/tools/build/src/contrib/boost.py new file mode 100644 index 000000000..6f4d6b4ab --- /dev/null +++ b/src/boost/tools/build/src/contrib/boost.py @@ -0,0 +1,280 @@ +# $Id: boost.jam 62249 2010-05-26 19:05:19Z steven_watanabe $ +# Copyright 2008 Roland Schwarz +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Boost library support module. +# +# This module allows to use the boost library from boost-build projects. +# The location of a boost source tree or the path to a pre-built +# version of the library can be configured from either site-config.jam +# or user-config.jam. If no location is configured the module looks for +# a BOOST_ROOT environment variable, which should point to a boost source +# tree. As a last resort it tries to use pre-built libraries from the standard +# search path of the compiler. +# +# If the location to a source tree is known, the module can be configured +# from the *-config.jam files: +# +# using boost : 1.35 : /path-to-boost-root ; +# +# If the location to a pre-built version is known: +# +# using boost : 1.34 +# : /usr/local/include/boost_1_34 +# /usr/local/lib +# ; +# +# It is legal to configure more than one boost library version in the config +# files. The version identifier is used to disambiguate between them. +# The first configured version becomes the default. +# +# To use a boost library you need to put a 'use' statement into your +# Jamfile: +# +# import boost ; +# +# boost.use-project 1.35 ; +# +# If you don't care about a specific version you just can omit the version +# part, in which case the default is picked up: +# +# boost.use-project ; +# +# The library can be referenced with the project identifier '/boost'. To +# reference the program_options you would specify: +# +# exe myexe : mysrc.cpp : /boost//program_options ; +# +# Note that the requirements are automatically transformed into suitable +# tags to find the correct pre-built library. +# + +import re + +import bjam + +from b2.build import alias, property, property_set, feature +from b2.manager import get_manager +from b2.tools import builtin, common +from b2.util import bjam_signature, regex + + +# TODO: This is currently necessary in Python Port, but was not in Jam. +feature.feature('layout', ['system', 'versioned', 'tag'], ['optional']) +feature.feature('root', [], ['optional', 'free']) +feature.feature('build-id', [], ['optional', 'free']) + +__initialized = None +__boost_auto_config = property_set.create([property.Property('layout', 'system')]) +__boost_configured = {} +__boost_default = None +__build_id = None + +__debug = None + +def debug(): + global __debug + if __debug is None: + __debug = "--debug-configuration" in bjam.variable("ARGV") + return __debug + + +# Configuration of the boost library to use. +# +# This can either be a boost source tree or +# pre-built libraries. The 'version' parameter must be a valid boost +# version number, e.g. 1.35, if specifying a pre-built version with +# versioned layout. It may be a symbolic name, e.g. 'trunk' if specifying +# a source tree. The options are specified as named parameters (like +# properties). The following parameters are available: +# +# /path-to-boost-root: Specify a source tree. +# +# /path-to-include: The include directory to search. +# +# /path-to-library: The library directory to search. +# +# system or versioned. +# +# my_build_id: The custom build id to use. +# +def init(version, options = None): + assert(isinstance(version,list)) + assert(len(version)==1) + version = version[0] + if version in __boost_configured: + get_manager().errors()("Boost {} already configured.".format(version)); + else: + global __boost_default + if debug(): + if not __boost_default: + print "notice: configuring default boost library {}".format(version) + print "notice: configuring boost library {}".format(version) + + if not __boost_default: + __boost_default = version + properties = [] + for option in options: + properties.append(property.create_from_string(option)) + __boost_configured[ version ] = property_set.PropertySet(properties) + +projects = get_manager().projects() +rules = projects.project_rules() + + +# Use a certain version of the library. +# +# The use-project rule causes the module to define a boost project of +# searchable pre-built boost libraries, or references a source tree +# of the boost library. If the 'version' parameter is omitted either +# the configured default (first in config files) is used or an auto +# configuration will be attempted. +# +@bjam_signature(([ "version", "?" ], )) +def use_project(version = None): + projects.push_current( projects.current() ) + if not version: + version = __boost_default + if not version: + version = "auto_config" + + global __initialized + if __initialized: + if __initialized != version: + get_manager().errors()('Attempt to use {} with different parameters'.format('boost')) + else: + if version in __boost_configured: + opts = __boost_configured[ version ] + root = opts.get('' ) + inc = opts.get('') + lib = opts.get('') + + if debug(): + print "notice: using boost library {} {}".format( version, opt.raw() ) + + global __layout + global __version_tag + __layout = opts.get('') + if not __layout: + __layout = 'versioned' + __build_id = opts.get('') + __version_tag = re.sub("[*\\/:.\"\' ]", "_", version) + __initialized = version + + if ( root and inc ) or \ + ( root and lib ) or \ + ( lib and not inc ) or \ + ( not lib and inc ): + get_manager().errors()("Ambiguous parameters, use either or with .") + elif not root and not inc: + root = bjam.variable("BOOST_ROOT") + + module = projects.current().project_module() + + if root: + bjam.call('call-in-module', module, 'use-project', ['boost', root]) + else: + projects.initialize(__name__) + if version == '0.0.1': + boost_0_0_1( inc, lib ) + else: + boost_std( inc, lib ) + else: + get_manager().errors()("Reference to unconfigured boost version.") + projects.pop_current() + + +rules.add_rule( 'boost.use-project', use_project ) + +def boost_std(inc = None, lib = None): + # The default definitions for pre-built libraries. + rules.project( + ['boost'], + ['usage-requirements'] + ['{}'.format(i) for i in inc] + ['BOOST_ALL_NO_LIB'], + ['requirements'] + ['{}'.format(l) for l in lib]) + + # TODO: There should be a better way to add a Python function into a + # project requirements property set. + tag_prop_set = property_set.create([property.Property('', tag_std)]) + attributes = projects.attributes(projects.current().project_module()) + attributes.requirements = attributes.requirements.refine(tag_prop_set) + + alias('headers') + + def boost_lib(lib_name, dyn_link_macro): + if (isinstance(lib_name,str)): + lib_name = [lib_name] + builtin.lib(lib_name, usage_requirements=['shared:{}'.format(dyn_link_macro)]) + + boost_lib('container' , 'BOOST_CONTAINER_DYN_LINK' ) + boost_lib('date_time' , 'BOOST_DATE_TIME_DYN_LINK' ) + boost_lib('filesystem' , 'BOOST_FILE_SYSTEM_DYN_LINK' ) + boost_lib('graph' , 'BOOST_GRAPH_DYN_LINK' ) + boost_lib('graph_parallel' , 'BOOST_GRAPH_DYN_LINK' ) + boost_lib('iostreams' , 'BOOST_IOSTREAMS_DYN_LINK' ) + boost_lib('locale' , 'BOOST_LOG_DYN_LINK' ) + boost_lib('log' , 'BOOST_LOG_DYN_LINK' ) + boost_lib('log_setup' , 'BOOST_LOG_DYN_LINK' ) + boost_lib('math_tr1' , 'BOOST_MATH_TR1_DYN_LINK' ) + boost_lib('math_tr1f' , 'BOOST_MATH_TR1_DYN_LINK' ) + boost_lib('math_tr1l' , 'BOOST_MATH_TR1_DYN_LINK' ) + boost_lib('math_c99' , 'BOOST_MATH_TR1_DYN_LINK' ) + boost_lib('math_c99f' , 'BOOST_MATH_TR1_DYN_LINK' ) + boost_lib('math_c99l' , 'BOOST_MATH_TR1_DYN_LINK' ) + boost_lib('mpi' , 'BOOST_MPI_DYN_LINK' ) + boost_lib('program_options' , 'BOOST_PROGRAM_OPTIONS_DYN_LINK') + boost_lib('python' , 'BOOST_PYTHON_DYN_LINK' ) + boost_lib('python3' , 'BOOST_PYTHON_DYN_LINK' ) + boost_lib('random' , 'BOOST_RANDOM_DYN_LINK' ) + boost_lib('regex' , 'BOOST_REGEX_DYN_LINK' ) + boost_lib('serialization' , 'BOOST_SERIALIZATION_DYN_LINK' ) + boost_lib('wserialization' , 'BOOST_SERIALIZATION_DYN_LINK' ) + boost_lib('signals' , 'BOOST_SIGNALS_DYN_LINK' ) + boost_lib('system' , 'BOOST_SYSTEM_DYN_LINK' ) + boost_lib('unit_test_framework' , 'BOOST_TEST_DYN_LINK' ) + boost_lib('prg_exec_monitor' , 'BOOST_TEST_DYN_LINK' ) + boost_lib('test_exec_monitor' , 'BOOST_TEST_DYN_LINK' ) + boost_lib('thread' , 'BOOST_THREAD_DYN_DLL' ) + boost_lib('wave' , 'BOOST_WAVE_DYN_LINK' ) + +def boost_0_0_1( inc, lib ): + print "You are trying to use an example placeholder for boost libs." ; + # Copy this template to another place (in the file boost.jam) + # and define a project and libraries modelled after the + # boost_std rule. Please note that it is also possible to have + # a per version taging rule in case they are different between + # versions. + +def tag_std(name, type, prop_set): + name = 'boost_' + name + if 'static' in prop_set.get('') and 'windows' in prop_set.get(''): + name = 'lib' + name + result = None + + if __layout == 'system': + versionRe = re.search('^([0-9]+)_([0-9]+)', __version_tag) + if versionRe and versionRe.group(1) == '1' and int(versionRe.group(2)) < 39: + result = tag_tagged(name, type, prop_set) + else: + result = tag_system(name, type, prop_set) + elif __layout == 'tagged': + result = tag_tagged(name, type, prop_set) + elif __layout == 'versioned': + result = tag_versioned(name, type, prop_set) + else: + get_manager().errors()("Missing layout") + return result + +def tag_maybe(param): + return ['-{}'.format(param)] if param else [] + +def tag_system(name, type, prop_set): + return common.format_name([''] + tag_maybe(__build_id), name, type, prop_set) + +def tag_tagged(name, type, prop_set): + return common.format_name(['', '', ''] + tag_maybe(__build_id), name, type, prop_set) + +def tag_versioned(name, type, prop_set): + return common.format_name(['', '', '', ''] + tag_maybe(__version_tag) + tag_maybe(__build_id), + name, type, prop_set) diff --git a/src/boost/tools/build/src/contrib/modular.jam b/src/boost/tools/build/src/contrib/modular.jam new file mode 100644 index 000000000..31d78d6ba --- /dev/null +++ b/src/boost/tools/build/src/contrib/modular.jam @@ -0,0 +1,288 @@ +# Copyright Rene Rivera 2015 +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import path ; +import project ; +import modules ; +import regex ; +import type ; + +# Add a location, i.e. directory, where to search for libraries. +# The optional 'prefix' indicates which rooted-prefixes the new +# search dir applies to. The prefix defaults to '/'. +rule add-location ( dir prefix ? : base-dir ? ) +{ + process-args ; + + prefix ?= "/" ; + + # Dir path of caller to base paths from. + caller-module ?= [ CALLER_MODULE ] ; + local caller-dir = [ modules.peek $(caller-module) : __file__ ] ; + caller-dir = $(caller-dir:D) ; + + base-dir ?= $(caller-dir) ; + + .search-path-prefix += $(prefix) ; + .search-path.$(prefix) += [ path.root [ path.root $(dir) $(base-dir) ] [ path.pwd ] ] ; +} + +# Declares additional definitions of a modular library target external +# to the modular library build itself. This makes it possible to externally +# define modular libraries without modifying the library. The passed in +# values are added on demand when the named library is first declared. +rule external ( + name : sources * : requirements * : default-build * : + usage-requirements * ) +{ + .external.($(name)).sources = $(sources) ; + .external.($(name)).requirements = $(requirements) ; + .external.($(name)).default-build = $(default-build) ; + .external.($(name)).usage-requirements = $(usage-requirements) ; +} + +# Find, and declare, any modular libraries referenced in the target-refs. +# This will both load the modular libraries, and declare/manufacture +# the modular libraries as needed. +rule find ( target-refs + ) +{ + process-args ; + + local caller-mod = [ CALLER_MODULE ] ; + local caller-dir = [ modules.peek $(caller-mod) : __file__ ] ; + caller-dir = $(caller-dir:D) ; + caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ; + + local result-refs ; + for local target-ref in $(target-refs) + { + result-refs += [ resolve-reference $(target-ref) + : $(caller-mod) $(caller-dir) ] ; + } + + return $(result-refs) ; +} + +############################################################################## + +local rule resolve-reference ( target-ref : caller-mod caller-dir ? ) +{ + # ECHO %%% modular.resolve-target-ref $(target-ref) :: $(caller-mod) $(caller-dir) ; + if ! $(caller-dir) + { + caller-dir = [ modules.peek $(caller-mod) : __file__ ] ; + caller-dir = $(caller-dir:D) ; + caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ; + } + local result-ref = $(target-ref) ; + local ref = [ MATCH ^(.*)//.* : $(target-ref:G=) ] ; + # if ! ( $(ref) in $(.target-refs) ) + { + # .target-refs += $(ref) ; + local search-prefix ; + local search-sub ; + for local prefix in $(.search-path-prefix) + { + if ! $(search-prefix) + { + local search-match = [ MATCH ^($(prefix))/(.*)$ : $(ref) ] ; + search-prefix = $(search-match[1]) ; + search-sub = $(search-match[2]) ; + } + } + + if $(search-prefix) + { + local found = [ path.glob $(.search-path.$(search-prefix)) : $(search-sub) ] ; + found = $(found[1]) ; + if $(found) + { + local lib-ref = [ regex.split $(search-sub) / ] ; + lib-ref = $(search-prefix)/$(lib-ref[1]) ; + local lib-path = [ path.relative-to $(caller-dir) $(found) ] ; + define-library $(lib-ref) $(caller-mod) : $(lib-path) ; + } + } + } + return $(result-ref) ; +} + +local rule define-library ( name caller-module ? : root ) +{ + # ECHO ~~~ modular.library $(name) $(caller-module) :: $(root) :: $(depends) ; + + process-args ; + + # Dir path of caller to base paths from. + caller-module ?= [ CALLER_MODULE ] ; + local caller-dir = [ modules.peek $(caller-module) : __file__ ] ; + caller-dir = $(caller-dir:D) ; + + # Find the various parts of the library. + local lib-dir = [ path.root [ path.root $(root) $(caller-dir) ] [ path.pwd ] ] ; + local lib-contents = [ path.glob $(lib-dir) : "include" "build" ] ; + lib-contents = $(lib-contents:D=) ; + + # "include" dir for library.. + local include-dir ; + if "include" in $(lib-contents) + { + include-dir = $(root)/include ; + } + + # If it has a build dir, i.e. it has targets to build, + # we root the project at the build dir to make it easy + # to refer to the build targets. This mirrors the regular + # Boost organization of the project aliases. + if "build" in $(lib-contents) + { + root = $(root)/build ; + build-dir = "." ; + } + + # Shadow target declarations so that we can alter build targets + # to work in the standalone modular structure. + local lib-location = [ path.root [ path.make $(root) ] $(caller-dir) ] ; + local lib-module-name = [ project.module-name $(lib-location) ] ; + local modular-rules = [ RULENAMES modular-rules ] ; + IMPORT modular-rules : $(modular-rules) : $(lib-module-name) : $(modular-rules) ; + + # Load/create/declare library project. + local lib-module = [ project.find $(root) : $(caller-dir) ] ; + if ! $(lib-module) + { + # If the find was unable to load the project we synthesize it. + lib-module = [ project.load $(lib-location) : synthesize ] ; + } + local lib-target = [ project.target $(lib-module) ] ; + if ! [ modules.peek $(lib-module) : __library__ ] + { + modules.poke $(lib-module) : __library__ : $(name) ; + for local type in [ modules.peek type : .types ] + { + main-rule-name = [ type.type-to-rule-name $(type) ] ; + IMPORT modular-rules : main-target-rule : $(lib-module-name) : $(main-rule-name) ; + } + } + + # Declare project alternate ID. + modules.call-in $(caller-module) : use-project $(name) : $(root) ; + + # Create a "library" target that has basic usage info if needed. + if ! [ $(lib-target).has-alternative-for-target library ] + { + include-dir = [ path.relative-to $(root) $(include-dir) ] ; + + project.push-current $(lib-target) ; + + # Declare the library alias. + modules.call-in $(lib-module) : library + : # Sources + : # Requirements + : # Default Build + : # Usage Requirements + $(include-dir) + ; + + project.pop-current ; + } +} + +local rule process-args ( ) +{ + if ! $(.did-process-args) + { + .did-process-args = yes ; + local argv = [ modules.peek : ARGV ] ; + local dirs = [ MATCH ^--modular-search-dir=(.*)$ : $(argv) ] ; + for local dir in $(dirs) + { + add-location $(dir) : [ path.pwd ] ; + } + } +} + +rule apply-external ( + mod : field : values * ) +{ + local result ; + local name = [ modules.peek $(mod) : __library__ ] ; + values += $(.external.($(name)).$(field)) ; + for local value in $(values) + { + result += [ resolve-reference $(value) : $(mod) ] ; + } + return $(result) ; +} + +module modular-rules +{ + import type ; + import targets ; + import builtin ; + import alias ; + + # Avoids any form of installation for Boost modules. + rule boost-install ( libraries * ) { } + + # Generic typed target rule to pre-process main target + # declarations to make them work within the standalone + # modular structure. + rule main-target-rule ( + name : sources * : requirements * : default-build * : + usage-requirements * ) + { + local mod = [ CALLER_MODULE ] ; + + # ECHO @@@ [[$(mod)]] modular-rules.main-target-rule $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + + # First discover the required target type based on the exact alias used to + # invoke this rule. + local bt = [ BACKTRACE 1 ] ; + local rulename = $(bt[4]) ; + local target-type = [ type.type-from-rule-name $(rulename) ] ; + return [ targets.create-typed-target $(target-type) : [ project.current ] : + $(name) : $(sources) : $(requirements) : $(default-build) : + $(usage-requirements) ] ; + } + + rule lib ( names + : sources * : requirements * : default-build * : + usage-requirements * ) + { + local mod = [ CALLER_MODULE ] ; + requirements += library ; + usage-requirements += library ; + + # ECHO @@@ [[$(mod)]] modular-rules.lib $(names) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + return [ builtin.lib $(names) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ; + } + + rule alias ( name : sources * : requirements * : default-build * : + usage-requirements * ) + { + local mod = [ CALLER_MODULE ] ; + + # ECHO @@@ [[$(mod)]] modular-rules.alias $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ; + } + + rule library ( name ? : sources * : requirements * : default-build * : + usage-requirements * ) + { + import modular ; + + local mod = [ CALLER_MODULE ] ; + sources = [ modular.apply-external $(mod) : sources : $(sources) ] ; + requirements = [ modular.apply-external $(mod) : requirements : $(requirements) ] ; + default-build = [ modular.apply-external $(mod) : default-build : $(default-build) ] ; + usage-requirements = [ modular.apply-external $(mod) : usage-requirements : $(usage-requirements) ] ; + + name ?= library ; + + # ECHO @@@ [[$(mod)]] modular-rules.library $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ; + } +} + diff --git a/src/boost/tools/build/src/contrib/tntnet.jam b/src/boost/tools/build/src/contrib/tntnet.jam new file mode 100644 index 000000000..1dfa37f7e --- /dev/null +++ b/src/boost/tools/build/src/contrib/tntnet.jam @@ -0,0 +1,208 @@ +# Copyright 2008 Eduardo Gurgel +# +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) +# + +# Support for creating components for the Tntnet web application +# server (http://tntnet.org) +# +# Example: +# +# using tntnet : /usr ; +# lib index : index.png index.js index.css index.ecpp otherclass.cpp +# /tntnnet//tntnet /tntnet//cxxtools ; +# +# + +import modules ; +import feature ; +import errors ; +import "class" : new ; +import generators ; +import project ; +import toolset : flags ; +import os ; +import virtual-target ; +import scanner ; +import type ; + +type.register ECPP : ecpp ; +type.register JPEG : jpeg ; +type.register JPG : jpg ; +type.register PNG : png ; +type.register JS : js ; +type.register CSS : css ; +type.register GIF : gif ; + +project.initialize $(__name__) ; +project tntnet ; + +# Save the project so that we tolerate 'import + using' combo. +.project = [ project.current ] ; +# Initialized the Tntnet support module. The 'prefix' parameter +# tells where Tntnet is installed. +rule init ( prefix : full_bin ? : full_inc ? : full_lib ? ) +{ + project.push-current $(.project) ; + + # pre-build paths to detect reinitializations changes + local inc_prefix lib_prefix bin_prefix ; + if $(full_inc) + { + inc_prefix = $(full_inc) ; + } + else + { + inc_prefix = $(prefix)/include ; + } + if $(full_lib) + { + lib_prefix = $(full_lib) ; + } + else + { + lib_prefix = $(prefix)/lib ; + } + if $(full_bin) + { + bin_prefix = $(full_bin) ; + } + else + { + bin_prefix = $(prefix)/bin ; + } + + if $(.initialized) + { + if $(prefix) != $(.prefix) + { + errors.error + "Attempt the reinitialize Tntnet with different installation prefix" ; + } + if $(inc_prefix) != $(.incprefix) + { + errors.error + "Attempt the reinitialize Tntnet with different include path" ; + } + if $(lib_prefix) != $(.libprefix) + { + errors.error + "Attempt the reinitialize Tntnet with different library path" ; + } + if $(bin_prefix) != $(.binprefix) + { + errors.error + "Attempt the reinitialize Tntnet with different bin path" ; + } + } + else + { + .initialized = true ; + .prefix = $(prefix) ; + + # Setup prefixes for include, binaries and libs. + .incprefix = $(.prefix)/include ; + .libprefix = $(.prefix)/lib ; + .binprefix = $(.prefix)/bin ; + + # Generates cpp files from ecpp files using "ecppc" tool + generators.register-standard tntnet.ecpp : ECPP : CPP ; + # Generates cpp files from jpeg files using "ecppc" tool + generators.register-standard tntnet.jpeg : JPEG : CPP ; + # Generates cpp files from jpg files using "ecppc" tool + generators.register-standard tntnet.jpg : JPG : CPP ; + # Generates cpp files from png files using "ecppc" tool + generators.register-standard tntnet.png : PNG : CPP ; + # Generates cpp files from js files using "ecppc" tool + generators.register-standard tntnet.js : JS : CPP ; + # Generates cpp files from gif files using "ecppc" tool + generators.register-standard tntnet.gif : GIF : CPP ; + # Generates cpp files from css files using "ecppc" tool + generators.register-standard tntnet.css : CSS : CPP ; + # Scanner for ecpp includes + type.set-scanner ECPP : ecpp-scanner ; + + + local usage-requirements = + $(.incprefix) + $(.libprefix) + $(.libprefix) + multi + tntnet ; + lib cxxtools : $(main) + : + : + : + $(.incprefix)/cxxtools + $(usage-requiriments) + ; + lib tntnet : $(main) + : + : + : + $(.incprefix)/tntnet + $(usage-requiriments) + ; + + } + project.pop-current ; + +} + +rule directory +{ + return $(.prefix) ; +} + +rule initialized ( ) +{ + return $(.initialized) ; +} + +# Get from current toolset. +flags tntnet.ecpp INCLUDES ; + +actions ecpp +{ + $(.binprefix)/ecppc -I " $(INCLUDES) " -o $(<) $(>) +} + +actions jpeg +{ + $(.binprefix)/ecppc -b -m image/jpeg -o $(<) $(>) +} + +actions jpg +{ + $(.binprefix)/ecppc -b -m image/jpeg -o $(<) $(>) +} + +actions js +{ + $(.binprefix)/ecppc -b -m application/x-javascript -o $(<) $(>) +} + +actions png +{ + $(.binprefix)/ecppc -b -m image/png -o $(<) $(>) +} +actions gif +{ + $(.binprefix)/ecppc -b -m image/gif -o $(<) $(>) +} +actions css +{ + $(.binprefix)/ecppc -b -m text/css -o $(<) $(>) +} + +class ecpp-scanner : common-scanner +{ + rule pattern ( ) + { + return "<%include.*>(.*)" ; + } +} + +scanner.register ecpp-scanner : include ; diff --git a/src/boost/tools/build/src/contrib/wxFormBuilder.jam b/src/boost/tools/build/src/contrib/wxFormBuilder.jam new file mode 100644 index 000000000..d3d6544c4 --- /dev/null +++ b/src/boost/tools/build/src/contrib/wxFormBuilder.jam @@ -0,0 +1,195 @@ +################################################################################ +# +# Copyright (c) 2007-2008 Dario Senic, Jurko Gospodnetic. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) +# +################################################################################ + +################################################################################ +# +# Boost Build wxFormBuilder generator tool module. +# +# wxFormBuilder is a GUI designer tool for the wxWidgets library. It can then +# generate C++ sources modeling the designed GUI using the wxWidgets library +# APIs. +# +# This module defines a wxFormBuilder project file type and rules needed to +# generate C++ source files from those projects. With it you can simply list +# wxFormBuilder projects as sources for some target and Boost Build will +# automatically convert them to C++ sources and process from there. +# +# The wxFormBuilder executable location may be provided as a parameter when +# configuring this toolset. Otherwise the default wxFormBuilder.exe executable +# name is used located in the folder pointed to by the WXFORMBUILDER environment +# variable. +# +# Current limitations: +# +# * Works only on Windows. +# * Works only when run via Boost Jam using the native Windows cmd.exe command +# interpreter, i.e. the default native Windows Boost Jam build. +# * Used wxFormBuilder projects need to have their output file names defined +# consistently with target names assumed by this build script. This means +# that their target names must use the prefix 'wxFormBuilderGenerated_' and +# have no output folder defined where the base name is equal to the .fpb +# project file's name. +# +################################################################################ + +################################################################################ +# +# Implementation note: +# +# Avoiding the limitation on the generated target file names can be done but +# would require depending on external tools to copy the wxFormBuilder project to +# a temp location and then modify it in-place to set its target file names. On +# the other hand wxFormBuilder is expected to add command-line options for +# choosing the target file names from the command line which will allow us to +# remove this limitation in a much cleaner way. +# (23.08.2008.) (Jurko) +# +################################################################################ + +import generators ; +import os ; +import path ; +import toolset ; +import type ; + + +################################################################################ +# +# wxFormBuilder.generate() +# ------------------------ +# +# Action for processing WX_FORM_BUILDER_PROJECT types. +# +################################################################################ +# +# Implementation notes: +# +# wxFormBuilder generated CPP and H files need to be moved to the location +# where the Boost Build target system expects them so that the generated CPP +# file can be included into the compile process and that the clean rule +# successfully deletes both CPP and H files. We expect wxFormBuilder to generate +# files in the same location where the provided WX_FORM_BUILDER_PROJECT file is +# located. +# (15.05.2007.) (Dario) +# +################################################################################ + +actions generate +{ + start "" /wait "$(EXECUTABLE)" /g "$(2)" + move "$(1[1]:BSR=$(2:P))" "$(1[1]:P)" + move "$(1[2]:BSR=$(2:P))" "$(1[2]:P)" +} + + +################################################################################ +# +# wxFormBuilder.init() +# -------------------- +# +# Main toolset initialization rule called via the toolset.using rule. +# +################################################################################ + +rule init ( executable ? ) +{ + if $(.initialized) + { + if $(.debug-configuration) + { + ECHO notice: [wxFormBuilder-cfg] Repeated initialization request + (executable \"$(executable:E="")\") detected and ignored. ; + } + } + else + { + local environmentVariable = WXFORMBUILDER ; + + if $(.debug-configuration) + { + ECHO notice: [wxFormBuilder-cfg] Configuring wxFormBuilder... ; + } + + # Deduce the path to the used wxFormBuilder executable. + if ! $(executable) + { + executable = "wxFormBuilder.exe" ; + local executable-path = [ os.environ $(environmentVariable) ] ; + if $(executable-path)-is-not-empty + { + executable = [ path.root $(executable) $(executable-path) ] ; + } + else if $(.debug-configuration) + { + ECHO notice: [wxFormBuilder-cfg] No wxFormBuilder path + configured either explicitly or using the + $(environmentVariable) environment variable. ; + ECHO notice: [wxFormBuilder-cfg] To avoid complications please + update your configuration to includes a correct path to the + wxFormBuilder executable. ; + ECHO notice: [wxFormBuilder-cfg] wxFormBuilder executable will + be searched for on the system path. ; + } + } + if $(.debug-configuration) + { + ECHO notice: [wxFormBuilder-cfg] Will use wxFormBuilder executable + \"$(executable)\". ; + } + + # Now we are sure we have everything we need to initialize this toolset. + .initialized = true ; + + # Store the path to the used wxFormBuilder executable. + .executable = $(executable) ; + + # Type registration. + type.register WX_FORM_BUILDER_PROJECT : fbp ; + + # Parameters to be forwarded to the action rule. + toolset.flags wxFormBuilder.generate EXECUTABLE : $(.executable) ; + + # Generator definition and registration. + generators.register-standard wxFormBuilder.generate : + WX_FORM_BUILDER_PROJECT : CPP(wxFormBuilderGenerated_%) + H(wxFormBuilderGenerated_%) ; + } +} + + +################################################################################ +# +# wxFormBuilder.is-initialized() +# ------------------------------ +# +# Returns whether this toolset has been initialized. +# +################################################################################ + +rule is-initialized ( ) +{ + return $(.initialized) ; +} + + +################################################################################ +# +# Startup code executed when loading this module. +# +################################################################################ + +# Global variables for this module. +.executable = ; +.initialized = ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} diff --git a/src/boost/tools/build/src/engine/boost-jam.spec b/src/boost/tools/build/src/engine/boost-jam.spec new file mode 100644 index 000000000..bc572fc96 --- /dev/null +++ b/src/boost/tools/build/src/engine/boost-jam.spec @@ -0,0 +1,64 @@ +Name: boost-jam +Version: 3.1.19 +Summary: Build tool +Release: 1 +Source: %{name}-%{version}.tgz + +License: Boost Software License, Version 1.0 +Group: Development/Tools +URL: http://www.boost.org +Packager: Rene Rivera +BuildRoot: /var/tmp/%{name}-%{version}.root + +%description +Boost Jam is a build tool based on FTJam, which in turn is based on +Perforce Jam. It contains significant improvements made to facilitate +its use in the Boost Build System, but should be backward compatible +with Perforce Jam. + +Authors: + Perforce Jam : Cristopher Seiwald + FT Jam : David Turner + Boost Jam : David Abrahams + +Copyright: + /+\ + +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + \+/ + License is hereby granted to use this software and distribute it + freely, as long as this copyright notice is retained and modifications + are clearly marked. + ALL WARRANTIES ARE HEREBY DISCLAIMED. + +Also: + Copyright 2001-2006 David Abrahams. + Copyright 2002-2006 Rene Rivera. + Copyright 2003-2006 Vladimir Prus. + + Distributed under the Boost Software License, Version 1.0. + (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +%prep +%setup -n %{name}-%{version} + +%build +LOCATE_TARGET=bin ./build.sh $BOOST_JAM_TOOLSET + +%install +rm -rf $RPM_BUILD_ROOT +mkdir -p $RPM_BUILD_ROOT%{_bindir} +mkdir -p $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version} +install -m 755 bin/bjam $RPM_BUILD_ROOT%{_bindir}/bjam-%{version} +ln -sf bjam-%{version} $RPM_BUILD_ROOT%{_bindir}/bjam +cp -R *.html *.png *.css LICENSE*.txt images jam $RPM_BUILD_ROOT%{_docdir}/%{name}-%{version} + +find $RPM_BUILD_ROOT -name CVS -type d -exec rm -r {} \; + +%files +%defattr(-,root,root) +%attr(755,root,root) /usr/bin/* +%doc %{_docdir}/%{name}-%{version} + + +%clean +rm -rf $RPM_BUILD_ROOT diff --git a/src/boost/tools/build/src/engine/boost-no-inspect b/src/boost/tools/build/src/engine/boost-no-inspect new file mode 100644 index 000000000..8a06f3a70 --- /dev/null +++ b/src/boost/tools/build/src/engine/boost-no-inspect @@ -0,0 +1 @@ +this really out of our hands, so tell inspect to ignore directory \ No newline at end of file diff --git a/src/boost/tools/build/src/engine/build.bat b/src/boost/tools/build/src/engine/build.bat new file mode 100644 index 000000000..d2d5f1aaf --- /dev/null +++ b/src/boost/tools/build/src/engine/build.bat @@ -0,0 +1,195 @@ +@ECHO OFF + +REM ~ Copyright 2002-2007 Rene Rivera. +REM ~ Distributed under the Boost Software License, Version 1.0. +REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +setlocal +goto Start + + +:Set_Error +color 00 +goto :eof + + +:Clear_Error +ver >nul +goto :eof + + +:Error_Print +REM Output an error message and set the errorlevel to indicate failure. +setlocal +ECHO ### +ECHO ### %1 +ECHO ### +ECHO ### You can specify the toolset as the argument, i.e.: +ECHO ### .\build.bat msvc +ECHO ### +ECHO ### Toolsets supported by this script are: borland, como, gcc, +ECHO ### gcc-nocygwin, intel-win32, mingw, +ECHO ### vc12, vc14, vc141, vc142, vc143 +ECHO ### +ECHO ### If you have Visual Studio 2017 installed you will need to either update +ECHO ### the Visual Studio 2017 installer or run from VS 2017 Command Prompt +ECHO ### as we where unable to detect your toolset installation. +ECHO ### +call :Set_Error +endlocal +goto :eof + + +:Test_Option +REM Tests whether the given string is in the form of an option: "--*" +call :Clear_Error +setlocal +set test=%1 +if not defined test ( + call :Set_Error + goto Test_Option_End +) +set test=###%test%### +set test=%test:"###=% +set test=%test:###"=% +set test=%test:###=% +if not "-" == "%test:~1,1%" call :Set_Error +:Test_Option_End +endlocal +goto :eof + + +:Test_Empty +REM Tests whether the given string is not empty +call :Clear_Error +setlocal +set test=%1 +if not defined test ( + call :Clear_Error + goto Test_Empty_End +) +set test=###%test%### +set test=%test:"###=% +set test=%test:###"=% +set test=%test:###=% +if not "" == "%test%" call :Set_Error +:Test_Empty_End +endlocal +goto :eof + + +:Guess_Toolset +set local +REM Try and guess the toolset to bootstrap the build with... +REM Sets B2_TOOLSET to the first found toolset. +REM May also set B2_TOOLSET_ROOT to the +REM location of the found toolset. + +call :Clear_Error +call :Test_Empty "%ProgramFiles%" +if not errorlevel 1 set "ProgramFiles=C:\Program Files" + +REM Visual Studio is by default installed to %ProgramFiles% on 32-bit machines and +REM %ProgramFiles(x86)% on 64-bit machines. Making a common variable for both. +call :Clear_Error +call :Test_Empty "%ProgramFiles(x86)%" +if errorlevel 1 ( + set "VS_ProgramFiles=%ProgramFiles(x86)%" +) else ( + set "VS_ProgramFiles=%ProgramFiles%" +) + +call guess_toolset.bat +if errorlevel 1 ( + call :Error_Print "Could not find a suitable toolset.") +goto :eof + +endlocal +goto :eof + + +:Start +set B2_TOOLSET= +set B2_BUILD_ARGS= + +REM If no arguments guess the toolset; +REM or if first argument is an option guess the toolset; +REM otherwise the argument is the toolset to use. +call :Clear_Error +call :Test_Empty %1 +if not errorlevel 1 ( + call :Guess_Toolset + if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish ) +) + +call :Clear_Error +call :Test_Option %1 +if not errorlevel 1 ( + call :Guess_Toolset + if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish ) +) + +call :Clear_Error +set B2_TOOLSET=%1 +shift +goto Setup_Toolset + + +:Setup_Toolset +REM Setup the toolset command and options. This bit of code +REM needs to be flexible enough to handle both when +REM the toolset was guessed at and found, or when the toolset +REM was indicated in the command arguments. +REM NOTE: The strange multiple "if ?? == _toolset_" tests are that way +REM because in BAT variables are subsituted only once during a single +REM command. A complete "if ... else ..." +REM is a single command, even though it's in multiple lines here. +:Setup_Args +call :Clear_Error +call :Test_Empty %1 +if not errorlevel 1 goto Config_Toolset +call :Clear_Error +call :Test_Option %1 +if errorlevel 1 ( + set B2_BUILD_ARGS=%B2_BUILD_ARGS% %1 + shift + goto Setup_Args +) +:Config_Toolset +call config_toolset.bat +if "_%_known_%_" == "__" ( + call :Error_Print "Unknown toolset: %B2_TOOLSET%" +) +if errorlevel 1 goto Finish + +echo ### +echo ### Using '%B2_TOOLSET%' toolset. +echo ### + +set B2_SOURCES= +set B2_SOURCES=%B2_SOURCES% builtins.cpp class.cpp +set B2_SOURCES=%B2_SOURCES% command.cpp compile.cpp constants.cpp cwd.cpp +set B2_SOURCES=%B2_SOURCES% debug.cpp debugger.cpp +set B2_SOURCES=%B2_SOURCES% execcmd.cpp execnt.cpp execunix.cpp filent.cpp filesys.cpp fileunix.cpp frames.cpp function.cpp +set B2_SOURCES=%B2_SOURCES% glob.cpp hash.cpp hcache.cpp hdrmacro.cpp headers.cpp jam.cpp +set B2_SOURCES=%B2_SOURCES% jamgram.cpp lists.cpp make.cpp make1.cpp md5.cpp mem.cpp modules.cpp +set B2_SOURCES=%B2_SOURCES% native.cpp object.cpp option.cpp output.cpp parse.cpp pathnt.cpp +set B2_SOURCES=%B2_SOURCES% pathsys.cpp pathunix.cpp regexp.cpp rules.cpp scan.cpp search.cpp jam_strings.cpp +set B2_SOURCES=%B2_SOURCES% startup.cpp subst.cpp sysinfo.cpp +set B2_SOURCES=%B2_SOURCES% timestamp.cpp variable.cpp w32_getreg.cpp +set B2_SOURCES=%B2_SOURCES% modules/order.cpp +set B2_SOURCES=%B2_SOURCES% modules/path.cpp +set B2_SOURCES=%B2_SOURCES% modules/property-set.cpp +set B2_SOURCES=%B2_SOURCES% modules/regex.cpp +set B2_SOURCES=%B2_SOURCES% modules/sequence.cpp +set B2_SOURCES=%B2_SOURCES% modules/set.cpp + +set B2_CXXFLAGS=%B2_CXXFLAGS% -DNDEBUG + +@echo ON +%B2_CXX% %CXXFLAGS% %B2_CXXFLAGS% %B2_SOURCES% %B2_CXX_LINK% +dir *.exe +copy /b .\b2.exe .\bjam.exe + +:Finish +@exit /b %ERRORLEVEL% diff --git a/src/boost/tools/build/src/engine/build.sh b/src/boost/tools/build/src/engine/build.sh new file mode 100755 index 000000000..42723ac5c --- /dev/null +++ b/src/boost/tools/build/src/engine/build.sh @@ -0,0 +1,511 @@ +#!/bin/sh + +#~ Copyright 2002-2020 Rene Rivera. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE_1_0.txt or copy at +#~ http://www.boost.org/LICENSE_1_0.txt) + +FALSE=1 +TRUE=0 + +# Reset the toolset. +B2_TOOLSET= +B2_SETUP= + +# Internal options. +B2_VERBOSE_OPT=${B2_VERBOSE_OPT:=${FALSE}} +B2_DEBUG_OPT=${B2_DEBUG_OPT:=${FALSE}} +B2_GUESS_TOOLSET_OPT=${FALSE} +B2_HELP_OPT=${FALSE} +B2_CXX_OPT= +B2_CXXFLAGS_OPT= + +# We need to calculate and set SCRIPT_PATH and SCRIPT_DIR to reference this +# script so that we can refer to file relative to it. +SCRIPT_PATH="" +if test "${BASH_SOURCE}" ; then + SCRIPT_PATH=${BASH_SOURCE} +fi +if test "${SCRIPT_PATH}" = "" ; then + SCRIPT_PATH=$0 +fi +SCRIPT_DIR="$( cd "$( dirname "${SCRIPT_PATH}" )" && pwd )" + +# This script needs to operate at engine source directory. +SAVED_PWD="${PWD}" +cd "${SCRIPT_DIR}" + +test_true () +{ + if test $1 -eq ${TRUE} ; then + return ${TRUE} + fi + return ${FALSE} +} + +# Run a command, and echo before doing so. Also checks the exit status and quits +# if there was an error. +echo_run () +{ + if test_true ${B2_VERBOSE_OPT} ; then echo "> $@" ; fi + $@ + r=$? + if test $r -ne ${TRUE} ; then + exit $r + fi +} + +# Print an error message, and exit with a status of 1. +error_exit () +{ + echo " +${@} + +You can specify the toolset as the argument, i.e.: + ./build.sh [options] gcc + +Toolsets supported by this script are: + acc, clang, como, gcc, intel-darwin, intel-linux, kcc, kylix, mipspro, + pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp + +For any toolset you can override the path to the compiler with the '--cxx' +option. You can also use additional flags for the compiler with the +'--cxxflags' option. + +A special toolset; cxx, is available which is used as a fallback when a more +specific toolset is not found and the cxx command is detected. The 'cxx' +toolset will use the '--cxx' and '--cxxflags' options, if present. + +Options: + --help Show this help message. + --verbose Show messages about what this script is doing. + --debug Build b2 with debug information, and no + optimizations. + --guess-toolset Print the toolset we can detect for building. + --cxx=CXX The compiler exec to use instead of the detected + compiler exec. + --cxxflags=CXXFLAGS The compiler flags to use in addition to the + flags for the detected compiler. + +" 1>&2 + exit 1 +} + +# Check that a command is in the PATH. +test_path () +{ + if `command -v command 1>/dev/null 2>/dev/null`; then + command -v $1 1>/dev/null 2>/dev/null + else + hash $1 1>/dev/null 2>/dev/null + fi +} + +# Check that the OS name, as returned by "uname", is as given. +test_uname () +{ + if test_path uname; then + test `uname` = $* + fi +} + +test_compiler () +{ + EXE="${B2_CXX_OPT:-$1}" + shift + CMD="${EXE} $@ ${B2_CXXFLAGS_OPT:-}" + SETUP=${B2_SETUP:-true} + if test_true ${B2_VERBOSE_OPT} ; then + echo "> ${CMD} check_cxx11.cpp" + ( ${SETUP} ; ${CMD} check_clib.cpp check_cxx11.cpp ) + else + ( ${SETUP} ; ${CMD} check_clib.cpp check_cxx11.cpp ) 1>/dev/null 2>/dev/null + fi + CHECK_RESULT=$? + if test_true ${CHECK_RESULT} ; then + B2_CXX=${CMD} + fi + rm -rf check_cxx11.o* a.out a.exe 1>/dev/null 2>/dev/null + return ${CHECK_RESULT} +} + +test_toolset () +{ + if test "${TOOLSET}" = "" ; then return ${TRUE} ; fi + if test "${TOOLSET}" = "$1" -o "${TOOLSET}" = "$2" -o "${TOOLSET}" = "$3" ; then return ${TRUE} ; fi + return 1 +} + +# Check the toolset to bootstrap the build with. The one optional argument to +# the function is a toolset name. This operates as follows based on these +# contextual vars, if set, and if an arg is given: +# +# No vars set: +# Checks, in some priority order, possible toolset commands. Upon finding the +# first working command sets B2_TOOLSET to the toolset and B2_CXX to the +# compile command with any base options. +# +# B2_TOOLSET set: +# Checks that toolset for possible compile commands and sets B2_CXX to the +# command that works for the toolset. +# +check_toolset () +{ + TOOLSET=${B2_TOOLSET%%-[0-9]*} + TOOLSET_SUFFIX=${B2_TOOLSET##$TOOLSET} + + # Prefer Clang (clang) on macOS.. + if test_toolset clang && test_uname Darwin && test_compiler clang++$TOOLSET_SUFFIX -x c++ -std=c++11 ; then B2_TOOLSET=clang$TOOLSET_SUFFIX ; return ${TRUE} ; fi + # GCC (gcc).. + if test_toolset gcc && test_compiler g++$TOOLSET_SUFFIX -x c++ -std=c++11 ; then B2_TOOLSET=gcc$TOOLSET_SUFFIX ; return ${TRUE} ; fi + if test_toolset gcc && test_compiler g++$TOOLSET_SUFFIX -x c++ -std=c++11 -D_GNU_SOURCE ; then B2_TOOLSET=gcc$TOOLSET_SUFFIX ; return ${TRUE} ; fi + # GCC (gcc) with -pthread arg (for AIX).. + if test_toolset gcc && test_compiler g++$TOOLSET_SUFFIX -x c++ -std=c++11 -pthread ; then B2_TOOLSET=gcc$TOOLSET_SUFFIX ; return ${TRUE} ; fi + # Clang (clang).. + if test_toolset clang && test_compiler clang++$TOOLSET_SUFFIX -x c++ -std=c++11 ; then B2_TOOLSET=clang$TOOLSET_SUFFIX ; return ${TRUE} ; fi + # Intel macOS (intel-darwin) + if test_toolset intel-darwin && test -r "${HOME}/intel/oneapi/setvars.sh" && test_uname Darwin ; then + B2_SETUP="source ${HOME}/intel/oneapi/setvars.sh" + if test_toolset intel-darwin && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi + if test_toolset intel-darwin && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi + B2_SETUP= + fi + if test_toolset intel-darwin && test -r "/opt/intel/oneapi/setvars.sh" && test_uname Darwin ; then + B2_SETUP="source /opt/intel/oneapi/setvars.sh" + if test_toolset intel-darwin && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi + if test_toolset intel-darwin && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-darwin ; return ${TRUE} ; fi + B2_SETUP= + fi + # Intel oneAPI (intel-linux) + if test_toolset intel-linux && test_path icpx ; then + if test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + fi + if test_toolset xyz && test_path icc ; then + if test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + fi + if test_toolset intel-linux && test -r "${HOME}/intel/oneapi/setvars.sh" ; then + B2_SETUP="source ${HOME}/intel/oneapi/setvars.sh" + if test_toolset intel-linux && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + if test_toolset intel-linux && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + B2_SETUP= + fi + if test_toolset intel-linux && test -r "/opt/intel/oneapi/setvars.sh" ; then + B2_SETUP="source /opt/intel/oneapi/setvars.sh" + if test_toolset intel-linux && test_compiler icpx -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + if test_toolset intel-linux && test_compiler icc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + B2_SETUP= + fi + # Intel Pro (intel-linux) + if test_toolset intel-linux && test_path icpc ; then + if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + fi + if test_toolset intel-linux && test -r "/opt/intel/inteloneapi/setvars.sh" ; then + B2_SETUP="source /opt/intel/inteloneapi/setvars.sh" + if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + B2_SETUP= + fi + if test_toolset intel-linux && test -r "/opt/intel/cc/9.0/bin/iccvars.sh" ; then + B2_SETUP="source /opt/intel/cc/9.0/bin/iccvars.sh" + if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + B2_SETUP= + fi + if test_toolset intel-linux && test -r "/opt/intel_cc_80/bin/iccvars.sh" ; then + B2_SETUP="source /opt/intel_cc_80/bin/iccvars.sh" + if test_compiler icpc -x c++ -std=c++11 ; then B2_TOOLSET=intel-linux ; return ${TRUE} ; fi + B2_SETUP= + fi + # Mips Pro (mipspro) + if test_toolset mipspro && test_uname IRIX && test_compiler CC -FE:template_in_elf_section -ptused ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi + if test_toolset mipspro && test_uname IRIX64 && test_compiler CC -FE:template_in_elf_section -ptused ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi + # OSF Tru64 C++ (tru64cxx) + if test_toolset tru64cxx && test_uname OSF1 && test_compiler cc ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi + # QNX (qcc) + if test_toolset qcc && test_uname QNX && test_compiler QCC ; then B2_TOOLSET=mipspro ; return ${TRUE} ; fi + # Linux XL/VA C++ (xlcpp, vacpp) + if test_toolset xlcpp vacpp && test_uname Linux && test_compiler xlC_r ; then + if /usr/bin/lscpu | grep Byte | grep Little > /dev/null 2>&1 ; then + # Little endian linux + B2_TOOLSET=xlcpp + return ${TRUE} + else + # Big endian linux + B2_TOOLSET=vacpp + return ${TRUE} + fi + fi + # AIX VA C++ (vacpp) + if test_toolset vacpp && test_uname AIX && test_compiler xlC_r ; then B2_TOOLSET=vacpp ; return ${TRUE} ; fi + # PGI (pgi) + if test_toolset pgi && test_compiler pgc++ -std=c++11 ; then B2_TOOLSET=pgi ; return ${TRUE} ; fi + # Pathscale C++ (pathscale) + if test_toolset pathscale && test_compiler pathCC ; then B2_TOOLSET=pathscale ; return ${TRUE} ; fi + # Como (como) + if test_toolset como && test_compiler como ; then B2_TOOLSET=como ; return ${TRUE} ; fi + # Borland C++ (kylix) + if test_toolset kylix && test_compiler bc++ -tC -q ; then B2_TOOLSET=kylix ; return ${TRUE} ; fi + # aCC (acc) + if test_toolset acc && test_compiler aCC -AA ; then B2_TOOLSET=acc ; return ${TRUE} ; fi + # Sun Pro C++ (sunpro) + if test_toolset sunpro && test_compiler /opt/SUNWspro/bin/CC -std=c++11 ; then B2_TOOLSET=sunpro ; return ${TRUE} ; fi + # Generic (cxx) + if test_toolset cxx && test_compiler cxx ; then B2_TOOLSET=cxx ; return ${TRUE} ; fi + if test_toolset cxx && test_compiler cpp ; then B2_TOOLSET=cxx ; return ${TRUE} ; fi + if test_toolset cxx && test_compiler CC ; then B2_TOOLSET=cxx ; return ${TRUE} ; fi + + # Nothing found. + if test "${B2_TOOLSET}" = "" ; then + error_exit "Could not find a suitable toolset." + fi + return ${FALSE} +} + +# Handle command options and args. +while test $# -gt 0 +do + case "$1" in + --verbose) B2_VERBOSE_OPT=${TRUE} ;; + --debug) B2_DEBUG_OPT=${TRUE} ;; + --guess-toolset) B2_GUESS_TOOLSET_OPT=${TRUE} ;; + --help) B2_HELP_OPT=${TRUE} ;; + --cxx=*) B2_CXX_OPT=`expr "x$1" : "x--cxx=\(.*\)"` ;; + --cxxflags=*) B2_CXXFLAGS_OPT=`expr "x$1" : "x--cxxflags=\(.*\)"` ;; + -*) ;; + ?*) B2_TOOLSET=$1 ;; + esac + shift +done + +# Show some help, if requested. +if test_true ${B2_HELP_OPT} ; then + error_exit +fi + +# If we have a CXX but no B2_TOOLSET specified by the user we assume they meant +# "cxx" as the toolset. +if test "${B2_CXX_OPT}" != "" -a "${B2_TOOLSET}" = "" ; then + B2_TOOLSET=cxx +fi + +# If we have B2_TOOLSET=cxx but no B2_CXX_OPT nor B2_CXXFLAGS_OPT specified by the user +# we assume they meant $CXX and $CXXFLAGS. +if test "${B2_TOOLSET}" = "cxx" -a "${B2_CXX_OPT}" = "" -a "${B2_CXXFLAGS_OPT}" = "" ; then + B2_CXX_OPT="${CXX}" + B2_CXXFLAGS_OPT="${CXXFLAGS}" +fi + +# Guess toolset, or toolset commands. +check_toolset +TOOLSET_CHECK=$? + +# We can bail from the rest of the checks and build if we are just guessing +# the toolset. +if test_true ${B2_GUESS_TOOLSET_OPT} ; then + echo "${B2_TOOLSET}" + exit 0 +fi + +# We need a viable compiler. Check here and give some feedback about it. +if ! test_true ${TOOLSET_CHECK} ; then + echo " +A C++11 capable compiler is required for building the B2 engine. +Toolset '${B2_TOOLSET}' does not appear to support C++11. +" + (B2_VERBOSE_OPT=${TRUE} check_toolset) + error_exit " +** Note, the C++11 capable compiler is _only_ required for building the B2 +** engine. The B2 build system allows for using any C++ level and any other +** supported language and resource in your projects. +" +fi + +# Set the additional options needed to build the engine based on the toolset. +case "${B2_TOOLSET}" in + + gcc|gcc-*) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O2 -s" + B2_CXXFLAGS_DEBUG="-O0 -g" + ;; + + intel-*) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O3 -static-intel" + B2_CXXFLAGS_DEBUG="-O0 -g -static-intel" + ;; + + vacpp) + CXX_VERSION_OPT=${CXX_VERSION_OPT:--qversion} + B2_CXXFLAGS_RELEASE="-O3 -s -qstrict -qinline" + B2_CXXFLAGS_DEBUG="-g -qNOOPTimize -qnoinline -pg" + ;; + + xlcpp) + CXX_VERSION_OPT=${CXX_VERSION_OPT:--qversion} + B2_CXXFLAGS_RELEASE="-s -O3 -qstrict -qinline" + B2_CXXFLAGS_DEBUG="-g -qNOOPTimize -qnoinline -pg" + ;; + + como) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O3 --inlining" + B2_CXXFLAGS_DEBUG="-O0 -g --no_inlining --long_long" + ;; + + kcc) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="+K2 -s" + B2_CXXFLAGS_DEBUG="+K0 -g" + ;; + + kylix) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O2 -vi -w-inl -s" + B2_CXXFLAGS_DEBUG="-Od -v -vi-" + ;; + + mipspro) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-Ofast -g0 \"-INLINE:none\" -s" + B2_CXXFLAGS_DEBUG="-O0 -INLINE -g" + ;; + + pathscale) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O3 -inline -s" + B2_CXXFLAGS_DEBUG="-O0 -noinline -ggdb" + ;; + + pgi) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-fast -s" + B2_CXXFLAGS_DEBUG="-O0 -gopt" + ;; + + sun*) + CXX_VERSION_OPT=${CXX_VERSION_OPT:--V} + B2_CXXFLAGS_RELEASE="-xO4 -s" + B2_CXXFLAGS_DEBUG="-g" + ;; + + clang|clang-*) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O3 -s" + B2_CXXFLAGS_DEBUG="-O0 -fno-inline -g" + ;; + + tru64cxx) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O5 -inline speed -s" + B2_CXXFLAGS_DEBUG="-O0 -pg -g" + ;; + + acc) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O3 -s" + B2_CXXFLAGS_DEBUG="+d -g" + ;; + + qcc) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + B2_CXXFLAGS_RELEASE="-O3 -Wc,-finline-functions" + B2_CXXFLAGS_DEBUG="O0 -Wc,-fno-inline -gstabs+" + ;; + + cxx) + CXX_VERSION_OPT=${CXX_VERSION_OPT:---version} + ;; + + *) + error_exit "Unknown toolset: ${B2_TOOLSET}" + ;; +esac + +build_b2 () +{ + echo " +### +### +### Using '${B2_TOOLSET}' toolset. +### +### +" + echo_run ${B2_CXX} ${CXX_VERSION_OPT} +echo " +### +### +" + B2_SOURCES="\ +builtins.cpp \ +class.cpp \ +command.cpp \ +compile.cpp \ +constants.cpp \ +cwd.cpp \ +debug.cpp \ +debugger.cpp \ +execcmd.cpp \ +execnt.cpp \ +execunix.cpp \ +filesys.cpp \ +filent.cpp \ +fileunix.cpp \ +frames.cpp \ +function.cpp \ +glob.cpp \ +hash.cpp \ +hcache.cpp \ +hdrmacro.cpp \ +headers.cpp \ +jam_strings.cpp \ +jam.cpp \ +jamgram.cpp \ +lists.cpp \ +make.cpp \ +make1.cpp \ +md5.cpp \ +mem.cpp \ +modules.cpp \ +native.cpp \ +object.cpp \ +option.cpp \ +output.cpp \ +parse.cpp \ +pathnt.cpp \ +pathsys.cpp \ +pathunix.cpp \ +regexp.cpp \ +rules.cpp \ +scan.cpp \ +search.cpp \ +startup.cpp \ +subst.cpp \ +sysinfo.cpp \ +timestamp.cpp \ +variable.cpp \ +w32_getreg.cpp \ +modules/order.cpp \ +modules/path.cpp \ +modules/property-set.cpp \ +modules/regex.cpp \ +modules/sequence.cpp \ +modules/set.cpp \ +" + + if test_true ${B2_DEBUG_OPT} ; then B2_CXXFLAGS="${B2_CXXFLAGS_DEBUG}" + else B2_CXXFLAGS="${B2_CXXFLAGS_RELEASE} -DNDEBUG" + fi + ( B2_VERBOSE_OPT=${TRUE} echo_run ${B2_CXX} ${B2_CXXFLAGS} ${B2_SOURCES} -o b2 ) + ( B2_VERBOSE_OPT=${TRUE} echo_run cp b2 bjam ) +} + +if test_true ${B2_VERBOSE_OPT} ; then + ( + ${B2_SETUP} + build_b2 + ) +else + ( + ${B2_SETUP} 1>/dev/null 2>/dev/null + build_b2 + ) +fi diff --git a/src/boost/tools/build/src/engine/build_vms.com b/src/boost/tools/build/src/engine/build_vms.com new file mode 100644 index 000000000..6f73512d0 --- /dev/null +++ b/src/boost/tools/build/src/engine/build_vms.com @@ -0,0 +1,153 @@ +$ ! Copyright 2002-2003 Rene Rivera, Johan Nilsson. +$ ! +$ ! 8-APR-2004 Boris Gubenko +$ ! Miscellaneous improvements. +$ ! +$ ! 20-JAN-2015 Artur Shepilko +$ ! Adapt for jam 3.1.19 +$ ! +$ ! Distributed under the Boost Software License, Version 1.0. +$ ! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +$ ! +$ ! bootstrap build script for Jam +$ ! +$ THIS_FACILITY = "BUILDJAM" +$ +$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'") +$ save_verify = f$verify(verify) +$ +$ SAY := WRITE SYS$OUTPUT +$ ! +$ ON WARNING THEN CONTINUE +$ ON ERROR THEN GOTO EXIT +$ +$ BOOST_JAM_TOOLSET = "vmsdecc" +$ BOOST_JAM_CC = "CC" +$ BJAM_UPDATE = "" +$ +$ ARGS = F$EDIT("''p1' ''p2' ''p3' ''p4'","TRIM,LOWERCASE") +$ ARGS_LEN = F$LENGTH(ARGS) +$ +$ IF F$LOCATE("--update", ARGS) .NE. F$LENGTH(ARGS) THEN BJAM_UPDATE = "update" +$ IF BJAM_UPDATE .EQS. "update" - + .AND. F$SEARCH("[.bootstrap_vms]jam0.exe") .EQS. "" THEN BJAM_UPDATE = "" +$ +$ IF BJAM_UPDATE .NES. "update" +$ THEN +$ GOSUB CLEAN +$ +$ SAY "I|Creating bootstrap directory..." +$ CREATE /DIR [.bootstrap_vms] +$ +$ !------------------ +$ ! NOTE: Assume jamgram and jambase have been generated (true for fresh release). +$ ! Otherwise these need to be re-generated manually. +$ !------------------ +$ +$ SAY "I|Building bootstrap jam..." +$ ! +$ CC_FLAGS = "/DEFINE=VMS /STANDARD=VAXC " + - + "/PREFIX_LIBRARY_ENTRIES=(ALL_ENTRIES) " + - + "/WARNING=DISABLE=(LONGEXTERN)" + - + "/OBJ=[.bootstrap_vms] " +$ +$ CC_INCLUDE="" +$ +$ SAY "I|Using compile flags: ", CC_FLAGS +$ +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE command.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE compile.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE constants.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE debug.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execcmd.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE frames.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE function.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE glob.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hash.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hdrmacro.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE headers.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jam.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jambase.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jamgram.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE lists.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make1.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE object.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE option.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE output.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE parse.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathsys.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE regexp.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE rules.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE scan.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE search.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE subst.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE timestamp.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE variable.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE modules.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE strings.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filesys.c +$ +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execvms.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathvms.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filevms.c +$ +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE builtins.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE class.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE cwd.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE native.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE md5.c +$ +$ CC_INCLUDE = "/INCLUDE=(""./modules"")" +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]set.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]path.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]regex.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]property-set.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]sequence.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]order.c +$ +$ LIB /CREATE [.bootstrap_vms]jam0.olb [.bootstrap_vms]*.obj +$ LINK /EXEC=[.bootstrap_vms]jam0.exe - + [.bootstrap_vms]jam0.olb/INCLUDE=JAM/LIB +$ +$ IF F$SEARCH("[.bootstrap_vms]*.obj") .NES. "" THEN - + DELETE /NOCONF /NOLOG [.bootstrap_vms]*.obj;*, *.olb;* +$ ENDIF +$ +$ IF F$SEARCH("[.bootstrap_vms]jam0.exe") .NES. "" +$ THEN +$ IF BJAM_UPDATE .NES. "update" +$ THEN +$ SAY "I|Cleaning previous build..." +$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS' clean +$ ENDIF +$ +$ SAY "I|Building Boost.Jam..." +$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS' +$ ENDIF +$ +$ +$EXIT: +$ sts = $STATUS +$ exit 'sts' + (0 * f$verify(save_verify)) + + +$CLEAN: !GOSUB +$ ! +$ IF F$SEARCH("[.bootstrap_vms]*.*") .NES. "" +$ THEN +$ SAY "I|Cleaning previous bootstrap files..." +$ ! +$ SET FILE /PROT=(W:RWED) [.bootstrap_vms]*.*;* +$ DELETE /NOCONF /NOLOG [.bootstrap_vms]*.*;* +$ ENDIF +$ ! +$ IF F$SEARCH("bootstrap_vms.dir") .NES. "" +$ THEN +$ SAY "I|Removing previous bootstrap directory..." +$ ! +$ SET FILE /PROT=(W:RWED) bootstrap_vms.dir +$ DELETE /NOCONF /NOLOG bootstrap_vms.dir; +$ ENDIF +$ ! +$ RETURN diff --git a/src/boost/tools/build/src/engine/builtins.cpp b/src/boost/tools/build/src/engine/builtins.cpp new file mode 100644 index 000000000..c61e77a4f --- /dev/null +++ b/src/boost/tools/build/src/engine/builtins.cpp @@ -0,0 +1,2631 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +#include "jam.h" +#include "builtins.h" + +#include "compile.h" +#include "constants.h" +#include "cwd.h" +#include "debugger.h" +#include "filesys.h" +#include "frames.h" +#include "hash.h" +#include "hdrmacro.h" +#include "lists.h" +#include "make.h" +#include "md5.h" +#include "native.h" +#include "object.h" +#include "parse.h" +#include "pathsys.h" +#include "rules.h" +#include "jam_strings.h" +#include "startup.h" +#include "subst.h" +#include "timestamp.h" +#include "variable.h" +#include "output.h" + +#include +#include + +#ifdef OS_NT +#include +#ifndef FSCTL_GET_REPARSE_POINT +/* MinGW's version of windows.h is missing this, so we need + * to include winioctl.h directly + */ +#include +#endif + +/* With VC8 (VS2005) these are not defined: + * FSCTL_GET_REPARSE_POINT (expects WINVER >= 0x0500 _WIN32_WINNT >= 0x0500 ) + * IO_REPARSE_TAG_SYMLINK (is part of a separate Driver SDK) + * So define them explicitly to their expected values. + */ +#ifndef FSCTL_GET_REPARSE_POINT +# define FSCTL_GET_REPARSE_POINT 0x000900a8 +#endif +#ifndef IO_REPARSE_TAG_SYMLINK +# define IO_REPARSE_TAG_SYMLINK (0xA000000CL) +#endif + +#include +#if !defined(__BORLANDC__) +#define dup _dup +#define dup2 _dup2 +#define open _open +#define close _close +#endif /* __BORLANDC__ */ +#endif /* OS_NT */ + +#if defined(USE_EXECUNIX) +# include +# include +#elif defined(OS_VMS) +# include +#else +/* + * NT does not have wait() and associated macros and uses the system() return + * value instead. Status code group are documented at: + * http://msdn.microsoft.com/en-gb/library/ff565436.aspx + */ +# define WIFEXITED(w) (((w) & 0XFFFFFF00) == 0) +# define WEXITSTATUS(w)(w) +#endif + +/* + * builtins.c - builtin jam rules + * + * External routines: + * load_builtins() - define builtin rules + * unknown_rule() - reports an unknown rule occurrence to the + * user and exits + * + * Internal routines: + * append_if_exists() - if file exists, append it to the list + * builtin_calc() - CALC rule + * builtin_delete_module() - DELETE_MODULE ( MODULE ? ) + * builtin_depends() - DEPENDS/INCLUDES rule + * builtin_echo() - ECHO rule + * builtin_exit() - EXIT rule + * builtin_export() - EXPORT ( MODULE ? : RULES * ) + * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule + * builtin_glob() - GLOB rule + * builtin_glob_recursive() - ??? + * builtin_hdrmacro() - ??? + * builtin_import() - IMPORT rule + * builtin_match() - MATCH rule, regexp matching + * builtin_rebuilds() - REBUILDS rule + * builtin_rulenames() - RULENAMES ( MODULE ? ) + * builtin_split_by_characters() - splits the given string into tokens + * builtin_varnames() - VARNAMES ( MODULE ? ) + * get_source_line() - get a frame's file and line number + * information + */ + + +/* + * compile_builtin() - define builtin rules + */ + +#define P0 (PARSE *)0 +#define C0 (OBJECT *)0 + +#if defined( OS_NT ) || defined( OS_CYGWIN ) + LIST * builtin_system_registry ( FRAME *, int ); + LIST * builtin_system_registry_names( FRAME *, int ); +#endif + +int glob( char const * s, char const * c ); + +void backtrace ( FRAME * ); +void backtrace_line ( FRAME * ); +void print_source_line( FRAME * ); + + +RULE * bind_builtin( char const * name_, LIST * (* f)( FRAME *, int flags ), + int flags, char const * * args ) +{ + FUNCTION * func; + RULE * result; + OBJECT * name = object_new( name_ ); + + func = function_builtin( f, flags, args ); + + result = new_rule_body( root_module(), name, func, 1 ); + + function_free( func ); + + object_free( name ); + + return result; +} + + +RULE * duplicate_rule( char const * name_, RULE * other ) +{ + OBJECT * name = object_new( name_ ); + RULE * result = import_rule( other, root_module(), name ); + object_free( name ); + return result; +} + + +/* + * load_builtins() - define builtin rules + */ + +void load_builtins() +{ + duplicate_rule( "Always", + bind_builtin( "ALWAYS", + builtin_flags, T_FLAG_TOUCHED, 0 ) ); + + duplicate_rule( "Depends", + bind_builtin( "DEPENDS", + builtin_depends, 0, 0 ) ); + + duplicate_rule( "echo", + duplicate_rule( "Echo", + bind_builtin( "ECHO", + builtin_echo, 0, 0 ) ) ); + + { + char const * args[] = { "message", "*", ":", "result-value", "?", 0 }; + duplicate_rule( "exit", + duplicate_rule( "Exit", + bind_builtin( "EXIT", + builtin_exit, 0, args ) ) ); + } + + { + char const * args[] = { "directories", "*", ":", "patterns", "*", ":", + "case-insensitive", "?", 0 }; + duplicate_rule( "Glob", + bind_builtin( "GLOB", builtin_glob, 0, args ) ); + } + + { + char const * args[] = { "patterns", "*", 0 }; + bind_builtin( "GLOB-RECURSIVELY", + builtin_glob_recursive, 0, args ); + } + + duplicate_rule( "Includes", + bind_builtin( "INCLUDES", + builtin_depends, 1, 0 ) ); + + { + char const * args[] = { "targets", "*", ":", "targets-to-rebuild", "*", + 0 }; + bind_builtin( "REBUILDS", + builtin_rebuilds, 0, args ); + } + + duplicate_rule( "Leaves", + bind_builtin( "LEAVES", + builtin_flags, T_FLAG_LEAVES, 0 ) ); + + duplicate_rule( "Match", + bind_builtin( "MATCH", + builtin_match, 0, 0 ) ); + + { + char const * args[] = { "string", ":", "delimiters", 0 }; + bind_builtin( "SPLIT_BY_CHARACTERS", + builtin_split_by_characters, 0, args ); + } + + duplicate_rule( "NoCare", + bind_builtin( "NOCARE", + builtin_flags, T_FLAG_NOCARE, 0 ) ); + + duplicate_rule( "NOTIME", + duplicate_rule( "NotFile", + bind_builtin( "NOTFILE", + builtin_flags, T_FLAG_NOTFILE, 0 ) ) ); + + duplicate_rule( "NoUpdate", + bind_builtin( "NOUPDATE", + builtin_flags, T_FLAG_NOUPDATE, 0 ) ); + + duplicate_rule( "Temporary", + bind_builtin( "TEMPORARY", + builtin_flags, T_FLAG_TEMP, 0 ) ); + + bind_builtin( "ISFILE", + builtin_flags, T_FLAG_ISFILE, 0 ); + + duplicate_rule( "HdrMacro", + bind_builtin( "HDRMACRO", + builtin_hdrmacro, 0, 0 ) ); + + /* FAIL_EXPECTED is used to indicate that the result of a target build + * action should be inverted (ok <=> fail) this can be useful when + * performing test runs from Jamfiles. + */ + bind_builtin( "FAIL_EXPECTED", + builtin_flags, T_FLAG_FAIL_EXPECTED, 0 ); + + bind_builtin( "RMOLD", + builtin_flags, T_FLAG_RMOLD, 0 ); + + { + char const * args[] = { "targets", "*", 0 }; + bind_builtin( "UPDATE", + builtin_update, 0, args ); + } + + { + char const * args[] = { "targets", "*", + ":", "log", "?", + ":", "ignore-minus-n", "?", + ":", "ignore-minus-q", "?", 0 }; + bind_builtin( "UPDATE_NOW", + builtin_update_now, 0, args ); + } + + { + char const * args[] = { "string", "pattern", "replacements", "+", 0 }; + duplicate_rule( "subst", + bind_builtin( "SUBST", + builtin_subst, 0, args ) ); + } + + { + char const * args[] = { "module", "?", 0 }; + bind_builtin( "RULENAMES", + builtin_rulenames, 0, args ); + } + + { + char const * args[] = { "module", "?", 0 }; + bind_builtin( "VARNAMES", + builtin_varnames, 0, args ); + } + + { + char const * args[] = { "module", "?", 0 }; + bind_builtin( "DELETE_MODULE", + builtin_delete_module, 0, args ); + } + + { + char const * args[] = { "source_module", "?", + ":", "source_rules", "*", + ":", "target_module", "?", + ":", "target_rules", "*", + ":", "localize", "?", 0 }; + bind_builtin( "IMPORT", + builtin_import, 0, args ); + } + + { + char const * args[] = { "module", "?", ":", "rules", "*", 0 }; + bind_builtin( "EXPORT", + builtin_export, 0, args ); + } + + { + char const * args[] = { "levels", "?", 0 }; + bind_builtin( "CALLER_MODULE", + builtin_caller_module, 0, args ); + } + + { + char const * args[] = { "levels", "?", 0 }; + bind_builtin( "BACKTRACE", + builtin_backtrace, 0, args ); + } + + { + char const * args[] = { 0 }; + bind_builtin( "PWD", + builtin_pwd, 0, args ); + } + + { + char const * args[] = { "modules_to_import", "+", + ":", "target_module", "?", 0 }; + bind_builtin( "IMPORT_MODULE", + builtin_import_module, 0, args ); + } + + { + char const * args[] = { "module", "?", 0 }; + bind_builtin( "IMPORTED_MODULES", + builtin_imported_modules, 0, args ); + } + + { + char const * args[] = { "instance_module", ":", "class_module", 0 }; + bind_builtin( "INSTANCE", + builtin_instance, 0, args ); + } + + { + char const * args[] = { "sequence", "*", 0 }; + bind_builtin( "SORT", + builtin_sort, 0, args ); + } + + { + char const * args[] = { "path_parts", "*", 0 }; + bind_builtin( "NORMALIZE_PATH", + builtin_normalize_path, 0, args ); + } + + { + char const * args[] = { "args", "*", 0 }; + bind_builtin( "CALC", + builtin_calc, 0, args ); + } + + { + char const * args[] = { "module", ":", "rule", 0 }; + bind_builtin( "NATIVE_RULE", + builtin_native_rule, 0, args ); + } + + { + char const * args[] = { "module", ":", "rule", ":", "version", 0 }; + bind_builtin( "HAS_NATIVE_RULE", + builtin_has_native_rule, 0, args ); + } + + { + char const * args[] = { "module", "*", 0 }; + bind_builtin( "USER_MODULE", + builtin_user_module, 0, args ); + } + + { + char const * args[] = { 0 }; + bind_builtin( "NEAREST_USER_LOCATION", + builtin_nearest_user_location, 0, args ); + } + + { + char const * args[] = { "file", 0 }; + bind_builtin( "CHECK_IF_FILE", + builtin_check_if_file, 0, args ); + } + +#ifdef HAVE_PYTHON + { + char const * args[] = { "python-module", + ":", "function", + ":", "jam-module", + ":", "rule-name", 0 }; + bind_builtin( "PYTHON_IMPORT_RULE", + builtin_python_import_rule, 0, args ); + } +#endif + +# if defined( OS_NT ) || defined( OS_CYGWIN ) + { + char const * args[] = { "key_path", ":", "data", "?", 0 }; + bind_builtin( "W32_GETREG", + builtin_system_registry, 0, args ); + } + + { + char const * args[] = { "key_path", ":", "result-type", 0 }; + bind_builtin( "W32_GETREGNAMES", + builtin_system_registry_names, 0, args ); + } +# endif + + { + char const * args[] = { "command", ":", "*", 0 }; + duplicate_rule( "SHELL", + bind_builtin( "COMMAND", + builtin_shell, 0, args ) ); + } + + { + char const * args[] = { "string", 0 }; + bind_builtin( "MD5", + builtin_md5, 0, args ); + } + + { + char const * args[] = { "name", ":", "mode", 0 }; + bind_builtin( "FILE_OPEN", + builtin_file_open, 0, args ); + } + + { + char const * args[] = { "string", ":", "width", 0 }; + bind_builtin( "PAD", + builtin_pad, 0, args ); + } + + { + char const * args[] = { "targets", "*", 0 }; + bind_builtin( "PRECIOUS", + builtin_precious, 0, args ); + } + + { + char const * args [] = { 0 }; + bind_builtin( "SELF_PATH", builtin_self_path, 0, args ); + } + + { + char const * args [] = { "path", 0 }; + bind_builtin( "MAKEDIR", builtin_makedir, 0, args ); + } + + { + const char * args [] = { "path", 0 }; + bind_builtin( "READLINK", builtin_readlink, 0, args ); + } + + { + char const * args[] = { "archives", "*", + ":", "member-patterns", "*", + ":", "case-insensitive", "?", + ":", "symbol-patterns", "*", 0 }; + bind_builtin( "GLOB_ARCHIVE", builtin_glob_archive, 0, args ); + } + +#ifdef JAM_DEBUGGER + + { + const char * args[] = { "list", "*", 0 }; + bind_builtin("__DEBUG_PRINT_HELPER__", builtin_debug_print_helper, 0, args); + } + +#endif + + /* Initialize builtin modules. */ + init_set(); + init_path(); + init_regex(); + init_property_set(); + init_sequence(); + init_order(); +} + + +/* + * builtin_calc() - CALC rule + * + * Performs simple mathematical operations on two arguments. + */ + +LIST * builtin_calc( FRAME * frame, int flags ) +{ + LIST * arg = lol_get( frame->args, 0 ); + + LIST * result = L0; + long lhs_value; + long rhs_value; + long result_value; + char buffer[ 16 ]; + char const * lhs; + char const * op; + char const * rhs; + LISTITER iter = list_begin( arg ); + LISTITER const end = list_end( arg ); + + if ( iter == end ) return L0; + lhs = object_str( list_item( iter ) ); + + iter = list_next( iter ); + if ( iter == end ) return L0; + op = object_str( list_item( iter ) ); + + iter = list_next( iter ); + if ( iter == end ) return L0; + rhs = object_str( list_item( iter ) ); + + lhs_value = atoi( lhs ); + rhs_value = atoi( rhs ); + + if ( !strcmp( "+", op ) ) + result_value = lhs_value + rhs_value; + else if ( !strcmp( "-", op ) ) + result_value = lhs_value - rhs_value; + else + return L0; + + sprintf( buffer, "%ld", result_value ); + result = list_push_back( result, object_new( buffer ) ); + return result; +} + + +/* + * builtin_depends() - DEPENDS/INCLUDES rule + * + * The DEPENDS/INCLUDES builtin rule appends each of the listed sources on the + * dependency/includes list of each of the listed targets. It binds both the + * targets and sources as TARGETs. + */ + +LIST * builtin_depends( FRAME * frame, int flags ) +{ + LIST * const targets = lol_get( frame->args, 0 ); + LIST * const sources = lol_get( frame->args, 1 ); + + LISTITER iter = list_begin( targets ); + LISTITER end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * const t = bindtarget( list_item( iter ) ); + + if ( flags ) + target_include_many( t, sources ); + else + targetlist( t->depends, sources ); + } + + /* Enter reverse links */ + iter = list_begin( sources ); + end = list_end( sources ); + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * const s = bindtarget( list_item( iter ) ); + if ( flags ) + { + LISTITER t_iter = list_begin( targets ); + LISTITER const t_end = list_end( targets ); + for ( ; t_iter != t_end; t_iter = list_next( t_iter ) ) + targetentry( s->dependants, bindtarget( + list_item( t_iter ) )->includes ); + } + else + targetlist( s->dependants, targets ); + } + + return L0; +} + + +/* + * builtin_rebuilds() - REBUILDS rule + * + * Appends each of the rebuild-targets listed in its second argument to the + * rebuilds list for each of the targets listed in its first argument. + */ + +LIST * builtin_rebuilds( FRAME * frame, int flags ) +{ + LIST * targets = lol_get( frame->args, 0 ); + LIST * rebuilds = lol_get( frame->args, 1 ); + LISTITER iter = list_begin( targets ); + LISTITER const end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * const t = bindtarget( list_item( iter ) ); + targetlist( t->rebuilds, rebuilds ); + } + return L0; +} + + +/* + * builtin_echo() - ECHO rule + * + * Echoes the targets to the user. No other actions are taken. + */ + +LIST * builtin_echo( FRAME * frame, int flags ) +{ + list_print( lol_get( frame->args, 0 ) ); + out_printf( "\n" ); + out_flush(); + return L0; +} + + +/* + * builtin_exit() - EXIT rule + * + * Echoes the targets to the user and exits the program with a failure status. + */ + +LIST * builtin_exit( FRAME * frame, int flags ) +{ + LIST * const code = lol_get( frame->args, 1 ); + list_print( lol_get( frame->args, 0 ) ); + out_printf( "\n" ); + if ( !list_empty( code ) ) + { + int status = atoi( object_str( list_front( code ) ) ); +#ifdef OS_VMS + switch( status ) + { + case 0: + status = EXITOK; + break; + case 1: + status = EXITBAD; + break; + } +#endif + b2::clean_exit( status ); + } + else + b2::clean_exit( EXITBAD ); /* yeech */ + return L0; +} + + +/* + * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule + * + * Marks the target with the appropriate flag, for use by make0(). It binds each + * target as a TARGET. + */ + +LIST * builtin_flags( FRAME * frame, int flags ) +{ + LIST * const targets = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( targets ); + LISTITER const end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + bindtarget( list_item( iter ) )->flags |= flags; + return L0; +} + + +/* + * builtin_glob() - GLOB rule + */ + +struct globbing +{ + LIST * patterns; + LIST * results; + LIST * case_insensitive; +}; + + +static void downcase_inplace( char * p ) +{ + for ( ; *p; ++p ) + *p = tolower( *p ); +} + + +static void builtin_glob_back( void * closure, OBJECT * file, int status, + timestamp const * const time ) +{ + PROFILE_ENTER( BUILTIN_GLOB_BACK ); + + struct globbing * const globbing = (struct globbing *)closure; + PATHNAME f; + string buf[ 1 ]; + LISTITER iter; + LISTITER end; + + /* Null out directory for matching. We wish we had file_dirscan() pass up a + * PATHNAME. + */ + path_parse( object_str( file ), &f ); + f.f_dir.len = 0; + + /* For globbing, we unconditionally ignore current and parent directory + * items. Since these items always exist, there is no reason why caller of + * GLOB would want to see them. We could also change file_dirscan(), but + * then paths with embedded "." and ".." would not work anywhere. + */ + if ( !strcmp( f.f_base.ptr, "." ) || !strcmp( f.f_base.ptr, ".." ) ) + { + PROFILE_EXIT( BUILTIN_GLOB_BACK ); + return; + } + + string_new( buf ); + path_build( &f, buf ); + + if ( globbing->case_insensitive ) + downcase_inplace( buf->value ); + + iter = list_begin( globbing->patterns ); + end = list_end( globbing->patterns ); + for ( ; iter != end; iter = list_next( iter ) ) + { + if ( !glob( object_str( list_item( iter ) ), buf->value ) ) + { + globbing->results = list_push_back( globbing->results, object_copy( + file ) ); + break; + } + } + + string_free( buf ); + + PROFILE_EXIT( BUILTIN_GLOB_BACK ); +} + + +static LIST * downcase_list( LIST * in ) +{ + LIST * result = L0; + LISTITER iter = list_begin( in ); + LISTITER const end = list_end( in ); + + string s[ 1 ]; + string_new( s ); + + for ( ; iter != end; iter = list_next( iter ) ) + { + string_append( s, object_str( list_item( iter ) ) ); + downcase_inplace( s->value ); + result = list_push_back( result, object_new( s->value ) ); + string_truncate( s, 0 ); + } + + string_free( s ); + return result; +} + + +LIST * builtin_glob( FRAME * frame, int flags ) +{ + LIST * const l = lol_get( frame->args, 0 ); + LIST * const r = lol_get( frame->args, 1 ); + + LISTITER iter; + LISTITER end; + struct globbing globbing; + + globbing.results = L0; + globbing.patterns = r; + + globbing.case_insensitive = +# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS ) + l; /* Always case-insensitive if any files can be found. */ +# else + lol_get( frame->args, 2 ); +# endif + + if ( globbing.case_insensitive ) + globbing.patterns = downcase_list( r ); + + iter = list_begin( l ); + end = list_end( l ); + for ( ; iter != end; iter = list_next( iter ) ) + file_dirscan( list_item( iter ), builtin_glob_back, &globbing ); + + if ( globbing.case_insensitive ) + list_free( globbing.patterns ); + + return globbing.results; +} + + +static int has_wildcards( char const * const str ) +{ + return str[ strcspn( str, "[]*?" ) ] ? 1 : 0; +} + + +/* + * append_if_exists() - if file exists, append it to the list + */ + +static LIST * append_if_exists( LIST * list, OBJECT * file ) +{ + file_info_t * info = file_query( file ); + return info + ? list_push_back( list, object_copy( info->name ) ) + : list ; +} + + +LIST * glob1( OBJECT * dirname, OBJECT * pattern ) +{ + LIST * const plist = list_new( object_copy( pattern ) ); + struct globbing globbing; + + globbing.results = L0; + globbing.patterns = plist; + + globbing.case_insensitive +# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS ) + = plist; /* always case-insensitive if any files can be found */ +# else + = L0; +# endif + + if ( globbing.case_insensitive ) + globbing.patterns = downcase_list( plist ); + + file_dirscan( dirname, builtin_glob_back, &globbing ); + + if ( globbing.case_insensitive ) + list_free( globbing.patterns ); + + list_free( plist ); + + return globbing.results; +} + + +LIST * glob_recursive( char const * pattern ) +{ + LIST * result = L0; + + /* Check if there's metacharacters in pattern */ + if ( !has_wildcards( pattern ) ) + { + /* No metacharacters. Check if the path exists. */ + OBJECT * const p = object_new( pattern ); + result = append_if_exists( result, p ); + object_free( p ); + } + else + { + /* Have metacharacters in the pattern. Split into dir/name. */ + PATHNAME path[ 1 ]; + path_parse( pattern, path ); + + if ( path->f_dir.ptr ) + { + LIST * dirs = L0; + string dirname[ 1 ]; + string basename[ 1 ]; + string_new( dirname ); + string_new( basename ); + + string_append_range( dirname, path->f_dir.ptr, + path->f_dir.ptr + path->f_dir.len ); + + path->f_grist.ptr = 0; + path->f_grist.len = 0; + path->f_dir.ptr = 0; + path->f_dir.len = 0; + path_build( path, basename ); + + dirs = has_wildcards( dirname->value ) + ? glob_recursive( dirname->value ) + : list_push_back( dirs, object_new( dirname->value ) ); + + if ( has_wildcards( basename->value ) ) + { + OBJECT * const b = object_new( basename->value ); + LISTITER iter = list_begin( dirs ); + LISTITER const end = list_end( dirs ); + for ( ; iter != end; iter = list_next( iter ) ) + result = list_append( result, glob1( list_item( iter ), b ) + ); + object_free( b ); + } + else + { + LISTITER iter = list_begin( dirs ); + LISTITER const end = list_end( dirs ); + string file_string[ 1 ]; + string_new( file_string ); + + /* No wildcard in basename. */ + for ( ; iter != end; iter = list_next( iter ) ) + { + OBJECT * p; + path->f_dir.ptr = object_str( list_item( iter ) ); + path->f_dir.len = int32_t(strlen( object_str( list_item( iter ) ) )); + path_build( path, file_string ); + + p = object_new( file_string->value ); + + result = append_if_exists( result, p ); + + object_free( p ); + + string_truncate( file_string, 0 ); + } + + string_free( file_string ); + } + + string_free( dirname ); + string_free( basename ); + + list_free( dirs ); + } + else + { + /* No directory, just a pattern. */ + OBJECT * const p = object_new( pattern ); + result = list_append( result, glob1( constant_dot, p ) ); + object_free( p ); + } + } + + return result; +} + + +/* + * builtin_glob_recursive() - ??? + */ + +LIST * builtin_glob_recursive( FRAME * frame, int flags ) +{ + LIST * result = L0; + LIST * const l = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( l ); + LISTITER const end = list_end( l ); + for ( ; iter != end; iter = list_next( iter ) ) + result = list_append( result, glob_recursive( object_str( list_item( + iter ) ) ) ); + return result; +} + + +/* + * builtin_match() - MATCH rule, regexp matching + */ + +LIST * builtin_match( FRAME * frame, int flags ) +{ + LIST * l; + LIST * r; + LIST * result = L0; + LISTITER l_iter; + LISTITER l_end; + LISTITER r_iter; + LISTITER r_end; + + string buf[ 1 ]; + string_new( buf ); + + /* For each pattern */ + + l = lol_get( frame->args, 0 ); + l_iter = list_begin( l ); + l_end = list_end( l ); + for ( ; l_iter != l_end; l_iter = list_next( l_iter ) ) + { + /* Result is cached and intentionally never freed. */ + regexp * re = regex_compile( list_item( l_iter ) ); + + /* For each string to match against. */ + r = lol_get( frame->args, 1 ); + r_iter = list_begin( r ); + r_end = list_end( r ); + for ( ; r_iter != r_end; r_iter = list_next( r_iter ) ) + { + if ( regexec( re, object_str( list_item( r_iter ) ) ) ) + { + int i; + int top; + + /* Find highest parameter */ + + for ( top = NSUBEXP; top-- > 1; ) + if ( re->startp[ top ] ) + break; + + /* And add all parameters up to highest onto list. */ + /* Must have parameters to have results! */ + for ( i = 1; i <= top; ++i ) + { + string_append_range( buf, re->startp[ i ], re->endp[ i ] ); + result = list_push_back( result, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + } + } + } + + string_free( buf ); + return result; +} + + +/* + * builtin_split_by_characters() - splits the given string into tokens + */ + +LIST * builtin_split_by_characters( FRAME * frame, int flags ) +{ + LIST * l1 = lol_get( frame->args, 0 ); + LIST * l2 = lol_get( frame->args, 1 ); + + LIST * result = L0; + + string buf[ 1 ]; + + char const * delimiters = object_str( list_front( l2 ) ); + char * t; + + string_copy( buf, object_str( list_front( l1 ) ) ); + + t = strtok( buf->value, delimiters ); + while ( t ) + { + result = list_push_back( result, object_new( t ) ); + t = strtok( NULL, delimiters ); + } + + string_free( buf ); + + return result; +} + + +/* + * builtin_hdrmacro() - ??? + */ + +LIST * builtin_hdrmacro( FRAME * frame, int flags ) +{ + LIST * const l = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( l ); + LISTITER const end = list_end( l ); + + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * const t = bindtarget( list_item( iter ) ); + + /* Scan file for header filename macro definitions. */ + if ( DEBUG_HEADER ) + out_printf( "scanning '%s' for header file macro definitions\n", + object_str( list_item( iter ) ) ); + + macro_headers( t ); + } + + return L0; +} + + +/* + * builtin_rulenames() - RULENAMES ( MODULE ? ) + * + * Returns a list of the non-local rule names in the given MODULE. If MODULE is + * not supplied, returns the list of rule names in the global module. + */ + +static void add_rule_name( void * r_, void * result_ ) +{ + RULE * const r = (RULE *)r_; + LIST * * const result = (LIST * *)result_; + if ( r->exported ) + *result = list_push_back( *result, object_copy( r->name ) ); +} + + +LIST * builtin_rulenames( FRAME * frame, int flags ) +{ + LIST * arg0 = lol_get( frame->args, 0 ); + LIST * result = L0; + module_t * const source_module = bindmodule( list_empty( arg0 ) + ? 0 + : list_front( arg0 ) ); + + if ( source_module->rules ) + hashenumerate( source_module->rules, add_rule_name, &result ); + return result; +} + + +/* + * builtin_varnames() - VARNAMES ( MODULE ? ) + * + * Returns a list of the variable names in the given MODULE. If MODULE is not + * supplied, returns the list of variable names in the global module. + */ + +/* helper function for builtin_varnames(), below. Used with hashenumerate, will + * prepend the key of each element to the list + */ +static void add_hash_key( void * np, void * result_ ) +{ + LIST * * result = (LIST * *)result_; + *result = list_push_back( *result, object_copy( *(OBJECT * *)np ) ); +} + + +LIST * builtin_varnames( FRAME * frame, int flags ) +{ + LIST * arg0 = lol_get( frame->args, 0 ); + LIST * result = L0; + module_t * source_module = bindmodule( list_empty( arg0 ) + ? 0 + : list_front( arg0 ) ); + + struct hash * const vars = source_module->variables; + if ( vars ) + hashenumerate( vars, add_hash_key, &result ); + return result; +} + + +/* + * builtin_delete_module() - DELETE_MODULE ( MODULE ? ) + * + * Clears all rules and variables from the given module. + */ + +LIST * builtin_delete_module( FRAME * frame, int flags ) +{ + LIST * const arg0 = lol_get( frame->args, 0 ); + module_t * const source_module = bindmodule( list_empty( arg0 ) ? 0 : + list_front( arg0 ) ); + delete_module( source_module ); + return L0; +} + + +/* + * unknown_rule() - reports an unknown rule occurrence to the user and exits + */ + +void unknown_rule( FRAME * frame, char const * key, module_t * module, + OBJECT * rule_name ) +{ + backtrace_line( frame->prev ); + if ( key ) + out_printf("%s error", key); + else + out_printf("ERROR"); + out_printf( ": rule \"%s\" unknown in ", object_str( rule_name ) ); + if ( module->name ) + out_printf( "module \"%s\".\n", object_str( module->name ) ); + else + out_printf( "root module.\n" ); + backtrace( frame->prev ); + b2::clean_exit( EXITBAD ); +} + + +/* + * builtin_import() - IMPORT rule + * + * IMPORT + * ( + * SOURCE_MODULE ? : + * SOURCE_RULES * : + * TARGET_MODULE ? : + * TARGET_RULES * : + * LOCALIZE ? + * ) + * + * Imports rules from the SOURCE_MODULE into the TARGET_MODULE as local rules. + * If either SOURCE_MODULE or TARGET_MODULE is not supplied, it refers to the + * global module. SOURCE_RULES specifies which rules from the SOURCE_MODULE to + * import; TARGET_RULES specifies the names to give those rules in + * TARGET_MODULE. If SOURCE_RULES contains a name that does not correspond to + * a rule in SOURCE_MODULE, or if it contains a different number of items than + * TARGET_RULES, an error is issued. If LOCALIZE is specified, the rules will be + * executed in TARGET_MODULE, with corresponding access to its module local + * variables. + */ + +LIST * builtin_import( FRAME * frame, int flags ) +{ + LIST * source_module_list = lol_get( frame->args, 0 ); + LIST * source_rules = lol_get( frame->args, 1 ); + LIST * target_module_list = lol_get( frame->args, 2 ); + LIST * target_rules = lol_get( frame->args, 3 ); + LIST * localize = lol_get( frame->args, 4 ); + + module_t * target_module = bindmodule( list_empty( target_module_list ) + ? 0 + : list_front( target_module_list ) ); + module_t * source_module = bindmodule( list_empty( source_module_list ) + ? 0 + : list_front( source_module_list ) ); + + LISTITER source_iter = list_begin( source_rules ); + LISTITER const source_end = list_end( source_rules ); + LISTITER target_iter = list_begin( target_rules ); + LISTITER const target_end = list_end( target_rules ); + + for ( ; + source_iter != source_end && target_iter != target_end; + source_iter = list_next( source_iter ), + target_iter = list_next( target_iter ) ) + { + RULE * r = nullptr; + RULE * imported = nullptr; + + if ( !source_module->rules || !(r = (RULE *)hash_find( + source_module->rules, list_item( source_iter ) ) ) ) + { + unknown_rule( frame, "IMPORT", source_module, list_item( source_iter + ) ); + } + + imported = import_rule( r, target_module, list_item( target_iter ) ); + if ( !list_empty( localize ) ) + rule_localize( imported, target_module ); + /* This rule is really part of some other module. Just refer to it here, + * but do not let it out. + */ + imported->exported = 0; + } + + if ( source_iter != source_end || target_iter != target_end ) + { + backtrace_line( frame->prev ); + out_printf( "import error: length of source and target rule name lists " + "don't match!\n" ); + out_printf( " source: " ); + list_print( source_rules ); + out_printf( "\n target: " ); + list_print( target_rules ); + out_printf( "\n" ); + backtrace( frame->prev ); + b2::clean_exit( EXITBAD ); + } + + return L0; +} + + +/* + * builtin_export() - EXPORT ( MODULE ? : RULES * ) + * + * The EXPORT rule marks RULES from the SOURCE_MODULE as non-local (and thus + * exportable). If an element of RULES does not name a rule in MODULE, an error + * is issued. + */ + +LIST * builtin_export( FRAME * frame, int flags ) +{ + LIST * const module_list = lol_get( frame->args, 0 ); + LIST * const rules = lol_get( frame->args, 1 ); + module_t * const m = bindmodule( list_empty( module_list ) ? 0 : list_front( + module_list ) ); + + LISTITER iter = list_begin( rules ); + LISTITER const end = list_end( rules ); + for ( ; iter != end; iter = list_next( iter ) ) + { + RULE * r = nullptr; + if ( !m->rules || !( r = (RULE *)hash_find( m->rules, list_item( iter ) + ) ) ) + { + unknown_rule( frame, "EXPORT", m, list_item( iter ) ); + } + r->exported = 1; + } + return L0; +} + + +/* + * get_source_line() - get a frame's file and line number information + * + * This is the execution traceback information to be indicated for in debug + * output or an error backtrace. + */ + +static void get_source_line( FRAME * frame, char const * * file, int * line ) +{ + if ( frame->file ) + { + char const * f = object_str( frame->file ); + int l = frame->line; + *file = f; + *line = l; + } + else + { + *file = "(builtin)"; + *line = -1; + } +} + + +void print_source_line( FRAME * frame ) +{ + char const * file; + int line; + get_source_line( frame, &file, &line ); + if ( line < 0 ) + out_printf( "(builtin):" ); + else + out_printf( "%s:%d:", file, line ); +} + + +/* + * backtrace_line() - print a single line of error backtrace for the given + * frame. + */ + +void backtrace_line( FRAME * frame ) +{ + if ( frame == 0 ) + { + out_printf( "(no frame):" ); + } + else + { + print_source_line( frame ); + out_printf( " in %s\n", frame->rulename ); + } +} + + +/* + * backtrace() - Print the entire backtrace from the given frame to the Jambase + * which invoked it. + */ + +void backtrace( FRAME * frame ) +{ + if ( !frame ) return; + while ( ( frame = frame->prev ) ) + backtrace_line( frame ); +} + + +/* + * builtin_backtrace() - A Jam version of the backtrace function, taking no + * arguments and returning a list of quadruples: FILENAME LINE MODULE. RULENAME + * describing each frame. Note that the module-name is always followed by a + * period. + */ + +LIST * builtin_backtrace( FRAME * frame, int flags ) +{ + LIST * const levels_arg = lol_get( frame->args, 0 ); + int levels = list_empty( levels_arg ) + ? (int)( (unsigned int)(-1) >> 1 ) + : atoi( object_str( list_front( levels_arg ) ) ); + + LIST * result = L0; + for ( ; ( frame = frame->prev ) && levels; --levels ) + { + char const * file; + int line; + char buf[ 32 ]; + string module_name[ 1 ]; + get_source_line( frame, &file, &line ); + sprintf( buf, "%d", line ); + string_new( module_name ); + if ( frame->module->name ) + { + string_append( module_name, object_str( frame->module->name ) ); + string_append( module_name, "." ); + } + result = list_push_back( result, object_new( file ) ); + result = list_push_back( result, object_new( buf ) ); + result = list_push_back( result, object_new( module_name->value ) ); + result = list_push_back( result, object_new( frame->rulename ) ); + string_free( module_name ); + } + return result; +} + + +/* + * builtin_caller_module() - CALLER_MODULE ( levels ? ) + * + * If levels is not supplied, returns the name of the module of the rule which + * called the one calling this one. If levels is supplied, it is interpreted as + * an integer specifying a number of additional levels of call stack to traverse + * in order to locate the module in question. If no such module exists, returns + * the empty list. Also returns the empty list when the module in question is + * the global module. This rule is needed for implementing module import + * behavior. + */ + +LIST * builtin_caller_module( FRAME * frame, int flags ) +{ + LIST * const levels_arg = lol_get( frame->args, 0 ); + int const levels = list_empty( levels_arg ) + ? 0 + : atoi( object_str( list_front( levels_arg ) ) ); + + int i; + for ( i = 0; ( i < levels + 2 ) && frame->prev; ++i ) + frame = frame->prev; + + return frame->module == root_module() + ? L0 + : list_new( object_copy( frame->module->name ) ); +} + + +/* + * Return the current working directory. + * + * Usage: pwd = [ PWD ] ; + */ + +LIST * builtin_pwd( FRAME * frame, int flags ) +{ + return list_new( object_copy( cwd() ) ); +} + + +/* + * Adds targets to the list of target that jam will attempt to update. + */ + +LIST * builtin_update( FRAME * frame, int flags ) +{ + LIST * result = list_copy( targets_to_update() ); + LIST * arg1 = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( arg1 ), end = list_end( arg1 ); + clear_targets_to_update(); + for ( ; iter != end; iter = list_next( iter ) ) + mark_target_for_updating( object_copy( list_item( iter ) ) ); + return result; +} + +extern int anyhow; +int last_update_now_status; + +/* Takes a list of target names and immediately updates them. + * + * Parameters: + * 1. Target list. + * 2. Optional file descriptor (converted to a string) for a log file where all + * the related build output should be redirected. + * 3. If specified, makes the build temporarily disable the -n option, i.e. + * forces all needed out-of-date targets to be rebuilt. + * 4. If specified, makes the build temporarily disable the -q option, i.e. + * forces the build to continue even if one of the targets fails to build. + */ +LIST * builtin_update_now( FRAME * frame, int flags ) +{ + LIST * targets = lol_get( frame->args, 0 ); + LIST * log = lol_get( frame->args, 1 ); + LIST * force = lol_get( frame->args, 2 ); + LIST * continue_ = lol_get( frame->args, 3 ); + int status; + int original_stdout = 0; + int original_stderr = 0; + int original_noexec = 0; + int original_quitquick = 0; + + if ( !list_empty( log ) ) + { + /* Temporarily redirect stdout and stderr to the given log file. */ + int const fd = atoi( object_str( list_front( log ) ) ); + original_stdout = dup( 0 ); + original_stderr = dup( 1 ); + dup2( fd, 0 ); + dup2( fd, 1 ); + } + + if ( !list_empty( force ) ) + { + original_noexec = globs.noexec; + globs.noexec = 0; + } + + if ( !list_empty( continue_ ) ) + { + original_quitquick = globs.quitquick; + globs.quitquick = 0; + } + + status = make( targets, anyhow ); + + if ( !list_empty( force ) ) + { + globs.noexec = original_noexec; + } + + if ( !list_empty( continue_ ) ) + { + globs.quitquick = original_quitquick; + } + + if ( !list_empty( log ) ) + { + /* Flush whatever stdio might have buffered, while descriptions 0 and 1 + * still refer to the log file. + */ + out_flush( ); + err_flush( ); + dup2( original_stdout, 0 ); + dup2( original_stderr, 1 ); + close( original_stdout ); + close( original_stderr ); + } + + last_update_now_status = status; + + return status ? L0 : list_new( object_copy( constant_ok ) ); +} + + +LIST * builtin_import_module( FRAME * frame, int flags ) +{ + LIST * const arg1 = lol_get( frame->args, 0 ); + LIST * const arg2 = lol_get( frame->args, 1 ); + module_t * const m = list_empty( arg2 ) + ? root_module() + : bindmodule( list_front( arg2 ) ); + import_module( arg1, m ); + return L0; +} + + +LIST * builtin_imported_modules( FRAME * frame, int flags ) +{ + LIST * const arg0 = lol_get( frame->args, 0 ); + OBJECT * const module = list_empty( arg0 ) ? 0 : list_front( arg0 ); + return imported_modules( bindmodule( module ) ); +} + + +LIST * builtin_instance( FRAME * frame, int flags ) +{ + LIST * arg1 = lol_get( frame->args, 0 ); + LIST * arg2 = lol_get( frame->args, 1 ); + module_t * const instance = bindmodule( list_front( arg1 ) ); + module_t * const class_module = bindmodule( list_front( arg2 ) ); + instance->class_module = class_module; + module_set_fixed_variables( instance, class_module->num_fixed_variables ); + return L0; +} + + +LIST * builtin_sort( FRAME * frame, int flags ) +{ + return list_sort( lol_get( frame->args, 0 ) ); +} + + +namespace +{ + template + void replace_all(S &str, const S &from, const S &to) + { + const auto from_len = from.length(); + const auto to_len = to.length(); + auto pos = str.find(from, 0); + while (pos != S::npos) + { + str.replace(pos, from_len, to); + pos += to_len; + pos = str.find(from, pos); + } + } +} + + +LIST * builtin_normalize_path( FRAME * frame, int flags ) +{ + LIST * arg = lol_get( frame->args, 0 ); + LISTITER arg_iter = list_begin( arg ); + LISTITER arg_end = list_end( arg ); + std::string in; + for ( ; arg_iter != arg_end; arg_iter = list_next( arg_iter ) ) + { + auto arg_str = object_str( list_item( arg_iter ) ); + if (arg_str[ 0 ] == '\0') continue; + if (!in.empty()) in += "/"; + in += arg_str; + } + std::string out = b2::paths::normalize(in); + + if (out.empty()) return L0; + else return list_new(object_new(out.c_str())); +} + + +LIST * builtin_native_rule( FRAME * frame, int flags ) +{ + LIST * module_name = lol_get( frame->args, 0 ); + LIST * rule_name = lol_get( frame->args, 1 ); + + module_t * module = bindmodule( list_front( module_name ) ); + + native_rule_t * np; + if ( module->native_rules && (np = (native_rule_t *)hash_find( + module->native_rules, list_front( rule_name ) ) ) ) + { + new_rule_body( module, np->name, np->procedure, 1 ); + } + else + { + backtrace_line( frame->prev ); + out_printf( "error: no native rule \"%s\" defined in module \"%s.\"\n", + object_str( list_front( rule_name ) ), object_str( module->name ) ); + backtrace( frame->prev ); + b2::clean_exit( EXITBAD ); + } + return L0; +} + + +LIST * builtin_has_native_rule( FRAME * frame, int flags ) +{ + LIST * module_name = lol_get( frame->args, 0 ); + LIST * rule_name = lol_get( frame->args, 1 ); + LIST * version = lol_get( frame->args, 2 ); + + module_t * module = bindmodule( list_front( module_name ) ); + + native_rule_t * np; + if ( module->native_rules && (np = (native_rule_t *)hash_find( + module->native_rules, list_front( rule_name ) ) ) ) + { + int expected_version = atoi( object_str( list_front( version ) ) ); + if ( np->version == expected_version ) + return list_new( object_copy( constant_true ) ); + } + return L0; +} + + +LIST * builtin_user_module( FRAME * frame, int flags ) +{ + LIST * const module_name = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( module_name ); + LISTITER const end = list_end( module_name ); + for ( ; iter != end; iter = list_next( iter ) ) + bindmodule( list_item( iter ) )->user_module = 1; + return L0; +} + + +LIST * builtin_nearest_user_location( FRAME * frame, int flags ) +{ + FRAME * const nearest_user_frame = frame->module->user_module + ? frame + : frame->prev_user; + if ( !nearest_user_frame ) + return L0; + + { + LIST * result = L0; + char const * file; + int line; + char buf[ 32 ]; + + get_source_line( nearest_user_frame, &file, &line ); + sprintf( buf, "%d", line ); + result = list_push_back( result, object_new( file ) ); + result = list_push_back( result, object_new( buf ) ); + return result; + } +} + + +LIST * builtin_check_if_file( FRAME * frame, int flags ) +{ + LIST * const name = lol_get( frame->args, 0 ); + return file_is_file( list_front( name ) ) == 1 + ? list_new( object_copy( constant_true ) ) + : L0; +} + + +LIST * builtin_md5( FRAME * frame, int flags ) +{ + LIST * l = lol_get( frame->args, 0 ); + char const * s = object_str( list_front( l ) ); + + md5_state_t state; + md5_byte_t digest[ 16 ]; + char hex_output[ 16 * 2 + 1 ]; + + int di; + + md5_init( &state ); + md5_append( &state, (md5_byte_t const *)s, strlen( s ) ); + md5_finish( &state, digest ); + + for ( di = 0; di < 16; ++di ) + sprintf( hex_output + di * 2, "%02x", digest[ di ] ); + + return list_new( object_new( hex_output ) ); +} + + +LIST * builtin_file_open( FRAME * frame, int flags ) +{ + char const * name = object_str( list_front( lol_get( frame->args, 0 ) ) ); + char const * mode = object_str( list_front( lol_get( frame->args, 1 ) ) ); + int fd; + char buffer[ sizeof( "4294967295" ) ]; + + if ( strcmp(mode, "w") == 0 ) + fd = open( name, O_WRONLY|O_CREAT|O_TRUNC, 0666 ); + else + fd = open( name, O_RDONLY ); + + if ( fd != -1 ) + { + sprintf( buffer, "%d", fd ); + return list_new( object_new( buffer ) ); + } + return L0; +} + + +LIST * builtin_pad( FRAME * frame, int flags ) +{ + OBJECT * string = list_front( lol_get( frame->args, 0 ) ); + char const * width_s = object_str( list_front( lol_get( frame->args, 1 ) ) ); + + int32_t current = int32_t(strlen( object_str( string ) )); + int32_t desired = atoi( width_s ); + if ( current >= desired ) + return list_new( object_copy( string ) ); + else + { + char * buffer = (char *)BJAM_MALLOC( desired + 1 ); + int32_t i; + LIST * result; + + strcpy( buffer, object_str( string ) ); + for ( i = current; i < desired; ++i ) + buffer[ i ] = ' '; + buffer[ desired ] = '\0'; + result = list_new( object_new( buffer ) ); + BJAM_FREE( buffer ); + return result; + } +} + + +LIST * builtin_precious( FRAME * frame, int flags ) +{ + LIST * targets = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( targets ); + LISTITER const end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + bindtarget( list_item( iter ) )->flags |= T_FLAG_PRECIOUS; + return L0; +} + + +LIST * builtin_self_path( FRAME * frame, int flags ) +{ + extern char const * saved_argv0; + char * p = executable_path( saved_argv0 ); + if ( p ) + { + LIST * const result = list_new( object_new( p ) ); + free( p ); + return result; + } + return L0; +} + + +LIST * builtin_makedir( FRAME * frame, int flags ) +{ + LIST * const path = lol_get( frame->args, 0 ); + return file_mkdir( object_str( list_front( path ) ) ) + ? L0 + : list_new( object_copy( list_front( path ) ) ); +} + +LIST *builtin_readlink( FRAME * frame, int flags ) +{ + const char * path = object_str( list_front( lol_get( frame->args, 0 ) ) ); +#ifdef OS_NT + + /* This struct is declared in ntifs.h which is + * part of the Windows Driver Kit. + */ + typedef struct _REPARSE_DATA_BUFFER { + ULONG ReparseTag; + USHORT ReparseDataLength; + USHORT Reserved; + union { + struct { + USHORT SubstituteNameOffset; + USHORT SubstituteNameLength; + USHORT PrintNameOffset; + USHORT PrintNameLength; + ULONG Flags; + WCHAR PathBuffer[ 1 ]; + } SymbolicLinkReparseBuffer; + struct { + USHORT SubstituteNameOffset; + USHORT SubstituteNameLength; + USHORT PrintNameOffset; + USHORT PrintNameLength; + WCHAR PathBuffer[ 1 ]; + } MountPointReparseBuffer; + struct { + UCHAR DataBuffer[ 1 ]; + } GenericReparseBuffer; + }; + } REPARSE_DATA_BUFFER; + + HANDLE hLink = CreateFileA( path, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, NULL ); + DWORD n; + union { + REPARSE_DATA_BUFFER reparse; + char data[MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; + } buf; + int okay = DeviceIoControl(hLink, FSCTL_GET_REPARSE_POINT, NULL, 0, &buf, sizeof(buf), &n, NULL); + + CloseHandle( hLink ); + + if (okay && buf.reparse.ReparseTag == IO_REPARSE_TAG_SYMLINK ) + { + int index = buf.reparse.SymbolicLinkReparseBuffer.SubstituteNameOffset / 2; + int length = buf.reparse.SymbolicLinkReparseBuffer.SubstituteNameLength / 2; + char cbuf[MAX_PATH + 1]; + int numchars = WideCharToMultiByte( CP_ACP, 0, buf.reparse.SymbolicLinkReparseBuffer.PathBuffer + index, length, cbuf, sizeof(cbuf), NULL, NULL ); + if( numchars >= int(sizeof(cbuf)) ) + { + return 0; + } + cbuf[numchars] = '\0'; + return list_new( object_new( cbuf ) ); + } + else if( okay && buf.reparse.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT ) + { + int index = buf.reparse.MountPointReparseBuffer.SubstituteNameOffset / 2; + int length = buf.reparse.MountPointReparseBuffer.SubstituteNameLength / 2; + char cbuf[MAX_PATH + 1]; + const char * result; + int numchars = WideCharToMultiByte( CP_ACP, 0, buf.reparse.MountPointReparseBuffer.PathBuffer + index, length, cbuf, sizeof(cbuf), NULL, NULL ); + if( numchars >= int(sizeof(cbuf)) ) + { + return 0; + } + cbuf[numchars] = '\0'; + /* strip off the leading "\??\" */ + result = cbuf; + if ( cbuf[ 0 ] == '\\' && cbuf[ 1 ] == '?' && + cbuf[ 2 ] == '?' && cbuf[ 3 ] == '\\' && + cbuf[ 4 ] != '\0' && cbuf[ 5 ] == ':' ) + { + result += 4; + } + return list_new( object_new( result ) ); + } + return 0; +#else + char static_buf[256]; + char * buf = static_buf; + int32_t bufsize = 256; + LIST * result = 0; + while (1) { + ssize_t len = readlink( path, buf, bufsize ); + if ( len < 0 ) + { + break; + } + else if ( int32_t(len) < bufsize ) + { + buf[ len ] = '\0'; + result = list_new( object_new( buf ) ); + break; + } + if ( buf != static_buf ) + BJAM_FREE( buf ); + bufsize *= 2; + buf = (char *)BJAM_MALLOC( bufsize ); + } + + if ( buf != static_buf ) + BJAM_FREE( buf ); + + return result; +#endif +} + +#ifdef JAM_DEBUGGER + +LIST *builtin_debug_print_helper( FRAME * frame, int flags ) +{ + debug_print_result = list_copy( lol_get( frame->args, 0 ) ); + return L0; +} + +#endif + +#ifdef HAVE_PYTHON + +LIST * builtin_python_import_rule( FRAME * frame, int flags ) +{ + static int first_time = 1; + char const * python_module = object_str( list_front( lol_get( frame->args, + 0 ) ) ); + char const * python_function = object_str( list_front( lol_get( frame->args, + 1 ) ) ); + OBJECT * jam_module = list_front( lol_get( frame->args, 2 ) ); + OBJECT * jam_rule = list_front( lol_get( frame->args, 3 ) ); + + PyObject * pName; + PyObject * pModule; + PyObject * pDict; + PyObject * pFunc; + + if ( first_time ) + { + /* At the first invocation, we add the value of the global + * EXTRA_PYTHONPATH to the sys.path Python variable. + */ + LIST * extra = 0; + module_t * outer_module = frame->module; + LISTITER iter, end; + + first_time = 0; + + extra = var_get( root_module(), constant_extra_pythonpath ); + + iter = list_begin( extra ), end = list_end( extra ); + for ( ; iter != end; iter = list_next( iter ) ) + { + string buf[ 1 ]; + string_new( buf ); + string_append( buf, "import sys\nsys.path.append(\"" ); + string_append( buf, object_str( list_item( iter ) ) ); + string_append( buf, "\")\n" ); + PyRun_SimpleString( buf->value ); + string_free( buf ); + } + } + + pName = PyString_FromString( python_module ); + pModule = PyImport_Import( pName ); + Py_DECREF( pName ); + + if ( pModule != NULL ) + { + pDict = PyModule_GetDict( pModule ); + pFunc = PyDict_GetItemString( pDict, python_function ); + + if ( pFunc && PyCallable_Check( pFunc ) ) + { + module_t * m = bindmodule( jam_module ); + new_rule_body( m, jam_rule, function_python( pFunc, 0 ), 0 ); + } + else + { + if ( PyErr_Occurred() ) + PyErr_Print(); + err_printf( "Cannot find function \"%s\"\n", python_function ); + } + Py_DECREF( pModule ); + } + else + { + PyErr_Print(); + err_printf( "Failed to load \"%s\"\n", python_module ); + } + return L0; + +} + +#endif /* #ifdef HAVE_PYTHON */ + + +void lol_build( LOL * lol, char const * * elements ) +{ + LIST * l = L0; + lol_init( lol ); + + while ( elements && *elements ) + { + if ( !strcmp( *elements, ":" ) ) + { + lol_add( lol, l ); + l = L0; + } + else + { + l = list_push_back( l, object_new( *elements ) ); + } + ++elements; + } + + if ( l != L0 ) + lol_add( lol, l ); +} + + +#ifdef HAVE_PYTHON + +static LIST *jam_list_from_string(PyObject *a) +{ + return list_new( object_new( PyString_AsString( a ) ) ); +} + +static LIST *jam_list_from_sequence(PyObject *a) +{ + LIST * l = 0; + + int i = 0; + int s = PySequence_Size( a ); + + for ( ; i < s; ++i ) + { + /* PySequence_GetItem returns new reference. */ + PyObject * e = PySequence_GetItem( a, i ); + char * s = PyString_AsString( e ); + if ( !s ) + { + /* try to get the repr() on the object */ + PyObject *repr = PyObject_Repr(e); + if (repr) + { + const char *str = PyString_AsString(repr); + PyErr_Format(PyExc_TypeError, "expecting type got %s", str); + } + /* fall back to a dumb error */ + else + { + PyErr_BadArgument(); + } + return NULL; + } + l = list_push_back( l, object_new( s ) ); + Py_DECREF( e ); + } + + return l; +} + +static void make_jam_arguments_from_python(FRAME* inner, PyObject *args) +{ + int i; + int size; + + /* Build up the list of arg lists. */ + frame_init( inner ); + inner->prev = 0; + inner->prev_user = 0; + inner->module = bindmodule( constant_python_interface ); + + size = PyTuple_Size( args ); + for (i = 0 ; i < size; ++i) + { + PyObject * a = PyTuple_GetItem( args, i ); + if ( PyString_Check( a ) ) + { + lol_add( inner->args, jam_list_from_string(a) ); + } + else if ( PySequence_Check( a ) ) + { + lol_add( inner->args, jam_list_from_sequence(a) ); + } + } +} + + +/* + * Calls the bjam rule specified by name passed in 'args'. The name is looked up + * in the context of bjam's 'python_interface' module. Returns the list of + * strings returned by the rule. + */ + +PyObject * bjam_call( PyObject * self, PyObject * args ) +{ + FRAME inner[ 1 ]; + LIST * result; + PARSE * p; + OBJECT * rulename; + PyObject *args_proper; + + /* PyTuple_GetItem returns borrowed reference. */ + rulename = object_new( PyString_AsString( PyTuple_GetItem( args, 0 ) ) ); + + args_proper = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); + make_jam_arguments_from_python (inner, args_proper); + if ( PyErr_Occurred() ) + { + return NULL; + } + Py_DECREF(args_proper); + + result = evaluate_rule( bindrule( rulename, inner->module), rulename, inner ); + object_free( rulename ); + + frame_free( inner ); + + /* Convert the bjam list into a Python list result. */ + { + PyObject * const pyResult = PyList_New( list_length( result ) ); + int i = 0; + LISTITER iter = list_begin( result ); + LISTITER const end = list_end( result ); + for ( ; iter != end; iter = list_next( iter ) ) + { + PyList_SetItem( pyResult, i, PyString_FromString( object_str( + list_item( iter ) ) ) ); + i += 1; + } + list_free( result ); + return pyResult; + } +} + + +/* + * Accepts four arguments: + * - module name + * - rule name, + * - Python callable. + * - (optional) bjam language function signature. + * Creates a bjam rule with the specified name in the specified module, which + * will invoke the Python callable. + */ + +PyObject * bjam_import_rule( PyObject * self, PyObject * args ) +{ + char * module; + char * rule; + PyObject * func; + PyObject * bjam_signature = NULL; + module_t * m; + RULE * r; + OBJECT * module_name; + OBJECT * rule_name; + + if ( !PyArg_ParseTuple( args, "ssO|O:import_rule", + &module, &rule, &func, &bjam_signature ) ) + return NULL; + + if ( !PyCallable_Check( func ) ) + { + PyErr_SetString( PyExc_RuntimeError, "Non-callable object passed to " + "bjam.import_rule" ); + return NULL; + } + + module_name = *module ? object_new( module ) : 0; + m = bindmodule( module_name ); + if ( module_name ) + object_free( module_name ); + rule_name = object_new( rule ); + new_rule_body( m, rule_name, function_python( func, bjam_signature ), 0 ); + object_free( rule_name ); + + Py_INCREF( Py_None ); + return Py_None; +} + + +/* + * Accepts four arguments: + * - an action name + * - an action body + * - a list of variable that will be bound inside the action + * - integer flags. + * Defines an action on bjam side. + */ + +PyObject * bjam_define_action( PyObject * self, PyObject * args ) +{ + char * name; + char * body; + module_t * m; + PyObject * bindlist_python; + int flags; + LIST * bindlist = L0; + int n; + int i; + OBJECT * name_str; + FUNCTION * body_func; + + if ( !PyArg_ParseTuple( args, "ssO!i:define_action", &name, &body, + &PyList_Type, &bindlist_python, &flags ) ) + return NULL; + + n = PyList_Size( bindlist_python ); + for ( i = 0; i < n; ++i ) + { + PyObject * next = PyList_GetItem( bindlist_python, i ); + if ( !PyString_Check( next ) ) + { + PyErr_SetString( PyExc_RuntimeError, "bind list has non-string " + "type" ); + return NULL; + } + bindlist = list_push_back( bindlist, object_new( PyString_AsString( next + ) ) ); + } + + name_str = object_new( name ); + body_func = function_compile_actions( body, constant_builtin, -1 ); + new_rule_actions( root_module(), name_str, body_func, bindlist, flags ); + function_free( body_func ); + object_free( name_str ); + + Py_INCREF( Py_None ); + return Py_None; +} + + +/* + * Returns the value of a variable in root Jam module. + */ + +PyObject * bjam_variable( PyObject * self, PyObject * args ) +{ + char * name; + LIST * value; + PyObject * result; + int i; + OBJECT * varname; + LISTITER iter; + LISTITER end; + + if ( !PyArg_ParseTuple( args, "s", &name ) ) + return NULL; + + varname = object_new( name ); + value = var_get( root_module(), varname ); + object_free( varname ); + iter = list_begin( value ); + end = list_end( value ); + + result = PyList_New( list_length( value ) ); + for ( i = 0; iter != end; iter = list_next( iter ), ++i ) + PyList_SetItem( result, i, PyString_FromString( object_str( list_item( + iter ) ) ) ); + + return result; +} + + +PyObject * bjam_backtrace( PyObject * self, PyObject * args ) +{ + PyObject * result = PyList_New( 0 ); + struct frame * f = frame_before_python_call; + + for ( ; (f = f->prev); ) + { + PyObject * tuple = PyTuple_New( 4 ); + char const * file; + int line; + char buf[ 32 ]; + string module_name[ 1 ]; + + get_source_line( f, &file, &line ); + sprintf( buf, "%d", line ); + string_new( module_name ); + if ( f->module->name ) + { + string_append( module_name, object_str( f->module->name ) ); + string_append( module_name, "." ); + } + + /* PyTuple_SetItem steals reference. */ + PyTuple_SetItem( tuple, 0, PyString_FromString( file ) ); + PyTuple_SetItem( tuple, 1, PyString_FromString( buf ) ); + PyTuple_SetItem( tuple, 2, PyString_FromString( module_name->value ) ); + PyTuple_SetItem( tuple, 3, PyString_FromString( f->rulename ) ); + + string_free( module_name ); + + PyList_Append( result, tuple ); + Py_DECREF( tuple ); + } + return result; +} + +PyObject * bjam_caller( PyObject * self, PyObject * args ) +{ + return PyString_FromString( frame_before_python_call->prev->module->name ? + object_str( frame_before_python_call->prev->module->name ) : "" ); +} + +#endif /* #ifdef HAVE_PYTHON */ + + +#ifdef HAVE_POPEN + +#if defined(_MSC_VER) || defined(__BORLANDC__) || defined(__MINGW64__) || defined(__MINGW32__) + #undef popen + #define popen windows_popen_wrapper + #undef pclose + #define pclose _pclose + + /* + * This wrapper is a workaround for a funny _popen() feature on Windows + * where it eats external quotes in some cases. The bug seems to be related + * to the quote stripping functionality used by the Windows cmd.exe + * interpreter when its /S is not specified. + * + * Cleaned up quote from the cmd.exe help screen as displayed on Windows XP + * SP3: + * + * 1. If all of the following conditions are met, then quote characters on + * the command line are preserved: + * + * - no /S switch + * - exactly two quote characters + * - no special characters between the two quote characters, where + * special is one of: &<>()@^| + * - there are one or more whitespace characters between the two quote + * characters + * - the string between the two quote characters is the name of an + * executable file. + * + * 2. Otherwise, old behavior is to see if the first character is a quote + * character and if so, strip the leading character and remove the last + * quote character on the command line, preserving any text after the + * last quote character. + * + * This causes some commands containing quotes not to be executed correctly. + * For example: + * + * "\Long folder name\aaa.exe" --name="Jurko" --no-surname + * + * would get its outermost quotes stripped and would be executed as: + * + * \Long folder name\aaa.exe" --name="Jurko --no-surname + * + * which would report an error about '\Long' not being a valid command. + * + * cmd.exe help seems to indicate it would be enough to add an extra space + * character in front of the command to avoid this but this does not work, + * most likely due to the shell first stripping all leading whitespace + * characters from the command. + * + * Solution implemented here is to quote the whole command in case it + * contains any quote characters. Note thought this will not work correctly + * should Windows ever 'fix' this feature. + * (03.06.2008.) (Jurko) + */ + static FILE * windows_popen_wrapper( char const * command, + char const * mode ) + { + int const extra_command_quotes_needed = !!strchr( command, '"' ); + string quoted_command; + FILE * result; + + if ( extra_command_quotes_needed ) + { + string_new( "ed_command ); + string_append( "ed_command, "\"" ); + string_append( "ed_command, command ); + string_append( "ed_command, "\"" ); + command = quoted_command.value; + } + + result = _popen( command, "r" ); + + if ( extra_command_quotes_needed ) + string_free( "ed_command ); + + return result; + } +#endif /* defined(_MSC_VER) || defined(__BORLANDC__) */ + + +LIST * builtin_shell( FRAME * frame, int flags ) +{ + LIST * command = lol_get( frame->args, 0 ); + LIST * result = L0; + string s; + int32_t ret; + char buffer[ 1024 ]; + FILE * p = NULL; + int exit_status = -1; + int exit_status_opt = 0; + int no_output_opt = 0; + int strip_eol_opt = 0; + + /* Process the variable args options. */ + { + int a = 1; + LIST * arg = lol_get( frame->args, a ); + for ( ; !list_empty( arg ); arg = lol_get( frame->args, ++a ) ) + { + if ( !strcmp( "exit-status", object_str( list_front( arg ) ) ) ) + exit_status_opt = 1; + else if ( !strcmp( "no-output", object_str( list_front( arg ) ) ) ) + no_output_opt = 1; + else if ( !strcmp("strip-eol", object_str( list_front( arg ) ) ) ) + strip_eol_opt = 1; + } + } + + /* The following fflush() call seems to be indicated as a workaround for a + * popen() bug on POSIX implementations related to synhronizing input + * stream positions for the called and the calling process. + */ + fflush( NULL ); + + p = popen( object_str( list_front( command ) ), "r" ); + if ( p == NULL ) + return L0; + + string_new( &s ); + + while ( ( ret = int32_t(fread( buffer, sizeof( char ), sizeof( buffer ) - 1, p )) ) > + 0 ) + { + buffer[ ret ] = 0; + if ( !no_output_opt ) + { + string_append( &s, buffer ); + } + + /* Explicit EOF check for systems with broken fread */ + if ( feof( p ) ) break; + } + + if ( strip_eol_opt ) + string_rtrim( &s ); + + exit_status = pclose( p ); + + /* The command output is returned first. */ + result = list_new( object_new( s.value ) ); + string_free( &s ); + + /* The command exit result next. */ + if ( exit_status_opt ) + { + if ( WIFEXITED( exit_status ) ) + exit_status = WEXITSTATUS( exit_status ); + else + exit_status = -1; + +#ifdef OS_VMS + /* Harmonize VMS success status with POSIX */ + if ( exit_status == 1 ) exit_status = EXIT_SUCCESS; +#endif + sprintf( buffer, "%d", exit_status ); + result = list_push_back( result, object_new( buffer ) ); + } + + return result; +} + +#else /* #ifdef HAVE_POPEN */ + +LIST * builtin_shell( FRAME * frame, int flags ) +{ + return L0; +} + +#endif /* #ifdef HAVE_POPEN */ + + +/* + * builtin_glob_archive() - GLOB_ARCHIVE rule + */ + +struct globbing2 +{ + LIST * patterns[ 2 ]; + LIST * results; + LIST * case_insensitive; +}; + + +static void builtin_glob_archive_back( void * closure, OBJECT * member, + LIST * symbols, int status, timestamp const * const time ) +{ + PROFILE_ENTER( BUILTIN_GLOB_ARCHIVE_BACK ); + + struct globbing2 * const globbing = (struct globbing2 *)closure; + PATHNAME f; + string buf[ 1 ]; + LISTITER iter; + LISTITER end; + LISTITER iter_symbols; + LISTITER end_symbols; + int matched = 0; + + /* Match member name. + */ + path_parse( object_str( member ), &f ); + + if ( !strcmp( f.f_member.ptr, "" ) ) + { + PROFILE_EXIT( BUILTIN_GLOB_ARCHIVE_BACK ); + return; + } + + string_new( buf ); + string_append_range( buf, f.f_member.ptr, f.f_member.ptr + f.f_member.len ); + + if ( globbing->case_insensitive ) + downcase_inplace( buf->value ); + + /* Glob with member patterns. If not matched, then match symbols. + */ + matched = 0; + iter = list_begin( globbing->patterns[ 0 ] ); + end = list_end( globbing->patterns[ 0 ] ); + for ( ; !matched && iter != end; + iter = list_next( iter ) ) + { + const char * pattern = object_str( list_item( iter ) ); + int match_exact = ( !has_wildcards( pattern ) ); + matched = ( match_exact ? + ( !strcmp( pattern, buf->value ) ) : + ( !glob( pattern, buf->value ) ) ); + } + + + /* Glob with symbol patterns, if requested. + */ + iter = list_begin( globbing->patterns[ 1 ] ); + end = list_end( globbing->patterns[ 1 ] ); + + if ( iter != end ) matched = 0; + + for ( ; !matched && iter != end; + iter = list_next( iter ) ) + { + const char * pattern = object_str( list_item( iter ) ); + int match_exact = ( !has_wildcards( pattern ) ); + + iter_symbols = list_begin( symbols ); + end_symbols = list_end( symbols ); + + for ( ; !matched && iter_symbols != end_symbols; + iter_symbols = list_next( iter_symbols ) ) + { + const char * symbol = object_str( list_item( iter_symbols ) ); + + string_copy( buf, symbol ); + if ( globbing->case_insensitive ) + downcase_inplace( buf->value ); + + matched = ( match_exact ? + ( !strcmp( pattern, buf->value ) ) : + ( !glob( pattern, buf->value ) ) ); + } + } + + if ( matched ) + { + globbing->results = list_push_back( globbing->results, + object_copy( member ) ); + } + + string_free( buf ); + + PROFILE_EXIT( BUILTIN_GLOB_ARCHIVE_BACK ); +} + + +LIST * builtin_glob_archive( FRAME * frame, int flags ) +{ + LIST * const l = lol_get( frame->args, 0 ); + LIST * const r1 = lol_get( frame->args, 1 ); + LIST * const r3 = lol_get( frame->args, 3 ); + + LISTITER iter; + LISTITER end; + struct globbing2 globbing; + + globbing.results = L0; + globbing.patterns[ 0 ] = r1; + globbing.patterns[ 1 ] = r3; + + globbing.case_insensitive = +# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS ) + l; /* Always case-insensitive. */ +# else + lol_get( frame->args, 2 ); // r2 +# endif + + if ( globbing.case_insensitive ) + { + globbing.patterns[ 0 ] = downcase_list( globbing.patterns[ 0 ] ); + globbing.patterns[ 1 ] = downcase_list( globbing.patterns[ 1 ] ); + } + + iter = list_begin( l ); + end = list_end( l ); + for ( ; iter != end; iter = list_next( iter ) ) + file_archivescan( list_item( iter ), builtin_glob_archive_back, &globbing ); + + if ( globbing.case_insensitive ) + { + list_free( globbing.patterns[ 0 ] ); + list_free( globbing.patterns[ 1 ] ); + } + + return globbing.results; +} diff --git a/src/boost/tools/build/src/engine/builtins.h b/src/boost/tools/build/src/engine/builtins.h new file mode 100644 index 000000000..241a0d050 --- /dev/null +++ b/src/boost/tools/build/src/engine/builtins.h @@ -0,0 +1,74 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +#ifndef JAM_BUILTINS_H +# define JAM_BUILTINS_H + +# include "config.h" +# include "frames.h" + +/* + * builtins.h - compile parsed jam statements + */ + +void load_builtins(); +void init_set(); +void init_path(); +void init_regex(); +void init_property_set(); +void init_sequence(); +void init_order(); + +void property_set_done(); + +LIST *builtin_calc( FRAME * frame, int flags ); +LIST *builtin_depends( FRAME * frame, int flags ); +LIST *builtin_rebuilds( FRAME * frame, int flags ); +LIST *builtin_echo( FRAME * frame, int flags ); +LIST *builtin_exit( FRAME * frame, int flags ); +LIST *builtin_flags( FRAME * frame, int flags ); +LIST *builtin_glob( FRAME * frame, int flags ); +LIST *builtin_glob_recursive( FRAME * frame, int flags ); +LIST *builtin_subst( FRAME * frame, int flags ); +LIST *builtin_match( FRAME * frame, int flags ); +LIST *builtin_split_by_characters( FRAME * frame, int flags ); +LIST *builtin_hdrmacro( FRAME * frame, int flags ); +LIST *builtin_rulenames( FRAME * frame, int flags ); +LIST *builtin_varnames( FRAME * frame, int flags ); +LIST *builtin_delete_module( FRAME * frame, int flags ); +LIST *builtin_import( FRAME * frame, int flags ); +LIST *builtin_export( FRAME * frame, int flags ); +LIST *builtin_caller_module( FRAME * frame, int flags ); +LIST *builtin_backtrace( FRAME * frame, int flags ); +LIST *builtin_pwd( FRAME * frame, int flags ); +LIST *builtin_update( FRAME * frame, int flags ); +LIST *builtin_update_now( FRAME * frame, int flags ); +LIST *builtin_import_module( FRAME * frame, int flags ); +LIST *builtin_imported_modules( FRAME * frame, int flags ); +LIST *builtin_instance( FRAME * frame, int flags ); +LIST *builtin_sort( FRAME * frame, int flags ); +LIST *builtin_normalize_path( FRAME * frame, int flags ); +LIST *builtin_native_rule( FRAME * frame, int flags ); +LIST *builtin_has_native_rule( FRAME * frame, int flags ); +LIST *builtin_user_module( FRAME * frame, int flags ); +LIST *builtin_nearest_user_location( FRAME * frame, int flags ); +LIST *builtin_check_if_file( FRAME * frame, int flags ); +LIST *builtin_python_import_rule( FRAME * frame, int flags ); +LIST *builtin_shell( FRAME * frame, int flags ); +LIST *builtin_md5( FRAME * frame, int flags ); +LIST *builtin_file_open( FRAME * frame, int flags ); +LIST *builtin_pad( FRAME * frame, int flags ); +LIST *builtin_precious( FRAME * frame, int flags ); +LIST *builtin_self_path( FRAME * frame, int flags ); +LIST *builtin_makedir( FRAME * frame, int flags ); +LIST *builtin_readlink( FRAME * frame, int flags ); +LIST *builtin_glob_archive( FRAME * frame, int flags ); +LIST *builtin_debug_print_helper( FRAME * frame, int flags ); + +void backtrace( FRAME *frame ); +extern int last_update_now_status; + +#endif diff --git a/src/boost/tools/build/src/engine/bump_version.py b/src/boost/tools/build/src/engine/bump_version.py new file mode 100644 index 000000000..66f5f1c72 --- /dev/null +++ b/src/boost/tools/build/src/engine/bump_version.py @@ -0,0 +1,98 @@ +#!/usr/bin/python + +# This script is used to bump the bjam version. It takes a single argument, e.g +# +# ./bump_version.py 3.1.9 +# +# and updates all the necessary files. +# +# Copyright 2006 Rene Rivera. +# Copyright 2005-2006 Vladimir Prus. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + + +import os +import os.path +import re +import string +import sys + +srcdir = os.path.abspath(os.path.dirname(__file__)) +docdir = os.path.abspath(os.path.join(srcdir, "..", "doc")) + + +def edit(file, *replacements): + print(" '%s'..." % file) + f = open(file, 'r') + text = f.read() + f.close() + for (source, target) in replacements: + text, n = re.compile(source, re.MULTILINE).subn(target, text) + assert n > 0 + f = open(file, 'w') + f.write(text) + f.close() + + +def make_edits(ver): + ver03 = (list(ver) + [0] * 3)[0:3] + ver02 = ver03[0:2] + + join = lambda v, s : s.join(str(x) for x in v) + dotJoin = lambda v : join(v, ".") + + print("Setting version to %s" % str(ver03)) + + edit(os.path.join(srcdir, "boost-jam.spec"), + ('^(Version:) .*$', '\\1 %s' % dotJoin(ver03))) + + edit(os.path.join(srcdir, "build.jam"), + ('^(_VERSION_ =).* ;$', '\\1 %s ;' % join(ver03, " "))) + + edit(os.path.join(docdir, "bjam.qbk"), + ('(\[version).*(\])', '\\1: %s\\2' % dotJoin(ver03)), + ('(\[def :version:).*(\])', '\\1 %s\\2' % dotJoin(ver03))) + + edit(os.path.join(srcdir, "patchlevel.h"), + ('^(#define VERSION_MAJOR) .*$', '\\1 %s' % ver03[0]), + ('^(#define VERSION_MINOR) .*$', '\\1 %s' % ver03[1]), + ('^(#define VERSION_PATCH) .*$', '\\1 %s' % ver03[2]), + ('^(#define VERSION_MAJOR_SYM) .*$', '\\1 "%02d"' % ver03[0]), + ('^(#define VERSION_MINOR_SYM) .*$', '\\1 "%02d"' % ver03[1]), + ('^(#define VERSION_PATCH_SYM) .*$', '\\1 "%02d"' % ver03[2]), + ('^(#define VERSION) .*$', '\\1 "%s"' % dotJoin(ver)), + ('^(#define JAMVERSYM) .*$', '\\1 "JAMVERSION=%s"' % dotJoin(ver02))) + + +def main(): + if len(sys.argv) < 2: + print("Expect new version as argument.") + sys.exit(1) + if len(sys.argv) > 3: + print("Too many arguments.") + sys.exit(1) + + version = sys.argv[1].split(".") + if len(version) > 3: + print("Expect version argument in the format: ..") + sys.exit(1) + + try: + version = list(int(x) for x in version) + except ValueError: + print("Version values must be valid integers.") + sys.exit(1) + + while version and version[-1] == 0: + version.pop() + + if not version: + print("At least one of the version values must be positive.") + sys.exit() + + make_edits(version) + + +if __name__ == '__main__': + main() diff --git a/src/boost/tools/build/src/engine/check_clib.cpp b/src/boost/tools/build/src/engine/check_clib.cpp new file mode 100644 index 000000000..158f81acf --- /dev/null +++ b/src/boost/tools/build/src/engine/check_clib.cpp @@ -0,0 +1,19 @@ +/* Copyright 2021 Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* +This program is a compile test for support of clib use. +This is used by the build script to guess and check the compiler to build the engine with. +*/ + +// Some headers we depend on.. +#include + + +int check_clib() +{ + { auto _ = strdup("-"); } + return 0; +} diff --git a/src/boost/tools/build/src/engine/check_cxx11.cpp b/src/boost/tools/build/src/engine/check_cxx11.cpp new file mode 100644 index 000000000..085547b26 --- /dev/null +++ b/src/boost/tools/build/src/engine/check_cxx11.cpp @@ -0,0 +1,31 @@ +/* Copyright 2020 Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* +This program is a compile test for support of C++11. If it compiles +successfully some key parts of C++11 the B2 engine requires are +available. This is used by the build script to guess and check the +compiler to build the engine with. +*/ + +// Some headers we test... +#include +#include + + +int main() +{ + // Check for basic thread calls. + // [2020-08-19] Mingw-w64 with win32 threading model (as opposed to posix + // threading model) does not really have std::thread etc. Please see comments + // in sysinfo.cpp. + #ifndef _WIN32 + { auto _ = std::thread::hardware_concurrency(); } + #endif + + // [2021-08-07] We check the following C++11 features: brace initialization, + // unique_ptr. Plus the author's ability to memorize some digits. + { const std::unique_ptr pf {new float {3.14159f}}; } +} diff --git a/src/boost/tools/build/src/engine/class.cpp b/src/boost/tools/build/src/engine/class.cpp new file mode 100644 index 000000000..09084c892 --- /dev/null +++ b/src/boost/tools/build/src/engine/class.cpp @@ -0,0 +1,192 @@ +/* + * Copyright Vladimir Prus 2003. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "class.h" + +#include "constants.h" +#include "frames.h" +#include "hash.h" +#include "lists.h" +#include "object.h" +#include "rules.h" +#include "jam_strings.h" +#include "variable.h" +#include "output.h" +#include "startup.h" + +#include +#include + + +static struct hash * classes = 0; + + +static void check_defined( LIST * class_names ) +{ + LISTITER iter = list_begin( class_names ); + LISTITER const end = list_end( class_names ); + for ( ; iter != end; iter = list_next( iter ) ) + { + if ( !hash_find( classes, list_item( iter ) ) ) + { + out_printf( "Class %s is not defined\n", object_str( list_item( iter ) ) + ); + b2::clean_exit( b2::exit_result::failure ); + } + } +} + + +static OBJECT * class_module_name( OBJECT * declared_name ) +{ + string name[ 1 ]; + OBJECT * result; + + string_new( name ); + string_append( name, "class@" ); + string_append( name, object_str( declared_name ) ); + + result = object_new( name->value ); + string_free( name ); + + return result; +} + + +struct import_base_data +{ + OBJECT * base_name; + module_t * base_module; + module_t * class_module; +}; + + +static void import_base_rule( void * r_, void * d_ ) +{ + RULE * r = (RULE *)r_; + RULE * ir1; + RULE * ir2; + struct import_base_data * d = (struct import_base_data *)d_; + OBJECT * qname; + + string qualified_name[ 1 ]; + string_new ( qualified_name ); + string_append ( qualified_name, object_str( d->base_name ) ); + string_push_back( qualified_name, '.' ); + string_append ( qualified_name, object_str( r->name ) ); + qname = object_new( qualified_name->value ); + string_free( qualified_name ); + + ir1 = import_rule( r, d->class_module, r->name ); + ir2 = import_rule( r, d->class_module, qname ); + + object_free( qname ); + + /* Copy 'exported' flag. */ + ir1->exported = ir2->exported = r->exported; + + /* If we are importing a class method, localize it. */ + if ( ( r->module == d->base_module ) || ( r->module->class_module && + ( r->module->class_module == d->base_module ) ) ) + { + rule_localize( ir1, d->class_module ); + rule_localize( ir2, d->class_module ); + } +} + + +/* + * For each exported rule 'n', declared in class module for base, imports that + * rule in 'class' as 'n' and as 'base.n'. Imported rules are localized and + * marked as exported. + */ + +static void import_base_rules( module_t * class_, OBJECT * base ) +{ + OBJECT * module_name = class_module_name( base ); + module_t * base_module = bindmodule( module_name ); + LIST * imported; + struct import_base_data d; + d.base_name = base; + d.base_module = base_module; + d.class_module = class_; + object_free( module_name ); + + if ( base_module->rules ) + hashenumerate( base_module->rules, import_base_rule, &d ); + + imported = imported_modules( base_module ); + import_module( imported, class_ ); + list_free( imported ); +} + + +OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame ) +{ + OBJECT * name = class_module_name( list_front( xname ) ); + OBJECT * * pp; + module_t * class_module = 0; + int found; + + if ( !classes ) + classes = hashinit( sizeof( OBJECT * ), "classes" ); + + pp = (OBJECT * *)hash_insert( classes, list_front( xname ), &found ); + if ( !found ) + { + *pp = object_copy( list_front( xname ) ); + } + else + { + out_printf( "Class %s already defined\n", object_str( list_front( xname ) ) + ); + b2::clean_exit( b2::exit_result::failure ); + } + check_defined( bases ); + + class_module = bindmodule( name ); + + { + /* + Initialize variables that B2 inserts in every object. + We want to avoid creating the object's hash if it isn't needed. + */ + int num = class_module->num_fixed_variables; + module_add_fixed_var( class_module, constant_name, &num ); + module_add_fixed_var( class_module, constant_class, &num ); + module_set_fixed_variables( class_module, num ); + } + + var_set( class_module, constant_name, xname, VAR_SET ); + var_set( class_module, constant_bases, bases, VAR_SET ); + + { + LISTITER iter = list_begin( bases ); + LISTITER const end = list_end( bases ); + for ( ; iter != end; iter = list_next( iter ) ) + import_base_rules( class_module, list_item( iter ) ); + } + + return name; +} + + +static void free_class( void * xclass, void * data ) +{ + object_free( *(OBJECT * *)xclass ); +} + + +void class_done( void ) +{ + if ( classes ) + { + hashenumerate( classes, free_class, (void *)0 ); + hashdone( classes ); + classes = 0; + } +} diff --git a/src/boost/tools/build/src/engine/class.h b/src/boost/tools/build/src/engine/class.h new file mode 100644 index 000000000..6272ff454 --- /dev/null +++ b/src/boost/tools/build/src/engine/class.h @@ -0,0 +1,15 @@ +/* Copyright Vladimir Prus 2003. Distributed under the Boost */ +/* Software License, Version 1.0. (See accompanying */ +/* file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) */ + +#ifndef CLASS_H_VP_2003_08_01 +#define CLASS_H_VP_2003_08_01 + +#include "config.h" +#include "lists.h" +#include "frames.h" + +OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame ); +void class_done( void ); + +#endif diff --git a/src/boost/tools/build/src/engine/command.cpp b/src/boost/tools/build/src/engine/command.cpp new file mode 100644 index 000000000..75837d49d --- /dev/null +++ b/src/boost/tools/build/src/engine/command.cpp @@ -0,0 +1,120 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * command.c - maintain lists of commands + */ + +#include "jam.h" +#include "command.h" + +#include "lists.h" +#include "rules.h" + +#include + + +/* + * cmdlist_append_cmd + */ +CMDLIST * cmdlist_append_cmd( CMDLIST * l, CMD * cmd ) +{ + CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) ); + result->iscmd = 1; + result->next = l; + result->impl.cmd = cmd; + return result; +} + +CMDLIST * cmdlist_append_target( CMDLIST * l, TARGET * t ) +{ + CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) ); + result->iscmd = 0; + result->next = l; + result->impl.t = t; + return result; +} + +void cmdlist_free( CMDLIST * l ) +{ + while ( l ) + { + CMDLIST * tmp = l->next; + BJAM_FREE( l ); + l = tmp; + } +} + +/* + * cmd_new() - return a new CMD. + */ + +CMD * cmd_new( RULE * rule, LIST * targets, LIST * sources, LIST * shell ) +{ + CMD * cmd = b2::jam::make_ptr(); + FRAME frame[ 1 ]; + + assert( cmd ); + cmd->rule = rule; + cmd->shell = shell; + cmd->next = 0; + cmd->noop = 0; + cmd->asynccnt = 1; + cmd->status = 0; + cmd->lock = NULL; + cmd->unlock = NULL; + + lol_init( &cmd->args ); + lol_add( &cmd->args, targets ); + lol_add( &cmd->args, sources ); + string_new( cmd->buf ); + + frame_init( frame ); + frame->module = rule->module; + lol_init( frame->args ); + lol_add( frame->args, list_copy( targets ) ); + lol_add( frame->args, list_copy( sources ) ); + function_run_actions( rule->actions->command, frame, stack_global(), + cmd->buf ); + frame_free( frame ); + + return cmd; +} + + +/* + * cmd_free() - free a CMD + */ + +void cmd_free( CMD * cmd ) +{ + cmdlist_free( cmd->next ); + lol_free( &cmd->args ); + list_free( cmd->shell ); + string_free( cmd->buf ); + b2::jam::free_ptr( cmd ); +} + + +/* + * cmd_release_targets_and_shell() + * + * Makes the CMD release its hold on its targets & shell lists and forget + * about them. Useful in case caller still has references to those lists and + * wants to reuse them after freeing the CMD object. + */ + +void cmd_release_targets_and_shell( CMD * cmd ) +{ + cmd->args.list[ 0 ] = L0; /* targets */ + cmd->shell = L0; /* shell */ +} diff --git a/src/boost/tools/build/src/engine/command.h b/src/boost/tools/build/src/engine/command.h new file mode 100644 index 000000000..9163d91ac --- /dev/null +++ b/src/boost/tools/build/src/engine/command.h @@ -0,0 +1,108 @@ +/* + * Copyright 1994 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 René Ferdinand Rivera Morell + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * command.h - the CMD structure and routines to manipulate them + * + * Both ACTION and CMD contain a rule, targets, and sources. An + * ACTION describes a rule to be applied to the given targets and + * sources; a CMD is what actually gets executed by the shell. The + * differences are due to: + * + * ACTIONS must be combined if 'actions together' is given. + * ACTIONS must be split if 'actions piecemeal' is given. + * ACTIONS must have current sources omitted for 'actions updated'. + * + * The CMD datatype holds a single command that is to be executed + * against a target, and they can chain together to represent the + * full collection of commands used to update a target. + * + * Structures: + * + * CMD - an action, ready to be formatted into a buffer and executed. + * + * External routines: + * + * cmd_new() - return a new CMD or 0 if too many args. + * cmd_free() - delete CMD and its parts. + * cmd_next() - walk the CMD chain. + * cmd_release_targets_and_shell() - CMD forgets about its targets & shell. + */ + + +/* + * CMD - an action, ready to be formatted into a buffer and executed. + */ + +#ifndef COMMAND_SW20111118_H +#define COMMAND_SW20111118_H + +#include "config.h" +#include "lists.h" +#include "mem.h" +#include "rules.h" +#include "jam_strings.h" + + +typedef struct _cmd CMD; + +/* + * A list whose elements are either TARGETS or CMDS. + * CMDLIST is used only by CMD. A TARGET means that + * the CMD is the last updating action required to + * build the target. A CMD is the next CMD required + * to build the same target. (Note that a single action + * can update more than one target, so the CMDs form + * a DAG, not a straight linear list.) + */ +typedef struct _cmdlist { + struct _cmdlist * next; + union { + CMD * cmd; + TARGET * t; + } impl; + char iscmd; +} CMDLIST; + +CMDLIST * cmdlist_append_cmd( CMDLIST *, CMD * ); +CMDLIST * cmdlist_append_target( CMDLIST *, TARGET * ); +void cmd_list_free( CMDLIST * ); + +struct _cmd +{ + CMDLIST * next; + RULE * rule; /* rule->actions contains shell script */ + LIST * shell; /* $(JAMSHELL) value */ + LOL args; /* LISTs for $(<), $(>) */ + string buf[ 1 ]; /* actual commands */ + int noop; /* no-op commands should be faked instead of executed */ + int asynccnt; /* number of outstanding dependencies */ + targets_ptr lock; /* semaphores that are required by this cmd. */ + targets_uptr unlock; /* semaphores that are released when this cmd finishes. */ + char status; /* the command status */ +}; + +CMD * cmd_new +( + RULE * rule, /* rule (referenced) */ + LIST * targets, /* $(<) (ownership transferred) */ + LIST * sources, /* $(>) (ownership transferred) */ + LIST * shell /* $(JAMSHELL) (ownership transferred) */ +); + +void cmd_release_targets_and_shell( CMD * ); + +void cmd_free( CMD * ); + +#define cmd_next( c ) ((c)->next) + +#endif diff --git a/src/boost/tools/build/src/engine/compile.cpp b/src/boost/tools/build/src/engine/compile.cpp new file mode 100644 index 000000000..daeb3d2f9 --- /dev/null +++ b/src/boost/tools/build/src/engine/compile.cpp @@ -0,0 +1,231 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 René Ferdinand Rivera Morell + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * compile.c - compile parsed jam statements + * + * External routines: + * evaluate_rule() - execute a rule invocation + * + * Internal routines: + * debug_compile() - printf with indent to show rule expansion + */ + +#include "jam.h" +#include "compile.h" + +#include "builtins.h" +#include "class.h" +#include "constants.h" +#include "hash.h" +#include "hdrmacro.h" +#include "make.h" +#include "modules.h" +#include "parse.h" +#include "rules.h" +#include "search.h" +#include "jam_strings.h" +#include "variable.h" +#include "output.h" + +#include +#include +#include + + +static void debug_compile( int which, char const * s, FRAME * ); + +/* Internal functions from builtins.c */ +void backtrace( FRAME * ); +void backtrace_line( FRAME * ); +void print_source_line( FRAME * ); +void unknown_rule( FRAME *, char const * key, module_t *, OBJECT * rule_name ); + + +/* + * evaluate_rule() - execute a rule invocation + */ + +LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * frame ) +{ + LIST * result = L0; + profile_frame prof[ 1 ]; + module_t * prev_module = frame->module; + + if ( DEBUG_COMPILE ) + { + /* Try hard to indicate in which module the rule is going to execute. */ + char buf[ 256 ] = ""; + if ( rule->module->name ) + { + strncat( buf, object_str( rule->module->name ), sizeof( buf ) - + 1 ); + strncat( buf, ".", sizeof( buf ) - 1 ); + if ( strncmp( buf, object_str( rule->name ), strlen( buf ) ) == 0 ) + { + buf[ 0 ] = 0; + } + } + strncat( buf, object_str( rule->name ), sizeof( buf ) - 1 ); + debug_compile( 1, buf, frame ); + + lol_print( frame->args ); + out_printf( "\n" ); + } + + if ( rule->procedure && rule->module != prev_module ) + { + /* Propagate current module to nested rule invocations. */ + frame->module = rule->module; + } + + /* Record current rule name in frame. */ + if ( rule->procedure ) + { + frame->rulename = object_str( rulename ); + /* And enter record profile info. */ + if ( DEBUG_PROFILE ) + profile_enter( function_rulename( rule->procedure ), prof ); + } + + /* Check traditional targets $(<) and sources $(>). */ + if ( !rule->actions && !rule->procedure ) + unknown_rule( frame, NULL, frame->module, rulename ); + + /* If this rule will be executed for updating the targets then construct the + * action for make(). + */ + if ( rule->actions ) + { + targets_ptr t; + + /* The action is associated with this instance of this rule. */ + ACTION * const action = b2::jam::make_ptr(); + + action->rule = rule; + action->targets.reset(); targetlist( action->targets, lol_get( frame->args, 0 ) ); + action->sources.reset(); targetlist( action->sources, lol_get( frame->args, 1 ) ); + action->refs = 1; + + /* If we have a group of targets all being built using the same action + * and any of these targets is updated, then we have to consider them + * all to be out-dated. We do this by adding a REBUILDS in both directions + * between the first target and all the other targets. + */ + if ( action->targets ) + { + TARGET * const t0 = action->targets->target; + for ( t = action->targets->next.get(); t; t = t->next.get() ) + { + targetentry( t->target->rebuilds, t0 ); + targetentry( t0->rebuilds, t->target ); + } + } + + /* Append this action to the actions of each target. */ + for ( t = action->targets.get(); t; t = t->next.get() ) + t->target->actions = actionlist( t->target->actions, action ); + + action_free( action ); + } + + /* Now recursively compile any parse tree associated with this rule. + * function_refer()/function_free() call pair added to ensure the rule does + * not get freed while in use. + */ + if ( rule->procedure ) + { + auto function = b2::jam::make_unique_bare_jptr( rule->procedure, function_refer, function_free ); + result = function_run( function.get(), frame, stack_global() ); + } + + if ( DEBUG_PROFILE && rule->procedure ) + profile_exit( prof ); + + if ( DEBUG_COMPILE ) + debug_compile( -1, 0, frame ); + + return result; +} + + +/* + * Call the given rule with the specified parameters. The parameters should be + * of type LIST* and end with a NULL pointer. This differs from 'evaluate_rule' + * in that frame for the called rule is prepared inside 'call_rule'. + * + * This function is useful when a builtin rule (in C) wants to call another rule + * which might be implemented in Jam. + */ + +LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... ) +{ + va_list va; + LIST * result; + + FRAME inner[ 1 ]; + frame_init( inner ); + inner->prev = caller_frame; + inner->prev_user = caller_frame->module->user_module + ? caller_frame + : caller_frame->prev_user; + inner->module = caller_frame->module; + + va_start( va, caller_frame ); + for ( ; ; ) + { + LIST * const l = va_arg( va, LIST * ); + if ( !l ) + break; + lol_add( inner->args, l ); + } + va_end( va ); + + result = evaluate_rule( bindrule( rulename, inner->module ), rulename, inner ); + + frame_free( inner ); + + return result; +} + + +/* + * debug_compile() - printf with indent to show rule expansion + */ + +static void debug_compile( int which, char const * s, FRAME * frame ) +{ + static int level = 0; + static char indent[ 36 ] = ">>>>|>>>>|>>>>|>>>>|>>>>|>>>>|>>>>|"; + + if ( which >= 0 ) + { + int i; + + print_source_line( frame ); + + i = ( level + 1 ) * 2; + while ( i > 35 ) + { + out_puts( indent ); + i -= 35; + } + + out_printf( "%*.*s ", i, i, indent ); + } + + if ( s ) + out_printf( "%s ", s ); + + level += which; +} diff --git a/src/boost/tools/build/src/engine/compile.h b/src/boost/tools/build/src/engine/compile.h new file mode 100644 index 000000000..72927810e --- /dev/null +++ b/src/boost/tools/build/src/engine/compile.h @@ -0,0 +1,60 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * compile.h - compile parsed jam statements + */ + +#ifndef COMPILE_DWA20011022_H +#define COMPILE_DWA20011022_H + +#include "config.h" +#include "frames.h" +#include "lists.h" +#include "object.h" +#include "rules.h" + +void compile_builtins(); + +LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * ); +LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... ); + +/* Flags for compile_set(), etc */ + +#define ASSIGN_SET 0x00 /* = assign variable */ +#define ASSIGN_APPEND 0x01 /* += append variable */ +#define ASSIGN_DEFAULT 0x02 /* set only if unset */ + +/* Flags for compile_setexec() */ + +#define EXEC_UPDATED 0x01 /* executes updated */ +#define EXEC_TOGETHER 0x02 /* executes together */ +#define EXEC_IGNORE 0x04 /* executes ignore */ +#define EXEC_QUIETLY 0x08 /* executes quietly */ +#define EXEC_PIECEMEAL 0x10 /* executes piecemeal */ +#define EXEC_EXISTING 0x20 /* executes existing */ + +/* Conditions for compile_if() */ + +#define EXPR_NOT 0 /* ! cond */ +#define EXPR_AND 1 /* cond && cond */ +#define EXPR_OR 2 /* cond || cond */ +#define EXPR_EXISTS 3 /* arg */ +#define EXPR_EQUALS 4 /* arg = arg */ +#define EXPR_NOTEQ 5 /* arg != arg */ +#define EXPR_LESS 6 /* arg < arg */ +#define EXPR_LESSEQ 7 /* arg <= arg */ +#define EXPR_MORE 8 /* arg > arg */ +#define EXPR_MOREEQ 9 /* arg >= arg */ +#define EXPR_IN 10 /* arg in arg */ + +#endif diff --git a/src/boost/tools/build/src/engine/config.h b/src/boost/tools/build/src/engine/config.h new file mode 100644 index 000000000..863f1aa77 --- /dev/null +++ b/src/boost/tools/build/src/engine/config.h @@ -0,0 +1,60 @@ +#ifndef B2_CONFIG_H +#define B2_CONFIG_H + +/* +Copyright 2002-2021 Rene Rivera. +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#define OPT_HEADER_CACHE_EXT 1 +#define OPT_GRAPH_DEBUG_EXT 1 +#define OPT_SEMAPHORE 1 +#define OPT_AT_FILES 1 +#define OPT_DEBUG_PROFILE 1 +#define JAM_DEBUGGER 1 +#define OPT_FIX_TARGET_VARIABLES_EXT 1 +#define OPT_IMPROVED_PATIENCE_EXT 1 +// #define BJAM_NO_MEM_CACHE 1 + +// Autodetect various operating systems.. + +#if defined(_WIN32) || defined(_WIN64) || \ + defined(__WIN32__) || defined(__TOS_WIN__) || \ + defined(__WINDOWS__) + #define NT 1 +#endif + +#if defined(__VMS) || defined(__VMS_VER) + #if !defined(VMS) + #define VMS 1 + #endif +#endif + +// To work around QEMU failures on mixed mode situations (32 vs 64) we need to +// enable partial LFS support in system headers. And we need to do this before +// any system headers are included. + +#if !defined(NT) && !defined(VMS) +# define _FILE_OFFSET_BITS 64 +#endif + +// Correct missing types in some earlier compilers.. + +#include +#ifndef INT32_MIN + +// VS 2013 is barely C++11/C99. And opts to not provide specific sized int types. +// Provide a generic implementation of the sizes we use. +#if UINT_MAX == 0xffffffff +typedef int int32_t; +#elif (USHRT_MAX == 0xffffffff) +typedef short int32_t; +#elif ULONG_MAX == 0xffffffff +typedef long int32_t; +#endif + +#endif + +#endif diff --git a/src/boost/tools/build/src/engine/config_toolset.bat b/src/boost/tools/build/src/engine/config_toolset.bat new file mode 100644 index 000000000..48ccd3aaf --- /dev/null +++ b/src/boost/tools/build/src/engine/config_toolset.bat @@ -0,0 +1,238 @@ +@ECHO OFF + +REM ~ Copyright 2002-2018 Rene Rivera. +REM ~ Distributed under the Boost Software License, Version 1.0. +REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +:Start +REM Setup the toolset command and options. +if "_%B2_TOOLSET%_" == "_msvc_" call :Config_MSVC +if "_%B2_TOOLSET%_" == "_vc12_" call :Config_VC12 +if "_%B2_TOOLSET%_" == "_vc14_" call :Config_VC14 +if "_%B2_TOOLSET%_" == "_vc141_" call :Config_VC141 +if "_%B2_TOOLSET%_" == "_vc142_" call :Config_VC142 +if "_%B2_TOOLSET%_" == "_vc143_" call :Config_VC143 +if "_%B2_TOOLSET%_" == "_borland_" call :Config_BORLAND +if "_%B2_TOOLSET%_" == "_como_" call :Config_COMO +if "_%B2_TOOLSET%_" == "_gcc_" call :Config_GCC +if "_%B2_TOOLSET%_" == "_clang_" call :Config_CLANG +if "_%B2_TOOLSET%_" == "_gcc-nocygwin_" call :Config_GCC_NOCYGWIN +if "_%B2_TOOLSET%_" == "_intel-win32_" call :Config_INTEL_WIN32 +if "_%B2_TOOLSET%_" == "_mingw_" call :Config_MINGW +exit /b %errorlevel% + +:Call_If_Exists +ECHO Call_If_Exists %* +if EXIST %1 call %* +goto :eof + +:Config_MSVC +if not defined CXX ( set "CXX=cl" ) +if NOT "_%MSVCDir%_" == "__" ( + set "B2_TOOLSET_ROOT=%MSVCDir%\" + ) +call :Call_If_Exists "%B2_TOOLSET_ROOT%bin\VCVARS32.BAT" +if not "_%B2_TOOLSET_ROOT%_" == "__" ( + set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%" + ) +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_VC11 +if not defined CXX ( set "CXX=cl" ) +if NOT "_%VS110COMNTOOLS%_" == "__" ( + set "B2_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\" + ) +if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS% +if NOT "_%B2_TOOLSET_ROOT%_" == "__" ( + if "_%VCINSTALLDIR%_" == "__" ( + set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%" + ) ) +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_VC12 +if not defined CXX ( set "CXX=cl" ) +if NOT "_%VS120COMNTOOLS%_" == "__" ( + set "B2_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\" + ) + +if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE% +set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH% + +if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS% +if NOT "_%B2_TOOLSET_ROOT%_" == "__" ( + if "_%VCINSTALLDIR%_" == "__" ( + set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%" + ) ) +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_VC14 +if not defined CXX ( set "CXX=cl" ) +if "_%B2_TOOLSET_ROOT%_" == "__" ( + if NOT "_%VS140COMNTOOLS%_" == "__" ( + set "B2_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\" + )) + +if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE% +set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH% + +if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%VCVARSALL.BAT" %B2_BUILD_ARGS% +if NOT "_%B2_TOOLSET_ROOT%_" == "__" ( + if "_%VCINSTALLDIR%_" == "__" ( + set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%" + ) ) +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_VC141 +if not defined CXX ( set "CXX=cl" ) +call vswhere_usability_wrapper.cmd +REM Reset ERRORLEVEL since from now on it's all based on ENV vars +ver > nul 2> nul +if "_%B2_TOOLSET_ROOT%_" == "__" ( + if NOT "_%VS150COMNTOOLS%_" == "__" ( + set "B2_TOOLSET_ROOT=%VS150COMNTOOLS%..\..\VC\" + )) + +if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE% +set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH% + +REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists. +pushd %CD% +if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS% +popd +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_VC142 +if not defined CXX ( set "CXX=cl" ) +call vswhere_usability_wrapper.cmd +REM Reset ERRORLEVEL since from now on it's all based on ENV vars +ver > nul 2> nul +if "_%B2_TOOLSET_ROOT%_" == "__" ( + if NOT "_%VS160COMNTOOLS%_" == "__" ( + set "B2_TOOLSET_ROOT=%VS160COMNTOOLS%..\..\VC\" + )) + +if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE% +set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH% + +REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists. +pushd %CD% +if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS% +popd +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_VC143 +if not defined CXX ( set "CXX=cl" ) +call vswhere_usability_wrapper.cmd +REM Reset ERRORLEVEL since from now on it's all based on ENV vars +ver > nul 2> nul +if "_%B2_TOOLSET_ROOT%_" == "__" ( + if NOT "_%VS170COMNTOOLS%_" == "__" ( + set "B2_TOOLSET_ROOT=%VS170COMNTOOLS%..\..\VC\" + )) + +if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE% +set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH% + +REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists. +pushd %CD% +if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS% +popd +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_VCUNK +if NOT "_%B2_TOOLSET%_" == "_vcunk_" goto Skip_VCUNK +call vswhere_usability_wrapper.cmd +REM Reset ERRORLEVEL since from now on it's all based on ENV vars +ver > nul 2> nul +if "_%B2_TOOLSET_ROOT%_" == "__" ( + if NOT "_%VSUNKCOMNTOOLS%_" == "__" ( + set "B2_TOOLSET_ROOT=%VSUNKCOMNTOOLS%..\..\VC\" + )) + +if "_%B2_ARCH%_" == "__" set B2_ARCH=%PROCESSOR_ARCHITECTURE% +set B2_BUILD_ARGS=%B2_BUILD_ARGS% %B2_ARCH% + +REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists. +pushd %CD% +if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%B2_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %B2_BUILD_ARGS% +popd +set "B2_CXX="%CXX%" /nologo /MP /MT /TP /Feb2 /wd4996 /O2 /GL /EHsc" +set "B2_CXX_LINK=/link kernel32.lib advapi32.lib user32.lib" +set "_known_=1" +goto :eof + +:Config_BORLAND +if not defined CXX ( set "CXX=bcc32c" ) +if "_%B2_TOOLSET_ROOT%_" == "__" ( + call guess_toolset.bat test_path bcc32c.exe ) +if "_%B2_TOOLSET_ROOT%_" == "__" ( + if not errorlevel 1 ( + set "B2_TOOLSET_ROOT=%FOUND_PATH%..\" + ) ) +if not "_%B2_TOOLSET_ROOT%_" == "__" ( + set "PATH=%B2_TOOLSET_ROOT%Bin;%PATH%" + ) +set "B2_CXX="%CXX%" -tC -P -O2 -w- -I"%B2_TOOLSET_ROOT%Include" -L"%B2_TOOLSET_ROOT%Lib" -eb2" +set "_known_=1" +goto :eof + +:Config_COMO +if not defined CXX ( set "CXX=como" ) +set "B2_CXX="%CXX%" --inlining -o b2.exe" +set "_known_=1" +goto :eof + +:Config_GCC +if not defined CXX ( set "CXX=g++" ) +set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -o b2.exe -D_GNU_SOURCE" +set "_known_=1" +goto :eof + +:Config_CLANG +if not defined CXX ( set "CXX=clang++" ) +set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -o b2.exe" +set "_known_=1" +goto :eof + +:Config_GCC_NOCYGWIN +if not defined CXX ( set "CXX=g++" ) +set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -mno-cygwin -o b2.exe" +set "_known_=1" +goto :eof + +:Config_INTEL_WIN32 +if not defined CXX ( set "CXX=icl" ) +set "B2_CXX="%CXX%" /nologo /MT /O2 /Ob2 /Gy /GF /GA /GB /Feb2" +set "_known_=1" +goto :eof + +:Config_MINGW +if not defined CXX ( set "CXX=g++" ) +if not "_%B2_TOOLSET_ROOT%_" == "__" ( + set "PATH=%B2_TOOLSET_ROOT%bin;%PATH%" + ) +for /F "delims=" %%I in ("%CXX%") do set "PATH=%PATH%;%%~dpI" +set "B2_CXX="%CXX%" -x c++ -std=c++11 -s -O3 -o b2.exe" +set "_known_=1" +goto :eof diff --git a/src/boost/tools/build/src/engine/constants.cpp b/src/boost/tools/build/src/engine/constants.cpp new file mode 100644 index 000000000..c64ea3b63 --- /dev/null +++ b/src/boost/tools/build/src/engine/constants.cpp @@ -0,0 +1,199 @@ +/* + * Copyright 2011 Steven Watanabe + * Copyright 2020 René Ferdinand Rivera Morell + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * constants.c - constant objects + * + * External functions: + * + * constants_init() - initialize constants + * constants_done() - free constants + * + */ + +#include "constants.h" + + +void constants_init( void ) +{ + constant_empty = object_new( "" ); + constant_dot = object_new( "." ); + constant_plus = object_new( "+" ); + constant_star = object_new( "*" ); + constant_question_mark = object_new( "?" ); + constant_ok = object_new( "ok" ); + constant_true = object_new( "true" ); + constant_name = object_new( "__name__" ); + constant_bases = object_new( "__bases__" ); + constant_class = object_new( "__class__" ); + constant_typecheck = object_new( ".typecheck" ); + constant_builtin = object_new( "(builtin)" ); + constant_HCACHEFILE = object_new( "HCACHEFILE" ); + constant_HCACHEMAXAGE = object_new( "HCACHEMAXAGE" ); + constant_HDRSCAN = object_new( "HDRSCAN" ); + constant_HDRRULE = object_new( "HDRRULE" ); + constant_BINDRULE = object_new( "BINDRULE" ); + constant_LOCATE = object_new( "LOCATE" ); + constant_SEARCH = object_new( "SEARCH" ); + constant_JAM_SEMAPHORE = object_new( "JAM_SEMAPHORE" ); + constant_TIMING_RULE = object_new( "__TIMING_RULE__" ); + constant_ACTION_RULE = object_new( "__ACTION_RULE__" ); + constant_JAMSHELL = object_new( "JAMSHELL" ); + constant_TMPDIR = object_new( "TMPDIR" ); + constant_TMPNAME = object_new( "TMPNAME" ); + constant_TMPFILE = object_new( "TMPFILE" ); + constant_STDOUT = object_new( "STDOUT" ); + constant_STDERR = object_new( "STDERR" ); + constant_JAMDATE = object_new( "JAMDATE" ); + constant_JAM_TIMESTAMP_RESOLUTION = object_new( "JAM_TIMESTAMP_RESOLUTION" ); + constant_JAM_VERSION = object_new( "JAM_VERSION" ); + constant_JAMUNAME = object_new( "JAMUNAME" ); + constant_ENVIRON = object_new( ".ENVIRON" ); + constant_ARGV = object_new( "ARGV" ); + constant_all = object_new( "all" ); + constant_PARALLELISM = object_new( "PARALLELISM" ); + constant_KEEP_GOING = object_new( "KEEP_GOING" ); + constant_other = object_new( "[OTHER]" ); + constant_total = object_new( "[TOTAL]" ); + constant_FILE_DIRSCAN = object_new( "FILE_DIRSCAN" ); + constant_MAIN = object_new( "MAIN" ); + constant_MAIN_MAKE = object_new( "MAIN_MAKE" ); + constant_MAKE_MAKE0 = object_new( "MAKE_MAKE0" ); + constant_MAKE_MAKE1 = object_new( "MAKE_MAKE1" ); + constant_MAKE_MAKE0SORT = object_new( "MAKE_MAKE0SORT" ); + constant_BINDMODULE = object_new( "BINDMODULE" ); + constant_IMPORT_MODULE = object_new( "IMPORT_MODULE" ); + constant_BUILTIN_GLOB_BACK = object_new( "BUILTIN_GLOB_BACK" ); + constant_timestamp = object_new( "timestamp" ); + constant_python = object_new("__python__"); + constant_python_interface = object_new( "python_interface" ); + constant_extra_pythonpath = object_new( "EXTRA_PYTHONPATH" ); + constant_MAIN_PYTHON = object_new( "MAIN_PYTHON" ); + constant_BUILTIN_GLOB_ARCHIVE_BACK= object_new( "BUILTIN_GLOB_ARCHIVE_BACK" ); + constant_FILE_ARCHIVESCAN = object_new( "FILE_ARCHIVESCAN" ); + + constant_RESPONSE_FILE_SUB = object_new( "RESPONSE_FILE_SUB" ); +} + +void constants_done( void ) +{ + object_free( constant_empty ); + object_free( constant_dot ); + object_free( constant_plus ); + object_free( constant_star ); + object_free( constant_question_mark ); + object_free( constant_ok ); + object_free( constant_true ); + object_free( constant_name ); + object_free( constant_bases ); + object_free( constant_class ); + object_free( constant_typecheck ); + object_free( constant_builtin ); + object_free( constant_HCACHEFILE ); + object_free( constant_HCACHEMAXAGE ); + object_free( constant_HDRSCAN ); + object_free( constant_HDRRULE ); + object_free( constant_BINDRULE ); + object_free( constant_LOCATE ); + object_free( constant_SEARCH ); + object_free( constant_JAM_SEMAPHORE ); + object_free( constant_TIMING_RULE ); + object_free( constant_ACTION_RULE ); + object_free( constant_JAMSHELL ); + object_free( constant_TMPDIR ); + object_free( constant_TMPNAME ); + object_free( constant_TMPFILE ); + object_free( constant_STDOUT ); + object_free( constant_STDERR ); + object_free( constant_JAMDATE ); + object_free( constant_JAM_TIMESTAMP_RESOLUTION ); + object_free( constant_JAM_VERSION ); + object_free( constant_JAMUNAME ); + object_free( constant_ENVIRON ); + object_free( constant_ARGV ); + object_free( constant_all ); + object_free( constant_PARALLELISM ); + object_free( constant_KEEP_GOING ); + object_free( constant_other ); + object_free( constant_total ); + object_free( constant_FILE_DIRSCAN ); + object_free( constant_MAIN ); + object_free( constant_MAIN_MAKE ); + object_free( constant_MAKE_MAKE0 ); + object_free( constant_MAKE_MAKE1 ); + object_free( constant_MAKE_MAKE0SORT ); + object_free( constant_BINDMODULE ); + object_free( constant_IMPORT_MODULE ); + object_free( constant_BUILTIN_GLOB_BACK ); + object_free( constant_timestamp ); + object_free( constant_python ); + object_free( constant_python_interface ); + object_free( constant_extra_pythonpath ); + object_free( constant_MAIN_PYTHON ); + object_free( constant_FILE_ARCHIVESCAN ); + object_free( constant_BUILTIN_GLOB_ARCHIVE_BACK ); + + object_free( constant_RESPONSE_FILE_SUB ); +} + +OBJECT * constant_empty; +OBJECT * constant_dot; +OBJECT * constant_plus; +OBJECT * constant_star; +OBJECT * constant_question_mark; +OBJECT * constant_ok; +OBJECT * constant_true; +OBJECT * constant_name; +OBJECT * constant_bases; +OBJECT * constant_class; +OBJECT * constant_typecheck; +OBJECT * constant_builtin; +OBJECT * constant_HCACHEFILE; +OBJECT * constant_HCACHEMAXAGE; +OBJECT * constant_HDRSCAN; +OBJECT * constant_HDRRULE; +OBJECT * constant_BINDRULE; +OBJECT * constant_LOCATE; +OBJECT * constant_SEARCH; +OBJECT * constant_JAM_SEMAPHORE; +OBJECT * constant_TIMING_RULE; +OBJECT * constant_ACTION_RULE; +OBJECT * constant_JAMSHELL; +OBJECT * constant_TMPDIR; +OBJECT * constant_TMPNAME; +OBJECT * constant_TMPFILE; +OBJECT * constant_STDOUT; +OBJECT * constant_STDERR; +OBJECT * constant_JAMDATE; +OBJECT * constant_JAM_VERSION; +OBJECT * constant_JAMUNAME; +OBJECT * constant_ENVIRON; +OBJECT * constant_ARGV; +OBJECT * constant_all; +OBJECT * constant_PARALLELISM; +OBJECT * constant_KEEP_GOING; +OBJECT * constant_other; +OBJECT * constant_total; +OBJECT * constant_FILE_DIRSCAN; +OBJECT * constant_MAIN; +OBJECT * constant_MAIN_MAKE; +OBJECT * constant_MAKE_MAKE0; +OBJECT * constant_MAKE_MAKE1; +OBJECT * constant_MAKE_MAKE0SORT; +OBJECT * constant_BINDMODULE; +OBJECT * constant_IMPORT_MODULE; +OBJECT * constant_BUILTIN_GLOB_BACK; +OBJECT * constant_timestamp; +OBJECT * constant_JAM_TIMESTAMP_RESOLUTION; +OBJECT * constant_python; +OBJECT * constant_python_interface; +OBJECT * constant_extra_pythonpath; +OBJECT * constant_MAIN_PYTHON; +OBJECT * constant_FILE_ARCHIVESCAN; +OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK; + +OBJECT * constant_RESPONSE_FILE_SUB; diff --git a/src/boost/tools/build/src/engine/constants.h b/src/boost/tools/build/src/engine/constants.h new file mode 100644 index 000000000..675b66579 --- /dev/null +++ b/src/boost/tools/build/src/engine/constants.h @@ -0,0 +1,78 @@ +/* + * Copyright 2011 Steven Watanabe + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * constants.h - constant objects + */ + +#ifndef BOOST_JAM_CONSTANTS_H +#define BOOST_JAM_CONSTANTS_H + +#include "config.h" +#include "object.h" + +void constants_init( void ); +void constants_done( void ); + +extern OBJECT * constant_empty; /* "" */ +extern OBJECT * constant_dot; /* "." */ +extern OBJECT * constant_plus; /* "+" */ +extern OBJECT * constant_star; /* "*" */ +extern OBJECT * constant_question_mark; /* "?" */ +extern OBJECT * constant_ok; /* "ok" */ +extern OBJECT * constant_true; /* "true" */ +extern OBJECT * constant_name; /* "__name__" */ +extern OBJECT * constant_bases; /* "__bases__" */ +extern OBJECT * constant_class; /* "__class__" */ +extern OBJECT * constant_typecheck; /* ".typecheck" */ +extern OBJECT * constant_builtin; /* "(builtin)" */ +extern OBJECT * constant_HCACHEFILE; /* "HCACHEFILE" */ +extern OBJECT * constant_HCACHEMAXAGE; /* "HCACHEMAXAGE" */ +extern OBJECT * constant_HDRSCAN; /* "HDRSCAN" */ +extern OBJECT * constant_HDRRULE; /* "HDRRULE" */ +extern OBJECT * constant_BINDRULE; /* "BINDRULE" */ +extern OBJECT * constant_LOCATE; /* "LOCATE" */ +extern OBJECT * constant_SEARCH; /* "SEARCH" */ +extern OBJECT * constant_JAM_SEMAPHORE; /* "JAM_SEMAPHORE" */ +extern OBJECT * constant_TIMING_RULE; /* "__TIMING_RULE__" */ +extern OBJECT * constant_ACTION_RULE; /* "__ACTION_RULE__" */ +extern OBJECT * constant_JAMSHELL; /* "JAMSHELL" */ +extern OBJECT * constant_TMPDIR; /* "TMPDIR" */ +extern OBJECT * constant_TMPNAME; /* "TMPNAME" */ +extern OBJECT * constant_TMPFILE; /* "TMPFILE" */ +extern OBJECT * constant_STDOUT; /* "STDOUT" */ +extern OBJECT * constant_STDERR; /* "STDERR" */ +extern OBJECT * constant_JAMDATE; /* "JAMDATE" */ +extern OBJECT * constant_JAM_TIMESTAMP_RESOLUTION; /* "JAM_TIMESTAMP_RESOLUTION" */ +extern OBJECT * constant_JAM_VERSION; /* "JAM_VERSION" */ +extern OBJECT * constant_JAMUNAME; /* "JAMUNAME" */ +extern OBJECT * constant_ENVIRON; /* ".ENVIRON" */ +extern OBJECT * constant_ARGV; /* "ARGV" */ +extern OBJECT * constant_all; /* "all" */ +extern OBJECT * constant_PARALLELISM; /* "PARALLELISM" */ +extern OBJECT * constant_KEEP_GOING; /* "KEEP_GOING" */ +extern OBJECT * constant_other; /* "[OTHER]" */ +extern OBJECT * constant_total; /* "[TOTAL]" */ +extern OBJECT * constant_FILE_DIRSCAN; /* "FILE_DIRSCAN" */ +extern OBJECT * constant_MAIN; /* "MAIN" */ +extern OBJECT * constant_MAIN_MAKE; /* "MAIN_MAKE" */ +extern OBJECT * constant_MAKE_MAKE0; /* "MAKE_MAKE0" */ +extern OBJECT * constant_MAKE_MAKE1; /* "MAKE_MAKE1" */ +extern OBJECT * constant_MAKE_MAKE0SORT; /* "MAKE_MAKE0SORT" */ +extern OBJECT * constant_BINDMODULE; /* "BINDMODULE" */ +extern OBJECT * constant_IMPORT_MODULE; /* "IMPORT_MODULE" */ +extern OBJECT * constant_BUILTIN_GLOB_BACK; /* "BUILTIN_GLOB_BACK" */ +extern OBJECT * constant_timestamp; /* "timestamp" */ +extern OBJECT * constant_python; /* "__python__" */ +extern OBJECT * constant_python_interface; /* "python_interface" */ +extern OBJECT * constant_extra_pythonpath; /* "EXTRA_PYTHONPATH" */ +extern OBJECT * constant_MAIN_PYTHON; /* "MAIN_PYTHON" */ +extern OBJECT * constant_FILE_ARCHIVESCAN; /* "FILE_ARCHIVESCAN" */ +extern OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK; /* "BUILTIN_GLOB_ARCHIVE_BACK" */ + +extern OBJECT * constant_RESPONSE_FILE_SUB; // "RESPONSE_FILE_SUB" + +#endif diff --git a/src/boost/tools/build/src/engine/cwd.cpp b/src/boost/tools/build/src/engine/cwd.cpp new file mode 100644 index 000000000..f216e7151 --- /dev/null +++ b/src/boost/tools/build/src/engine/cwd.cpp @@ -0,0 +1,101 @@ +/* + * Copyright 2002. Vladimir Prus + * Copyright 2005. Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "cwd.h" + +#include "jam.h" +#include "mem.h" +#include "output.h" +#include "pathsys.h" +#include "startup.h" + +#include +#include +#include + +/* MinGW on Windows declares PATH_MAX in limits.h */ +#if defined( NT ) && !defined( __GNUC__ ) +# include +# define PATH_MAX _MAX_PATH +#else +# include +# if defined( __COMO__ ) +# include +# endif +#endif + +#ifndef PATH_MAX +# define PATH_MAX 1024 +#endif + + +static OBJECT * cwd_; +namespace +{ + std::string cwd_s; +} + + +void cwd_init( void ) +{ + int buffer_size = PATH_MAX; + char * cwd_buffer = 0; + int error; + + assert( !cwd_ ); + + do + { + char * const buffer = (char *)BJAM_MALLOC_RAW( buffer_size ); +#ifdef OS_VMS + /* cwd in POSIX-format */ + cwd_buffer = getcwd( buffer, buffer_size, 0 ); +#else + cwd_buffer = getcwd( buffer, buffer_size ); +#endif + error = errno; + if ( cwd_buffer ) + { + /* We store the path using its canonical/long/key format. */ + OBJECT * const cwd = object_new( cwd_buffer ); + cwd_ = path_as_key( cwd ); + object_free( cwd ); + cwd_s = cwd_buffer; + } + buffer_size *= 2; + BJAM_FREE_RAW( buffer ); + } + while ( !cwd_ && error == ERANGE ); + + if ( !cwd_ ) + { + errno_puts( "can not get current working directory" ); + b2::clean_exit( EXITBAD ); + } +} + + +OBJECT * cwd( void ) +{ + assert( cwd_ ); + return cwd_; +} + + +void cwd_done( void ) +{ + assert( cwd_ ); + object_free( cwd_ ); + cwd_ = NULL; +} + + +const std::string & b2::cwd_str() +{ + return cwd_s; +} \ No newline at end of file diff --git a/src/boost/tools/build/src/engine/cwd.h b/src/boost/tools/build/src/engine/cwd.h new file mode 100644 index 000000000..2910c2f82 --- /dev/null +++ b/src/boost/tools/build/src/engine/cwd.h @@ -0,0 +1,42 @@ +/* + * Copyright 2002. Vladimir Prus + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * cwd.h - manages the current working folder information + */ + +#ifndef CWD_H +#define CWD_H + +#include "config.h" +#include "object.h" + +#include + + +/* cwd() - returns the current working folder */ +OBJECT * cwd( void ); +namespace b2 +{ + const std::string & cwd_str(); +} + +/* cwd_init() - initialize the cwd module functionality + * + * The current working folder can not change in Boost Jam so this function + * gets the current working folder information from the OS and stores it + * internally. + * + * Expected to be called at program startup before the program's current + * working folder has been changed + */ +void cwd_init( void ); + +/* cwd_done() - cleans up the cwd module functionality */ +void cwd_done( void ); + +#endif diff --git a/src/boost/tools/build/src/engine/debian/changelog b/src/boost/tools/build/src/engine/debian/changelog new file mode 100644 index 000000000..29084289c --- /dev/null +++ b/src/boost/tools/build/src/engine/debian/changelog @@ -0,0 +1,72 @@ +bjam (3.1.12-1) unstable; urgency=low + + * New upstream release. + + -- Rene Rivera Sat, 01 Oct 2005 00:00:00 +0000 + +bjam (3.1.11-1) unstable; urgency=low + + * New upstream release. + + -- Rene Rivera Sat, 30 Apr 2005 00:00:00 +0000 + +bjam (3.1.10-1) unstable; urgency=low + + * New upstream release. + + -- Rene Rivera Tue, 1 Jun 2004 05:42:35 +0000 + +bjam (3.1.9-2) unstable; urgency=low + + * Use default value of BOOST_BUILD_PATH is not is set in environment. + + -- Vladimir Prus Wed, 17 Dec 2003 16:44:35 +0300 + +bjam (3.1.9-1) unstable; urgency=low + + * Implement NATIVE_FILE builtin and several native rules. + + -- Vladimir Prus Thu, 11 Dec 2003 13:15:26 +0300 + +bjam (3.1.8-1) unstable; urgency=low + + * New upstream release. + + -- Vladimir Prus Tue, 4 Nov 2003 20:50:43 +0300 + +bjam (3.1.7-1) unstable; urgency=low + + * New upstream release. + + -- Vladimir Prus Thu, 11 Sep 2003 10:45:44 +0400 + +bjam (3.1.6-1) unstable; urgency=low + + * New upstream release. + + -- Vladimir Prus Tue, 1 Jul 2003 09:12:18 +0400 + +bjam (3.1.5-1) unstable; urgency=low + + * New upstream release. + + -- Vladimir Prus Mon, 19 May 2003 14:05:13 +0400 + +bjam (3.1.3-2) unstable; urgency=low + + * Changed Debian package to be similar to Jam's package. + + -- Vladimir Prus Thu, 10 Oct 2002 18:43:26 +0400 + +bjam (3.1.3-1) unstable; urgency=low + + * New upstream release. + + -- Vladimir Prus Fri, 4 Oct 2002 18:16:54 +0400 + +bjam (3.1.2-1) unstable; urgency=low + + * Initial Release. + + -- Vladimir Prus Wed, 14 Aug 2002 14:08:00 +0400 + diff --git a/src/boost/tools/build/src/engine/debian/control b/src/boost/tools/build/src/engine/debian/control new file mode 100644 index 000000000..46747d838 --- /dev/null +++ b/src/boost/tools/build/src/engine/debian/control @@ -0,0 +1,16 @@ +Source: bjam +Section: devel +Priority: optional +Maintainer: Vladimir Prus +Build-Depends: debhelper (>> 3.0.0), docbook-to-man, bison +Standards-Version: 3.5.2 + +Package: bjam +Architecture: any +Depends: ${shlibs:Depends} +Description: Build tool + Boost.Jam is a portable build tool with its own interpreted language, which + allows to implement rather complex logic in a readable way and without + resorting to external programs. It is a descendant of Jam/MR tool modified to + suit the needs of B2. In particular, modules and rule parameters + were added, as well as several new builtins. diff --git a/src/boost/tools/build/src/engine/debian/copyright b/src/boost/tools/build/src/engine/debian/copyright new file mode 100644 index 000000000..f72e4e3a9 --- /dev/null +++ b/src/boost/tools/build/src/engine/debian/copyright @@ -0,0 +1,25 @@ +This package was debianized by Vladimir Prus on +Wed, 17 July 2002, 19:27:00 +0400. + +Copyright: + + /+\ + +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + \+/ + + This is Release 2.4 of Jam/MR, a make-like program. + + License is hereby granted to use this software and distribute it + freely, as long as this copyright notice is retained and modifications + are clearly marked. + + ALL WARRANTIES ARE HEREBY DISCLAIMED. + +Some portions are also: + + Copyright 2001-2006 David Abrahams. + Copyright 2002-2006 Rene Rivera. + Copyright 2003-2006 Vladimir Prus. + + Distributed under the Boost Software License, Version 1.0. + (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) diff --git a/src/boost/tools/build/src/engine/debian/jam.man.sgml b/src/boost/tools/build/src/engine/debian/jam.man.sgml new file mode 100644 index 000000000..1fabfb64c --- /dev/null +++ b/src/boost/tools/build/src/engine/debian/jam.man.sgml @@ -0,0 +1,236 @@ + manpage.1'. You may view + the manual page with: `docbook-to-man manpage.sgml | nroff -man | + less'. A typical entry in a Makefile or Makefile.am is: + +manpage.1: manpage.sgml + docbook-to-man $< > $@ + --> + + Yann"> + Dirson"> + + mai 23, 2001"> + dirson@debian.org"> + + + + Debian GNU/Linux"> + GNU"> +]> + + + +
+ &dhemail; +
+ + &dhfirstname; + &dhsurname; + + + 2001 + &dhusername; + + &dhdate; +
+ + + JAM + 1 + + + + Jam/MR + Make(1) Redux + + + + + jam + + + + + + + + + + + + + + + + + + DESCRIPTION + + Jam is a program construction tool, like make(1). + + Jam recursively builds target files from source files, using + dependency information and updating actions expressed in the + Jambase file, which is written in jam's own interpreted language. + The default Jambase is compiled into jam and provides a + boilerplate for common use, relying on a user-provide file + "Jamfile" to enumerate actual targets and sources. + + + + OPTIONS + + + + + + + + + Enable cumulative debugging levels from 1 to + + + + + + Show + dependency analysis, and target/source + timestamps/paths + + + + + + Show + directory/header file/archive + scans + + + + + + + + + + + + + + + Enable debugging level + + + + + + + + + + Read + + + + + + + Run up to + + + + + + + + + + Write the updating actions to the specified file + instead of running them (or outputting them, as on the + Mac). + + + + + + + Set the variable + + + + + + + Rebuild + + + + + + + + + + + SEE ALSO + + Jam is documented fully in HTML pages available on Debian + systems from + /usr/share/doc/jam/Jam.html. + + + + AUTHOR + + This manual page was created by &dhusername; &dhemail; from + the + +
+ + diff --git a/src/boost/tools/build/src/engine/debian/rules b/src/boost/tools/build/src/engine/debian/rules new file mode 100755 index 000000000..8538b3572 --- /dev/null +++ b/src/boost/tools/build/src/engine/debian/rules @@ -0,0 +1,73 @@ +#!/usr/bin/make -f +# Sample debian/rules that uses debhelper. +# GNU copyright 1997 to 1999 by Joey Hess. +# GNU copyright 2001 by Yann Dirson. + +# This is the debian/rules file for packages jam and ftjam +# It should be usable with both packages without any change + +# Uncomment this to turn on verbose mode. +#export DH_VERBOSE=1 + +# This is the debhelper compatibility version to use. +export DH_COMPAT=3 + +topdir=$(shell pwd) + +jam=bjam +binname=bjam + +build: build-stamp +build-stamp: debian/jam.1 + dh_testdir + + ./build.sh + + touch build-stamp + +%.1: %.man.sgml + /usr/bin/docbook-to-man $< > $@ + +clean: + dh_testdir + dh_testroot + rm -f build-stamp + rm -rf bin.* + rm -f jam0 debian/jam.1 + dh_clean + +install: build + dh_testdir + dh_testroot + dh_clean -k + dh_installdirs + + install -d ${topdir}/debian/${jam}/usr/bin + install -m755 bin.linuxx86/bjam ${topdir}/debian/${jam}/usr/bin/ + install -d ${topdir}/debian/${jam}/usr/share/man/man1/ + install -m644 debian/jam.1 ${topdir}/debian/${jam}/usr/share/man/man1/${binname}.1 + + +# Build architecture-independent files here. +binary-indep: build install +# We have nothing to do by default. + +# Build architecture-dependent files here. +binary-arch: build install + dh_testdir + dh_testroot + dh_installdocs README RELNOTES Jambase *.html +# dh_installemacsen +# dh_undocumented + dh_installchangelogs + dh_strip + dh_compress + dh_fixperms + dh_installdeb + dh_shlibdeps + dh_gencontrol + dh_md5sums + dh_builddeb + +binary: binary-indep binary-arch +.PHONY: build clean binary-indep binary-arch binary install configure diff --git a/src/boost/tools/build/src/engine/debug.cpp b/src/boost/tools/build/src/engine/debug.cpp new file mode 100644 index 000000000..f802b8a00 --- /dev/null +++ b/src/boost/tools/build/src/engine/debug.cpp @@ -0,0 +1,158 @@ +/* + * Copyright 2005, 2016. Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "jam.h" +#include "debug.h" +#include "output.h" +#include "hash.h" +#include + + +static profile_frame * profile_stack = 0; +static struct hash * profile_hash = 0; +static profile_info profile_other = { 0 }; +static profile_info profile_total = { 0 }; + + +profile_frame * profile_init( OBJECT * rulename, profile_frame * frame ) +{ + if ( DEBUG_PROFILE ) profile_enter( rulename, frame ); + return frame; +} + + +void profile_enter( OBJECT * rulename, profile_frame * frame ) +{ + if ( DEBUG_PROFILE ) + { + double start = profile_clock(); + profile_info * p; + + if ( !profile_hash && rulename ) + profile_hash = hashinit( sizeof( profile_info ), "profile" ); + + if ( rulename ) + { + int found; + p = (profile_info *)hash_insert( profile_hash, rulename, &found ); + if ( !found ) + { + p->name = rulename; + p->cumulative = 0; + p->net = 0; + p->num_entries = 0; + p->stack_count = 0; + p->memory = 0; + } + } + else + { + p = &profile_other; + } + + p->num_entries += 1; + p->stack_count += 1; + + frame->info = p; + + frame->caller = profile_stack; + profile_stack = frame; + + frame->entry_time = profile_clock(); + frame->overhead = 0; + frame->subrules = 0; + + /* caller pays for the time it takes to play with the hash table */ + if ( frame->caller ) + frame->caller->overhead += frame->entry_time - start; + } +} + + +void profile_memory( size_t mem ) +{ + if ( DEBUG_PROFILE ) + if ( profile_stack && profile_stack->info ) + profile_stack->info->memory += ((double)mem) / 1024; +} + + +void profile_exit( profile_frame * frame ) +{ + if ( DEBUG_PROFILE ) + { + /* Cumulative time for this call. */ + double t = profile_clock() - frame->entry_time - frame->overhead; + /* If this rule is already present on the stack, do not add the time for + * this instance. + */ + if ( frame->info->stack_count == 1 ) + frame->info->cumulative += t; + /* Net time does not depend on presence of the same rule in call stack. + */ + frame->info->net += t - frame->subrules; + + if ( frame->caller ) + { + /* Caller's cumulative time must account for this overhead. */ + frame->caller->overhead += frame->overhead; + frame->caller->subrules += t; + } + /* Pop this stack frame. */ + --frame->info->stack_count; + profile_stack = frame->caller; + } +} + + +static void dump_profile_entry( void * p_, void * ignored ) +{ + profile_info * p = (profile_info *)p_; + double mem_each = ( p->memory / ( p->num_entries ? p->num_entries : 1 + ) ); + double q = p->net; + if (p->num_entries) q /= p->num_entries; + if ( !ignored ) + { + profile_total.cumulative += p->net; + profile_total.memory += p->memory; + } + out_printf( "%10ld %12.6f %12.6f %12.8f %10.2f %10.2f %s\n", p->num_entries, + p->cumulative, p->net, q, p->memory, mem_each, object_str( p->name ) ); +} + + +void profile_dump() +{ + if ( profile_hash ) + { + out_printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--", + "--net--", "--each--", "--mem--", "--each--", "--name--" ); + hashenumerate( profile_hash, dump_profile_entry, 0 ); + profile_other.name = constant_other; + dump_profile_entry( &profile_other, 0 ); + profile_total.name = constant_total; + dump_profile_entry( &profile_total, (void *)1 ); + } +} + +double profile_clock() +{ + return ((double) clock()) / CLOCKS_PER_SEC; +} + +OBJECT * profile_make_local( char const * scope ) +{ + if ( DEBUG_PROFILE ) + { + return object_new( scope ); + } + else + { + return 0; + } +} diff --git a/src/boost/tools/build/src/engine/debug.h b/src/boost/tools/build/src/engine/debug.h new file mode 100644 index 000000000..fad73f9f5 --- /dev/null +++ b/src/boost/tools/build/src/engine/debug.h @@ -0,0 +1,63 @@ +/* + * Copyright 2005, 2016. Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef BJAM_DEBUG_H +#define BJAM_DEBUG_H + +#include "config.h" +#include "constants.h" +#include "object.h" + + +typedef struct profile_info +{ + /* name of rule being called */ + OBJECT * name; + /* cumulative time spent in rule, in seconds */ + double cumulative; + /* time spent in rule proper, in seconds */ + double net; + /* number of time rule was entered */ + unsigned long num_entries; + /* number of the times this function is present in stack */ + unsigned long stack_count; + /* memory allocated by the call, in KiB */ + double memory; +} profile_info; + +typedef struct profile_frame +{ + /* permanent storage where data accumulates */ + profile_info * info; + /* overhead for profiling in this call */ + double overhead; + /* time of last entry to rule */ + double entry_time; + /* stack frame of caller */ + struct profile_frame * caller; + /* time spent in subrules */ + double subrules; +} profile_frame; + +profile_frame * profile_init( OBJECT * rulename, profile_frame * ); +void profile_enter( OBJECT * rulename, profile_frame * ); +void profile_memory( size_t mem ); +void profile_exit( profile_frame * ); +void profile_dump(); +double profile_clock(); + +#define PROFILE_ENTER( scope ) profile_frame PROF_ ## scope, *PROF_ ## scope ## _p = profile_init( constant_ ## scope, &PROF_ ## scope ) +#define PROFILE_EXIT( scope ) profile_exit( PROF_ ## scope ## _p ) + +OBJECT * profile_make_local( char const * ); +#define PROFILE_ENTER_LOCAL( scope ) \ + static OBJECT * constant_LOCAL_##scope = 0; \ + if (DEBUG_PROFILE && !constant_LOCAL_##scope) constant_LOCAL_##scope = profile_make_local( #scope ); \ + PROFILE_ENTER( LOCAL_##scope ) +#define PROFILE_EXIT_LOCAL( scope ) PROFILE_EXIT( LOCAL_##scope ) + +#endif diff --git a/src/boost/tools/build/src/engine/debugger.cpp b/src/boost/tools/build/src/engine/debugger.cpp new file mode 100644 index 000000000..2b9755995 --- /dev/null +++ b/src/boost/tools/build/src/engine/debugger.cpp @@ -0,0 +1,2737 @@ +/* + * Copyright 2015 Steven Watanabe + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "debugger.h" +#include "constants.h" +#include "jam_strings.h" +#include "pathsys.h" +#include "cwd.h" +#include "function.h" +#include "mem.h" +#include "startup.h" +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef NT +#include +#include +#include +#else +#include +#include +#include +#endif + +#include +#include + +#undef debug_on_enter_function +#undef debug_on_exit_function + +struct breakpoint +{ + OBJECT * file; + OBJECT * bound_file; + int line; + int status; +}; + +#define BREAKPOINT_ENABLED 1 +#define BREAKPOINT_DISABLED 2 +#define BREAKPOINT_DELETED 3 + +static struct breakpoint * breakpoints; +static int num_breakpoints; +static int breakpoints_capacity; + +#define DEBUG_NO_CHILD 0 +#define DEBUG_RUN 1 +#define DEBUG_STEP 2 +#define DEBUG_NEXT 3 +#define DEBUG_FINISH 4 +#define DEBUG_STOPPED 5 + +#define DEBUG_MSG_BREAKPOINT 1 +#define DEBUG_MSG_END_STEPPING 2 +#define DEBUG_MSG_SETUP 3 +#define DEBUG_MSG_DONE 32 + +static int debug_state; +static int debug_depth; +static OBJECT * debug_file; +static int debug_line; +static FRAME * debug_frame; +LIST * debug_print_result; +static int current_token; +static int debug_selected_frame_number; + +/* Commands are read from this stream. */ +static FILE * command_input; +/* Where to send output from commands. */ +static FILE * command_output; +/* Only valid in the parent. Reads command output from the child. */ +static FILE * command_child; + +struct command_elem +{ + const char * key; + void (*command)( int, const char * * ); +}; + +static struct command_elem * command_array; + +static void debug_listen( void ); +static int read_command( void ); +static int is_same_file( OBJECT * file1, OBJECT * file2 ); +static void debug_mi_format_token( void ); +static OBJECT * make_absolute_path( OBJECT * filename ); + +static void debug_string_write( FILE * out, const char * data ) +{ + fprintf( out, "%s", data ); + fputc( '\0', out ); +} + +static std::string debug_string_read( FILE * in ) +{ + string buf[ 1 ]; + int ch; + std::string result; + string_new( buf ); + while( ( ch = fgetc( in ) ) > 0 ) + { + string_push_back( buf, (char)ch ); + } + result = buf->value; + string_free( buf ); + return result; +} + +static void debug_object_write( FILE * out, OBJECT * data ) +{ + debug_string_write( out, object_str( data ) ); +} + +static OBJECT * debug_object_read( FILE * in ) +{ + string buf[ 1 ]; + int ch; + OBJECT * result; + string_new( buf ); + while( ( ch = fgetc( in ) ) > 0 ) + { + string_push_back( buf, (char)ch ); + } + result = object_new( buf->value ); + string_free( buf ); + return result; +} + +static void debug_int_write( FILE * out, int i ) +{ + fprintf( out, "%d", i ); + fputc( '\0', out ); +} + +static int debug_int_read( FILE * in ) +{ + OBJECT * str = debug_object_read( in ); + int result = atoi( object_str( str ) ); + object_free( str ); + return result; +} + +static void debug_list_write( FILE * out, LIST * l ) +{ + LISTITER iter = list_begin( l ), end = list_end( l ); + fprintf( out, "%d\n", list_length( l ) ); + for ( ; iter != end; iter = list_next( iter ) ) + { + debug_object_write( out, list_item( iter ) ); + } +} + +static LIST * debug_list_read( FILE * in ) +{ + int len; + int i; + LIST * result = L0; + int ret = fscanf( in, "%d", &len ); + if (ret == 1) + { + int ch = fgetc( in ); + if (ch > 0) assert( ch == '\n' ); + for ( i = 0; i < len; ++i ) + { + result = list_push_back( result, debug_object_read( in ) ); + } + } + return result; +} + +static void debug_lol_write( FILE * out, LOL * lol ) +{ + int i; + debug_int_write( out, lol->count ); + for ( i = 0; i < lol->count; ++i ) + { + debug_list_write( out, lol_get( lol, i ) ); + } +} + +static void debug_lol_read( FILE * in, LOL * lol ) +{ + int count, i; + lol_init( lol ); + count = debug_int_read( in ); + for ( i = 0; i < count; ++i ) + { + lol_add( lol, debug_list_read( in ) ); + } +} + +static void debug_format_rulename ( string * out, FRAME * frame ) +{ + const char * pos = strchr( frame->rulename, '.' ); + if ( frame->module->class_module && pos ) + { + string_copy( out, object_str( frame->module->name ) ); + string_push_back( out, '.' ); + string_append( out, pos + 1 ); + } + else + { + string_copy( out, frame->rulename ); + } +} + +static void debug_frame_write( FILE * out, FRAME * frame ) +{ + string rulename_buffer [ 1 ]; + OBJECT * fullname = constant_builtin; + OBJECT * file = frame->file; + if ( file == NULL ) file = constant_builtin; + else fullname = make_absolute_path( frame->file ); + debug_format_rulename( rulename_buffer, frame ); + debug_object_write( out, file ); + debug_int_write( out, frame->line ); + debug_object_write( out, fullname ); + debug_lol_write( out, frame->args ); + debug_string_write( out, rulename_buffer->value ); + object_free( fullname ); + string_free( rulename_buffer ); +} + +/* + * The information passed to the debugger for + * a frame is slightly different from the FRAME + * struct. + */ +typedef struct _frame_info +{ + OBJECT * file = nullptr; + int line = 0; + OBJECT * fullname = nullptr; + LOL args[ 1 ]; + std::string rulename; + + _frame_info() + { + lol_init( args ); + } + + ~_frame_info() + { + if ( file ) object_free( file ); + if ( fullname ) object_free( fullname ); + lol_free( args ); + } +} FRAME_INFO; + +static void debug_frame_read( FILE * in, FRAME_INFO * frame ) +{ + frame->file = debug_object_read( in ); + frame->line = debug_int_read( in ); + frame->fullname = debug_object_read( in ); + debug_lol_read( in, frame->args ); + frame->rulename = debug_string_read( in ); +} + +static int add_breakpoint( struct breakpoint elem ) +{ + if ( num_breakpoints == breakpoints_capacity ) + { + int new_capacity = breakpoints_capacity * 2; + if ( new_capacity == 0 ) new_capacity = 1; + breakpoints = ( struct breakpoint * )realloc( breakpoints, new_capacity * sizeof( struct breakpoint ) ); + breakpoints_capacity = new_capacity; + } + breakpoints[ num_breakpoints++ ] = elem; + return num_breakpoints; +} + +static int add_line_breakpoint( OBJECT * file, int line ) +{ + struct breakpoint elem; + elem.file = file; + elem.bound_file = NULL; + elem.line = line; + elem.status = BREAKPOINT_ENABLED; + return add_breakpoint( elem ); +} + +static int add_function_breakpoint( OBJECT * name ) +{ + struct breakpoint elem; + elem.file = name; + elem.bound_file = object_copy( name ); + elem.line = -1; + elem.status = BREAKPOINT_ENABLED; + return add_breakpoint( elem ); +} + +/* + * Checks whether there is an active breakpoint at the + * specified location. Returns the breakpoint id + * or -1 if none is found. + */ +static int handle_line_breakpoint( OBJECT * file, int line ) +{ + int i; + if ( file == NULL ) return 0; + for ( i = 0; i < num_breakpoints; ++i ) + { + if ( breakpoints[ i ].bound_file == NULL && is_same_file( breakpoints[ i ].file, file ) ) + { + breakpoints[ i ].bound_file = object_copy( file ); + } + if ( breakpoints[ i ].status == BREAKPOINT_ENABLED && + breakpoints[ i ].bound_file != NULL && + object_equal( breakpoints[ i ].bound_file, file ) && + breakpoints[ i ].line == line ) + { + return i + 1; + } + } + return 0; +} + +static int handle_function_breakpoint( OBJECT * name ) +{ + return handle_line_breakpoint( name, -1 ); +} + +static OBJECT * make_absolute_path( OBJECT * filename ) +{ + PATHNAME path1[ 1 ]; + string buf[ 1 ]; + OBJECT * result; + const char * root = object_str( cwd() ); + path_parse( object_str( filename ), path1 ); + path1->f_root.ptr = root; + path1->f_root.len = int32_t(strlen( root )); + string_new( buf ); + path_build( path1, buf ); + result = object_new( buf->value ); + string_free( buf ); + return result; +} + +static OBJECT * get_filename( OBJECT * path ) +{ + PATHNAME path1[ 1 ]; + string buf[ 1 ]; + OBJECT * result; + path_parse( object_str( path ), path1 ); + path1->f_dir.ptr = NULL; + path1->f_dir.len = 0; + string_new( buf ); + path_build( path1, buf ); + result = object_new( buf->value ); + string_free( buf ); + return result; +} + +static int is_same_file( OBJECT * file1, OBJECT * file2 ) +{ + OBJECT * absolute1 = make_absolute_path( file1 ); + OBJECT * absolute2 = make_absolute_path( file2 ); + OBJECT * norm1 = path_as_key( absolute1 ); + OBJECT * norm2 = path_as_key( absolute2 ); + OBJECT * base1 = get_filename( file1 ); + OBJECT * base2 = get_filename( file2 ); + OBJECT * normbase1 = path_as_key( base1 ); + OBJECT * normbase2 = path_as_key( base2 ); + int result = object_equal( norm1, norm2 ) || + ( object_equal( base1, file1 ) && object_equal( normbase1, normbase2 ) ); + object_free( absolute1 ); + object_free( absolute2 ); + object_free( norm1 ); + object_free( norm2 ); + object_free( base1 ); + object_free( base2 ); + object_free( normbase1 ); + object_free( normbase2 ); + return result; +} + +static void debug_print_source( OBJECT * filename, int line ) +{ + FILE * file; + + if ( filename == NULL || object_equal( filename, constant_builtin ) ) + return; + + file = fopen( object_str( filename ), "r" ); + if ( file ) + { + int ch; + int printing = 0; + int current_line = 1; + if ( line == 1 ) + { + printing = 1; + printf( "%d\t", current_line ); + } + while ( ( ch = fgetc( file ) ) != EOF ) + { + if ( printing ) + fputc( ch, stdout ); + + if ( ch == '\n' ) + { + if ( printing ) + break; + + ++current_line; + if ( current_line == line ) + { + printing = 1; + printf( "%d\t", current_line ); + } + } + } + fclose( file ); + } +} + +static void debug_print_frame_info( FRAME_INFO & frame ) +{ + OBJECT * file = frame.file; + if ( file == NULL ) file = constant_builtin; + printf( "%s ", frame.rulename.c_str() ); + if ( frame.rulename != "module scope" ) + { + printf( "( " ); + if ( frame.args->count ) + { + lol_print( frame.args ); + printf( " " ); + } + printf( ") " ); + } + printf( "at %s:%d", object_str( file ), frame.line ); +} + +static void debug_mi_print_frame_info( FRAME_INFO * frame ) +{ + printf( "frame={func=\"%s\",args=[],file=\"%s\",fullname=\"%s\",line=\"%d\"}", + frame->rulename.c_str(), + object_str( frame->file ), + object_str( frame->fullname ), + frame->line ); +} + +static void debug_on_breakpoint( int id ) +{ + fputc( DEBUG_MSG_BREAKPOINT, command_output ); + debug_int_write( command_output, id ); + fflush( command_output ); + debug_listen(); +} + +static void debug_end_stepping( void ) +{ + fputc( DEBUG_MSG_END_STEPPING, command_output ); + fflush( command_output ); + debug_listen(); +} + +void debug_on_instruction( FRAME * frame, OBJECT * file, int line ) +{ + int breakpoint_id; + assert( debug_is_debugging() ); + if ( debug_state == DEBUG_NEXT && + ( debug_depth < 0 || ( debug_depth == 0 && debug_line != line ) ) ) + { + debug_file = file; + debug_line = line; + debug_frame = frame; + debug_end_stepping(); + } + else if ( debug_state == DEBUG_STEP && debug_line != line ) + { + debug_file = file; + debug_line = line; + debug_frame = frame; + debug_end_stepping(); + } + else if ( debug_state == DEBUG_FINISH && debug_depth < 0 ) + { + debug_file = file; + debug_line = line; + debug_frame = frame; + debug_end_stepping(); + } + else if ( ( debug_file == NULL || ! object_equal( file, debug_file ) || + line != debug_line || debug_depth != 0 ) && + ( breakpoint_id = handle_line_breakpoint( file, line ) ) ) + { + debug_file = file; + debug_line = line; + debug_frame = frame; + debug_on_breakpoint( breakpoint_id ); + } + else if ( ( debug_state == DEBUG_RUN || debug_state == DEBUG_FINISH ) && + ( debug_depth < 0 || ( debug_depth == 0 && debug_line != line ) ) ) + { + debug_file = NULL; + debug_line = 0; + } +} + +void debug_on_enter_function( FRAME * frame, OBJECT * name, OBJECT * file, int line ) +{ + int breakpoint_id; + assert( debug_is_debugging() ); + ++debug_depth; + if ( debug_state == DEBUG_STEP && file ) + { + debug_file = file; + debug_line = line; + debug_frame = frame; + debug_end_stepping(); + } + else if ( ( breakpoint_id = handle_function_breakpoint( name ) ) || + ( breakpoint_id = handle_line_breakpoint( file, line ) ) ) + { + debug_file = file; + debug_line = line; + debug_frame = frame; + debug_on_breakpoint( breakpoint_id ); + } +} + +void debug_on_exit_function( OBJECT * name ) +{ + assert( debug_is_debugging() ); + --debug_depth; + if ( debug_depth < 0 ) + { + /* The current location is no longer valid + after we return from the containing function. */ + debug_file = NULL; + debug_line = 0; + } +} + +#if NT +static HANDLE child_handle; +static DWORD child_pid; +#else +static int child_pid; +#endif + +static void debug_child_continue( int argc, const char * * argv ) +{ + debug_state = DEBUG_RUN; + debug_depth = 0; +} + +static void debug_child_step( int argc, const char * * argv ) +{ + debug_state = DEBUG_STEP; + debug_depth = 0; +} + +static void debug_child_next( int argc, const char * * argv ) +{ + debug_state = DEBUG_NEXT; + debug_depth = 0; +} + +static void debug_child_finish( int argc, const char * * argv ) +{ + debug_state = DEBUG_FINISH; + debug_depth = 0; +} + +static void debug_child_kill( int argc, const char * * argv ) +{ + b2::clean_exit( 0 ); +} + +static int debug_add_breakpoint( const char * name ) +{ + const char * file_ptr = name; + const char * ptr = strrchr( file_ptr, ':' ); + if ( ptr ) + { + char * end; + long line = strtoul( ptr + 1, &end, 10 ); + if ( line > 0 && line <= INT_MAX && end != ptr + 1 && *end == 0 ) + { + OBJECT * file = object_new_range( file_ptr, int32_t(ptr - file_ptr) ); + return add_line_breakpoint( file, line ); + } + else + { + OBJECT * name = object_new( file_ptr ); + return add_function_breakpoint( name ); + } + } + else + { + OBJECT * name = object_new( file_ptr ); + return add_function_breakpoint( name ); + } +} + +static void debug_child_break( int argc, const char * * argv ) +{ + if ( argc == 2 ) + { + debug_add_breakpoint( argv[ 1 ] ); + } +} + +static int get_breakpoint_by_name( const char * name ) +{ + int result; + const char * file_ptr = name; + const char * ptr = strrchr( file_ptr, ':' ); + if ( ptr ) + { + char * end; + long line = strtoul( ptr + 1, &end, 10 ); + if ( line > 0 && line <= INT_MAX && end != ptr + 1 && *end == 0 ) + { + OBJECT * file = object_new_range( file_ptr, int32_t(ptr - file_ptr) ); + result = handle_line_breakpoint( file, line ); + object_free( file ); + } + else + { + OBJECT * name = object_new( file_ptr ); + result = handle_function_breakpoint( name ); + object_free( name ); + } + } + else + { + OBJECT * name = object_new( file_ptr ); + result = handle_function_breakpoint( name ); + object_free( name ); + } + return result; +} + +static void debug_child_disable( int argc, const char * * argv ) +{ + if ( argc == 2 ) + { + int id = atoi( argv[ 1 ] ); + if ( id < 1 || id > num_breakpoints ) + return; + --id; + if ( breakpoints[ id ].status == BREAKPOINT_DELETED ) + return; + breakpoints[ id ].status = BREAKPOINT_DISABLED; + } +} + +static void debug_child_enable( int argc, const char * * argv ) +{ + if ( argc == 2 ) + { + int id = atoi( argv[ 1 ] ); + if ( id < 1 || id > num_breakpoints ) + return; + --id; + if ( breakpoints[ id ].status == BREAKPOINT_DELETED ) + return; + breakpoints[ id ].status = BREAKPOINT_ENABLED; + } +} + +static void debug_child_delete( int argc, const char * * argv ) +{ + if ( argc == 2 ) + { + int id = atoi( argv[ 1 ] ); + if ( id < 1 || id > num_breakpoints ) + return; + --id; + breakpoints[ id ].status = BREAKPOINT_DELETED; + } +} + +static void debug_child_print( int argc, const char * * argv ) +{ + FRAME * saved_frame; + OBJECT * saved_file; + int saved_line; + string buf[ 1 ]; + const char * lines[ 2 ]; + int i; + FRAME new_frame = *debug_frame; + /* Save the current file/line, since running parse_string + * will likely change it. + */ + saved_frame = debug_frame; + saved_file = debug_file; + saved_line = debug_line; + string_new( buf ); + string_append( buf, "__DEBUG_PRINT_HELPER__" ); + for ( i = 1; i < argc; ++i ) + { + string_push_back( buf, ' ' ); + string_append( buf, argv[ i ] ); + } + string_append( buf, " ;\n" ); + lines[ 0 ] = buf->value; + lines[ 1 ] = NULL; + parse_string( constant_builtin, lines, &new_frame ); + string_free( buf ); + debug_list_write( command_output, debug_print_result ); + fflush( command_output ); + debug_frame = saved_frame; + debug_file = saved_file; + debug_line = saved_line; +} + +static void debug_child_frame( int argc, const char * * argv ) +{ + if ( argc == 2 ) + { + debug_selected_frame_number = atoi( argv[ 1 ] ); + } + else + { + assert( !"Wrong number of arguments to frame." ); + } +} + +static void debug_child_info( int argc, const char * * argv ) +{ + if ( strcmp( argv[ 1 ], "locals" ) == 0 ) + { + LIST * locals = L0; + if ( debug_frame->function ) + { + locals = function_get_variables( (FUNCTION*)debug_frame->function ); + } + debug_list_write( command_output, locals ); + fflush( command_output ); + list_free( locals ); + } + else if ( strcmp( argv[ 1 ], "frame" ) == 0 ) + { + int frame_number = debug_selected_frame_number; + int i; + FRAME base = *debug_frame; + FRAME * frame = &base; + base.file = debug_file; + base.line = debug_line; + if ( argc == 3 ) frame_number = atoi( argv[ 2 ] ); + + for ( i = 0; i < frame_number; ++i ) frame = frame->prev; + + debug_frame_write( command_output, frame ); + } + else if ( strcmp( argv[ 1 ], "depth" ) == 0 ) + { + int result = 0; + FRAME * frame = debug_frame; + while ( frame ) + { + frame = frame->prev; + ++result; + } + fprintf( command_output, "%d", result ); + fputc( '\0', command_output ); + fflush( command_output ); + } +} + +/* Commands for the parent. */ + +#ifdef NT + +static int get_module_filename( string * out ) +{ + DWORD result; + string_reserve( out, 256 + 1 ); + string_truncate( out, 256 ); + while( ( result = GetModuleFileNameA( NULL, out->value, DWORD(out->size) ) ) == DWORD(out->size) ) + { + string_reserve( out, out->size * 2 + 1); + string_truncate( out, out->size * 2 ); + } + if ( result != 0 ) + { + string_truncate( out, result ); + return 1; + } + else + { + return 0; + } +} + +#endif + +static struct command_elem child_commands[] = +{ + { "continue", &debug_child_continue }, + { "kill", &debug_child_kill }, + { "step", &debug_child_step }, + { "next", &debug_child_next }, + { "finish", &debug_child_finish }, + { "break", &debug_child_break }, + { "disable", &debug_child_disable }, + { "enable", &debug_child_enable }, + { "delete", &debug_child_delete }, + { "print", &debug_child_print }, + { "frame", &debug_child_frame }, + { "info", &debug_child_info }, + { NULL, NULL } +}; + +static void debug_mi_error( const char * message ) +{ + debug_mi_format_token(); + printf( "^error,msg=\"%s\"\n(gdb) \n", message ); +} + +static void debug_error_( const char * message ) +{ + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + printf( "%s\n", message ); + } + else if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_error( message ); + } +} + +static const char * debug_format_message( const char * format, va_list vargs ) +{ + char * buf; + int result; + int sz = 80; + for ( ; ; ) + { + va_list args; + buf = (char *)malloc( sz ); + if ( !buf ) + return 0; + #ifndef va_copy + args = vargs; + #else + va_copy( args, vargs ); + #endif + #if defined(_MSC_VER) && (_MSC_VER <= 1310) + result = _vsnprintf( buf, sz, format, args ); + #else + result = vsnprintf( buf, sz, format, args ); + #endif + va_end( args ); + if ( 0 <= result && result < sz ) + return buf; + free( buf ); + if ( result < 0 ) + return 0; + sz = result + 1; + } +} + +static void debug_error( const char * format, ... ) +{ + va_list args; + const char * msg; + va_start( args, format ); + msg = debug_format_message( format, args ); + va_end( args ); + if ( !msg ) + { + debug_error_( "Failed formatting error message." ); + return; + } + debug_error_( msg ); + free( ( void * )msg ); +} + +static void debug_parent_child_exited( int pid, int exit_code ) +{ + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + printf( "Child %d exited with status %d\n", (int)child_pid, (int)exit_code ); + } + else if ( debug_interface == DEBUG_INTERFACE_MI ) + { + if ( exit_code == 0 ) + printf( "*stopped,reason=\"exited-normally\"\n(gdb) \n" ); + else + printf( "*stopped,reason=\"exited\",exit-code=\"%d\"\n(gdb) \n", exit_code ); + } + else + { + assert( !"Wrong value of debug_interface." ); + } +} + +#if !NT + +static void debug_parent_child_signalled( int pid, int id ) +{ + + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + printf( "Child %d exited on signal %d\n", child_pid, id ); + } + else if ( debug_interface == DEBUG_INTERFACE_MI ) + { + const char * name = "unknown"; + const char * meaning = "unknown"; + switch( id ) + { + case SIGINT: name = "SIGINT"; meaning = "Interrupt"; break; + } + printf("*stopped,reason=\"exited-signalled\",signal-name=\"%s\",signal-meaning=\"%s\"\n(gdb) \n", name, meaning); + } + else + { + assert( !"Wrong value of debug_interface." ); + } +} + +#endif + +static void debug_parent_on_breakpoint( void ) +{ + FRAME_INFO base; + int id; + id = debug_int_read( command_child ); + fprintf( command_output, "info frame\n" ); + fflush( command_output ); + debug_frame_read( command_child, &base ); + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + printf( "Breakpoint %d, ", id ); + debug_print_frame_info( base ); + printf( "\n" ); + debug_print_source( base.file, base.line ); + } + else if ( debug_interface == DEBUG_INTERFACE_MI ) + { + printf( "*stopped,reason=\"breakpoint-hit\",bkptno=\"%d\",disp=\"keep\",", id ); + debug_mi_print_frame_info( &base ); + printf( ",thread-id=\"1\",stopped-threads=\"all\"" ); + printf( "\n(gdb) \n" ); + } + else + { + assert( !"Wrong value if debug_interface" ); + } + fflush( stdout ); +} + +static void debug_parent_on_end_stepping( void ) +{ + FRAME_INFO base; + fprintf( command_output, "info frame\n" ); + fflush( command_output ); + debug_frame_read( command_child, &base ); + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + debug_print_source( base.file, base.line ); + } + else + { + printf( "*stopped,reason=\"end-stepping-range\"," ); + debug_mi_print_frame_info( &base ); + printf( ",thread-id=\"1\"" ); + printf( "\n(gdb) \n" ); + } + fflush( stdout ); +} + +/* Waits for events from the child. */ +static void debug_parent_wait( int print_message ) +{ + int ch = fgetc( command_child ); + if ( ch == DEBUG_MSG_BREAKPOINT ) + { + debug_parent_on_breakpoint(); + } + else if ( ch == DEBUG_MSG_END_STEPPING ) + { + debug_parent_on_end_stepping(); + } + else if ( ch == DEBUG_MSG_SETUP ) + { + /* FIXME: This is handled in the caller, but it would make + more sense to handle it here. */ + return; + } + else if ( ch == EOF ) + { +#if NT + WaitForSingleObject( child_handle, INFINITE ); + if ( print_message ) + { + DWORD exit_code; + GetExitCodeProcess( child_handle, &exit_code ); + debug_parent_child_exited( (int)child_pid, (int)exit_code ); + } + CloseHandle( child_handle ); +#else + int status; + int pid; + while ( ( pid = waitpid( child_pid, &status, 0 ) ) == -1 ) + if ( errno != EINTR ) + break; + if ( print_message ) + { + if ( WIFEXITED( status ) ) + debug_parent_child_exited( child_pid, WEXITSTATUS( status ) ); + else if ( WIFSIGNALED( status ) ) + debug_parent_child_signalled( child_pid, WTERMSIG( status ) ); + } +#endif + fclose( command_child ); + fclose( command_output ); + debug_state = DEBUG_NO_CHILD; + } +} + +/* Prints the message for starting the child. */ +static void debug_parent_run_print( int argc, const char * * argv ) +{ + int i; + extern char const * saved_argv0; + char * name = executable_path( saved_argv0 ); + printf( "Starting program: %s", name ); + free( name ); + for ( i = 1; i < argc; ++i ) + { + printf( " %s", argv[ i ] ); + } + printf( "\n" ); + fflush( stdout ); +} + +#if NT + +void debug_init_handles( const char * in, const char * out ) +{ + HANDLE read_handle; + int read_fd; + HANDLE write_handle; + int write_fd; + + sscanf( in, "%p", &read_handle ); + read_fd = _open_osfhandle( (intptr_t)read_handle, _O_RDONLY ); + command_input = _fdopen( read_fd, "r" ); + + sscanf( out, "%p", &write_handle ); + write_fd = _open_osfhandle( (intptr_t)write_handle, _O_WRONLY ); + command_output = _fdopen( write_fd, "w" ); + + command_array = child_commands; + + /* Handle the initial setup */ + /* wake up the parent */ + fputc( DEBUG_MSG_SETUP, command_output ); + debug_listen(); +} + +static void init_parent_handles( HANDLE out, HANDLE in ) +{ + command_child = _fdopen( _open_osfhandle( (intptr_t)in, _O_RDONLY ), "r" ); + command_output = _fdopen( _open_osfhandle( (intptr_t)out, _O_WRONLY ), "w" ); +} + +static void debug_parent_copy_breakpoints( void ) +{ + int i; + for ( i = 0; i < num_breakpoints; ++i ) + { + fprintf( command_output, "break %s", object_str( breakpoints[ i ].file ) ); + if ( breakpoints[ i ].line != -1 ) + { + fprintf( command_output, ":%d", breakpoints[ i ].line ); + } + fprintf( command_output, "\n" ); + + switch ( breakpoints[ i ].status ) + { + case BREAKPOINT_ENABLED: + break; + case BREAKPOINT_DISABLED: + fprintf( command_output, "disable %d\n", i + 1 ); + break; + case BREAKPOINT_DELETED: + fprintf( command_output, "delete %d\n", i + 1 ); + break; + default: + assert( !"Wrong breakpoint status." ); + } + } + fflush( command_output ); +} + +#endif + +static void debug_start_child( int argc, const char * * argv ) +{ +#if NT + char buf[ 80 ]; + HANDLE pipe1[ 2 ]; + HANDLE pipe2[ 2 ]; + string self[ 1 ]; + string command_line[ 1 ]; + SECURITY_ATTRIBUTES sa = { sizeof( SECURITY_ATTRIBUTES ), NULL, TRUE }; + PROCESS_INFORMATION pi = { NULL, NULL, 0, 0 }; + STARTUPINFOA si = { sizeof( STARTUPINFOA ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 }; + assert( debug_state == DEBUG_NO_CHILD ); + if ( ! CreatePipe( &pipe1[ 0 ], &pipe1[ 1 ], &sa, 0 ) ) + { + printf("internal error: CreatePipe:1: 0x%08lx\n", GetLastError()); + return; + } + if ( ! CreatePipe( &pipe2[ 0 ], &pipe2[ 1 ], &sa, 0 ) ) + { + printf("internal error: CreatePipe:2: 0x%08lx\n", GetLastError()); + CloseHandle( pipe1[ 0 ] ); + CloseHandle( pipe1[ 1 ] ); + return; + } + string_new( self ); + if ( ! get_module_filename( self ) ) + { + printf("internal error\n"); + CloseHandle( pipe1[ 0 ] ); + CloseHandle( pipe1[ 1 ] ); + CloseHandle( pipe2[ 0 ] ); + CloseHandle( pipe2[ 1 ] ); + return; + } + string_copy( command_line, "b2 " ); + /* Pass the handles as the first and second arguments. */ + string_append( command_line, debugger_opt ); + sprintf( buf, "%p", pipe1[ 0 ] ); + string_append( command_line, buf ); + string_push_back( command_line, ' ' ); + string_append( command_line, debugger_opt ); + sprintf( buf, "%p", pipe2[ 1 ] ); + string_append( command_line, buf ); + /* Pass the rest of the command line. */ + { + int i; + for ( i = 1; i < argc; ++i ) + { + string_push_back( command_line, ' ' ); + string_append( command_line, argv[ i ] ); + } + } + SetHandleInformation( pipe1[ 1 ], HANDLE_FLAG_INHERIT, 0 ); + SetHandleInformation( pipe2[ 0 ], HANDLE_FLAG_INHERIT, 0 ); + if ( ! CreateProcessA( + self->value, + command_line->value, + NULL, + NULL, + TRUE, + 0, + NULL, + NULL, + &si, + &pi + ) ) + { + printf("internal error\n"); + CloseHandle( pipe1[ 0 ] ); + CloseHandle( pipe1[ 1 ] ); + CloseHandle( pipe2[ 0 ] ); + CloseHandle( pipe2[ 1 ] ); + string_free( self ); + string_free( command_line ); + return; + } + child_pid = pi.dwProcessId; + child_handle = pi.hProcess; + CloseHandle( pi.hThread ); + CloseHandle( pipe1[ 0 ] ); + CloseHandle( pipe2[ 1 ] ); + string_free( self ); + string_free( command_line ); + + debug_state = DEBUG_RUN; + + init_parent_handles( pipe1[ 1 ], pipe2[ 0 ] ); + debug_parent_wait( 1 ); + debug_parent_copy_breakpoints(); + fprintf( command_output, "continue\n" ); + fflush( command_output ); +#else + int pipe1[2]; + int pipe2[2]; + int write_fd; + int read_fd; + int pid; + assert( debug_state == DEBUG_NO_CHILD ); + if (pipe(pipe1) == -1) + { + printf("internal error: pipe:1: %s\n", strerror(errno)); + return; + } + if (pipe(pipe2) == -1) + { + close( pipe1[ 0 ] ); + close( pipe1[ 1 ] ); + printf("internal error: pipe:2: %s\n", strerror(errno)); + return; + } + + pid = fork(); + if ( pid == -1 ) + { + /* error */ + close( pipe1[ 0 ] ); + close( pipe1[ 1 ] ); + close( pipe2[ 0 ] ); + close( pipe2[ 1 ] ); + printf("internal error: fork: %s\n", strerror(errno)); + return; + } + else if ( pid == 0 ) + { + /* child */ + extern const char * saved_argv0; + read_fd = pipe1[ 0 ]; + write_fd = pipe2[ 1 ]; + close( pipe2[ 0 ] ); + close( pipe1[ 1 ] ); + command_array = child_commands; + argv[ 0 ] = executable_path( saved_argv0 ); + debug_child_data.argc = argc; + debug_child_data.argv = argv; + command_input = fdopen( read_fd, "r" ); + command_output = fdopen( write_fd, "w" ); + longjmp( debug_child_data.jmp, 1 ); + } + else + { + /* parent */ + read_fd = pipe2[ 0 ]; + write_fd = pipe1[ 1 ]; + close( pipe1[ 0 ] ); + close( pipe2[ 1 ] ); + command_output = fdopen( write_fd, "w" ); + command_child = fdopen( read_fd, "r" ); + child_pid = pid; + } + debug_state = DEBUG_RUN; +#endif +} + +static void debug_parent_run( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_RUN ) + { + fprintf( command_output, "kill\n" ); + fflush( command_output ); + debug_parent_wait( 1 ); + } + debug_parent_run_print( argc, argv ); + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + printf( "=thread-created,id=\"1\",group-id=\"i1\"\n" ); + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + } + debug_start_child( argc, argv ); + debug_parent_wait( 1 ); +} + +static int debug_parent_forward_nowait( int argc, const char * * argv, int print_message, int require_child ) +{ + int i; + if ( debug_state == DEBUG_NO_CHILD ) + { + if ( require_child ) + printf( "The program is not being run.\n" ); + return 1; + } + fputs( argv[ 0 ], command_output ); + for( i = 1; i < argc; ++i ) + { + fputc( ' ', command_output ); + fputs( argv[ i ], command_output ); + } + fputc( '\n', command_output ); + fflush( command_output ); + return 0; +} + +/* FIXME: This function should be eliminated when I finish all stdout to the parent. */ +static void debug_parent_forward( int argc, const char * * argv, int print_message, int require_child ) +{ + if ( debug_parent_forward_nowait( argc, argv, print_message, require_child ) != 0 ) + { + return; + } + debug_parent_wait( print_message ); +} + +static void debug_parent_continue( int argc, const char * * argv ) +{ + if ( argc > 1 ) + { + debug_error( "Too many arguments to continue." ); + return; + } + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + } + debug_parent_forward( 1, argv, 1, 1 ); +} + +static void debug_parent_kill( int argc, const char * * argv ) +{ + if ( argc > 1 ) + { + debug_error( "Too many arguments to kill." ); + return; + } + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); + fflush( stdout ); + } + debug_parent_forward( 1, argv, 0, 1 ); +} + +static void debug_parent_step( int argc, const char * * argv ) +{ + if ( argc > 1 ) + { + debug_error( "Too many arguments to step." ); + return; + } + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + } + debug_parent_forward( 1, argv, 1, 1 ); +} + +static void debug_parent_next( int argc, const char * * argv ) +{ + if ( argc > 1 ) + { + debug_error( "Too many arguments to next." ); + return; + } + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + } + debug_parent_forward( 1, argv, 1, 1 ); +} + +static void debug_parent_finish( int argc, const char * * argv ) +{ + if ( argc > 1 ) + { + debug_error( "Too many arguments to finish." ); + return; + } + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + } + debug_parent_forward( 1, argv, 1, 1 ); +} + +static void debug_parent_break( int argc, const char * * argv ) +{ + int id; + if ( argc < 2 ) + { + debug_error( "Missing argument to break." ); + return; + } + else if ( argc > 2 ) + { + debug_error( "Too many arguments to break." ); + return; + } + id = debug_add_breakpoint( argv[ 1 ] ); + debug_parent_forward_nowait( argc, argv, 1, 0 ); + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + printf( "Breakpoint %d set at %s\n", id, argv[ 1 ] ); + } + else if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); + } + else + { + assert( !"wrong value of debug_interface." ); + } +} + +int check_breakpoint_fn_args( int argc, const char * * argv ) +{ + if ( argc < 2 ) + { + debug_error( "Missing argument to %s.", argv[ 0 ] ); + return 0; + } + else if ( argc > 2 ) + { + debug_error( "Too many arguments to %s.", argv[ 0 ] ); + return 0; + } + else + { + char * end; + long x = strtol( argv[ 1 ], &end, 10 ); + if ( *end ) + { + debug_error( "Invalid breakpoint number %s.", argv[ 1 ] ); + return 0; + } + if ( x < 1 || x > num_breakpoints || breakpoints[ x - 1 ].status == BREAKPOINT_DELETED ) + { + debug_error( "Unknown breakpoint %s.", argv[ 1 ] ); + return 0; + } + } + return 1; +} + +static void debug_parent_disable( int argc, const char * * argv ) +{ + if ( ! check_breakpoint_fn_args( argc, argv ) ) + { + return; + } + debug_child_disable( argc, argv ); + debug_parent_forward_nowait( 2, argv, 1, 0 ); + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); + } +} + +static void debug_parent_enable( int argc, const char * * argv ) +{ + if ( ! check_breakpoint_fn_args( argc, argv ) ) + { + return; + } + debug_child_enable( argc, argv ); + debug_parent_forward_nowait( 2, argv, 1, 0 ); + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); + } +} + +static void debug_parent_delete( int argc, const char * * argv ) +{ + if ( ! check_breakpoint_fn_args( argc, argv ) ) + { + return; + } + debug_child_delete( argc, argv ); + debug_parent_forward_nowait( 2, argv, 1, 0 ); + if ( debug_interface == DEBUG_INTERFACE_MI ) + { + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); + } +} + +static void debug_parent_clear( int argc, const char * * argv ) +{ + char buf[ 16 ]; + const char * new_args[ 2 ]; + int id; + if ( argc < 2 ) + { + debug_error( "Missing argument to clear." ); + return; + } + else if ( argc > 2 ) + { + debug_error( "Too many arguments to clear." ); + return; + } + id = get_breakpoint_by_name( argv[ 1 ] ); + if ( id == 0 ) + { + debug_error( "No breakpoint at %s.", argv[ 1 ] ); + return; + } + + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + printf( "Deleted breakpoint %d\n", id ); + } + + sprintf( buf, "%d", id ); + new_args[ 0 ] = "delete"; + new_args[ 1 ] = buf; + debug_parent_delete( 2, new_args ); +} + +static void debug_parent_print( int argc, const char * * argv ) +{ + LIST * result; + if ( debug_parent_forward_nowait( argc, argv, 1, 1 ) != 0 ) + { + return; + } + result = debug_list_read( command_child ); + + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + { + list_print( result ); + printf( "\n" ); + } + else if ( debug_interface == DEBUG_INTERFACE_MI ) + { + printf( "~\"$1 = " ); + list_print( result ); + printf( "\"\n~\"\\n\"\n" ); + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); + } + + list_free( result ); +} + +static void debug_parent_backtrace( int argc, const char * * argv ) +{ + const char * new_args[ 3 ]; + OBJECT * depth_str; + int depth; + int i; + FRAME_INFO frame; + + if ( debug_state == DEBUG_NO_CHILD ) + { + debug_error( "The program is not being run." ); + return; + } + + new_args[ 0 ] = "info"; + new_args[ 1 ] = "frame"; + + fprintf( command_output, "info depth\n" ); + fflush( command_output ); + depth_str = debug_object_read( command_child ); + depth = atoi( object_str( depth_str ) ); + object_free( depth_str ); + + for ( i = 0; i < depth; ++i ) + { + char buf[ 16 ]; + sprintf( buf, "%d", i ); + new_args[ 2 ] = buf; + debug_parent_forward_nowait( 3, new_args, 0, 0 ); + debug_frame_read( command_child, &frame ); + printf( "#%d in ", i ); + debug_print_frame_info( frame ); + printf( "\n" ); + } + fflush( stdout ); +} + +static void debug_parent_quit( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_RUN ) + { + fprintf( command_output, "kill\n" ); + fflush( command_output ); + debug_parent_wait( 0 ); + } + b2::clean_exit( 0 ); +} + +static const char * const help_text[][2] = +{ + { + "run", + "run \n" + "Creates a new b2 child process passing on the command line." + " Terminates\nthe current child (if any).\n" + }, + { + "continue", + "continue\nContinue debugging\n" + }, + { + "step", + "step\nContinue to the next statement\n" + }, + { + "next", + "next\nContinue to the next line in the current frame\n" + }, + { + "finish", + "finish\nContinue to the end of the current frame\n" + }, + { + "break", + "break \n" + "Sets a breakpoint at . can be either a the name of a\nfunction or :\n" + }, + { + "disable", + "disable \nDisable a breakpoint\n" + }, + { + "enable", + "enable \nEnable a breakpoint\n" + }, + { + "delete", + "delete \nDelete a breakpoint\n" + }, + { + "clear", + "clear \nDelete the breakpoint at \n" + }, + { + "print", + "print \nDisplay the value of \n" + }, + { + "backtrace", + "backtrace\nDisplay the call stack\n" + }, + { + "kill", + "kill\nTerminate the child\n" + }, + { + "quit", + "quit\nExit the debugger\n" + }, + { + "help", + "help\nhelp \nShow help for debugger commands.\n" + }, + { 0, 0 } +}; + +static void debug_parent_help( int argc, const char * * argv ) +{ + if ( argc == 1 ) + { + printf( + "run - Start debugging\n" + "continue - Continue debugging\n" + "step - Continue to the next statement\n" + "next - Continue to the next line in the current frame\n" + "finish - Continue to the end of the current frame\n" + "break - Set a breakpoint\n" + "disable - Disable a breakpoint\n" + "enable - Enable a breakpoint\n" + "delete - Delete a breakpoint\n" + "clear - Delete a breakpoint by location\n" + ); + printf( + "print - Display an expression\n" + "backtrace - Display the call stack\n" + "kill - Terminate the child\n" + "quit - Exit the debugger\n" + "help - Debugger help\n" + ); + } + else if ( argc == 2 ) + { + int i; + for ( i = 0; help_text[ i ][ 0 ]; ++i ) + { + if ( strcmp( argv[ 1 ], help_text[ i ][ 0 ] ) == 0 ) + { + printf( "%s", help_text[ i ][ 1 ] ); + return; + } + } + printf( "No command named %s\n", argv[ 1 ] ); + } +} + +static void debug_mi_break_insert( int argc, const char * * argv ); +static void debug_mi_break_delete( int argc, const char * * argv ); +static void debug_mi_break_disable( int argc, const char * * argv ); +static void debug_mi_break_enable( int argc, const char * * argv ); +static void debug_mi_break_info( int argc, const char * * argv ); +static void debug_mi_break_list( int argc, const char * * argv ); +static void debug_mi_inferior_tty_set( int argc, const char * * argv ); +static void debug_mi_gdb_exit( int argc, const char * * argv ); +static void debug_mi_gdb_set( int argc, const char * * argv ); +static void debug_mi_gdb_show( int argc, const char * * argv ); +static void debug_mi_not_implemented( int argc, const char * * argv ); +static void debug_mi_file_list_exec_source_files( int argc, const char * * argv ); +static void debug_mi_file_list_exec_source_file( int argc, const char * * argv ); +static void debug_mi_thread_info( int argc, const char * * argv ); +static void debug_mi_thread_select( int argc, const char * * argv ); +static void debug_mi_stack_info_frame( int argc, const char * * argv ); +static void debug_mi_stack_select_frame( int argc, const char * * argv ); +static void debug_mi_stack_list_variables( int argc, const char * * argv ); +static void debug_mi_stack_list_locals( int argc, const char * * argv ); +static void debug_mi_stack_list_frames( int argc, const char * * argv ); +static void debug_mi_list_target_features( int argc, const char * * argv ); +static void debug_mi_exec_run( int argc, const char * * argv ); +static void debug_mi_exec_continue( int argc, const char * * argv ); +static void debug_mi_exec_step( int argc, const char * * argv ); +static void debug_mi_exec_next( int argc, const char * * argv ); +static void debug_mi_exec_finish( int argc, const char * * argv ); +static void debug_mi_data_list_register_names( int argc, const char * * argv ); +static void debug_mi_data_evaluate_expression( int argc, const char * * argv ); +static void debug_mi_interpreter_exec( int argc, const char * * argv ); + +static struct command_elem parent_commands[] = +{ + { "run", &debug_parent_run }, + { "continue", &debug_parent_continue }, + { "kill", &debug_parent_kill }, + { "step", &debug_parent_step }, + { "next", &debug_parent_next }, + { "finish", &debug_parent_finish }, + { "break", &debug_parent_break }, + { "disable", &debug_parent_disable }, + { "enable", &debug_parent_enable }, + { "delete", &debug_parent_delete }, + { "clear", &debug_parent_clear }, + { "print", &debug_parent_print }, + { "backtrace", &debug_parent_backtrace }, + { "quit", &debug_parent_quit }, + { "help", &debug_parent_help }, + { "-break-insert", &debug_mi_break_insert }, + { "-break-delete", &debug_mi_break_delete }, + { "-break-disable", &debug_mi_break_disable }, + { "-break-enable", &debug_mi_break_enable }, + { "-break-info", &debug_mi_break_info }, + { "-break-list", &debug_mi_break_list }, + { "-inferior-tty-set", &debug_mi_inferior_tty_set }, + { "-gdb-exit", &debug_mi_gdb_exit }, + { "-gdb-set", &debug_mi_gdb_set }, + { "-gdb-show", &debug_mi_gdb_show }, + { "-enable-pretty-printing", &debug_mi_not_implemented }, + { "-file-list-exec-source-files", &debug_mi_file_list_exec_source_files }, + { "-file-list-exec-source-file", &debug_mi_file_list_exec_source_file }, + { "-thread-info", &debug_mi_thread_info }, + { "-thread-select", &debug_mi_thread_select }, + { "-stack-info-frame", &debug_mi_stack_info_frame }, + { "-stack-select-frame", &debug_mi_stack_select_frame }, + { "-stack-list-variables", &debug_mi_stack_list_variables }, + { "-stack-list-locals", &debug_mi_stack_list_locals }, + { "-stack-list-frames", &debug_mi_stack_list_frames }, + { "-list-target-features", &debug_mi_list_target_features }, + { "-exec-run", &debug_mi_exec_run }, + { "-exec-continue", &debug_mi_exec_continue }, + { "-exec-step", &debug_mi_exec_step }, + { "-exec-next", &debug_mi_exec_next }, + { "-exec-finish", &debug_mi_exec_finish }, + { "-data-list-register-names", &debug_mi_data_list_register_names }, + { "-data-evaluate-expression", &debug_mi_data_evaluate_expression }, + { "-interpreter-exec", &debug_mi_interpreter_exec }, + { NULL, NULL } +}; + +static void debug_mi_format_token( void ) +{ + if ( current_token != 0 ) + { + printf( "%d", current_token ); + } +} + +static void debug_mi_format_breakpoint( int id ) +{ + struct breakpoint * ptr = &breakpoints[ id - 1 ]; + printf( "bkpt={" ); + printf( "number=\"%d\"", id ); + printf( ",type=\"breakpoint\"" ); + printf( ",disp=\"keep\"" ); /* FIXME: support temporary breakpoints. */ + printf( ",enabled=\"%s\"", ptr->status == BREAKPOINT_ENABLED ? "y" : "n" ); + /* addr */ + if ( ptr->line == -1 ) + { + printf( ",func=\"%s\"", object_str( ptr->file ) ); + } + else + { + printf( ",file=\"%s\"", object_str( ptr->file ) ); + printf( ",line=\"%d\"", ptr->line ); + printf( ",fullname=\"%s\"", object_str( ptr->file ) ); + } + /* fullname */ + /* times */ + // printf( "" ); + printf( "}" ); +} + +static int breakpoint_id_parse( const char * name ) +{ + int id = atoi( name ); + if ( id > num_breakpoints || id < 1 || breakpoints[ id ].status == BREAKPOINT_DELETED ) + return -1; + return id; +} + +static void debug_mi_break_insert( int argc, const char * * argv ) +{ + const char * inner_argv[ 2 ]; + // int temporary = 0; /* FIXME: not supported yet */ + // int hardware = 0; /* unsupported */ + // int force = 1; /* We don't have global debug information... */ + int disabled = 0; + // int tracepoint = 0; /* unsupported */ + // int thread_id = 0; + // int ignore_count = 0; + // const char * condition; /* FIXME: not supported yet */ + const char * location; + int id; + for ( --argc, ++argv; argc; --argc, ++argv ) + { + if ( strcmp( *argv, "-t" ) == 0 ) + { + // temporary = 1; + } + else if ( strcmp( *argv, "-h" ) == 0 ) + { + // hardware = 1; + } + else if ( strcmp( *argv, "-f" ) == 0 ) + { + // force = 1; + } + else if ( strcmp( *argv, "-d" ) == 0 ) + { + disabled = 1; + } + else if ( strcmp( *argv, "-a" ) == 0 ) + { + // tracepoint = 1; + } + else if ( strcmp( *argv, "-c" ) == 0 ) + { + if ( argc < 2 ) + { + debug_mi_error( "Missing argument for -c." ); + return; + } + + // condition = argv[ 1 ]; + --argc; + ++argv; + } + else if ( strcmp( *argv, "-i" ) == 0 ) + { + if ( argc < 2 ) + { + debug_mi_error( "Missing argument for -i." ); + return; + } + + // ignore_count = atoi( argv[ 1 ] ); + --argc; + ++argv; + } + else if ( strcmp( *argv, "-p" ) == 0 ) + { + if ( argc < 2 ) + { + debug_mi_error( "Missing argument for -p." ); + return; + } + + // thread_id = atoi( argv[ 1 ] ); + --argc; + ++argv; + } + else if ( strcmp( *argv, "--" ) == 0 ) + { + --argc; + ++argv; + break; + } + else if ( **argv != '-' ) + { + break; + } + else + { + debug_mi_error( "Unknown argument." ); + return; + } + } + if ( argc > 1 ) + { + debug_mi_error( "Too many arguments for -break-insert." ); + return; + } + + if ( argc == 1 ) + { + location = *argv; + } + else + { + debug_mi_error( "Not implemented: -break-insert with no location." ); + return; + } + inner_argv[ 0 ] = "break"; + inner_argv[ 1 ] = location; + + id = debug_add_breakpoint( location ); + debug_parent_forward_nowait( 2, inner_argv, 1, 0 ); + + if ( disabled ) + { + char buf[ 80 ]; + sprintf( buf, "%d", num_breakpoints ); + inner_argv[ 0 ] = "disable"; + inner_argv[ 1 ] = buf; + debug_child_disable( 2, inner_argv ); + debug_parent_forward_nowait( 2, inner_argv, 1, 0 ); + } + + debug_mi_format_token(); + printf( "^done," ); + debug_mi_format_breakpoint( id ); + printf( "\n(gdb) \n" ); +} + +static void debug_mi_break_delete( int argc, const char * * argv ) +{ + if ( argc < 2 ) + { + debug_mi_error( "Not enough arguments for -break-delete" ); + return; + } + for ( --argc, ++argv; argc; --argc, ++argv ) + { + const char * inner_argv[ 2 ]; + int id = breakpoint_id_parse( *argv ); + if ( id == -1 ) + { + debug_mi_error( "Not a valid breakpoint" ); + return; + } + inner_argv[ 0 ] = "delete"; + inner_argv[ 1 ] = *argv; + debug_parent_delete( 2, inner_argv ); + } +} + +static void debug_mi_break_enable( int argc, const char * * argv ) +{ + if ( argc < 2 ) + { + debug_mi_error( "Not enough arguments for -break-enable" ); + return; + } + for ( --argc, ++argv; argc; --argc, ++argv ) + { + const char * inner_argv[ 2 ]; + int id = breakpoint_id_parse( *argv ); + if ( id == -1 ) + { + debug_mi_error( "Not a valid breakpoint" ); + return; + } + inner_argv[ 0 ] = "enable"; + inner_argv[ 1 ] = *argv; + debug_parent_enable( 2, inner_argv ); + } +} + +static void debug_mi_break_disable( int argc, const char * * argv ) +{ + if ( argc < 2 ) + { + debug_mi_error( "Not enough arguments for -break-disable" ); + return; + } + for ( --argc, ++argv; argc; --argc, ++argv ) + { + const char * inner_argv[ 2 ]; + int id = breakpoint_id_parse( *argv ); + if ( id == -1 ) + { + debug_mi_error( "Not a valid breakpoint" ); + return; + } + inner_argv[ 0 ] = "disable"; + inner_argv[ 1 ] = *argv; + debug_parent_disable( 2, inner_argv ); + } +} + +static void debug_mi_format_breakpoint_header_col( int width, int alignment, const char * col_name, const char * colhdr ) +{ + printf( "{width=\"%d\",alignment=\"%d\",col_name=\"%s\",colhdr=\"%s\"}", width, alignment, col_name, colhdr ); +} + +static void debug_mi_format_breakpoint_hdr( void ) +{ + printf( "hdr=[" ); + debug_mi_format_breakpoint_header_col( 7, -1, "number", "Num" ); + printf( "," ); + debug_mi_format_breakpoint_header_col( 14, -1, "type", "Type" ); + printf( "," ); + debug_mi_format_breakpoint_header_col( 4, -1, "disp", "Disp" ); + printf( "," ); + debug_mi_format_breakpoint_header_col( 3, -1, "enabled", "Enb" ); + printf( "," ); + debug_mi_format_breakpoint_header_col( 10, -1, "addr", "Address" ); + printf( "," ); + debug_mi_format_breakpoint_header_col( 40, 2, "what", "What" ); + printf( "]" ); +} + +static void debug_mi_break_info( int argc, const char * * argv ) +{ + int id; + --argc; + ++argv; + if ( strcmp( *argv, "--" ) == 0 ) + { + --argc; + ++argv; + } + if ( argc < 1 ) + { + debug_mi_error( "Not enough arguments for -break-info" ); + return; + } + if ( argc > 1 ) + { + debug_mi_error( "Too many arguments for -break-info" ); + } + + id = breakpoint_id_parse( *argv ); + if ( id == -1 ) + { + debug_mi_error( "No such breakpoint." ); + return; + } + + printf( "^done,BreakpointTable={" + "nr_rows=\"%d\",nr_cols=\"6\",", 1 ); + debug_mi_format_breakpoint_hdr(); + printf( ",body=[" ); + debug_mi_format_breakpoint( id ); + printf( "]}" ); + printf("\n(gdb) \n"); +} + +static void debug_mi_break_list( int argc, const char * * argv ) +{ + int number; + int i; + int first; + if ( argc > 2 || ( argc == 2 && strcmp( argv[ 1 ], "--" ) ) ) + { + debug_mi_error( "Too many arguments for -break-list" ); + return; + } + + number = 0; + for ( i = 0; i < num_breakpoints; ++i ) + if ( breakpoints[ i ].status != BREAKPOINT_DELETED ) + ++number; + debug_mi_format_token(); + printf( "^done,BreakpointTable={" + "nr_rows=\"%d\",nr_cols=\"6\",", number ); + debug_mi_format_breakpoint_hdr(); + printf( ",body=[" ); + first = 1; + for ( i = 0; i < num_breakpoints; ++i ) + if ( breakpoints[ i ].status != BREAKPOINT_DELETED ) + { + if ( first ) first = 0; + else printf( "," ); + debug_mi_format_breakpoint( i + 1 ); + } + printf( "]}" ); + printf("\n(gdb) \n"); +} + +static void debug_mi_inferior_tty_set( int argc, const char * * argv ) +{ + /* FIXME: implement this for real */ + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); +} + +static void debug_mi_gdb_exit( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_RUN ) + { + fprintf( command_output, "kill\n" ); + fflush( command_output ); + debug_parent_wait( 0 ); + } + debug_mi_format_token(); + printf( "^exit\n" ); + b2::clean_exit( EXIT_SUCCESS ); +} + +static void debug_mi_gdb_set( int argc, const char * * argv ) +{ + /* FIXME: implement this for real */ + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); +} + +static void debug_mi_gdb_show( int argc, const char * * argv ) +{ + const char * value = ""; + /* FIXME: implement this for real */ + debug_mi_format_token(); + value = "(gdb) "; + printf( "^done,value=\"%s\"\n(gdb) \n", value ); +} + +static void debug_mi_not_implemented( int argc, const char * * argv ) +{ + /* FIXME: implement this for real */ + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); +} + +void debug_mi_file_list_exec_source_files( int argc, const char * * argv ) +{ + /* FIXME: implement this for real */ + debug_mi_format_token(); + printf( "^done,files=[]\n(gdb) \n" ); +} + +static void debug_mi_file_list_exec_source_file( int argc, const char * * argv ) +{ + /* FIXME: implement this for real */ + debug_mi_format_token(); + printf( "^error,msg=\"Don't know how to handle this yet\"\n(gdb) \n" ); +} + +static void debug_mi_thread_info( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + debug_mi_format_token(); + printf( "^done,threads=[]\n(gdb) \n" ); + } + else + { + const char * new_args[] = { "info", "frame" }; + FRAME_INFO info; + debug_parent_forward_nowait( 2, new_args, 0, 0 ); + debug_frame_read( command_child, &info ); + + debug_mi_format_token(); + printf( "^done,threads=[{id=\"1\"," ); + debug_mi_print_frame_info( &info ); + printf( "}],current-thread-id=\"1\"\n(gdb) \n" ); + } +} + +static void debug_mi_thread_select( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + /* FIXME: better error handling*/ + debug_mi_format_token(); + printf( "^error,msg=\"Thread ID 1 not known\"\n(gdb) \n" ); + } + else + { + const char * new_args[] = { "info", "frame" }; + FRAME_INFO info; + debug_parent_forward_nowait( 2, new_args, 0, 0 ); + debug_frame_read( command_child, &info ); + + debug_mi_format_token(); + printf( "^done,new-thread-id=\"1\"," ); + debug_mi_print_frame_info( &info ); + printf( "\n(gdb) \n" ); + } +} + +static void debug_mi_stack_select_frame( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + debug_mi_format_token(); + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + } + else + { + const char * new_args[ 2 ]; + new_args[ 0 ] = "frame"; + new_args[ 1 ] = argv[ 1 ]; + debug_parent_forward_nowait( 2, new_args, 0, 0 ); + debug_mi_format_token(); + printf( "^done\n(gdb) \n" ); + } +} + +static void debug_mi_stack_info_frame( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + debug_mi_format_token(); + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + } + else + { + FRAME_INFO info; + fprintf( command_output, "info frame\n" ); + fflush( command_output ); + debug_frame_read( command_child, &info ); + debug_mi_format_token(); + printf( "^done," ); + debug_mi_print_frame_info( &info ); + printf( "\n(gdb) \n" ); + } +} + +static void debug_mi_stack_list_variables( int argc, const char * * argv ) +{ +#define DEBUG_PRINT_VARIABLES_NO_VALUES 1 +#define DEBUG_PRINT_VARIABLES_ALL_VALUES 2 +#define DEBUG_PRINT_VARIABLES_SIMPLE_VALUES 3 + if ( debug_state == DEBUG_NO_CHILD ) + { + debug_mi_format_token(); + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + return; + } + --argc; + ++argv; + for ( ; argc; --argc, ++argv ) + { + if ( strcmp( *argv, "--thread" ) == 0 ) + { + /* Only one thread. */ + --argc; + ++argv; + } + else if ( strcmp( *argv, "--no-values" ) == 0 ) + { + // print_values = DEBUG_PRINT_VARIABLES_NO_VALUES; + } + else if ( strcmp( *argv, "--all-values" ) == 0 ) + { + // print_values = DEBUG_PRINT_VARIABLES_ALL_VALUES; + } + else if ( strcmp( *argv, "--simple-values" ) == 0 ) + { + // print_values = DEBUG_PRINT_VARIABLES_SIMPLE_VALUES; + } + else if ( strcmp( *argv, "--" ) == 0 ) + { + --argc; + ++argv; + break; + } + else if ( argv[ 0 ][ 0 ] == '-' ) + { + debug_mi_format_token(); + printf( "^error,msg=\"Unknown argument %s\"\n(gdb) \n", *argv ); + return; + } + else + { + break; + } + } + if ( argc != 0 ) + { + debug_mi_format_token(); + printf( "^error,msg=\"Too many arguments for -stack-list-variables\"\n(gdb) \n" ); + return; + } + + { + LIST * vars; + LISTITER iter, end; + int first = 1; + fprintf( command_output, "info locals\n" ); + fflush( command_output ); + vars = debug_list_read( command_child ); + debug_parent_wait( 0 ); + debug_mi_format_token(); + printf( "^done,variables=[" ); + for ( iter = list_begin( vars ), end = list_end( vars ); iter != end; iter = list_next( iter ) ) + { + OBJECT * varname = list_item( iter ); + string varbuf[1]; + const char * new_args[2]; + if ( first ) + { + first = 0; + } + else + { + printf( "," ); + } + printf( "{name=\"%s\",value=\"", object_str( varname ) ); + fflush( stdout ); + string_new( varbuf ); + string_append( varbuf, "$(" ); + string_append( varbuf, object_str( varname ) ); + string_append( varbuf, ")" ); + new_args[ 0 ] = "print"; + new_args[ 1 ] = varbuf->value; + debug_parent_forward( 2, new_args, 0, 0 ); + string_free( varbuf ); + printf( "\"}" ); + } + printf( "]\n(gdb) \n" ); + fflush( stdout ); + list_free( vars ); + } +} + +static void debug_mi_stack_list_locals( int argc, const char * * argv ) +{ +#define DEBUG_PRINT_VARIABLES_NO_VALUES 1 +#define DEBUG_PRINT_VARIABLES_ALL_VALUES 2 +#define DEBUG_PRINT_VARIABLES_SIMPLE_VALUES 3 + if ( debug_state == DEBUG_NO_CHILD ) + { + debug_mi_format_token(); + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + return; + } + --argc; + ++argv; + for ( ; argc; --argc, ++argv ) + { + if ( strcmp( *argv, "--thread" ) == 0 ) + { + /* Only one thread. */ + --argc; + ++argv; + if ( argc == 0 ) + { + debug_mi_format_token(); + printf( "^error,msg=\"Argument required for --thread.\"" ); + return; + } + } + else if ( strcmp( *argv, "--no-values" ) == 0 ) + { + // print_values = DEBUG_PRINT_VARIABLES_NO_VALUES; + } + else if ( strcmp( *argv, "--all-values" ) == 0 ) + { + // print_values = DEBUG_PRINT_VARIABLES_ALL_VALUES; + } + else if ( strcmp( *argv, "--simple-values" ) == 0 ) + { + // print_values = DEBUG_PRINT_VARIABLES_SIMPLE_VALUES; + } + else if ( strcmp( *argv, "--" ) == 0 ) + { + --argc; + ++argv; + break; + } + else if ( argv[ 0 ][ 0 ] == '-' ) + { + debug_mi_format_token(); + printf( "^error,msg=\"Unknown argument %s\"\n(gdb) \n", *argv ); + return; + } + else + { + break; + } + } + if ( argc != 0 ) + { + debug_mi_format_token(); + printf( "^error,msg=\"Too many arguments for -stack-list-variables\"\n(gdb) \n" ); + return; + } + + { + LIST * vars; + LISTITER iter, end; + int first = 1; + fprintf( command_output, "info locals\n" ); + fflush( command_output ); + vars = debug_list_read( command_child ); + debug_parent_wait( 0 ); + debug_mi_format_token(); + printf( "^done,locals=[" ); + for ( iter = list_begin( vars ), end = list_end( vars ); iter != end; iter = list_next( iter ) ) + { + OBJECT * varname = list_item( iter ); + string varbuf[1]; + const char * new_args[2]; + if ( first ) + { + first = 0; + } + else + { + printf( "," ); + } + printf( "{name=\"%s\",type=\"list\",value=\"", object_str( varname ) ); + fflush( stdout ); + string_new( varbuf ); + string_append( varbuf, "$(" ); + string_append( varbuf, object_str( varname ) ); + string_append( varbuf, ")" ); + new_args[ 0 ] = "print"; + new_args[ 1 ] = varbuf->value; + debug_parent_forward( 2, new_args, 0, 0 ); + string_free( varbuf ); + printf( "\"}" ); + } + printf( "]\n(gdb) \n" ); + fflush( stdout ); + list_free( vars ); + } +} + +static void debug_mi_stack_list_frames( int argc, const char * * argv ) +{ + int depth; + int i; + + if ( debug_state == DEBUG_NO_CHILD ) + { + debug_mi_format_token(); + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + return; + } + + fprintf( command_output, "info depth\n" ); + fflush( command_output ); + depth = debug_int_read( command_child ); + + debug_mi_format_token(); + printf( "^done,stack=[" ); + for ( i = 0; i < depth; ++i ) + { + FRAME_INFO frame; + fprintf( command_output, "info frame %d\n", i ); + fflush( command_output ); + if ( i != 0 ) + { + printf( "," ); + } + debug_frame_read( command_child, &frame ); + debug_mi_print_frame_info( &frame ); + } + printf( "]\n(gdb) \n" ); + fflush( stdout ); +} + +static void debug_mi_list_target_features( int argc, const char * * argv ) +{ + /* FIXME: implement this for real */ + debug_mi_format_token(); + printf( "^done,features=[\"async\"]\n(gdb) \n" ); +} + +static void debug_mi_exec_run( int argc, const char * * argv ) +{ + printf( "=thread-created,id=\"1\",group-id=\"i1\"\n" ); + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + debug_start_child( argc, argv ); + debug_parent_wait( 1 ); +} + +static void debug_mi_exec_continue( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + } + else + { + const char * new_args[] = { "continue" }; + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + debug_parent_forward( 1, new_args, 1, 0 ); + } +} + +static void debug_mi_exec_step( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + } + else + { + const char * new_args[] = { "step" }; + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + debug_parent_forward( 1, new_args, 1, 0 ); + } +} + +static void debug_mi_exec_next( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + } + else + { + const char * new_args[] = { "next" }; + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + debug_parent_forward( 1, new_args, 1, 0 ); + } +} + +static void debug_mi_exec_finish( int argc, const char * * argv ) +{ + if ( debug_state == DEBUG_NO_CHILD ) + { + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + } + else + { + const char * new_args[] = { "finish" }; + debug_mi_format_token(); + printf( "^running\n(gdb) \n" ); + fflush( stdout ); + debug_parent_forward( 1, new_args, 1, 0 ); + } +} + +static void debug_mi_data_list_register_names( int argc, const char * * argv ) +{ + debug_mi_format_token(); + printf( "^done,register-names=[]\n(gdb) \n" ); +} + +static void debug_mi_data_evaluate_expression( int argc, const char * * argv ) +{ + if ( argc < 2 ) + { + printf( "^error,msg=\"Not enough arguments for -data-evaluate-expression\"\n(gdb) \n" ); + } + if ( debug_state == DEBUG_NO_CHILD ) + { + printf( "^error,msg=\"No child\"\n(gdb) \n" ); + } + else + { + const char * new_args[ 2 ]; + debug_mi_format_token(); + printf( "^done,value=\"" ); + fflush( stdout ); + new_args[ 0 ] = "print"; + new_args[ 1 ] = argv[ 1 ]; + debug_parent_forward( 2, new_args, 1, 0 ); + printf( "\"\n(gdb) \n" ); + } +} + +static int process_command( char * command ); + +static void debug_mi_interpreter_exec( int argc, const char * * argv ) +{ + if ( argc < 3 ) + { + debug_mi_error( "Not enough arguments for -interpreter-exec" ); + } + process_command( (char *)argv[ 2 ] ); +} + +/* The debugger's main loop. */ +int debugger( void ) +{ + command_array = parent_commands; + command_input = stdin; + if ( debug_interface == DEBUG_INTERFACE_MI ) + printf( "=thread-group-added,id=\"i1\"\n(gdb) \n" ); + while ( 1 ) + { + if ( debug_interface == DEBUG_INTERFACE_CONSOLE ) + printf("(b2db) "); + fflush( stdout ); + read_command(); + } + return 0; +} + + +/* Runs the matching command in the current command_array. */ +static int run_command( int argc, const char * * argv ) +{ + struct command_elem * command; + const char * command_name; + if ( argc == 0 ) + { + return 1; + } + command_name = argv[ 0 ]; + /* Skip the GDB/MI token when choosing the command to run. */ + while( isdigit( *command_name ) ) ++command_name; + current_token = atoi( argv[ 0 ] ); + for( command = command_array; command->key; ++command ) + { + if ( strcmp( command->key, command_name ) == 0 ) + { + ( *command->command )( argc, argv ); + return 1; + } + } + debug_error( "Unknown command: %s", command_name ); + return 0; +} + +/* Parses a single command into whitespace separated tokens, and runs it. */ +static int process_command( char * line ) +{ + int result; + size_t capacity = 8; + std::vector tokens; + tokens.reserve(capacity); + char * iter = line; + char * saved = iter; + for ( ; ; ) + { + /* skip spaces */ + while ( *iter && isspace( *iter ) ) + { + ++iter; + } + if ( ! *iter ) + { + break; + } + /* Find the next token */ + saved = iter; + if ( *iter == '\"' ) + { + saved = ++iter; + /* FIXME: handle escaping */ + while ( *iter && *iter != '\"' ) + { + ++iter; + } + } + else + { + while ( *iter && ! isspace( *iter ) ) + { + ++iter; + } + } + /* append the token to the buffer */ + tokens.push_back(saved); + /* null terminate the token */ + if ( *iter ) + { + *iter++ = '\0'; + } + } + result = run_command( (int) tokens.size(), const_cast( &tokens[0] ) ); + return result; +} + +static int read_command( void ) +{ + int result; + int ch; + string line[ 1 ]; + auto line_delete = b2::jam::make_unique_bare_jptr( line, string_new, string_free ); + /* HACK: force line to be on the heap. */ + string_reserve( line, 64 ); + while( ( ch = fgetc( command_input ) ) != EOF ) + { + if ( ch == '\n' ) + { + break; + } + else + { + string_push_back( line, (char)ch ); + } + } + result = process_command( line->value ); + return result; +} + +static void debug_listen( void ) +{ + debug_state = DEBUG_STOPPED; + while ( debug_state == DEBUG_STOPPED ) + { + if ( feof( command_input ) ) + b2::clean_exit( 1 ); + fflush(stdout); + fflush( command_output ); + read_command(); + } + debug_selected_frame_number = 0; +} + +struct debug_child_data_t debug_child_data; +const char debugger_opt[] = "--b2db-internal-debug-handle="; +int debug_interface; diff --git a/src/boost/tools/build/src/engine/debugger.h b/src/boost/tools/build/src/engine/debugger.h new file mode 100644 index 000000000..d0bd1c689 --- /dev/null +++ b/src/boost/tools/build/src/engine/debugger.h @@ -0,0 +1,64 @@ +/* + * Copyright 2015 Steven Watanabe + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef DEBUGGER_SW20150314_H +#define DEBUGGER_SW20150314_H + +#include "config.h" +#include +#include "object.h" +#include "frames.h" + +#ifdef JAM_DEBUGGER + +void debug_on_instruction( FRAME * frame, OBJECT * file, int line ); +void debug_on_enter_function( FRAME * frame, OBJECT * name, OBJECT * file, int line ); +void debug_on_exit_function( OBJECT * name ); +int debugger( void ); + +struct debug_child_data_t +{ + int argc; + const char * * argv; + jmp_buf jmp; +}; + +extern struct debug_child_data_t debug_child_data; +extern LIST * debug_print_result; +extern const char debugger_opt[]; +extern int debug_interface; + +#define DEBUG_INTERFACE_CONSOLE 1 +#define DEBUG_INTERFACE_MI 2 +#define DEBUG_INTERFACE_CHILD 3 + +#define debug_is_debugging() ( debug_interface != 0 ) +#define debug_on_enter_function( frame, name, file, line ) \ + ( debug_is_debugging()? \ + debug_on_enter_function( frame, name, file, line ) : \ + (void)0 ) +#define debug_on_exit_function( name ) \ + ( debug_is_debugging()? \ + debug_on_exit_function( name ) : \ + (void)0 ) + +#if NT + +void debug_init_handles( const char * in, const char * out ); + +#endif + +#else + +#define debug_on_instruction( frame, file, line ) ( ( void )0 ) +#define debug_on_enter_function( frame, name, file, line ) ( ( void )0 ) +#define debug_on_exit_function( name ) ( ( void )0 ) +#define debug_is_debugging() ( 0 ) + +#endif + +#endif diff --git a/src/boost/tools/build/src/engine/execcmd.cpp b/src/boost/tools/build/src/engine/execcmd.cpp new file mode 100644 index 000000000..dfbd5a7f8 --- /dev/null +++ b/src/boost/tools/build/src/engine/execcmd.cpp @@ -0,0 +1,122 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * Copyright 2007 Noel Belcourt. + * + * Utility functions shared between different exec*.c platform specific + * implementation modules. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +#include "jam.h" +#include "execcmd.h" +#include "output.h" + +#include +#include +#include + + +/* Internal interrupt counter. */ +static int intr; + + +/* Constructs a list of command-line elements using the format specified by the + * given shell list. + * + * Given argv array should have at least MAXARGC + 1 elements. + * Slot numbers may be between 0 and 998 (inclusive). + * + * Constructed argv list will be zero terminated. Character arrays referenced by + * the argv structure elements will be either elements from the give shell list, + * internal static buffers or the given command string and should thus not + * considered owned by or released via the argv structure and should be + * considered invalidated by the next argv_from_shell() call. + * + * Shell list elements: + * - Starting with '%' - represent the command string. + * - Starting with '!' - represent the slot number (increased by one). + * - Anything else - used as a literal. + * - If no '%' element is found, the command string is appended as an extra. + */ + +void argv_from_shell( char const * * argv, LIST * shell, char const * command, + int32_t const slot ) +{ + static char jobno[ 12 ]; + + int i; + int gotpercent = 0; + LISTITER iter = list_begin( shell ); + LISTITER end = list_end( shell ); + + assert( 0 <= slot ); + assert( slot < 999 ); + sprintf( jobno, "%d", slot + 1 ); + + for ( i = 0; iter != end && i < MAXARGC; ++i, iter = list_next( iter ) ) + { + switch ( object_str( list_item( iter ) )[ 0 ] ) + { + case '%': argv[ i ] = command; ++gotpercent; break; + case '!': argv[ i ] = jobno; break; + default : argv[ i ] = object_str( list_item( iter ) ); + } + } + + if ( !gotpercent ) + argv[ i++ ] = command; + + argv[ i ] = NULL; +} + + +/* Returns whether the given command string contains lines longer than the given + * maximum. + */ +int check_cmd_for_too_long_lines( char const * command, int32_t max, + int32_t * const error_length, int32_t * const error_max_length ) +{ + while ( *command ) + { + int32_t const l = int32_t(strcspn( command, "\n" )); + if ( l > max ) + { + *error_length = l; + *error_max_length = max; + return EXEC_CHECK_LINE_TOO_LONG; + } + command += l; + if ( *command ) + ++command; + } + return EXEC_CHECK_OK; +} + + +/* Checks whether the given shell list is actually a request to execute raw + * commands without an external shell. + */ +int is_raw_command_request( LIST * shell ) +{ + return !list_empty( shell ) && + !strcmp( object_str( list_front( shell ) ), "%" ) && + list_next( list_begin( shell ) ) == list_end( shell ); +} + + +/* Returns whether an interrupt has been detected so far. */ + +int interrupted( void ) +{ + return intr != 0; +} + + +/* Internal interrupt handler. */ + +void onintr( int disp ) +{ + ++intr; + out_printf( "...interrupted\n" ); +} diff --git a/src/boost/tools/build/src/engine/execcmd.h b/src/boost/tools/build/src/engine/execcmd.h new file mode 100644 index 000000000..293822b30 --- /dev/null +++ b/src/boost/tools/build/src/engine/execcmd.h @@ -0,0 +1,119 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * execcmd.h - execute a shell script. + * + * Defines the interface to be implemented in platform specific implementation + * modules as well as different shared utility functions prepared in the + * execcmd.c module. + */ + +#ifndef EXECCMD_H +#define EXECCMD_H + +#include "config.h" +#include "lists.h" +#include "jam_strings.h" +#include "timestamp.h" + + +typedef struct timing_info +{ + double system; + double user; + timestamp start; + timestamp end; +} timing_info; + +typedef void (* ExecCmdCallback) +( + void * const closure, + int const status, + timing_info const * const, + char const * const cmd_stdout, + char const * const cmd_stderr, + int const cmd_exit_reason +); + +/* Global initialization. Must be called after setting + * globs.jobs. May be called multiple times. */ +void exec_init( void ); +/* Global cleanup */ +void exec_done( void ); + +/* Status codes passed to ExecCmdCallback routines. */ +#define EXEC_CMD_OK 0 +#define EXEC_CMD_FAIL 1 +#define EXEC_CMD_INTR 2 + +int exec_check +( + string const * command, + LIST * * pShell, + int32_t * error_length, + int32_t * error_max_length +); + +/* exec_check() return codes. */ +#define EXEC_CHECK_OK 101 +#define EXEC_CHECK_NOOP 102 +#define EXEC_CHECK_LINE_TOO_LONG 103 +#define EXEC_CHECK_TOO_LONG 104 + +/* Prevents action output from being written + * immediately to stdout/stderr. + */ +#define EXEC_CMD_QUIET 1 + +void exec_cmd +( + string const * command, + int flags, + ExecCmdCallback func, + void * closure, + LIST * shell +); + +void exec_wait(); + + +/****************************************************************************** + * * + * Utility functions defined in the execcmd.c module. * + * * + ******************************************************************************/ + +/* Constructs a list of command-line elements using the format specified by the + * given shell list. + */ +void argv_from_shell( char const * * argv, LIST * shell, char const * command, + int32_t const slot ); + +/* Interrupt routine bumping the internal interrupt counter. Needs to be + * registered by platform specific exec*.c modules. + */ +void onintr( int disp ); + +/* Returns whether an interrupt has been detected so far. */ +int interrupted( void ); + +/* Checks whether the given shell list is actually a request to execute raw + * commands without an external shell. + */ +int is_raw_command_request( LIST * shell ); + +/* Utility worker for exec_check() checking whether all the given command lines + * are under the specified length limit. + */ +int check_cmd_for_too_long_lines( char const * command, int32_t max, + int32_t * const error_length, int32_t * const error_max_length ); + +/* Maximum shell command line length. + */ +int32_t shell_maxline(); + +#endif diff --git a/src/boost/tools/build/src/engine/execnt.cpp b/src/boost/tools/build/src/engine/execnt.cpp new file mode 100644 index 000000000..1cf627f2f --- /dev/null +++ b/src/boost/tools/build/src/engine/execnt.cpp @@ -0,0 +1,1389 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2007 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * execnt.c - execute a shell command on Windows NT + * + * If $(JAMSHELL) is defined, uses that to formulate the actual command. The + * default is: cmd.exe /Q/C + * + * In $(JAMSHELL), % expands to the command string and ! expands to the slot + * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does + * not include a %, it is tacked on as the last argument. + * + * Each $(JAMSHELL) placeholder must be specified as a separate individual + * element in a jam variable value. + * + * Do not just set JAMSHELL to cmd.exe - it will not work! + * + * External routines: + * exec_check() - preprocess and validate the command + * exec_cmd() - launch an async command execution + * exec_wait() - wait for any of the async command processes to terminate + * + * Internal routines: + * filetime_to_seconds() - Windows FILETIME --> number of seconds conversion + */ + +#include "jam.h" + +#ifdef USE_EXECNT + +#include "output.h" +#include "execcmd.h" + +#include "lists.h" +#include "output.h" +#include "pathsys.h" +#include "startup.h" +#include "string.h" + +#include +#include +#include +#include + +#define WIN32_LEAN_AND_MEAN +#include +#include +#include + +#if defined(__GNUC__) || defined(__clang__) +#else +#pragma warning( push ) +#pragma warning(disable: 4800) // 'BOOL' forced to 'true' or 'false' +#endif +#include +#if defined(__GNUC__) || defined(__clang__) +#else +#pragma warning( pop ) +#endif + + +/* get the maximum shell command line length according to the OS */ +static int32_t maxline(); +/* valid raw command string length */ +static int32_t raw_command_length( char const * command ); +/* add two 64-bit unsigned numbers, h1l1 and h2l2 */ +static FILETIME add_64( + unsigned long h1, unsigned long l1, + unsigned long h2, unsigned long l2 ); +/* */ +static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 ); +/* */ +static FILETIME negate_FILETIME( FILETIME t ); +/* record the timing info for the process */ +static void record_times( HANDLE const, timing_info * const ); +/* calc the current running time of an *active* process */ +static double running_time( HANDLE const ); +/* terminate the given process, after terminating all its children first */ +static void kill_process_tree( DWORD const procesdId, HANDLE const ); +/* waits for a command to complete or time out */ +static int32_t try_wait( int32_t const timeoutMillis ); +/* reads any pending output for running commands */ +static void read_output(); +/* checks if a command ran out of time, and kills it */ +static int32_t try_kill_one(); +/* is the first process a parent (direct or indirect) to the second one */ +static int32_t is_parent_child( DWORD const parent, DWORD const child ); +/* */ +static void close_alert( PROCESS_INFORMATION const * const ); +/* close any alerts hanging around */ +static void close_alerts(); +/* prepare a command file to be executed using an external shell */ +static char const * prepare_command_file( string const * command, int32_t slot ); +/* invoke the actual external process using the given command line */ +static void invoke_cmd( char const * const command, int32_t const slot ); +/* find a free slot in the running commands table */ +static int32_t get_free_cmdtab_slot(); +/* put together the final command string we are to run */ +static void string_new_from_argv( string * result, char const * const * argv ); +/* frees and renews the given string */ +static void string_renew( string * const ); +/* reports the last failed Windows API related error message */ +static void reportWindowsError( char const * const apiName, int32_t slot ); +/* closes a Windows HANDLE and resets its variable to 0. */ +static void closeWinHandle( HANDLE * const handle ); +/* Adds the job index to the list of currently active jobs. */ +static void register_wait( int32_t job_id ); + +/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ + +/* CreateProcessA() Windows API places a limit of 32768 characters (bytes) on + * the allowed command-line length, including a trailing Unicode (2-byte) + * nul-terminator character. + */ +#define MAX_RAW_COMMAND_LENGTH 32766 + + /* Communication buffers size */ +#define IO_BUFFER_SIZE ( 64 * 1024 ) + +/* We hold handles for pipes used to communicate with child processes in two + * element arrays indexed as follows. + */ +#define EXECCMD_PIPE_READ 0 +#define EXECCMD_PIPE_WRITE 1 + +static int32_t intr_installed; + + +/* The list of commands we run. */ +static struct _cmdtab_t +{ + /* Temporary command file used to execute the action when needed. */ + string command_file[ 1 ]; + + /* Pipes for communicating with the child process. Parent reads from (0), + * child writes to (1). + */ + HANDLE pipe_out[ 2 ]; + HANDLE pipe_err[ 2 ]; + + string buffer_out[ 1 ]; /* buffer to hold stdout, if any */ + string buffer_err[ 1 ]; /* buffer to hold stderr, if any */ + + PROCESS_INFORMATION pi; /* running process information */ + + HANDLE wait_handle; + + int32_t flags; + + /* Function called when the command completes. */ + ExecCmdCallback func; + + /* Opaque data passed back to the 'func' callback. */ + void * closure; +} * cmdtab = NULL; +static int32_t cmdtab_size = 0; + +/* A thread-safe single element queue. Used by the worker threads + * to signal the main thread that a process is completed. + */ +struct +{ + int32_t job_index; + HANDLE read_okay; + HANDLE write_okay; +} process_queue; + +/* + * Execution unit tests. + */ + +void execnt_unit_test() +{ +#if !defined( NDEBUG ) + /* vc6 preprocessor is broken, so assert with these strings gets confused. + * Use a table instead. + */ + { + typedef struct test { const char * command; int32_t result; } test; + test tests[] = { + { "", 0 }, + { " ", 0 }, + { "x", 1 }, + { "\nx", 1 }, + { "x\n", 1 }, + { "\nx\n", 1 }, + { "\nx \n", 2 }, + { "\nx \n ", 2 }, + { " \n\t\t\v\r\r\n \t x \v \t\t\r\n\n\n \n\n\v\t", 8 }, + { "x\ny", -1 }, + { "x\n\n y", -1 }, + { "echo x > foo.bar", -1 }, + { "echo x < foo.bar", -1 }, + { "echo x | foo.bar", -1 }, + { "echo x \">\" foo.bar", 18 }, + { "echo x '<' foo.bar", 18 }, + { "echo x \"|\" foo.bar", 18 }, + { "echo x \\\">\\\" foo.bar", -1 }, + { "echo x \\\"<\\\" foo.bar", -1 }, + { "echo x \\\"|\\\" foo.bar", -1 }, + { "\"echo x > foo.bar\"", 18 }, + { "echo x \"'\"<' foo.bar", -1 }, + { "echo x \\\\\"<\\\\\" foo.bar", 22 }, + { "echo x \\x\\\"<\\\\\" foo.bar", -1 }, + { 0 } }; + test const * t; + for ( t = tests; t->command; ++t ) + assert( raw_command_length( t->command ) == t->result ); + } + + { + int32_t const length = maxline() + 9; + char * const cmd = (char *)BJAM_MALLOC_ATOMIC( size_t(length) + 1 ); + memset( cmd, 'x', size_t(length) ); + cmd[ length ] = 0; + assert( raw_command_length( cmd ) == length ); + BJAM_FREE( cmd ); + } +#endif +} + +/* + * exec_init() - global initialization + */ +void exec_init( void ) +{ + if ( globs.jobs > cmdtab_size ) + { + cmdtab = (_cmdtab_t*)BJAM_REALLOC( cmdtab, globs.jobs * sizeof( *cmdtab ) ); + memset( cmdtab + cmdtab_size, 0, ( globs.jobs - cmdtab_size ) * sizeof( *cmdtab ) ); + cmdtab_size = globs.jobs; + } + if ( globs.jobs > MAXIMUM_WAIT_OBJECTS && !process_queue.read_okay ) + { + process_queue.read_okay = CreateEvent( NULL, FALSE, FALSE, NULL ); + process_queue.write_okay = CreateEvent( NULL, FALSE, TRUE, NULL ); + } +} + +/* + * exec_done - free resources. + */ +void exec_done( void ) +{ + if ( process_queue.read_okay ) + { + CloseHandle( process_queue.read_okay ); + } + if ( process_queue.write_okay ) + { + CloseHandle( process_queue.write_okay ); + } + BJAM_FREE( cmdtab ); +} + +/* + * exec_check() - preprocess and validate the command + */ + +int32_t exec_check +( + string const * command, + LIST * * pShell, + int32_t * error_length, + int32_t * error_max_length +) +{ + /* Default shell does nothing when triggered with an empty or a + * whitespace-only command so we simply skip running it in that case. We + * still pass them on to non-default shells as we do not really know what + * they are going to do with such commands. + */ + if ( list_empty( *pShell ) ) + { + char const * s = command->value; + while ( isspace( *s ) ) ++s; + if ( !*s ) + return EXEC_CHECK_NOOP; + } + + /* Check prerequisites for executing raw commands. */ + if ( is_raw_command_request( *pShell ) ) + { + int32_t const raw_cmd_length = raw_command_length( command->value ); + if ( raw_cmd_length < 0 ) + { + /* Invalid characters detected - fallback to default shell. */ + list_free( *pShell ); + *pShell = L0; + } + else if ( raw_cmd_length > MAX_RAW_COMMAND_LENGTH ) + { + *error_length = raw_cmd_length; + *error_max_length = MAX_RAW_COMMAND_LENGTH; + return EXEC_CHECK_TOO_LONG; + } + else + return raw_cmd_length ? EXEC_CHECK_OK : EXEC_CHECK_NOOP; + } + + /* Now we know we are using an external shell. Note that there is no need to + * check for too long command strings when using an external shell since we + * use a command file and assume no one is going to set up a JAMSHELL format + * string longer than a few hundred bytes at most which should be well under + * the total command string limit. Should someone actually construct such a + * JAMSHELL value it will get reported as an 'invalid parameter' + * CreateProcessA() Windows API failure which seems like a good enough + * result for such intentional mischief. + */ + + /* Check for too long command lines. */ + return check_cmd_for_too_long_lines( command->value, shell_maxline(), + error_length, error_max_length ); +} + + +/* + * exec_cmd() - launch an async command execution + * + * We assume exec_check() already verified that the given command can have its + * command string constructed as requested. + */ + +void exec_cmd +( + string const * cmd_orig, + int32_t flags, + ExecCmdCallback func, + void * closure, + LIST * shell +) +{ + int32_t const slot = get_free_cmdtab_slot(); + int32_t const is_raw_cmd = is_raw_command_request( shell ); + string cmd_local[ 1 ]; + + /* Initialize default shell - anything more than /Q/C is non-portable. */ + static LIST * default_shell; + if ( !default_shell ) + default_shell = list_new( object_new( "cmd.exe /Q/C" ) ); + + /* Specifying no shell means requesting the default shell. */ + if ( list_empty( shell ) ) + shell = default_shell; + + if ( DEBUG_EXECCMD ) + { + if ( is_raw_cmd ) + out_printf( "Executing raw command directly\n" ); + else + { + out_printf( "Executing using a command file and the shell: " ); + list_print( shell ); + out_printf( "\n" ); + } + } + + /* If we are running a raw command directly - trim its leading whitespaces + * as well as any trailing all-whitespace lines but keep any trailing + * whitespace in the final/only line containing something other than + * whitespace). + */ + if ( is_raw_cmd ) + { + char const * start = cmd_orig->value; + char const * p = cmd_orig->value + cmd_orig->size; + char const * end = p; + while ( isspace( *start ) ) ++start; + while ( p > start && isspace( p[ -1 ] ) ) + if ( *--p == '\n' ) + end = p; + string_new( cmd_local ); + string_append_range( cmd_local, start, end ); + assert( int32_t(cmd_local->size) == raw_command_length( cmd_orig->value ) ); + } + /* If we are not running a raw command directly, prepare a command file to + * be executed using an external shell and the actual command string using + * that command file. + */ + else + { + char const * const cmd_file = prepare_command_file( cmd_orig, slot ); + char const * argv[ MAXARGC + 1 ]; /* +1 for NULL */ + argv_from_shell( argv, shell, cmd_file, slot ); + string_new_from_argv( cmd_local, argv ); + } + + /* Catch interrupts whenever commands are running. */ + if ( !intr_installed ) + { + intr_installed = 1; + signal( SIGINT, onintr ); + } + + cmdtab[ slot ].flags = flags; + + /* Save input data into the selected running commands table slot. */ + cmdtab[ slot ].func = func; + cmdtab[ slot ].closure = closure; + + /* Invoke the actual external process using the constructed command line. */ + invoke_cmd( cmd_local->value, slot ); + + /* Free our local command string copy. */ + string_free( cmd_local ); +} + + +/* + * exec_wait() - wait for any of the async command processes to terminate + * + * Wait and drive at most one execution completion, while processing the I/O for + * all ongoing commands. + */ + +void exec_wait() +{ + int32_t i = -1; + int32_t exit_reason; /* reason why a command completed */ + + /* Wait for a command to complete, while snarfing up any output. */ + while ( 1 ) + { + /* Check for a complete command, briefly. */ + i = try_wait( 500 ); + /* Read in the output of all running commands. */ + read_output(); + /* Close out pending debug style dialogs. */ + close_alerts(); + /* Process the completed command we found. */ + if ( i >= 0 ) { exit_reason = EXIT_OK; break; } + /* Check if a command ran out of time. */ + i = try_kill_one(); + if ( i >= 0 ) { exit_reason = EXIT_TIMEOUT; break; } + } + + /* We have a command... process it. */ + { + DWORD exit_code; + timing_info time; + int32_t rstat; + + /* The time data for the command. */ + record_times( cmdtab[ i ].pi.hProcess, &time ); + + /* Removed the used temporary command file. */ + if ( cmdtab[ i ].command_file->size ) + unlink( cmdtab[ i ].command_file->value ); + + /* Find out the process exit code. */ + GetExitCodeProcess( cmdtab[ i ].pi.hProcess, &exit_code ); + + /* The dispossition of the command. */ + if ( interrupted() ) + rstat = EXEC_CMD_INTR; + else if ( exit_code ) + rstat = EXEC_CMD_FAIL; + else + rstat = EXEC_CMD_OK; + + /* Call the callback, may call back to jam rule land. */ + (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat, &time, + cmdtab[ i ].buffer_out->value, cmdtab[ i ].buffer_err->value, + exit_reason ); + + /* Clean up our child process tracking data. No need to clear the + * temporary command file name as it gets reused. + */ + closeWinHandle( &cmdtab[ i ].pi.hProcess ); + closeWinHandle( &cmdtab[ i ].pi.hThread ); + closeWinHandle( &cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ] ); + closeWinHandle( &cmdtab[ i ].pipe_out[ EXECCMD_PIPE_WRITE ] ); + closeWinHandle( &cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ] ); + closeWinHandle( &cmdtab[ i ].pipe_err[ EXECCMD_PIPE_WRITE ] ); + string_renew( cmdtab[ i ].buffer_out ); + string_renew( cmdtab[ i ].buffer_err ); + } +} + + +/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ + +/* + * Invoke the actual external process using the given command line. Track the + * process in our running commands table. + */ + +static void invoke_cmd( char const * const command, int32_t const slot ) +{ + SECURITY_ATTRIBUTES sa = { sizeof( SECURITY_ATTRIBUTES ), 0, 0 }; + SECURITY_DESCRIPTOR sd; + STARTUPINFOA si = { sizeof( STARTUPINFOA ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 }; + + /* Init the security data. */ + InitializeSecurityDescriptor( &sd, SECURITY_DESCRIPTOR_REVISION ); + SetSecurityDescriptorDacl( &sd, TRUE, NULL, FALSE ); + sa.lpSecurityDescriptor = &sd; + sa.bInheritHandle = TRUE; + + /* Create output buffers. */ + string_new( cmdtab[ slot ].buffer_out ); + string_new( cmdtab[ slot ].buffer_err ); + + /* Create pipes for communicating with the child process. */ + if ( !CreatePipe( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ], + &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ], &sa, IO_BUFFER_SIZE ) ) + { + reportWindowsError( "CreatePipe", slot ); + return; + } + if ( globs.pipe_action && !CreatePipe( &cmdtab[ slot ].pipe_err[ + EXECCMD_PIPE_READ ], &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ], + &sa, IO_BUFFER_SIZE ) ) + { + reportWindowsError( "CreatePipe", slot ); + return; + } + + /* Set handle inheritance off for the pipe ends the parent reads from. */ + SetHandleInformation( cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ], + HANDLE_FLAG_INHERIT, 0 ); + if ( globs.pipe_action ) + SetHandleInformation( cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_READ ], + HANDLE_FLAG_INHERIT, 0 ); + + /* Hide the child window, if any. */ + si.dwFlags |= STARTF_USESHOWWINDOW; + si.wShowWindow = SW_HIDE; + + /* Redirect the child's output streams to our pipes. */ + si.dwFlags |= STARTF_USESTDHANDLES; + si.hStdOutput = cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ]; + si.hStdError = globs.pipe_action + ? cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ] + : cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ]; + + /* Let the child inherit stdin, as some commands assume it is available. */ + si.hStdInput = GetStdHandle( STD_INPUT_HANDLE ); + + if ( DEBUG_EXECCMD ) + out_printf( "Command string for CreateProcessA(): '%s'\n", command ); + + /* Run the command by creating a sub-process for it. */ + if ( !CreateProcessA( + NULL , /* application name */ + (char *)command , /* command line */ + NULL , /* process attributes */ + NULL , /* thread attributes */ + TRUE , /* inherit handles */ + CREATE_NEW_PROCESS_GROUP, /* create flags */ + NULL , /* env vars, null inherits env */ + NULL , /* current dir, null is our current dir */ + &si , /* startup info */ + &cmdtab[ slot ].pi ) ) /* child process info, if created */ + { + reportWindowsError( "CreateProcessA", slot ); + return; + } + + register_wait( slot ); +} + + +/* + * For more details on Windows cmd.exe shell command-line length limitations see + * the following MSDN article: + * http://support.microsoft.com/default.aspx?scid=kb;en-us;830473 + */ + +static int32_t raw_maxline() +{ + if ( IsWindowsVersionOrGreater(5,0,0) == TRUE ) return 8191; /* XP */ + if ( IsWindowsVersionOrGreater(4,0,0) == TRUE ) return 2047; /* NT 4.x */ + return 996; /* NT 3.5.1 */ +} + +static int32_t maxline() +{ + static int32_t result; + if ( !result ) result = raw_maxline(); + return result; +} + + +/* + * Closes a Windows HANDLE and resets its variable to 0. + */ + +static void closeWinHandle( HANDLE * const handle ) +{ + if ( *handle ) + { + CloseHandle( *handle ); + *handle = 0; + } +} + + +/* + * Frees and renews the given string. + */ + +static void string_renew( string * const s ) +{ + string_free( s ); + string_new( s ); +} + + +/* + * raw_command_length() - valid raw command string length + * + * Checks whether the given command may be executed as a raw command. If yes, + * returns the corresponding command string length. If not, returns -1. + * + * Rules for constructing raw command strings: + * - Command may not contain unquoted shell I/O redirection characters. + * - May have at most one command line with non-whitespace content. + * - Leading whitespace trimmed. + * - Trailing all-whitespace lines trimmed. + * - Trailing whitespace on the sole command line kept (may theoretically + * affect the executed command). + */ + +static int32_t raw_command_length( char const * command ) +{ + char const * p; + char const * escape = 0; + char inquote = 0; + char const * newline = 0; + + /* Skip leading whitespace. */ + while ( isspace( *command ) ) + ++command; + + p = command; + + /* Look for newlines and unquoted I/O redirection. */ + do + { + p += strcspn( p, "\n\"'<>|\\" ); + switch ( *p ) + { + case '\n': + /* If our command contains non-whitespace content split over + * multiple lines we can not execute it directly. + */ + newline = p; + while ( isspace( *++p ) ); + if ( *p ) return -1; + break; + + case '\\': + escape = escape && escape == p - 1 ? 0 : p; + ++p; + break; + + case '"': + case '\'': + if ( escape && escape == p - 1 ) + escape = 0; + else if ( inquote == *p ) + inquote = 0; + else if ( !inquote ) + inquote = *p; + ++p; + break; + + case '<': + case '>': + case '|': + if ( !inquote ) + return -1; + ++p; + break; + } + } + while ( *p ); + + /* Return the number of characters the command will occupy. */ + return int32_t(( newline ? newline : p ) - command); +} + + +/* 64-bit arithmetic helpers. */ + +/* Compute the carry bit from the addition of two 32-bit unsigned numbers. */ +#define add_carry_bit( a, b ) ((((a) | (b)) >> 31) & (~((a) + (b)) >> 31) & 0x1) + +/* Compute the high 32 bits of the addition of two 64-bit unsigned numbers, h1l1 + * and h2l2. + */ +#define add_64_hi( h1, l1, h2, l2 ) ((h1) + (h2) + add_carry_bit(l1, l2)) + + +/* + * Add two 64-bit unsigned numbers, h1l1 and h2l2. + */ + +static FILETIME add_64 +( + unsigned long h1, unsigned long l1, + unsigned long h2, unsigned long l2 +) +{ + FILETIME result; + result.dwLowDateTime = l1 + l2; + result.dwHighDateTime = add_64_hi( h1, l1, h2, l2 ); + return result; +} + + +static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 ) +{ + return add_64( t1.dwHighDateTime, t1.dwLowDateTime, t2.dwHighDateTime, + t2.dwLowDateTime ); +} + + +static FILETIME negate_FILETIME( FILETIME t ) +{ + /* 2s complement negation */ + return add_64( ~t.dwHighDateTime, ~t.dwLowDateTime, 0, 1 ); +} + + +/* + * filetime_to_seconds() - Windows FILETIME --> number of seconds conversion + */ + +static double filetime_to_seconds( FILETIME const ft ) +{ + return ft.dwHighDateTime * ( (double)( 1UL << 31 ) * 2.0 * 1.0e-7 ) + + ft.dwLowDateTime * 1.0e-7; +} + + +static void record_times( HANDLE const process, timing_info * const time ) +{ + FILETIME creation; + FILETIME exit; + FILETIME kernel; + FILETIME user; + if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) ) + { + time->system = filetime_to_seconds( kernel ); + time->user = filetime_to_seconds( user ); + timestamp_from_filetime( &time->start, &creation ); + timestamp_from_filetime( &time->end, &exit ); + } +} + + +static char ioBuffer[ IO_BUFFER_SIZE + 1 ]; + +#define FORWARD_PIPE_NONE 0 +#define FORWARD_PIPE_STDOUT 1 +#define FORWARD_PIPE_STDERR 2 + +static void read_pipe +( + HANDLE in, /* the pipe to read from */ + string * out, + int32_t forwarding_mode +) +{ + DWORD bytesInBuffer = 0; + DWORD bytesAvailable = 0; + DWORD i; + + for (;;) + { + /* check if we have any data to read */ + if ( !PeekNamedPipe( in, NULL, IO_BUFFER_SIZE, NULL, + &bytesAvailable, NULL ) || bytesAvailable == 0 ) + return; + + /* we only read in the available bytes, to avoid blocking */ + if ( !ReadFile( in, ioBuffer, bytesAvailable <= IO_BUFFER_SIZE ? + bytesAvailable : IO_BUFFER_SIZE, &bytesInBuffer, NULL ) || bytesInBuffer == 0 ) + return; + + /* Clean up some illegal chars. */ + for ( i = 0; i < bytesInBuffer; ++i ) + { + if ( ( (unsigned char)ioBuffer[ i ] < 1 ) ) + ioBuffer[ i ] = '?'; + } + /* Null, terminate. */ + ioBuffer[ bytesInBuffer ] = '\0'; + /* Append to the output. */ + string_append( out, ioBuffer ); + /* Copy it to our output if appropriate */ + if ( forwarding_mode == FORWARD_PIPE_STDOUT ) + out_data( ioBuffer ); + else if ( forwarding_mode == FORWARD_PIPE_STDERR ) + err_data( ioBuffer ); + } +} + +#define EARLY_OUTPUT( cmd ) \ + ( ! ( cmd.flags & EXEC_CMD_QUIET ) ) + +#define FORWARD_STDOUT( c ) \ + ( ( EARLY_OUTPUT( c ) && ( globs.pipe_action != 2 ) ) ? \ + FORWARD_PIPE_STDOUT : FORWARD_PIPE_NONE ) +#define FORWARD_STDERR( c ) \ + ( ( EARLY_OUTPUT( c ) && ( globs.pipe_action & 2 ) ) ? \ + FORWARD_PIPE_STDERR : FORWARD_PIPE_NONE ) + +static void read_output() +{ + int32_t i; + for ( i = 0; i < globs.jobs; ++i ) + if ( cmdtab[ i ].pi.hProcess ) + { + /* Read stdout data. */ + if ( cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ] ) + read_pipe( cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ], + cmdtab[ i ].buffer_out, FORWARD_STDOUT( cmdtab[ i ] ) ); + /* Read stderr data. */ + if ( cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ] ) + read_pipe( cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ], + cmdtab[ i ].buffer_err, FORWARD_STDERR( cmdtab[ i ] ) ); + } +} + +static void CALLBACK try_wait_callback( void * data, BOOLEAN is_timeout ) +{ + struct _cmdtab_t * slot = ( struct _cmdtab_t * )data; + WaitForSingleObject( process_queue.write_okay, INFINITE ); + process_queue.job_index = int32_t(slot - cmdtab); + assert( !is_timeout ); + SetEvent( process_queue.read_okay ); + /* Okay. Non-blocking. */ + UnregisterWait( slot->wait_handle ); +} + +static int32_t try_wait_impl( DWORD timeout ) +{ + int32_t job_index; + int32_t res = WaitForSingleObject( process_queue.read_okay, timeout ); + if ( res != WAIT_OBJECT_0 ) + return -1; + job_index = process_queue.job_index; + SetEvent( process_queue.write_okay ); + return job_index; +} + +static void register_wait( int32_t job_id ) +{ + if ( globs.jobs > MAXIMUM_WAIT_OBJECTS ) + { + RegisterWaitForSingleObject( &cmdtab[ job_id ].wait_handle, + cmdtab[ job_id ].pi.hProcess, + &try_wait_callback, &cmdtab[ job_id ], INFINITE, + WT_EXECUTEDEFAULT | WT_EXECUTEONLYONCE ); + } +} + +/* + * Waits for a single child process command to complete, or the timeout, + * whichever comes first. Returns the index of the completed command in the + * cmdtab array, or -1. + */ + +static int32_t try_wait( int32_t const timeoutMillis ) +{ + if ( globs.jobs <= MAXIMUM_WAIT_OBJECTS ) + { + int32_t i; + HANDLE active_handles[ MAXIMUM_WAIT_OBJECTS ]; + int32_t job_ids[ MAXIMUM_WAIT_OBJECTS ]; + DWORD num_handles = 0; + DWORD wait_api_result; + for ( i = 0; i < globs.jobs; ++i ) + { + if( cmdtab[ i ].pi.hProcess ) + { + job_ids[ num_handles ] = i; + active_handles[ num_handles ] = cmdtab[ i ].pi.hProcess; + ++num_handles; + } + } + wait_api_result = WaitForMultipleObjects( num_handles, active_handles, FALSE, timeoutMillis ); + if ( WAIT_OBJECT_0 <= wait_api_result && wait_api_result < WAIT_OBJECT_0 + globs.jobs ) + { + return job_ids[ wait_api_result - WAIT_OBJECT_0 ]; + } + else + { + return -1; + } + } + else + { + return try_wait_impl( timeoutMillis ); + } + +} + + +static int32_t try_kill_one() +{ + /* Only need to check if a timeout was specified with the -l option. */ + if ( globs.timeout > 0 ) + { + int32_t i; + for ( i = 0; i < globs.jobs; ++i ) + if ( cmdtab[ i ].pi.hProcess ) + { + double const t = running_time( cmdtab[ i ].pi.hProcess ); + if ( t > (double)globs.timeout ) + { + /* The job may have left an alert dialog around, try and get + * rid of it before killing the job itself. + */ + close_alert( &cmdtab[ i ].pi ); + /* We have a "runaway" job, kill it. */ + kill_process_tree( cmdtab[ i ].pi.dwProcessId, + cmdtab[ i ].pi.hProcess ); + /* And return its running commands table slot. */ + return i; + } + } + } + return -1; +} + + +static void close_alerts() +{ + /* We only attempt this every 5 seconds or so, because it is not a cheap + * operation, and we will catch the alerts eventually. This check uses + * floats as some compilers define CLOCKS_PER_SEC as a float or double. + */ + if ( ( (float)clock() / (float)( CLOCKS_PER_SEC * 5 ) ) < ( 1.0 / 5.0 ) ) + { + int32_t i; + for ( i = 0; i < globs.jobs; ++i ) + if ( cmdtab[ i ].pi.hProcess ) + close_alert( &cmdtab[ i ].pi ); + } +} + + +/* + * Calc the current running time of an *active* process. + */ + +static double running_time( HANDLE const process ) +{ + FILETIME creation; + FILETIME exit; + FILETIME kernel; + FILETIME user; + if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) ) + { + /* Compute the elapsed time. */ + FILETIME current; + GetSystemTimeAsFileTime( ¤t ); + return filetime_to_seconds( add_FILETIME( current, + negate_FILETIME( creation ) ) ); + } + return 0.0; +} + + +/* + * Not really optimal, or efficient, but it is easier this way, and it is not + * like we are going to be killing thousands, or even tens of processes. + */ + +static void kill_process_tree( DWORD const pid, HANDLE const process ) +{ + HANDLE const process_snapshot_h = CreateToolhelp32Snapshot( + TH32CS_SNAPPROCESS, 0 ); + if ( INVALID_HANDLE_VALUE != process_snapshot_h ) + { + BOOL ok = TRUE; + PROCESSENTRY32 pinfo; + pinfo.dwSize = sizeof( PROCESSENTRY32 ); + for ( + ok = Process32First( process_snapshot_h, &pinfo ); + ok == TRUE; + ok = Process32Next( process_snapshot_h, &pinfo ) ) + { + if ( pinfo.th32ParentProcessID == pid ) + { + /* Found a child, recurse to kill it and anything else below it. + */ + HANDLE const ph = OpenProcess( PROCESS_ALL_ACCESS, FALSE, + pinfo.th32ProcessID ); + if ( ph ) + { + kill_process_tree( pinfo.th32ProcessID, ph ); + CloseHandle( ph ); + } + } + } + CloseHandle( process_snapshot_h ); + } + /* Now that the children are all dead, kill the root. */ + TerminateProcess( process, -2 ); +} + + +static double creation_time( HANDLE const process ) +{ + FILETIME creation; + FILETIME exit; + FILETIME kernel; + FILETIME user; + return GetProcessTimes( process, &creation, &exit, &kernel, &user ) + ? filetime_to_seconds( creation ) + : 0.0; +} + + +/* + * Recursive check if first process is parent (directly or indirectly) of the + * second one. Both processes are passed as process ids, not handles. Special + * return value 2 means that the second process is smss.exe and its parent + * process is System (first argument is ignored). + */ + +static int32_t is_parent_child( DWORD const parent, DWORD const child ) +{ + HANDLE process_snapshot_h = INVALID_HANDLE_VALUE; + + if ( !child ) + return 0; + if ( parent == child ) + return 1; + + process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 ); + if ( INVALID_HANDLE_VALUE != process_snapshot_h ) + { + BOOL ok = TRUE; + PROCESSENTRY32 pinfo; + pinfo.dwSize = sizeof( PROCESSENTRY32 ); + for ( + ok = Process32First( process_snapshot_h, &pinfo ); + ok == TRUE; + ok = Process32Next( process_snapshot_h, &pinfo ) ) + { + if ( pinfo.th32ProcessID == child ) + { + /* Unfortunately, process ids are not really unique. There might + * be spurious "parent and child" relationship match between two + * non-related processes if real parent process of a given + * process has exited (while child process kept running as an + * "orphan") and the process id of such parent process has been + * reused by internals of the operating system when creating + * another process. + * + * Thus an additional check is needed - process creation time. + * This check may fail (i.e. return 0) for system processes due + * to insufficient privileges, and that is OK. + */ + double tchild = 0.0; + double tparent = 0.0; + HANDLE const hchild = OpenProcess( PROCESS_QUERY_INFORMATION, + FALSE, pinfo.th32ProcessID ); + CloseHandle( process_snapshot_h ); + + /* csrss.exe may display message box like following: + * xyz.exe - Unable To Locate Component + * This application has failed to start because + * boost_foo-bar.dll was not found. Re-installing the + * application may fix the problem + * This actually happens when starting a test process that + * depends on a dynamic library which failed to build. We want + * to automatically close these message boxes even though + * csrss.exe is not our child process. We may depend on the fact + * that (in all current versions of Windows) csrss.exe is a + * direct child of the smss.exe process, which in turn is a + * direct child of the System process, which always has process + * id == 4. This check must be performed before comparing + * process creation times. + */ + +#ifdef UNICODE // no PROCESSENTRY32A + if ( !wcsicmp( pinfo.szExeFile, L"csrss.exe" ) && +#else + if ( !stricmp( pinfo.szExeFile, "csrss.exe" ) && +#endif + is_parent_child( parent, pinfo.th32ParentProcessID ) == 2 ) + return 1; + +#ifdef UNICODE // no PROCESSENTRY32A + if ( !wcsicmp( pinfo.szExeFile, L"smss.exe" ) && +#else + if ( !stricmp( pinfo.szExeFile, "smss.exe" ) && +#endif + ( pinfo.th32ParentProcessID == 4 ) ) + return 2; + + if ( hchild ) + { + HANDLE hparent = OpenProcess( PROCESS_QUERY_INFORMATION, + FALSE, pinfo.th32ParentProcessID ); + if ( hparent ) + { + tchild = creation_time( hchild ); + tparent = creation_time( hparent ); + CloseHandle( hparent ); + } + CloseHandle( hchild ); + } + + /* Return 0 if one of the following is true: + * 1. we failed to read process creation time + * 2. child was created before alleged parent + */ + if ( ( tchild == 0.0 ) || ( tparent == 0.0 ) || + ( tchild < tparent ) ) + return 0; + + return is_parent_child( parent, pinfo.th32ParentProcessID ) & 1; + } + } + + CloseHandle( process_snapshot_h ); + } + + return 0; +} + + +/* + * Called by the OS for each topmost window. + */ + +BOOL CALLBACK close_alert_window_enum( HWND hwnd, LPARAM lParam ) +{ + char buf[ 7 ] = { 0 }; + PROCESS_INFORMATION const * const pi = (PROCESS_INFORMATION *)lParam; + DWORD pid; + DWORD tid; + + /* We want to find and close any window that: + * 1. is visible and + * 2. is a dialog and + * 3. is displayed by any of our child processes + */ + if ( + /* We assume hidden windows do not require user interaction. */ + !IsWindowVisible( hwnd ) + /* Failed to read class name; presume it is not a dialog. */ + || !GetClassNameA( hwnd, buf, sizeof( buf ) ) + /* All Windows system dialogs use the same Window class name. */ + || strcmp( buf, "#32770" ) ) + return TRUE; + + /* GetWindowThreadProcessId() returns 0 on error, otherwise thread id of + * the window's message pump thread. + */ + tid = GetWindowThreadProcessId( hwnd, &pid ); + if ( !tid || !is_parent_child( pi->dwProcessId, pid ) ) + return TRUE; + + /* Ask real nice. */ + PostMessageA( hwnd, WM_CLOSE, 0, 0 ); + + /* Wait and see if it worked. If not, insist. */ + if ( WaitForSingleObject( pi->hProcess, 200 ) == WAIT_TIMEOUT ) + { + PostThreadMessageA( tid, WM_QUIT, 0, 0 ); + WaitForSingleObject( pi->hProcess, 300 ); + } + + /* Done, we do not want to check any other windows now. */ + return FALSE; +} + + +static void close_alert( PROCESS_INFORMATION const * const pi ) +{ + EnumWindows( &close_alert_window_enum, (LPARAM)pi ); +} + + +/* + * Open a command file to store the command into for executing using an external + * shell. Returns a pointer to a FILE open for writing or 0 in case such a file + * could not be opened. The file name used is stored back in the corresponding + * running commands table slot. + * + * Expects the running commands table slot's command_file attribute to contain + * either a zeroed out string object or one prepared previously by this same + * function. + */ + +static FILE * open_command_file( int32_t const slot ) +{ + string * const command_file = cmdtab[ slot ].command_file; + + /* If the temporary command file name has not already been prepared for this + * slot number, prepare a new one containing a '##' place holder that will + * be changed later and needs to be located at a fixed distance from the + * end. + */ + if ( !command_file->value ) + { + DWORD const procID = GetCurrentProcessId(); + string const * const tmpdir = path_tmpdir(); + string_new( command_file ); + string_reserve( command_file, tmpdir->size + 64 ); + command_file->size = sprintf( command_file->value, + "%s\\jam%lu-%02d-##.bat", tmpdir->value, procID, slot ); + } + + /* For some reason opening a command file can fail intermittently. But doing + * some retries works. Most likely this is due to a previously existing file + * of the same name that happens to still be opened by an active virus + * scanner. Originally pointed out and fixed by Bronek Kozicki. + * + * We first try to open several differently named files to avoid having to + * wait idly if not absolutely necessary. Our temporary command file names + * contain a fixed position place holder we use for generating different + * file names. + */ + { + char * const index1 = command_file->value + command_file->size - 6; + char * const index2 = index1 + 1; + int32_t waits_remaining; + assert( command_file->value < index1 ); + assert( index2 + 1 < command_file->value + command_file->size ); + assert( index2[ 1 ] == '.' ); + for ( waits_remaining = 3; ; --waits_remaining ) + { + int32_t index; + for ( index = 0; index != 20; ++index ) + { + FILE * f; + *index1 = '0' + index / 10; + *index2 = '0' + index % 10; + f = fopen( command_file->value, "w" ); + if ( f ) return f; + } + if ( !waits_remaining ) break; + Sleep( 250 ); + } + } + + return 0; +} + + +/* + * Prepare a command file to be executed using an external shell. + */ + +static char const * prepare_command_file( string const * command, int32_t slot ) +{ + FILE * const f = open_command_file( slot ); + if ( !f ) + { + err_printf( "failed to write command file!\n" ); + b2::clean_exit( EXITBAD ); + } + fputs( command->value, f ); + fclose( f ); + return cmdtab[ slot ].command_file->value; +} + + +/* + * Find a free slot in the running commands table. + */ + +static int32_t get_free_cmdtab_slot() +{ + int32_t slot; + for ( slot = 0; slot < globs.jobs; ++slot ) + if ( !cmdtab[ slot ].pi.hProcess ) + return slot; + err_printf( "no slots for child!\n" ); + b2::clean_exit( EXITBAD ); + return -1; +} + + +/* + * Put together the final command string we are to run. + */ + +static void string_new_from_argv( string * result, char const * const * argv ) +{ + assert( argv ); + assert( argv[ 0 ] ); + string_copy( result, *(argv++) ); + while ( *argv ) + { + string_push_back( result, ' ' ); + string_push_back( result, '"' ); + string_append( result, *(argv++) ); + string_push_back( result, '"' ); + } +} + + +/* + * Reports the last failed Windows API related error message. + */ + +static void reportWindowsError( char const * const apiName, int32_t slot ) +{ + char * errorMessage; + char buf[24]; + string * err_buf; + timing_info time; + DWORD const errorCode = GetLastError(); + DWORD apiResult = FormatMessageA( + FORMAT_MESSAGE_ALLOCATE_BUFFER | /* __in DWORD dwFlags */ + FORMAT_MESSAGE_FROM_SYSTEM | + FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, /* __in_opt LPCVOID lpSource */ + errorCode, /* __in DWORD dwMessageId */ + 0, /* __in DWORD dwLanguageId */ + (LPSTR)&errorMessage, /* __out LPTSTR lpBuffer */ + 0, /* __in DWORD nSize */ + 0 ); /* __in_opt va_list * Arguments */ + + /* Build a message as if the process had written to stderr. */ + if ( globs.pipe_action ) + err_buf = cmdtab[ slot ].buffer_err; + else + err_buf = cmdtab[ slot ].buffer_out; + string_append( err_buf, apiName ); + string_append( err_buf, "() Windows API failed: " ); + sprintf( buf, "%lu", errorCode ); + string_append( err_buf, buf ); + + if ( !apiResult ) + string_append( err_buf, ".\n" ); + else + { + string_append( err_buf, " - " ); + string_append( err_buf, errorMessage ); + /* Make sure that the buffer is terminated with a newline */ + if( err_buf->value[ err_buf->size - 1 ] != '\n' ) + string_push_back( err_buf, '\n' ); + LocalFree( errorMessage ); + } + + /* Since the process didn't actually start, use a blank timing_info. */ + time.system = 0; + time.user = 0; + timestamp_current( &time.start ); + timestamp_current( &time.end ); + + /* Invoke the callback with a failure status. */ + (*cmdtab[ slot ].func)( cmdtab[ slot ].closure, EXEC_CMD_FAIL, &time, + cmdtab[ slot ].buffer_out->value, cmdtab[ slot ].buffer_err->value, + EXIT_OK ); + + /* Clean up any handles that were opened. */ + closeWinHandle( &cmdtab[ slot ].pi.hProcess ); + closeWinHandle( &cmdtab[ slot ].pi.hThread ); + closeWinHandle( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ] ); + closeWinHandle( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ] ); + closeWinHandle( &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_READ ] ); + closeWinHandle( &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ] ); + string_renew( cmdtab[ slot ].buffer_out ); + string_renew( cmdtab[ slot ].buffer_err ); +} + +int32_t shell_maxline() +{ + return maxline(); +} + + +#endif /* USE_EXECNT */ diff --git a/src/boost/tools/build/src/engine/execunix.cpp b/src/boost/tools/build/src/engine/execunix.cpp new file mode 100644 index 000000000..7825fffe7 --- /dev/null +++ b/src/boost/tools/build/src/engine/execunix.cpp @@ -0,0 +1,614 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * Copyright 2007 Noel Belcourt. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +#include "jam.h" + +#ifdef USE_EXECUNIX + +#include "execcmd.h" + +#include "lists.h" +#include "output.h" +#include "jam_strings.h" +#include "startup.h" + +#include +#include +#include +#include +#include /* vfork(), _exit(), STDOUT_FILENO and such */ +#include +#include +#include +#include + +#if defined(sun) || defined(__sun) + #include +#endif + +#include + +#if defined(__APPLE__) + #define NO_VFORK +#endif + +#ifdef NO_VFORK + #define vfork() fork() +#endif + + +/* + * execunix.c - execute a shell script on UNIX/OS2/AmigaOS + * + * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp(). The + * default is: /bin/sh -c + * + * In $(JAMSHELL), % expands to the command string and ! expands to the slot + * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does + * not include a %, it is tacked on as the last argument. + * + * Each word must be an individual element in a jam variable value. + * + * Do not just set JAMSHELL to /bin/sh - it will not work! + * + * External routines: + * exec_check() - preprocess and validate the command. + * exec_cmd() - launch an async command execution. + * exec_wait() - wait for any of the async command processes to terminate. + */ + +/* find a free slot in the running commands table */ +static int get_free_cmdtab_slot(); + +/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ + +static clock_t tps; + +/* We hold stdout & stderr child process information in two element arrays + * indexed as follows. + */ +#define OUT 0 +#define ERR 1 + +static struct cmdtab_t +{ + int pid; /* on win32, a real process handle */ + int fd[ 2 ]; /* file descriptors for stdout and stderr */ + FILE * stream[ 2 ]; /* child's stdout and stderr file streams */ + clock_t start_time; /* start time of child process */ + int exit_reason; /* termination status */ + char * buffer[ 2 ]; /* buffers to hold stdout and stderr, if any */ + int buf_size[ 2 ]; /* buffer sizes in bytes */ + timestamp start_dt; /* start of command timestamp */ + + int flags; + + /* Function called when the command completes. */ + ExecCmdCallback func; + + /* Opaque data passed back to the 'func' callback. */ + void * closure; +} * cmdtab = NULL; +static int cmdtab_size = 0; + +/* Contains both stdin and stdout of all processes. + * The length is either globs.jobs or globs.jobs * 2 + * depending on globs.pipe_action. + */ +struct pollfd * wait_fds = NULL; +#define WAIT_FDS_SIZE ( globs.jobs * ( globs.pipe_action ? 2 : 1 ) ) +#define GET_WAIT_FD( job_idx ) ( wait_fds + ( ( job_idx * ( globs.pipe_action ? 2 : 1 ) ) ) ) + +/* + * exec_init() - global initialization + */ +void exec_init( void ) +{ + int i; + if ( globs.jobs > cmdtab_size ) + { + cmdtab = (cmdtab_t*)BJAM_REALLOC( cmdtab, globs.jobs * sizeof( *cmdtab ) ); + memset( cmdtab + cmdtab_size, 0, ( globs.jobs - cmdtab_size ) * sizeof( *cmdtab ) ); + wait_fds = (pollfd*)BJAM_REALLOC( wait_fds, WAIT_FDS_SIZE * sizeof ( *wait_fds ) ); + for ( i = cmdtab_size; i < globs.jobs; ++i ) + { + GET_WAIT_FD( i )[ OUT ].fd = -1; + GET_WAIT_FD( i )[ OUT ].events = POLLIN; + if ( globs.pipe_action ) + { + GET_WAIT_FD( i )[ ERR ].fd = -1; + GET_WAIT_FD( i )[ ERR ].events = POLLIN; + } + } + cmdtab_size = globs.jobs; + } +} + +void exec_done( void ) +{ + BJAM_FREE( cmdtab ); + BJAM_FREE( wait_fds ); +} + +/* + * exec_check() - preprocess and validate the command. + */ + +int exec_check +( + string const * command, + LIST * * pShell, + int32_t * error_length, + int32_t * error_max_length +) +{ + int const is_raw_cmd = is_raw_command_request( *pShell ); + + /* We allow empty commands for non-default shells since we do not really + * know what they are going to do with such commands. + */ + if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) ) + return EXEC_CHECK_NOOP; + + return is_raw_cmd + ? EXEC_CHECK_OK + : check_cmd_for_too_long_lines( command->value, shell_maxline(), error_length, + error_max_length ); +} + + +/* + * exec_cmd() - launch an async command execution. + */ + +/* We hold file descriptors for pipes used to communicate with child processes + * in two element arrays indexed as follows. + */ +#define EXECCMD_PIPE_READ 0 +#define EXECCMD_PIPE_WRITE 1 + +void exec_cmd +( + string const * command, + int flags, + ExecCmdCallback func, + void * closure, + LIST * shell +) +{ + struct sigaction ignore, saveintr, savequit; + sigset_t chldmask, savemask; + + int const slot = get_free_cmdtab_slot(); + int out[ 2 ]; + int err[ 2 ]; + char const * argv[ MAXARGC + 1 ]; /* +1 for NULL */ + + /* Initialize default shell. */ + static LIST * default_shell; + if ( !default_shell ) + default_shell = list_push_back( list_new( + object_new( "/bin/sh" ) ), + object_new( "-c" ) ); + + if ( list_empty( shell ) ) + shell = default_shell; + + /* Forumulate argv. If shell was defined, be prepared for % and ! subs. + * Otherwise, use stock /bin/sh. + */ + argv_from_shell( argv, shell, command->value, slot ); + + if ( DEBUG_EXECCMD ) + { + int i; + out_printf( "Using shell: " ); + list_print( shell ); + out_printf( "\n" ); + for ( i = 0; argv[ i ]; ++i ) + out_printf( " argv[%d] = '%s'\n", i, argv[ i ] ); + } + + /* Create pipes for collecting child output. */ + if ( pipe( out ) < 0 || ( globs.pipe_action && pipe( err ) < 0 ) ) + { + errno_puts( "pipe" ); + b2::clean_exit( EXITBAD ); + } + + /* Start the command */ + + timestamp_current( &cmdtab[ slot ].start_dt ); + + if ( 0 < globs.timeout ) + { + /* Handle hung processes by manually tracking elapsed time and signal + * process when time limit expires. + */ + struct tms buf; + cmdtab[ slot ].start_time = times( &buf ); + + /* Make a global, only do this once. */ + if ( !tps ) tps = sysconf( _SC_CLK_TCK ); + } + + /* Child does not need the read pipe ends used by the parent. */ + fcntl( out[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC ); + if ( globs.pipe_action ) + fcntl( err[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC ); + + /* ignore SIGINT and SIGQUIT */ + ignore.sa_handler = SIG_IGN; + sigemptyset(&ignore.sa_mask); + ignore.sa_flags = 0; + if (sigaction(SIGINT, &ignore, &saveintr) < 0) + return; + if (sigaction(SIGQUIT, &ignore, &savequit) < 0) + return; + + /* block SIGCHLD */ + sigemptyset(&chldmask); + sigaddset(&chldmask, SIGCHLD); + if (sigprocmask(SIG_BLOCK, &chldmask, &savemask) < 0) + return; + + if ( ( cmdtab[ slot ].pid = vfork() ) == -1 ) + { + errno_puts( "vfork" ); + b2::clean_exit( EXITBAD ); + } + + if ( cmdtab[ slot ].pid == 0 ) + { + /*****************/ + /* Child process */ + /*****************/ + int const pid = getpid(); + + /* restore previous signals */ + sigaction(SIGINT, &saveintr, NULL); + sigaction(SIGQUIT, &savequit, NULL); + sigprocmask(SIG_SETMASK, &savemask, NULL); + + /* Redirect stdout and stderr to pipes inherited from the parent. */ + dup2( out[ EXECCMD_PIPE_WRITE ], STDOUT_FILENO ); + dup2( globs.pipe_action ? err[ EXECCMD_PIPE_WRITE ] : + out[ EXECCMD_PIPE_WRITE ], STDERR_FILENO ); + close( out[ EXECCMD_PIPE_WRITE ] ); + if ( globs.pipe_action ) + close( err[ EXECCMD_PIPE_WRITE ] ); + + /* Make this process a process group leader so that when we kill it, all + * child processes of this process are terminated as well. We use + * killpg( pid, SIGKILL ) to kill the process group leader and all its + * children. + */ + if ( 0 < globs.timeout ) + { + struct rlimit r_limit; + r_limit.rlim_cur = globs.timeout; + r_limit.rlim_max = globs.timeout; + setrlimit( RLIMIT_CPU, &r_limit ); + } + if (0 != setpgid( pid, pid )) { + errno_puts("setpgid(child)"); + /* b2::clean_exit( EXITBAD ); */ + } + execvp( argv[ 0 ], (char * *)argv ); + errno_puts( "execvp" ); + _exit( 127 ); + } + + /******************/ + /* Parent process */ + /******************/ + + /* redundant call, ignore return value */ + setpgid(cmdtab[ slot ].pid, cmdtab[ slot ].pid); + + /* Parent not need the write pipe ends used by the child. */ + close( out[ EXECCMD_PIPE_WRITE ] ); + if ( globs.pipe_action ) + close( err[ EXECCMD_PIPE_WRITE ] ); + + /* Set both pipe read file descriptors to non-blocking. */ + fcntl( out[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK ); + if ( globs.pipe_action ) + fcntl( err[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK ); + + /* Parent reads from out[ EXECCMD_PIPE_READ ]. */ + cmdtab[ slot ].fd[ OUT ] = out[ EXECCMD_PIPE_READ ]; + cmdtab[ slot ].stream[ OUT ] = fdopen( cmdtab[ slot ].fd[ OUT ], "rb" ); + if ( !cmdtab[ slot ].stream[ OUT ] ) + { + errno_puts( "fdopen" ); + b2::clean_exit( EXITBAD ); + } + + /* Parent reads from err[ EXECCMD_PIPE_READ ]. */ + if ( globs.pipe_action ) + { + cmdtab[ slot ].fd[ ERR ] = err[ EXECCMD_PIPE_READ ]; + cmdtab[ slot ].stream[ ERR ] = fdopen( cmdtab[ slot ].fd[ ERR ], "rb" ); + if ( !cmdtab[ slot ].stream[ ERR ] ) + { + errno_puts( "fdopen" ); + b2::clean_exit( EXITBAD ); + } + } + + GET_WAIT_FD( slot )[ OUT ].fd = out[ EXECCMD_PIPE_READ ]; + if ( globs.pipe_action ) + GET_WAIT_FD( slot )[ ERR ].fd = err[ EXECCMD_PIPE_READ ]; + + cmdtab[ slot ].flags = flags; + + /* Save input data into the selected running commands table slot. */ + cmdtab[ slot ].func = func; + cmdtab[ slot ].closure = closure; + + /* restore previous signals */ + sigaction(SIGINT, &saveintr, NULL); + sigaction(SIGQUIT, &savequit, NULL); + sigprocmask(SIG_SETMASK, &savemask, NULL); +} + +#undef EXECCMD_PIPE_READ +#undef EXECCMD_PIPE_WRITE + + +/* Returns 1 if file descriptor is closed, or 0 if it is still alive. + * + * i is index into cmdtab + * + * s (stream) indexes: + * - cmdtab[ i ].stream[ s ] + * - cmdtab[ i ].buffer[ s ] + * - cmdtab[ i ].fd [ s ] + */ + +static int read_descriptor( int i, int s ) +{ + int ret; + char buffer[ BUFSIZ ]; + + while ( 0 < ( ret = fread( buffer, sizeof( char ), BUFSIZ - 1, + cmdtab[ i ].stream[ s ] ) ) ) + { + buffer[ ret ] = 0; + + /* Copy it to our output if appropriate */ + if ( ! ( cmdtab[ i ].flags & EXEC_CMD_QUIET ) ) + { + if ( s == OUT && ( globs.pipe_action != 2 ) ) + out_data( buffer ); + else if ( s == ERR && ( globs.pipe_action & 2 ) ) + err_data( buffer ); + } + + if ( !cmdtab[ i ].buffer[ s ] ) + { + /* Never been allocated. */ + if ( globs.max_buf && ret > globs.max_buf ) + { + ret = globs.max_buf; + buffer[ ret ] = 0; + } + cmdtab[ i ].buf_size[ s ] = ret + 1; + cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( ret + 1 ); + memcpy( cmdtab[ i ].buffer[ s ], buffer, ret + 1 ); + } + else + { + /* Previously allocated. */ + if ( cmdtab[ i ].buf_size[ s ] < globs.max_buf || !globs.max_buf ) + { + char * tmp = cmdtab[ i ].buffer[ s ]; + int const old_len = cmdtab[ i ].buf_size[ s ] - 1; + int const new_len = old_len + ret + 1; + cmdtab[ i ].buf_size[ s ] = new_len; + cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( new_len ); + memcpy( cmdtab[ i ].buffer[ s ], tmp, old_len ); + memcpy( cmdtab[ i ].buffer[ s ] + old_len, buffer, ret + 1 ); + BJAM_FREE( tmp ); + } + } + } + + /* If buffer full, ensure last buffer char is newline so that jam log + * contains the command status at beginning of it own line instead of + * appended to end of the previous output. + */ + if ( globs.max_buf && globs.max_buf <= cmdtab[ i ].buf_size[ s ] ) + cmdtab[ i ].buffer[ s ][ cmdtab[ i ].buf_size[ s ] - 2 ] = '\n'; + + return feof( cmdtab[ i ].stream[ s ] ); +} + + +/* + * close_streams() - Close the stream and pipe descriptor. + */ + +static void close_streams( int const i, int const s ) +{ + fclose( cmdtab[ i ].stream[ s ] ); + cmdtab[ i ].stream[ s ] = 0; + + close( cmdtab[ i ].fd[ s ] ); + cmdtab[ i ].fd[ s ] = 0; + + GET_WAIT_FD( i )[ s ].fd = -1; +} + + +/* + * exec_wait() - wait for any of the async command processes to terminate. + * + * May register more than one terminated child process but will exit as soon as + * at least one has been registered. + */ + +void exec_wait() +{ + int finished = 0; + + /* Process children that signaled. */ + while ( !finished ) + { + int i; + int select_timeout = globs.timeout; + + /* Check for timeouts: + * - kill children that already timed out + * - decide how long until the next one times out + */ + if ( globs.timeout > 0 ) + { + struct tms buf; + clock_t const current = times( &buf ); + for ( i = 0; i < globs.jobs; ++i ) + if ( cmdtab[ i ].pid ) + { + clock_t const consumed = + ( current - cmdtab[ i ].start_time ) / tps; + if ( consumed >= globs.timeout ) + { + killpg( cmdtab[ i ].pid, SIGKILL ); + cmdtab[ i ].exit_reason = EXIT_TIMEOUT; + } + else if ( globs.timeout - consumed < select_timeout ) + select_timeout = globs.timeout - consumed; + } + } + + /* select() will wait for I/O on a descriptor, a signal, or timeout. */ + { + /* disable child termination signals while in select */ + int ret; + int timeout; + sigset_t sigmask; + sigemptyset(&sigmask); + sigaddset(&sigmask, SIGCHLD); + sigprocmask(SIG_BLOCK, &sigmask, NULL); + + /* If no timeout is specified, pass -1 (which means no timeout, + * wait indefinitely) to poll, to prevent busy-looping. + */ + timeout = select_timeout? select_timeout * 1000 : -1; + while ( ( ret = poll( wait_fds, WAIT_FDS_SIZE, timeout ) ) == -1 ) + if ( errno != EINTR ) + break; + /* restore original signal mask by unblocking sigchld */ + sigprocmask(SIG_UNBLOCK, &sigmask, NULL); + if ( ret <= 0 ) + continue; + } + + for ( i = 0; i < globs.jobs; ++i ) + { + int out_done = 0; + int err_done = 0; + if ( GET_WAIT_FD( i )[ OUT ].revents ) + out_done = read_descriptor( i, OUT ); + + if ( globs.pipe_action && ( GET_WAIT_FD( i )[ ERR ].revents ) ) + err_done = read_descriptor( i, ERR ); + + /* If feof on either descriptor, we are done. */ + if ( out_done || err_done ) + { + int pid; + int status; + int rstat; + timing_info time_info; + struct rusage cmd_usage; + + /* We found a terminated child process - our search is done. */ + finished = 1; + + /* Close the stream and pipe descriptors. */ + close_streams( i, OUT ); + if ( globs.pipe_action ) + close_streams( i, ERR ); + + /* Reap the child and release resources. */ + while ( ( pid = wait4( cmdtab[ i ].pid, &status, 0, &cmd_usage ) ) == -1 ) + if ( errno != EINTR ) + break; + if ( pid != cmdtab[ i ].pid ) + { + err_printf( "unknown pid %d with errno = %d\n", pid, errno ); + b2::clean_exit( EXITBAD ); + } + + /* Set reason for exit if not timed out. */ + if ( WIFEXITED( status ) ) + cmdtab[ i ].exit_reason = WEXITSTATUS( status ) + ? EXIT_FAIL + : EXIT_OK; + + { + time_info.system = ((double)(cmd_usage.ru_stime.tv_sec)*1000000.0+(double)(cmd_usage.ru_stime.tv_usec))/1000000.0; + time_info.user = ((double)(cmd_usage.ru_utime.tv_sec)*1000000.0+(double)(cmd_usage.ru_utime.tv_usec))/1000000.0; + timestamp_copy( &time_info.start, &cmdtab[ i ].start_dt ); + timestamp_current( &time_info.end ); + } + + /* Drive the completion. */ + if ( interrupted() ) + rstat = EXEC_CMD_INTR; + else if ( status ) + rstat = EXEC_CMD_FAIL; + else + rstat = EXEC_CMD_OK; + + /* Call the callback, may call back to jam rule land. */ + (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat, &time_info, + cmdtab[ i ].buffer[ OUT ], cmdtab[ i ].buffer[ ERR ], + cmdtab[ i ].exit_reason ); + + /* Clean up the command's running commands table slot. */ + BJAM_FREE( cmdtab[ i ].buffer[ OUT ] ); + cmdtab[ i ].buffer[ OUT ] = 0; + cmdtab[ i ].buf_size[ OUT ] = 0; + + BJAM_FREE( cmdtab[ i ].buffer[ ERR ] ); + cmdtab[ i ].buffer[ ERR ] = 0; + cmdtab[ i ].buf_size[ ERR ] = 0; + + cmdtab[ i ].pid = 0; + cmdtab[ i ].func = 0; + cmdtab[ i ].closure = 0; + cmdtab[ i ].start_time = 0; + } + } + } +} + + +/* + * Find a free slot in the running commands table. + */ + +static int get_free_cmdtab_slot() +{ + int slot; + for ( slot = 0; slot < globs.jobs; ++slot ) + if ( !cmdtab[ slot ].pid ) + return slot; + err_printf( "no slots for child!\n" ); + b2::clean_exit( EXITBAD ); + return -1; +} + +int32_t shell_maxline() +{ + return MAXLINE; +} + +# endif /* USE_EXECUNIX */ diff --git a/src/boost/tools/build/src/engine/execvms.cpp b/src/boost/tools/build/src/engine/execvms.cpp new file mode 100644 index 000000000..05b6c7a20 --- /dev/null +++ b/src/boost/tools/build/src/engine/execvms.cpp @@ -0,0 +1,426 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2015 Artur Shepilko. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + + +/* + * execvms.c - execute a shell script, ala VMS. + * + * The approach is this: + * + * If the command is a single line, and shorter than WRTLEN (what we believe to + * be the maximum line length), we just system() it. + * + * If the command is multi-line, or longer than WRTLEN, we write the command + * block to a temp file, splitting long lines (using "-" at the end of the line + * to indicate contiuation), and then source that temp file. We use special + * logic to make sure we do not continue in the middle of a quoted string. + * + * 05/04/94 (seiwald) - async multiprocess interface; noop on VMS + * 12/20/96 (seiwald) - rewritten to handle multi-line commands well + * 01/14/96 (seiwald) - do not put -'s between "'s + * 01/19/15 (shepilko)- adapt for jam-3.1.19 + */ + +#include "jam.h" +#include "lists.h" +#include "execcmd.h" +#include "output.h" +#include "startup.h" + +#ifdef OS_VMS + +#include +#include +#include +#include +#include +#include +#include + + +#define WRTLEN 240 + +#define MIN( a, b ) ((a) < (b) ? (a) : (b)) + +#define CHAR_DQUOTE '"' + +#define VMS_PATH_MAX 1024 +#define VMS_COMMAND_MAX 1024 + +#define VMS_WARNING 0 +#define VMS_SUCCESS 1 +#define VMS_ERROR 2 +#define VMS_FATAL 4 + +char commandbuf[ VMS_COMMAND_MAX ] = { 0 }; + + +static int get_status(int vms_status); +static clock_t get_cpu_time(); + +/* + * exec_check() - preprocess and validate the command. + */ + +int exec_check +( + string const * command, + LIST * * pShell, + int32_t * error_length, + int32_t * error_max_length +) +{ + int const is_raw_cmd = 1; + + /* We allow empty commands for non-default shells since we do not really + * know what they are going to do with such commands. + */ + if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) ) + return EXEC_CHECK_NOOP; + + return is_raw_cmd + ? EXEC_CHECK_OK + : check_cmd_for_too_long_lines( command->value, shell_maxline(), error_length, + error_max_length ); +} + + +/* + * exec_cmd() - execute system command. + */ + +void exec_cmd +( + string const * command, + int flags, + ExecCmdCallback func, + void * closure, + LIST * shell +) +{ + char * s; + char * e; + char * p; + int vms_status; + int status; + int rstat = EXEC_CMD_OK; + int exit_reason = EXIT_OK; + timing_info time_info; + timestamp start_dt; + struct tms start_time; + struct tms end_time; + char * cmd_string = command->value; + + + /* Start the command */ + + timestamp_current( &time_info.start ); + times( &start_time ); + + /* See if command is more than one line discounting leading/trailing white + * space. + */ + for ( s = cmd_string; *s && isspace( *s ); ++s ); + + e = p = strchr( s, '\n' ); + + while ( p && isspace( *p ) ) + ++p; + + /* If multi line or long, write to com file. Otherwise, exec directly. */ + if ( ( p && *p ) || ( e - s > WRTLEN ) ) + { + FILE * f; + + /* Create temp file invocation. */ + + if ( !*commandbuf ) + { + OBJECT * tmp_filename = 0; + + tmp_filename = path_tmpfile(); + + + /* Get tmp file name is VMS-format. */ + { + string os_filename[ 1 ]; + string_new( os_filename ); + path_translate_to_os( object_str( tmp_filename ), os_filename ); + object_free( tmp_filename ); + tmp_filename = object_new( os_filename->value ); + string_free( os_filename ); + } + + commandbuf[0] = '@'; + strncat( commandbuf + 1, object_str( tmp_filename ), + VMS_COMMAND_MAX - 2); + } + + + /* Open tempfile. */ + if ( !( f = fopen( commandbuf + 1, "w" ) ) ) + { + err_printf( "[errno %d] failed to wite cmd_string file '%s': %s", + errno, commandbuf + 1, strerror(errno) ); + rstat = EXEC_CMD_FAIL; + exit_reason = EXIT_FAIL; + + times( &end_time ); + + timestamp_current( &time_info.end ); + time_info.system = (double)( end_time.tms_cstime - + start_time.tms_cstime ) / 100.; + time_info.user = (double)( end_time.tms_cutime - + start_time.tms_cutime ) / 100.; + + (*func)( closure, rstat, &time_info, "" , "", exit_reason ); + return; + } + + + /* Running from TMP, so explicitly set default to CWD. */ + { + char * cwd = NULL; + int cwd_buf_size = VMS_PATH_MAX; + + while ( !(cwd = getcwd( NULL, cwd_buf_size ) ) /* alloc internally */ + && errno == ERANGE ) + { + cwd_buf_size += VMS_PATH_MAX; + } + + if ( !cwd ) + { + errno_puts( "can not get current working directory" ); + b2::clean_exit( EXITBAD ); + } + + fprintf( f, "$ SET DEFAULT %s\n", cwd); + + free( cwd ); + } + + + /* For each line of the command. */ + while ( *cmd_string ) + { + char * s = strchr( cmd_string,'\n' ); + int len = s ? s + 1 - cmd_string : strlen( cmd_string ); + + fputc( '$', f ); + + /* For each chunk of a line that needs to be split. */ + while ( len > 0 ) + { + char * q = cmd_string; + char * qe = cmd_string + MIN( len, WRTLEN ); + char * qq = q; + int quote = 0; + + /* Look for matching "s -- expected in the same line. */ + for ( ; q < qe; ++q ) + if ( ( *q == CHAR_DQUOTE ) && ( quote = !quote ) ) + qq = q; + + /* When needs splitting and is inside an open quote, + * back up to opening quote and split off at it. + * When the quoted string spans over a chunk, + * pass string as a whole. + * If no matching quote found, dump the rest of command. + */ + if ( len > WRTLEN && quote ) + { + q = qq; + + if ( q == cmd_string ) + { + for ( q = qe; q < ( cmd_string + len ) + && *q != CHAR_DQUOTE ; ++q) {} + q = ( *q == CHAR_DQUOTE) ? ( q + 1 ) : ( cmd_string + len ); + } + } + + fwrite( cmd_string, ( q - cmd_string ), 1, f ); + + len -= ( q - cmd_string ); + cmd_string = q; + + if ( len ) + { + fputc( '-', f ); + fputc( '\n', f ); + } + } + } + + fclose( f ); + + if ( DEBUG_EXECCMD ) + { + FILE * f; + char buf[ WRTLEN + 1 ] = { 0 }; + + if ( (f = fopen( commandbuf + 1, "r" ) ) ) + { + int nbytes; + printf( "Command file: %s\n", commandbuf + 1 ); + + do + { + nbytes = fread( buf, sizeof( buf[0] ), sizeof( buf ) - 1, f ); + + if ( nbytes ) fwrite(buf, sizeof( buf[0] ), nbytes, stdout); + } + while ( !feof(f) ); + + fclose(f); + } + } + + /* Execute command file */ + vms_status = system( commandbuf ); + status = get_status( vms_status ); + + unlink( commandbuf + 1 ); + } + else + { + /* Execute single line command. Strip trailing newline before execing. + * TODO:Call via popen() with capture of the output may be better here. + */ + if ( e ) *e = 0; + + status = VMS_SUCCESS; /* success on empty command */ + if ( *s ) + { + vms_status = system( s ); + status = get_status( vms_status ); + } + } + + + times( &end_time ); + + timestamp_current( &time_info.end ); + time_info.system = (double)( end_time.tms_cstime - + start_time.tms_cstime ) / 100.; + time_info.user = (double)( end_time.tms_cutime - + start_time.tms_cutime ) / 100.; + + + /* Fail for error or fatal error. OK on OK, warning or info exit. */ + if ( ( status == VMS_ERROR ) || ( status == VMS_FATAL ) ) + { + rstat = EXEC_CMD_FAIL; + exit_reason = EXIT_FAIL; + } + + (*func)( closure, rstat, &time_info, "" , "", exit_reason ); +} + + +void exec_wait() +{ + return; +} + + +/* get_status() - returns status of the VMS command execution. + - Map VMS status to its severity (lower 3-bits) + - W-DCL-IVVERB is returned on unrecognized command -- map to general ERROR +*/ +int get_status( int vms_status ) +{ +#define VMS_STATUS_DCL_IVVERB 0x00038090 + + int status; + + switch (vms_status) + { + case VMS_STATUS_DCL_IVVERB: + status = VMS_ERROR; + break; + + default: + status = vms_status & 0x07; /* $SEVERITY bits */ + } + + return status; +} + + +#define __NEW_STARLET 1 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +/* + * get_cpu_time() - returns CPU time in CLOCKS_PER_SEC since process start. + * on error returns (clock_t)-1. + * + * Intended to emulate (system + user) result of *NIX times(), if CRTL times() + * is not available. +* However, this accounts only for the current process. To account for child +* processes, these need to be directly spawned/forked via exec(). +* Moreover, child processes should be running a C main program or a program +* that calls VAXC$CRTL_INIT or DECC$CRTL_INIT. +*/ + +clock_t get_cpu_time() +{ + clock_t result = (clock_t) 0; + + IOSB iosb; + int status; + long cputime = 0; + + + ILE3 jpi_items[] = { + { sizeof( cputime ), JPI$_CPUTIM, &cputime, NULL }, /* longword int, 10ms */ + { 0 }, + }; + + status = sys$getjpiw (EFN$C_ENF, 0, 0, jpi_items, &iosb, 0, 0); + + if ( !$VMS_STATUS_SUCCESS( status ) ) + { + lib$signal( status ); + + result = (clock_t) -1; + return result; + } + + + result = ( cputime / 100 ) * CLOCKS_PER_SEC; + + return result; +} + +int32_t shell_maxline() +{ + return MAXLINE; +} + + +# endif /* VMS */ + diff --git a/src/boost/tools/build/src/engine/filent.cpp b/src/boost/tools/build/src/engine/filent.cpp new file mode 100644 index 000000000..e32879424 --- /dev/null +++ b/src/boost/tools/build/src/engine/filent.cpp @@ -0,0 +1,517 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * filent.c - scan directories and archives on NT + * + * External routines: + * file_archscan() - scan an archive for files + * file_mkdir() - create a directory + * file_supported_fmt_resolution() - file modification timestamp resolution + * + * External routines called only via routines in filesys.c: + * file_collect_dir_content_() - collects directory content information + * file_dirscan_() - OS specific file_dirscan() implementation + * file_query_() - query information about a path from the OS + * file_collect_archive_content_() - collects information about archive members + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +#include "jam.h" +#ifdef OS_NT +#include "filesys.h" + +#include "object.h" +#include "pathsys.h" +#include "jam_strings.h" +#include "output.h" + +#ifdef __BORLANDC__ +# undef FILENAME /* cpp namespace collision */ +#endif + +#define WIN32_LEAN_AND_MEAN +#include + +#include +#include +#include +#include + + +int file_collect_archive_content_( file_archive_info_t * const archive ); + +/* + * file_collect_dir_content_() - collects directory content information + */ + +int file_collect_dir_content_( file_info_t * const d ) +{ + PATHNAME f; + string pathspec[ 1 ]; + string pathname[ 1 ]; + LIST * files = L0; + int32_t d_length; + + assert( d ); + assert( d->is_dir ); + assert( list_empty( d->files ) ); + + d_length = int32_t(strlen( object_str( d->name ) )); + + memset( (char *)&f, '\0', sizeof( f ) ); + f.f_dir.ptr = object_str( d->name ); + f.f_dir.len = d_length; + + /* Prepare file search specification for the FindXXX() Windows API. */ + if ( !d_length ) + string_copy( pathspec, ".\\*" ); + else + { + /* We can not simply assume the given folder name will never include its + * trailing path separator or otherwise we would not support the Windows + * root folder specified without its drive letter, i.e. '\'. + */ + char const trailingChar = object_str( d->name )[ d_length - 1 ] ; + string_copy( pathspec, object_str( d->name ) ); + if ( ( trailingChar != '\\' ) && ( trailingChar != '/' ) ) + string_append( pathspec, "\\" ); + string_append( pathspec, "*" ); + } + + /* The following code for collecting information about all files in a folder + * needs to be kept synchronized with how the file_query() operation is + * implemented (collects information about a single file). + */ + { + /* FIXME: Avoid duplicate FindXXX Windows API calls here and in the code + * determining a normalized path. + */ + WIN32_FIND_DATAA finfo; + HANDLE const findHandle = FindFirstFileA( pathspec->value, &finfo ); + if ( findHandle == INVALID_HANDLE_VALUE ) + { + string_free( pathspec ); + return -1; + } + + string_new( pathname ); + do + { + OBJECT * pathname_obj; + + f.f_base.ptr = finfo.cFileName; + f.f_base.len = int32_t(strlen( finfo.cFileName )); + + string_truncate( pathname, 0 ); + path_build( &f, pathname ); + + pathname_obj = object_new( pathname->value ); + path_register_key( pathname_obj ); + files = list_push_back( files, pathname_obj ); + { + int found; + file_info_t * const ff = file_info( pathname_obj, &found ); + ff->is_dir = finfo.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY; + ff->is_file = !ff->is_dir; + ff->exists = 1; + timestamp_from_filetime( &ff->time, &finfo.ftLastWriteTime ); + // Use the timestamp of the link target, not the link itself + // (i.e. stat instead of lstat) + if ( finfo.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT ) + { + HANDLE hLink = CreateFileA( pathname->value, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL ); + BY_HANDLE_FILE_INFORMATION target_finfo[ 1 ]; + if ( hLink != INVALID_HANDLE_VALUE && GetFileInformationByHandle( hLink, target_finfo ) ) + { + ff->is_file = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 0 : 1; + ff->is_dir = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 1 : 0; + timestamp_from_filetime( &ff->time, &target_finfo->ftLastWriteTime ); + } + } + } + } + while ( FindNextFileA( findHandle, &finfo ) ); + + FindClose( findHandle ); + } + + string_free( pathname ); + string_free( pathspec ); + + d->files = files; + return 0; +} + + +/* + * file_dirscan_() - OS specific file_dirscan() implementation + */ + +void file_dirscan_( file_info_t * const d, scanback func, void * closure ) +{ + assert( d ); + assert( d->is_dir ); + + /* Special case \ or d:\ : enter it */ + { + char const * const name = object_str( d->name ); + if ( name[ 0 ] == '\\' && !name[ 1 ] ) + { + (*func)( closure, d->name, 1 /* stat()'ed */, &d->time ); + } + else if ( name[ 0 ] && name[ 1 ] == ':' && name[ 2 ] && !name[ 3 ] ) + { + /* We have just entered a 3-letter drive name spelling (with a + * trailing slash), into the hash table. Now enter its two-letter + * variant, without the trailing slash, so that if we try to check + * whether "c:" exists, we hit it. + * + * Jam core has workarounds for that. Given: + * x = c:\whatever\foo ; + * p = $(x:D) ; + * p2 = $(p:D) ; + * There will be no trailing slash in $(p), but there will be one in + * $(p2). But, that seems rather fragile. + */ + OBJECT * const dir_no_slash = object_new_range( name, 2 ); + (*func)( closure, d->name, 1 /* stat()'ed */, &d->time ); + (*func)( closure, dir_no_slash, 1 /* stat()'ed */, &d->time ); + object_free( dir_no_slash ); + } + } +} + + +/* + * file_mkdir() - create a directory + */ + +int file_mkdir( char const * const path ) +{ + return _mkdir( path ); +} + + +/* + * file_query_() - query information about a path from the OS + * + * The following code for collecting information about a single file needs to be + * kept synchronized with how the file_collect_dir_content_() operation is + * implemented (collects information about all files in a folder). + */ + +int try_file_query_root( file_info_t * const info ) +{ + WIN32_FILE_ATTRIBUTE_DATA fileData; + char buf[ 4 ]; + char const * const pathstr = object_str( info->name ); + if ( !pathstr[ 0 ] ) + { + buf[ 0 ] = '.'; + buf[ 1 ] = 0; + } + else if ( pathstr[ 0 ] == '\\' && ! pathstr[ 1 ] ) + { + buf[ 0 ] = '\\'; + buf[ 1 ] = '\0'; + } + else if ( pathstr[ 1 ] == ':' ) + { + if ( !pathstr[ 2 ] || ( pathstr[ 2 ] == '\\' && !pathstr[ 3 ] ) ) + { + buf[ 0 ] = pathstr[ 0 ]; + buf[ 1 ] = ':'; + buf[ 2 ] = '\\'; + buf[ 3 ] = '\0'; + } + else + { + return 0; + } + } + else + { + return 0; + } + + /* We have a root path */ + if ( !GetFileAttributesExA( buf, GetFileExInfoStandard, &fileData ) ) + { + info->is_dir = 0; + info->is_file = 0; + info->exists = 0; + timestamp_clear( &info->time ); + } + else + { + info->is_dir = fileData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY; + info->is_file = !info->is_dir; + info->exists = 1; + timestamp_from_filetime( &info->time, &fileData.ftLastWriteTime ); + } + return 1; +} + +void file_query_( file_info_t * const info ) +{ + char const * const pathstr = object_str( info->name ); + const char * dir; + OBJECT * parent; + file_info_t * parent_info; + + if ( try_file_query_root( info ) ) + return; + + if ( ( dir = strrchr( pathstr, '\\' ) ) ) + { + parent = object_new_range( pathstr, int32_t(dir - pathstr) ); + } + else + { + parent = object_copy( constant_empty ); + } + parent_info = file_query( parent ); + object_free( parent ); + if ( !parent_info || !parent_info->is_dir ) + { + info->is_dir = 0; + info->is_file = 0; + info->exists = 0; + timestamp_clear( &info->time ); + } + else + { + info->is_dir = 0; + info->is_file = 0; + info->exists = 0; + timestamp_clear( &info->time ); + if ( list_empty( parent_info->files ) ) + file_collect_dir_content_( parent_info ); + } +} + + +/* + * file_supported_fmt_resolution() - file modification timestamp resolution + * + * Returns the minimum file modification timestamp resolution supported by this + * Boost Jam implementation. File modification timestamp changes of less than + * the returned value might not be recognized. + * + * Does not take into consideration any OS or file system related restrictions. + * + * Return value 0 indicates that any value supported by the OS is also supported + * here. + */ + +void file_supported_fmt_resolution( timestamp * const t ) +{ + /* On Windows we support nano-second file modification timestamp resolution, + * just the same as the Windows OS itself. + */ + timestamp_init( t, 0, 0 ); +} + + +/* + * file_archscan() - scan an archive for files + */ + +/* Straight from SunOS */ + +#define ARMAG "!\n" +#define SARMAG 8 + +#define ARFMAG "`\n" + +struct ar_hdr +{ + char ar_name[ 16 ]; + char ar_date[ 12 ]; + char ar_uid[ 6 ]; + char ar_gid[ 6 ]; + char ar_mode[ 8 ]; + char ar_size[ 10 ]; + char ar_fmag[ 2 ]; +}; + +#define SARFMAG 2 +#define SARHDR sizeof( struct ar_hdr ) + +void file_archscan( char const * arch, scanback func, void * closure ) +{ + OBJECT * path = object_new( arch ); + file_archive_info_t * archive = file_archive_query( path ); + + object_free( path ); + + if ( filelist_empty( archive->members ) ) + { + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, 1 /* time valid */, &member_file->time ); + object_free( member ); + } + } + } +} + + +/* + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ) +{ +} + + +/* + * file_collect_archive_content_() - collects information about archive members + */ + +int file_collect_archive_content_( file_archive_info_t * const archive ) +{ + struct ar_hdr ar_hdr; + char * string_table = 0; + char buf[ MAXJPATH ]; + long offset; + const char * path = object_str( archive->file->name ); + int const fd = open( path , O_RDONLY | O_BINARY, 0 ); + + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); + + if ( fd < 0 ) + return -1; + + if ( read( fd, buf, SARMAG ) != SARMAG || strncmp( ARMAG, buf, SARMAG ) ) + { + close( fd ); + return -1; + } + + offset = SARMAG; + + if ( DEBUG_BINDSCAN ) + out_printf( "scan archive %s\n", path ); + + while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) && + !memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) ) + { + long lar_date; + long lar_size; + char * name = 0; + char * endname; + + sscanf( ar_hdr.ar_date, "%ld", &lar_date ); + sscanf( ar_hdr.ar_size, "%ld", &lar_size ); + + lar_size = ( lar_size + 1 ) & ~1; + + if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] == '/' ) + { + /* This is the "string table" entry of the symbol table, holding + * filename strings longer than 15 characters, i.e. those that do + * not fit into ar_name. + */ + string_table = (char*)BJAM_MALLOC_ATOMIC( lar_size + 1 ); + if ( read( fd, string_table, lar_size ) != lar_size ) + out_printf( "error reading string table\n" ); + string_table[ lar_size ] = '\0'; + offset += SARHDR + lar_size; + continue; + } + else if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] != ' ' ) + { + /* Long filenames are recognized by "/nnnn" where nnnn is the + * string's offset in the string table represented in ASCII + * decimals. + */ + name = string_table + atoi( ar_hdr.ar_name + 1 ); + for ( endname = name; *endname && *endname != '\n'; ++endname ); + } + else + { + /* normal name */ + name = ar_hdr.ar_name; + endname = name + sizeof( ar_hdr.ar_name ); + } + + /* strip trailing white-space, slashes, and backslashes */ + + while ( endname-- > name ) + if ( !isspace( *endname ) && ( *endname != '\\' ) && ( *endname != + '/' ) ) + break; + *++endname = 0; + + /* strip leading directory names, an NT specialty */ + { + char * c; + if ( (c = strrchr( name, '/' )) != nullptr ) + name = c + 1; + if ( (c = strrchr( name, '\\' )) != nullptr ) + name = c + 1; + } + + sprintf( buf, "%.*s", int(endname - name), name ); + + if ( strcmp( buf, "") != 0 ) + { + file_info_t * member = 0; + + /* NT static libraries appear to store the objects in a sequence + * reverse to the order in which they were inserted. + * Here we reverse the stored sequence by pushing members to front of + * member file list to get the intended members order. + */ + archive->members = filelist_push_front( archive->members, object_new( buf ) ); + member = filelist_front( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); + } + + offset += SARHDR + lar_size; + lseek( fd, offset, 0 ); + } + + close( fd ); + + return 0; +} + +#endif /* OS_NT */ diff --git a/src/boost/tools/build/src/engine/filesys.cpp b/src/boost/tools/build/src/engine/filesys.cpp new file mode 100644 index 000000000..b23f3791d --- /dev/null +++ b/src/boost/tools/build/src/engine/filesys.cpp @@ -0,0 +1,708 @@ +/* + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * filesys.c - OS independent file system manipulation support + * + * External routines: + * file_build1() - construct a path string based on PATHNAME information + * file_dirscan() - scan a directory for files + * file_done() - module cleanup called on shutdown + * file_info() - return cached information about a path + * file_is_file() - return whether a path identifies an existing file + * file_query() - get cached information about a path, query the OS if + * needed + * file_remove_atexit() - schedule a path to be removed on program exit + * file_time() - get a file timestamp + * + * External routines - utilities for OS specific module implementations: + * file_query_posix_() - query information about a path using POSIX stat() + * + * Internal routines: + * file_dirscan_impl() - no-profiling worker for file_dirscan() + */ + + +#include "jam.h" +#include "filesys.h" + +#include "lists.h" +#include "object.h" +#include "pathsys.h" +#include "jam_strings.h" +#include "output.h" + +#include +#include + + +/* Internal OS specific implementation details - have names ending with an + * underscore and are expected to be implemented in an OS specific fileXXX.c + * module. + */ +void file_dirscan_( file_info_t * const dir, scanback func, void * closure ); +int file_collect_dir_content_( file_info_t * const dir ); +void file_query_( file_info_t * const ); + +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ); +int file_collect_archive_content_( file_archive_info_t * const archive ); +void file_archive_query_( file_archive_info_t * const ); + +static void file_archivescan_impl( OBJECT * path, archive_scanback func, + void * closure ); +static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure ); +static void free_file_archive_info( void * xarchive, void * data ); +static void free_file_info( void * xfile, void * data ); + +static void remove_files_atexit( void ); + + +static struct hash * filecache_hash; +static struct hash * archivecache_hash; + + +/* + * file_archive_info() - return cached information about an archive + * + * Returns a default initialized structure containing only queried file's info + * in case this is the first time this file system entity has been + * referenced. + */ + +file_archive_info_t * file_archive_info( OBJECT * const path, int * found ) +{ + OBJECT * const path_key = path_as_key( path ); + file_archive_info_t * archive; + + if ( !archivecache_hash ) + archivecache_hash = hashinit( sizeof( file_archive_info_t ), + "file_archive_info" ); + + archive = (file_archive_info_t *)hash_insert( archivecache_hash, path_key, + found ); + + if ( !*found ) + { + archive->name = path_key; + archive->file = 0; + archive->members = FL0; + } + else + object_free( path_key ); + + return archive; +} + + +/* + * file_archive_query() - get cached information about a archive file path + * + * Returns 0 in case querying the OS about the given path fails, e.g. because + * the path does not reference an existing file system object. + */ + +file_archive_info_t * file_archive_query( OBJECT * const path ) +{ + int found; + file_archive_info_t * const archive = file_archive_info( path, &found ); + file_info_t * file = file_query( path ); + + if ( !( file && file->is_file ) ) + { + return 0; + } + + archive->file = file; + + + return archive; +} + + + +/* + * file_archivescan() - scan an archive for members + */ + +void file_archivescan( OBJECT * path, archive_scanback func, void * closure ) +{ + PROFILE_ENTER( FILE_ARCHIVESCAN ); + file_archivescan_impl( path, func, closure ); + PROFILE_EXIT( FILE_ARCHIVESCAN ); +} + + +/* + * file_build1() - construct a path string based on PATHNAME information + */ + +void file_build1( PATHNAME * const f, string * file ) +{ + if ( DEBUG_SEARCH ) + { + out_printf( "build file: " ); + if ( f->f_root.len ) + out_printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr ); + if ( f->f_dir.len ) + out_printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr ); + if ( f->f_base.len ) + out_printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr ); + out_printf( "\n" ); + } + + /* Start with the grist. If the current grist is not surrounded by <>'s, add + * them. + */ + if ( f->f_grist.len ) + { + if ( f->f_grist.ptr[ 0 ] != '<' ) + string_push_back( file, '<' ); + string_append_range( + file, f->f_grist.ptr, f->f_grist.ptr + f->f_grist.len ); + if ( file->value[ file->size - 1 ] != '>' ) + string_push_back( file, '>' ); + } +} + + +/* + * file_dirscan() - scan a directory for files + */ + +void file_dirscan( OBJECT * dir, scanback func, void * closure ) +{ + PROFILE_ENTER( FILE_DIRSCAN ); + file_dirscan_impl( dir, func, closure ); + PROFILE_EXIT( FILE_DIRSCAN ); +} + + +/* + * file_done() - module cleanup called on shutdown + */ + +void file_done() +{ + remove_files_atexit(); + if ( filecache_hash ) + { + hashenumerate( filecache_hash, free_file_info, (void *)0 ); + hashdone( filecache_hash ); + } + + if ( archivecache_hash ) + { + hashenumerate( archivecache_hash, free_file_archive_info, (void *)0 ); + hashdone( archivecache_hash ); + } +} + + +/* + * file_info() - return cached information about a path + * + * Returns a default initialized structure containing only the path's normalized + * name in case this is the first time this file system entity has been + * referenced. + */ + +file_info_t * file_info( OBJECT * const path, int * found ) +{ + OBJECT * const path_key = path_as_key( path ); + file_info_t * finfo; + + if ( !filecache_hash ) + filecache_hash = hashinit( sizeof( file_info_t ), "file_info" ); + + finfo = (file_info_t *)hash_insert( filecache_hash, path_key, found ); + if ( !*found ) + { + finfo->name = path_key; + finfo->files = L0; + } + else + object_free( path_key ); + + return finfo; +} + + +/* + * file_is_file() - return whether a path identifies an existing file + */ + +int file_is_file( OBJECT * const path ) +{ + file_info_t const * const ff = file_query( path ); + return ff ? ff->is_file : -1; +} + + +/* + * file_time() - get a file timestamp + */ + +int file_time( OBJECT * const path, timestamp * const time ) +{ + file_info_t const * const ff = file_query( path ); + if ( !ff ) return -1; + timestamp_copy( time, &ff->time ); + return 0; +} + + +/* + * file_query() - get cached information about a path, query the OS if needed + * + * Returns 0 in case querying the OS about the given path fails, e.g. because + * the path does not reference an existing file system object. + */ + +file_info_t * file_query( OBJECT * const path ) +{ + /* FIXME: Add tracking for disappearing files (i.e. those that can not be + * detected by stat() even though they had been detected successfully + * before) and see how they should be handled in the rest of Boost Jam code. + * Possibly allow Jamfiles to specify some files as 'volatile' which would + * make Boost Jam avoid caching information about those files and instead + * ask the OS about them every time. + */ + int found; + file_info_t * const ff = file_info( path, &found ); + if ( !found ) + { + file_query_( ff ); + if ( ff->exists ) + { + /* Set the path's timestamp to 1 in case it is 0 or undetected to avoid + * confusion with non-existing paths. + */ + if ( timestamp_empty( &ff->time ) ) + timestamp_init( &ff->time, 1, 0 ); + } + } + if ( !ff->exists ) + { + return 0; + } + return ff; +} + +#ifndef OS_NT + +/* + * file_query_posix_() - query information about a path using POSIX stat() + * + * Fallback file_query_() implementation for OS specific modules. + * + * Note that the Windows POSIX stat() function implementation suffers from + * several issues: + * * Does not support file timestamps with resolution finer than 1 second, + * meaning it can not be used to detect file timestamp changes of less than + * 1 second. One possible consequence is that some fast-paced touch commands + * (such as those done by Boost Build's internal testing system if it does + * not do some extra waiting) will not be detected correctly by the build + * system. + * * Returns file modification times automatically adjusted for daylight + * savings time even though daylight savings time should have nothing to do + * with internal time representation. + */ + +void file_query_posix_( file_info_t * const info ) +{ + struct stat statbuf; + char const * const pathstr = object_str( info->name ); + char const * const pathspec = *pathstr ? pathstr : "."; + + if ( stat( pathspec, &statbuf ) < 0 ) + { + info->is_file = 0; + info->is_dir = 0; + info->exists = 0; + timestamp_clear( &info->time ); + } + else + { + info->is_file = statbuf.st_mode & S_IFREG ? 1 : 0; + info->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0; + info->exists = 1; +#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809 +#if defined(OS_MACOSX) + timestamp_init( &info->time, statbuf.st_mtimespec.tv_sec, statbuf.st_mtimespec.tv_nsec ); +#else + timestamp_init( &info->time, statbuf.st_mtim.tv_sec, statbuf.st_mtim.tv_nsec ); +#endif +#else + timestamp_init( &info->time, statbuf.st_mtime, 0 ); +#endif + } +} + +/* + * file_supported_fmt_resolution() - file modification timestamp resolution + * + * Returns the minimum file modification timestamp resolution supported by this + * Boost Jam implementation. File modification timestamp changes of less than + * the returned value might not be recognized. + * + * Does not take into consideration any OS or file system related restrictions. + * + * Return value 0 indicates that any value supported by the OS is also supported + * here. + */ + +void file_supported_fmt_resolution( timestamp * const t ) +{ +#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809 + timestamp_init( t, 0, 1 ); +#else + /* The current implementation does not support file modification timestamp + * resolution of less than one second. + */ + timestamp_init( t, 1, 0 ); +#endif +} + +#endif + + +/* + * file_remove_atexit() - schedule a path to be removed on program exit + */ + +static LIST * files_to_remove = L0; + +void file_remove_atexit( OBJECT * const path ) +{ + files_to_remove = list_push_back( files_to_remove, object_copy( path ) ); +} + + +/* + * file_archivescan_impl() - no-profiling worker for file_archivescan() + */ + +static void file_archivescan_impl( OBJECT * path, archive_scanback func, void * closure ) +{ + file_archive_info_t * const archive = file_archive_query( path ); + if ( !archive || !archive->file->is_file ) + return; + + /* Lazy collect the archive content information. */ + if ( filelist_empty( archive->members ) ) + { + if ( DEBUG_BINDSCAN ) + printf( "scan archive %s\n", object_str( archive->file->name ) ); + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* OS specific part of the file_archivescan operation. */ + file_archivescan_( archive, func, closure ); + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + LIST * symbols = member_file->files; + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, symbols, 1, &member_file->time ); + object_free( member ); + } + } + } +} + + +/* + * file_dirscan_impl() - no-profiling worker for file_dirscan() + */ + +static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure ) +{ + file_info_t * const d = file_query( dir ); + if ( !d || !d->is_dir ) + return; + + /* Lazy collect the directory content information. */ + if ( list_empty( d->files ) ) + { + if ( DEBUG_BINDSCAN ) + out_printf( "scan directory %s\n", object_str( d->name ) ); + if ( file_collect_dir_content_( d ) < 0 ) + return; + } + + /* OS specific part of the file_dirscan operation. */ + file_dirscan_( d, func, closure ); + + /* Report the collected directory content. */ + { + LISTITER iter = list_begin( d->files ); + LISTITER const end = list_end( d->files ); + for ( ; iter != end; iter = list_next( iter ) ) + { + OBJECT * const path = list_item( iter ); + file_info_t const * const ffq = file_query( path ); + /* Using a file name read from a file_info_t structure allows OS + * specific implementations to store some kind of a normalized file + * name there. Using such a normalized file name then allows us to + * correctly recognize different file paths actually identifying the + * same file. For instance, an implementation may: + * - convert all file names internally to lower case on a case + * insensitive file system + * - convert the NTFS paths to their long path variants as that + * file system each file system entity may have a long and a + * short path variant thus allowing for many different path + * strings identifying the same file. + */ + (*func)( closure, ffq->name, 1 /* stat()'ed */, &ffq->time ); + } + } +} + + +static void free_file_archive_info( void * xarchive, void * data ) +{ + file_archive_info_t * const archive = (file_archive_info_t *)xarchive; + + if ( archive ) filelist_free( archive->members ); +} + + +static void free_file_info( void * xfile, void * data ) +{ + file_info_t * const file = (file_info_t *)xfile; + object_free( file->name ); + list_free( file->files ); +} + + +static void remove_files_atexit( void ) +{ + LISTITER iter = list_begin( files_to_remove ); + LISTITER const end = list_end( files_to_remove ); + for ( ; iter != end; iter = list_next( iter ) ) + remove( object_str( list_item( iter ) ) ); + list_free( files_to_remove ); + files_to_remove = L0; +} + + +/* + * FILELIST linked-list implementation + */ + +FILELIST * filelist_new( OBJECT * path ) +{ + FILELIST * list = b2::jam::make_ptr(); + + memset( list, 0, sizeof( *list ) ); + list->size = 0; + list->head = 0; + list->tail = 0; + + return filelist_push_back( list, path ); +} + +FILELIST * filelist_push_back( FILELIST * list, OBJECT * path ) +{ + FILEITEM * item; + file_info_t * file; + + /* Lazy initialization + */ + if ( filelist_empty( list ) ) + { + list = filelist_new( path ); + return list; + } + + + item = b2::jam::make_ptr(); + item->value = b2::jam::make_ptr(); + + file = item->value; + file->name = path; + file->files = L0; + + if ( list->tail ) + { + list->tail->next = item; + } + else + { + list->head = item; + } + list->tail = item; + list->size++; + + return list; +} + +FILELIST * filelist_push_front( FILELIST * list, OBJECT * path ) +{ + FILEITEM * item; + file_info_t * file; + + /* Lazy initialization + */ + if ( filelist_empty( list ) ) + { + list = filelist_new( path ); + return list; + } + + + item = b2::jam::make_ptr(); + memset( item, 0, sizeof( *item ) ); + item->value = b2::jam::make_ptr(); + + file = item->value; + memset( file, 0, sizeof( *file ) ); + + file->name = path; + file->files = L0; + + if ( list->head ) + { + item->next = list->head; + } + else + { + list->tail = item; + } + list->head = item; + list->size++; + + return list; +} + + +FILELIST * filelist_pop_front( FILELIST * list ) +{ + FILEITEM * item; + + if ( filelist_empty( list ) ) return list; + + item = list->head; + + if ( item ) + { + if ( item->value ) + { + free_file_info( item->value, 0 ); + b2::jam::free_ptr( item->value ); + } + + list->head = item->next; + list->size--; + if ( !list->size ) list->tail = list->head; + + b2::jam::free_ptr( item ); + } + + return list; +} + +int filelist_length( FILELIST * list ) +{ + int result = 0; + if ( !filelist_empty( list ) ) result = list->size; + + return result; +} + +void filelist_free( FILELIST * list ) +{ + if ( filelist_empty( list ) ) return; + + while ( filelist_length( list ) ) filelist_pop_front( list ); + + b2::jam::free_ptr( list ); +} + +int filelist_empty( FILELIST * list ) +{ + return ( list == FL0 ); +} + + +FILELISTITER filelist_begin( FILELIST * list ) +{ + if ( filelist_empty( list ) + || list->head == 0 ) return (FILELISTITER)0; + + return &list->head->value; +} + + +FILELISTITER filelist_end( FILELIST * list ) +{ + return (FILELISTITER)0; +} + + +FILELISTITER filelist_next( FILELISTITER iter ) +{ + if ( iter ) + { + /* Given FILEITEM.value is defined as first member of FILEITEM structure + * and FILELISTITER = &FILEITEM.value, + * FILEITEM = *(FILEITEM **)FILELISTITER + */ + FILEITEM * item = (FILEITEM *)iter; + iter = ( item->next ? &item->next->value : (FILELISTITER)0 ); + } + + return iter; +} + + +file_info_t * filelist_item( FILELISTITER it ) +{ + file_info_t * result = (file_info_t *)0; + + if ( it ) + { + result = (file_info_t *)*it; + } + + return result; +} + + +file_info_t * filelist_front( FILELIST * list ) +{ + if ( filelist_empty( list ) + || list->head == 0 ) return (file_info_t *)0; + + return list->head->value; +} + + +file_info_t * filelist_back( FILELIST * list ) +{ + if ( filelist_empty( list ) + || list->tail == 0 ) return (file_info_t *)0; + + return list->tail->value; +} diff --git a/src/boost/tools/build/src/engine/filesys.h b/src/boost/tools/build/src/engine/filesys.h new file mode 100644 index 000000000..ee49f28c6 --- /dev/null +++ b/src/boost/tools/build/src/engine/filesys.h @@ -0,0 +1,125 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * filesys.h - OS specific file routines + */ + +#ifndef FILESYS_DWA20011025_H +#define FILESYS_DWA20011025_H + +#include "config.h" +#include "hash.h" +#include "lists.h" +#include "object.h" +#include "pathsys.h" +#include "timestamp.h" + +#include + + +typedef struct file_info_t +{ + OBJECT * name; + char is_file; + char is_dir; + char exists; + timestamp time; + LIST * files; +} file_info_t; + +typedef struct file_item FILEITEM; +struct file_item +{ + file_info_t * value; /* expected to be equivalent with &FILEITEM */ + FILEITEM * next; +}; + +typedef struct file_list +{ + FILEITEM * head; + FILEITEM * tail; + int size; +} FILELIST; + +typedef file_info_t * * FILELISTITER; /* also &FILEITEM equivalent */ + + +typedef struct file_archive_info_t +{ + OBJECT * name; + file_info_t * file; + FILELIST * members; +} file_archive_info_t; + + +typedef void (*archive_scanback)( void * closure, OBJECT * path, LIST * symbols, + int found, timestamp const * const ); +typedef void (*scanback)( void * closure, OBJECT * path, int found, + timestamp const * const ); + + +void file_archscan( char const * arch, scanback func, void * closure ); +void file_archivescan( OBJECT * path, archive_scanback func, void * closure ); +void file_build1( PATHNAME * const f, string * file ) ; +void file_dirscan( OBJECT * dir, scanback func, void * closure ); +file_info_t * file_info( OBJECT * const path, int * found ); +int file_is_file( OBJECT * const path ); +int file_mkdir( char const * const path ); +file_info_t * file_query( OBJECT * const path ); +void file_remove_atexit( OBJECT * const path ); +void file_supported_fmt_resolution( timestamp * const ); +int file_time( OBJECT * const path, timestamp * const ); + +namespace b2 { namespace filesys { + + inline bool is_file(const std::string &path) + { + OBJECT * path_o = object_new(path.c_str()); + bool result = file_is_file(path_o) == 1; + object_free(path_o); + return result; + } + +}} + + +/* Archive/library file support */ +file_archive_info_t * file_archive_info( OBJECT * const path, int * found ); +file_archive_info_t * file_archive_query( OBJECT * const path ); + +/* FILELIST linked-list */ +FILELIST * filelist_new( OBJECT * path ); +FILELIST * filelist_push_back( FILELIST * list, OBJECT * path ); +FILELIST * filelist_push_front( FILELIST * list, OBJECT * path ); +FILELIST * filelist_pop_front( FILELIST * list ); +int filelist_length( FILELIST * list ); +void filelist_free( FILELIST * list ); + +FILELISTITER filelist_begin( FILELIST * list ); +FILELISTITER filelist_end( FILELIST * list ); +FILELISTITER filelist_next( FILELISTITER it ); +file_info_t * filelist_item( FILELISTITER it ); +file_info_t * filelist_front( FILELIST * list ); +file_info_t * filelist_back( FILELIST * list ); + +int filelist_empty( FILELIST * list ); + +#define FL0 ((FILELIST *)0) + + +/* Internal utility worker functions. */ +void file_query_posix_( file_info_t * const ); + +void file_done(); + +#endif diff --git a/src/boost/tools/build/src/engine/fileunix.cpp b/src/boost/tools/build/src/engine/fileunix.cpp new file mode 100644 index 000000000..4982c9bb8 --- /dev/null +++ b/src/boost/tools/build/src/engine/fileunix.cpp @@ -0,0 +1,533 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * fileunix.c - manipulate file names and scan directories on UNIX/AmigaOS + * + * External routines: + * file_archscan() - scan an archive for files + * file_mkdir() - create a directory + * file_supported_fmt_resolution() - file modification timestamp resolution + * + * External routines called only via routines in filesys.c: + * file_collect_dir_content_() - collects directory content information + * file_dirscan_() - OS specific file_dirscan() implementation + * file_query_() - query information about a path from the OS + * file_collect_archive_content_() - collects information about archive members + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +#include "jam.h" +#ifdef USE_FILEUNIX +#include "filesys.h" + +#include "object.h" +#include "pathsys.h" +#include "jam_strings.h" +#include "output.h" + +#include +#include +#include +#include /* needed for mkdir() */ + +#if defined( sun ) || defined( __sun ) || defined( linux ) +# include /* needed for read and close prototype */ +#endif + +#if defined( OS_SEQUENT ) || \ + defined( OS_DGUX ) || \ + defined( OS_SCO ) || \ + defined( OS_ISC ) +# define PORTAR 1 +#endif + +#if defined( OS_RHAPSODY ) || defined( OS_MACOSX ) || defined( OS_NEXT ) +# include +# include /* need unistd for rhapsody's proper lseek */ +# define STRUCT_DIRENT struct direct +#else +# include +# define STRUCT_DIRENT struct dirent +#endif + +#ifdef OS_COHERENT +# include +# define HAVE_AR +#endif + +#if defined( OS_MVS ) || defined( OS_INTERIX ) +#define ARMAG "!\n" +#define SARMAG 8 +#define ARFMAG "`\n" +#define HAVE_AR + +struct ar_hdr /* archive file member header - printable ascii */ +{ + char ar_name[ 16 ]; /* file member name - `/' terminated */ + char ar_date[ 12 ]; /* file member date - decimal */ + char ar_uid[ 6 ]; /* file member user id - decimal */ + char ar_gid[ 6 ]; /* file member group id - decimal */ + char ar_mode[ 8 ]; /* file member mode - octal */ + char ar_size[ 10 ]; /* file member size - decimal */ + char ar_fmag[ 2 ]; /* ARFMAG - string to end header */ +}; +#endif + +#if defined( OS_QNX ) || \ + defined( OS_BEOS ) || \ + defined( OS_HAIKU ) || \ + defined( OS_MPEIX ) +# define NO_AR +# define HAVE_AR +#endif + +#ifndef HAVE_AR +# ifdef OS_AIX +/* Define these for AIX to get the definitions for both small and big archive + * file format variants. + */ +# define __AR_SMALL__ +# define __AR_BIG__ +# endif +# include +#endif + + +/* + * file_collect_dir_content_() - collects directory content information + */ + +int file_collect_dir_content_( file_info_t * const d ) +{ + LIST * files = L0; + PATHNAME f; + int n; + STRUCT_DIRENT ** namelist; + STRUCT_DIRENT * dirent; + string path[ 1 ]; + char const * dirstr; + + assert( d ); + assert( d->is_dir ); + assert( list_empty( d->files ) ); + + dirstr = object_str( d->name ); + + memset( (char *)&f, '\0', sizeof( f ) ); + f.f_dir.ptr = dirstr; + f.f_dir.len = strlen( dirstr ); + + if ( !*dirstr ) dirstr = "."; + + if ( -1 == ( n = scandir( dirstr, &namelist, NULL, alphasort ) ) ) + { + if (n != ENOENT && n != ENOTDIR) + err_printf( "[errno %d] scandir '%s' failed: %s\n", + errno, dirstr, strerror(errno) ); + return -1; + } + + string_new( path ); + while ( n-- ) + { + OBJECT * name; + dirent = namelist[ n ]; + f.f_base.ptr = dirent->d_name + #ifdef old_sinix + - 2 /* Broken structure definition on sinix. */ + #endif + ; + f.f_base.len = strlen( f.f_base.ptr ); + + string_truncate( path, 0 ); + path_build( &f, path ); + name = object_new( path->value ); + /* Immediately stat the file to preserve invariants. */ + if ( file_query( name ) ) + files = list_push_back( files, name ); + else + object_free( name ); + free( dirent ); + } + string_free( path ); + + free( namelist ); + + d->files = files; + return 0; +} + + +/* + * file_dirscan_() - OS specific file_dirscan() implementation + */ + +void file_dirscan_( file_info_t * const d, scanback func, void * closure ) +{ + assert( d ); + assert( d->is_dir ); + + /* Special case / : enter it */ + if ( !strcmp( object_str( d->name ), "/" ) ) + (*func)( closure, d->name, 1 /* stat()'ed */, &d->time ); +} + + +/* + * file_mkdir() - create a directory + */ + +int file_mkdir( char const * const path ) +{ + /* Explicit cast to remove const modifiers and avoid related compiler + * warnings displayed when using the intel compiler. + */ + return mkdir( (char *)path, 0777 ); +} + + +/* + * file_query_() - query information about a path from the OS + */ + +void file_query_( file_info_t * const info ) +{ + file_query_posix_( info ); +} + + +int file_collect_archive_content_( file_archive_info_t * const archive ); + +/* + * file_archscan() - scan an archive for files + */ +void file_archscan( char const * arch, scanback func, void * closure ) +{ + OBJECT * path = object_new( arch ); + file_archive_info_t * archive = file_archive_query( path ); + + object_free( path ); + + if ( filelist_empty( archive->members ) ) + { + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, 1 /* time valid */, &member_file->time ); + object_free( member ); + } + } + } +} + + +/* + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ) +{ +} + + +/* + * file_collect_archive_content_() - collects information about archive members + */ + +#ifndef AIAMAG /* God-fearing UNIX */ + +#define SARFMAG 2 +#define SARHDR sizeof( struct ar_hdr ) + +int file_collect_archive_content_( file_archive_info_t * const archive ) +{ +#ifndef NO_AR + struct ar_hdr ar_hdr; + char * string_table = 0; + char buf[ MAXJPATH ]; + long offset; + int fd; + const char * path = object_str( archive->file->name ); + + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); + + if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 ) + return -1; + + if ( read( fd, buf, SARMAG ) != SARMAG || + strncmp( ARMAG, buf, SARMAG ) ) + { + close( fd ); + return -1; + } + + offset = SARMAG; + + if ( DEBUG_BINDSCAN ) + out_printf( "scan archive %s\n", path ); + + while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) && + !( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) +#ifdef ARFZMAG + /* OSF also has a compressed format */ + && memcmp( ar_hdr.ar_fmag, ARFZMAG, SARFMAG ) +#endif + ) ) + { + char lar_name_[ 257 ]; + char * lar_name = lar_name_ + 1; + long lar_date; + long lar_size; + long lar_offset; + char * c; + char * src; + char * dest; + + int32_t ar_hdr_name_size = sizeof( ar_hdr.ar_name ); // Workaround for sizeof strncpy warning. + strncpy( lar_name, ar_hdr.ar_name, ar_hdr_name_size ); + + sscanf( ar_hdr.ar_date, "%ld", &lar_date ); + sscanf( ar_hdr.ar_size, "%ld", &lar_size ); + + if ( ar_hdr.ar_name[ 0 ] == '/' ) + { + if ( ar_hdr.ar_name[ 1 ] == '/' ) + { + /* This is the "string table" entry of the symbol table, holding + * filename strings longer than 15 characters, i.e. those that + * do not fit into ar_name. + */ + string_table = (char *)BJAM_MALLOC_ATOMIC( lar_size ); + lseek( fd, offset + SARHDR, 0 ); + if ( read( fd, string_table, lar_size ) != lar_size ) + out_printf("error reading string table\n"); + } + else if ( string_table && ar_hdr.ar_name[ 1 ] != ' ' ) + { + /* Long filenames are recognized by "/nnnn" where nnnn is the + * offset of the string in the string table represented in ASCII + * decimals. + */ + dest = lar_name; + lar_offset = atoi( lar_name + 1 ); + src = &string_table[ lar_offset ]; + while ( *src != '/' ) + *dest++ = *src++; + *dest = '/'; + } + } + + c = lar_name - 1; + while ( ( *++c != ' ' ) && ( *c != '/' ) ); + *c = '\0'; + + if ( DEBUG_BINDSCAN ) + out_printf( "archive name %s found\n", lar_name ); + + sprintf( buf, "%s", lar_name ); + + if ( strcmp( buf, "") != 0 ) + { + file_info_t * member = 0; + + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); + } + + offset += SARHDR + ( ( lar_size + 1 ) & ~1 ); + lseek( fd, offset, 0 ); + } + + if ( string_table ) + BJAM_FREE( string_table ); + + close( fd ); +#endif /* NO_AR */ + + return 0; +} + +#else /* AIAMAG - RS6000 AIX */ + +static void collect_archive_content_small( int fd, file_archive_info_t * const archive ) +{ + struct fl_hdr fl_hdr; + + struct { + struct ar_hdr hdr; + char pad[ 256 ]; + } ar_hdr ; + + char buf[ MAXJPATH ]; + long offset; + const char * path = object_str( archive->file->name ); + + if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ ) + return; + + sscanf( fl_hdr.fl_fstmoff, "%ld", &offset ); + + if ( DEBUG_BINDSCAN ) + out_printf( "scan archive %s\n", path ); + + while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 && + read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= (int)sizeof( ar_hdr.hdr ) ) + { + long lar_date; + int lar_namlen; + + sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen ); + sscanf( ar_hdr.hdr.ar_date , "%ld", &lar_date ); + sscanf( ar_hdr.hdr.ar_nxtmem, "%ld", &offset ); + + if ( !lar_namlen ) + continue; + + ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0'; + + sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name ); + + if ( strcmp( buf, "") != 0 ) + { + file_info_t * member = 0; + + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); + } + } +} + +/* Check for OS versions supporting the big variant. */ +#ifdef AR_HSZ_BIG + +static void collect_archive_content_big( int fd, file_archive_info_t * const archive ) +{ + struct fl_hdr_big fl_hdr; + + struct { + struct ar_hdr_big hdr; + char pad[ 256 ]; + } ar_hdr ; + + char buf[ MAXJPATH ]; + long long offset; + const char * path = object_str( archive->file->name ); + + if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG ) != FL_HSZ_BIG ) + return; + + sscanf( fl_hdr.fl_fstmoff, "%lld", &offset ); + + if ( DEBUG_BINDSCAN ) + out_printf( "scan archive %s\n", path ); + + while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 && + read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) ) + { + long lar_date; + int lar_namlen; + + sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen ); + sscanf( ar_hdr.hdr.ar_date , "%ld" , &lar_date ); + sscanf( ar_hdr.hdr.ar_nxtmem, "%lld", &offset ); + + if ( !lar_namlen ) + continue; + + ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0'; + + sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name ); + + if ( strcmp( buf, "") != 0 ) + { + file_info_t * member = 0; + + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); + } + } +} + +#endif /* AR_HSZ_BIG */ + +int file_collect_archive_content_( file_archive_info_t * const archive ) +{ + int fd; + char fl_magic[ SAIAMAG ]; + const char * path = object_str( archive->file->name ); + + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); + + if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 ) + return -1; + + if ( read( fd, fl_magic, SAIAMAG ) != SAIAMAG || + lseek( fd, 0, SEEK_SET ) == -1 ) + { + close( fd ); + return -1; + } + + if ( !strncmp( AIAMAG, fl_magic, SAIAMAG ) ) + { + /* read small variant */ + collect_archive_content_small( fd, archive ); + } +#ifdef AR_HSZ_BIG + else if ( !strncmp( AIAMAGBIG, fl_magic, SAIAMAG ) ) + { + /* read big variant */ + collect_archive_content_big( fd, archive ); + } +#endif + + close( fd ); + + return 0; +} + +#endif /* AIAMAG - RS6000 AIX */ + +#endif /* USE_FILEUNIX */ diff --git a/src/boost/tools/build/src/engine/filevms.cpp b/src/boost/tools/build/src/engine/filevms.cpp new file mode 100644 index 000000000..6dc84ae63 --- /dev/null +++ b/src/boost/tools/build/src/engine/filevms.cpp @@ -0,0 +1,440 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Copyright 2015 Artur Shepilko. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + + +#include "jam.h" +#include "filesys.h" + +#include "object.h" +#include "pathsys.h" +#include "jam_strings.h" + + +#ifdef OS_VMS + +/* + * filevms.c - manipulate file names and scan directories on VMS. + * + * This implementation is based on POSIX-style path manipulation. + * + * VMS CTRL directly supports both POSIX- and native VMS-style path expressions, + * with the POSIX-to-VMS path translation performed internally by the same + * set of functions. For the most part such processing is transparent, with + * few differences mainly related to file versions (in POSIX mode only the recent + * version is visible). + * + * This should allow us to re-use fileunix.c implementation, + * excluding archive/library member processing. + * + * Thus in jam-files the path references can also remain POSIX/UNIX-style on all + * levels EXCEPT in actions scope, where these must be translated to the native + * VMS-style. This approach is somewhat similar to jam CYGWIN handling. + * + * + * External routines: + * file_archscan() - scan an archive for files + * file_mkdir() - create a directory + * file_supported_fmt_resolution() - file modification timestamp resolution + * + * External routines called only via routines in filesys.c: + * file_collect_dir_content_() - collects directory content information + * file_dirscan_() - OS specific file_dirscan() implementation + * file_query_() - query information about a path from the OS + * file_collect_archive_content_() - collects information about archive members + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +#include +#include + +#include /* needed for mkdir() */ +#include /* needed for read and close prototype */ + +#include +#define STRUCT_DIRENT struct dirent + + +void path_translate_to_os_( char const * f, string * file ); + +/* + * file_collect_dir_content_() - collects directory content information + */ + +int file_collect_dir_content_( file_info_t * const d ) +{ + LIST * files = L0; + PATHNAME f; + DIR * dd; + STRUCT_DIRENT * dirent; + string path[ 1 ]; + char const * dirstr; + + assert( d ); + assert( d->is_dir ); + assert( list_empty( d->files ) ); + + dirstr = object_str( d->name ); + + memset( (char *)&f, '\0', sizeof( f ) ); + f.f_dir.ptr = dirstr; + f.f_dir.len = strlen( dirstr ); + + if ( !*dirstr ) dirstr = "."; + + if ( !( dd = opendir( dirstr ) ) ) + return -1; + + string_new( path ); + while ( ( dirent = readdir( dd ) ) ) + { + OBJECT * name; + f.f_base.ptr = dirent->d_name + #ifdef old_sinix + - 2 /* Broken structure definition on sinix. */ + #endif + ; + f.f_base.len = strlen( f.f_base.ptr ); + + string_truncate( path, 0 ); + path_build( &f, path ); + name = object_new( path->value ); + /* Immediately stat the file to preserve invariants. */ + if ( file_query( name ) ) + files = list_push_back( files, name ); + else + object_free( name ); + } + string_free( path ); + + closedir( dd ); + + d->files = files; + return 0; +} + + +/* + * file_dirscan_() - OS specific file_dirscan() implementation + */ + +void file_dirscan_( file_info_t * const d, scanback func, void * closure ) +{ + assert( d ); + assert( d->is_dir ); + + /* Special case / : enter it */ + if ( !strcmp( object_str( d->name ), "/" ) ) + (*func)( closure, d->name, 1 /* stat()'ed */, &d->time ); +} + + +/* + * file_mkdir() - create a directory + */ + +int file_mkdir( char const * const path ) +{ + /* Explicit cast to remove const modifiers and avoid related compiler + * warnings displayed when using the intel compiler. + */ + return mkdir( (char *)path, 0777 ); +} + + +/* + * file_query_() - query information about a path from the OS + */ + +void file_query_( file_info_t * const info ) +{ + file_query_posix_( info ); +} + + +/*------------------------------------------------------------------------------ +* VMS-specific processing: +* +*/ + +#include +#include +#include +#include +#include +#include +#include + +/* Supply missing prototypes for lbr$-routines*/ + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +int lbr$set_module( + void **, + unsigned long *, + struct dsc$descriptor_s *, + unsigned short *, + void * ); + +int lbr$open( void **, + struct dsc$descriptor_s *, + void *, + void *, + void *, + void *, + void * ); + +int lbr$ini_control( + void **, + unsigned long *, + unsigned long *, + void * ); + +int lbr$get_index( + void **, + unsigned long * const, + int (*func)( struct dsc$descriptor_s *, unsigned long *), + void * ); + +int lbr$search( + void **, + unsigned long * const, + unsigned short *, + int (*func)( struct dsc$descriptor_s *, unsigned long *), + unsigned long *); + +int lbr$close( + void ** ); + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + + + +static void +file_cvttime( + unsigned int *curtime, + time_t *unixtime ) +{ + static const int32_t divisor = 10000000; + static unsigned int bastim[2] = { 0x4BEB4000, 0x007C9567 }; /* 1/1/1970 */ + int delta[2], remainder; + + lib$subx( curtime, bastim, delta ); + lib$ediv( &divisor, delta, unixtime, &remainder ); +} + + +static void downcase_inplace( char * p ) +{ + for ( ; *p; ++p ) + *p = tolower( *p ); +} + + +static file_archive_info_t * m_archive = NULL; +static file_info_t * m_member_found = NULL; +static void * m_lbr_context = NULL; +static unsigned short * m_rfa_found = NULL; +static const unsigned long LBR_MODINDEX_NUM = 1, + LBR_SYMINDEX_NUM = 2; /* GST:global symbol table */ + + +static unsigned int set_archive_symbol( struct dsc$descriptor_s *symbol, + unsigned long *rfa ) +{ + file_info_t * member = m_member_found; + char buf[ MAXJPATH ] = { 0 }; + + strncpy(buf, symbol->dsc$a_pointer, symbol->dsc$w_length); + buf[ symbol->dsc$w_length ] = 0; + + member->files = list_push_back( member->files, object_new( buf ) ); + + return ( 1 ); /* continue */ +} + + +static unsigned int set_archive_member( struct dsc$descriptor_s *module, + unsigned long *rfa ) +{ + file_archive_info_t * archive = m_archive; + + static struct dsc$descriptor_s bufdsc = + {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL}; + + struct mhddef *mhd; + char filename[128] = { 0 }; + char buf[ MAXJPATH ] = { 0 }; + + int status; + time_t library_date; + + register int i; + register char *p; + + bufdsc.dsc$a_pointer = filename; + bufdsc.dsc$w_length = sizeof( filename ); + status = lbr$set_module( &m_lbr_context, rfa, &bufdsc, + &bufdsc.dsc$w_length, NULL ); + + if ( !(status & 1) ) + return ( 1 ); /* continue */ + + mhd = (struct mhddef *)filename; + + file_cvttime( &mhd->mhd$l_datim, &library_date ); + + /* strncpy( filename, module->dsc$a_pointer, module->dsc$w_length ); + */ + for ( i = 0, p = module->dsc$a_pointer; i < module->dsc$w_length; ++i, ++p ) + filename[ i ] = *p; + + filename[ i ] = '\0'; + + if ( strcmp( filename, "" ) != 0 ) + { + file_info_t * member = 0; + + /* Construct member's filename as lowercase "module.obj" */ + sprintf( buf, "%s.obj", filename ); + downcase_inplace( buf ); + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)library_date, 0 ); + + m_member_found = member; + m_rfa_found = rfa; + status = lbr$search(&m_lbr_context, &LBR_SYMINDEX_NUM, m_rfa_found, set_archive_symbol, NULL); + } + + return ( 1 ); /* continue */ +} + + + +void file_archscan( char const * arch, scanback func, void * closure ) +{ + OBJECT * path = object_new( arch ); + file_archive_info_t * archive = file_archive_query( path ); + + object_free( path ); + + if ( filelist_empty( archive->members ) ) + { + if ( DEBUG_BINDSCAN ) + out_printf( "scan archive %s\n", object_str( archive->file->name ) ); + + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + LIST * symbols = member_file->files; + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, 1 /* time valid */, &member_file->time ); + object_free( member ); + } + } + } +} + + +/* + * file_archivescan_() - OS specific file_archivescan() implementation + */ +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ) +{ +} + + +/* + * file_collect_archive_content_() - collects information about archive members + */ + +int file_collect_archive_content_( file_archive_info_t * const archive ) +{ + unsigned short rfa[3]; + + static struct dsc$descriptor_s library = + {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL}; + + unsigned long lfunc = LBR$C_READ; + unsigned long typ = LBR$C_TYP_UNK; + + register int status; + string buf[ 1 ]; + char vmspath[ MAXJPATH ] = { 0 }; + + m_archive = archive; + + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); + + /* Translate path to VMS + */ + string_new( buf ); + path_translate_to_os_( object_str( archive->file->name ), buf ); + strcpy( vmspath, buf->value ); + string_free( buf ); + + + status = lbr$ini_control( &m_lbr_context, &lfunc, &typ, NULL ); + if ( !( status & 1 ) ) + return -1; + + library.dsc$a_pointer = vmspath; + library.dsc$w_length = strlen( vmspath ); + + status = lbr$open( &m_lbr_context, &library, NULL, NULL, NULL, NULL, NULL ); + if ( !( status & 1 ) ) + return -1; + + /* Scan main index for modules. + * For each module search symbol-index to collect module's symbols. + */ + status = lbr$get_index( &m_lbr_context, &LBR_MODINDEX_NUM, set_archive_member, NULL ); + + if ( !( status & 1 ) ) + return -1; + + + (void) lbr$close( &m_lbr_context ); + + return 0; +} + +#endif /* OS_VMS */ + diff --git a/src/boost/tools/build/src/engine/frames.cpp b/src/boost/tools/build/src/engine/frames.cpp new file mode 100644 index 000000000..b64d54fa7 --- /dev/null +++ b/src/boost/tools/build/src/engine/frames.cpp @@ -0,0 +1,29 @@ +/* + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "jam.h" +#include "frames.h" + + +FRAME * frame_before_python_call; + + +void frame_init( FRAME * frame ) +{ + frame->prev = 0; + frame->prev_user = 0; + lol_init( frame->args ); + frame->module = root_module(); + frame->rulename = "module scope"; + frame->file = 0; + frame->line = -1; +} + + +void frame_free( FRAME * frame ) +{ + lol_free( frame->args ); +} diff --git a/src/boost/tools/build/src/engine/frames.h b/src/boost/tools/build/src/engine/frames.h new file mode 100644 index 000000000..2a3a22837 --- /dev/null +++ b/src/boost/tools/build/src/engine/frames.h @@ -0,0 +1,46 @@ +/* + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef FRAMES_DWA20011021_H +#define FRAMES_DWA20011021_H + +#include "config.h" +#include "lists.h" +#include "modules.h" +#include "object.h" + + +typedef struct frame FRAME; + +struct frame +{ + FRAME * prev; + FRAME * prev_user; /* The nearest enclosing frame for which + module->user_module is true. */ + LOL args[ 1 ]; + module_t * module; + OBJECT * file; + int line; + char const * rulename; +#ifdef JAM_DEBUGGER + void * function; +#endif +}; + + +/* When a call into Python is in progress, this variable points to the bjam + * frame that was current at the moment of the call. When the call completes, + * the variable is not defined. Furthermore, if Jam calls Python which calls Jam + * and so on, this variable only keeps the most recent Jam frame. + */ +extern FRAME * frame_before_python_call; + + +void frame_init( FRAME * ); +void frame_free( FRAME * ); + +#endif diff --git a/src/boost/tools/build/src/engine/function.cpp b/src/boost/tools/build/src/engine/function.cpp new file mode 100644 index 000000000..2db31cfae --- /dev/null +++ b/src/boost/tools/build/src/engine/function.cpp @@ -0,0 +1,5560 @@ +/* + * Copyright 2011 Steven Watanabe + * Copyright 2016 Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "jam.h" +#include "function.h" + +#include "class.h" +#include "compile.h" +#include "constants.h" +#include "debugger.h" +#include "execcmd.h" +#include "filesys.h" +#include "frames.h" +#include "lists.h" +#include "mem.h" +#include "pathsys.h" +#include "rules.h" +#include "search.h" +#include "variable.h" +#include "output.h" +#include "startup.h" + +#include +#include +#include +#include +#include + +#include + +/* +#define FUNCTION_DEBUG_PROFILE +*/ + +#ifndef FUNCTION_DEBUG_PROFILE +#undef PROFILE_ENTER_LOCAL +#define PROFILE_ENTER_LOCAL(x) while (false) +#undef PROFILE_EXIT_LOCAL +#define PROFILE_EXIT_LOCAL(x) +#endif + +int32_t glob( char const * s, char const * c ); +void backtrace( FRAME * ); +void backtrace_line( FRAME * ); + +#define INSTR_PUSH_EMPTY 0 +#define INSTR_PUSH_CONSTANT 1 +#define INSTR_PUSH_ARG 2 +#define INSTR_PUSH_VAR 3 +#define INSTR_PUSH_VAR_FIXED 57 +#define INSTR_PUSH_GROUP 4 +#define INSTR_PUSH_RESULT 5 +#define INSTR_PUSH_APPEND 6 +#define INSTR_SWAP 7 + +#define INSTR_JUMP_EMPTY 8 +#define INSTR_JUMP_NOT_EMPTY 9 + +#define INSTR_JUMP 10 +#define INSTR_JUMP_LT 11 +#define INSTR_JUMP_LE 12 +#define INSTR_JUMP_GT 13 +#define INSTR_JUMP_GE 14 +#define INSTR_JUMP_EQ 15 +#define INSTR_JUMP_NE 16 +#define INSTR_JUMP_IN 17 +#define INSTR_JUMP_NOT_IN 18 + +#define INSTR_JUMP_NOT_GLOB 19 + +#define INSTR_FOR_INIT 56 +#define INSTR_FOR_LOOP 20 + +#define INSTR_SET_RESULT 21 +#define INSTR_RETURN 22 +#define INSTR_POP 23 + +#define INSTR_PUSH_LOCAL 24 +#define INSTR_POP_LOCAL 25 +#define INSTR_SET 26 +#define INSTR_APPEND 27 +#define INSTR_DEFAULT 28 + +#define INSTR_PUSH_LOCAL_FIXED 58 +#define INSTR_POP_LOCAL_FIXED 59 +#define INSTR_SET_FIXED 60 +#define INSTR_APPEND_FIXED 61 +#define INSTR_DEFAULT_FIXED 62 + +#define INSTR_PUSH_LOCAL_GROUP 29 +#define INSTR_POP_LOCAL_GROUP 30 +#define INSTR_SET_GROUP 31 +#define INSTR_APPEND_GROUP 32 +#define INSTR_DEFAULT_GROUP 33 + +#define INSTR_PUSH_ON 34 +#define INSTR_POP_ON 35 +#define INSTR_SET_ON 36 +#define INSTR_APPEND_ON 37 +#define INSTR_DEFAULT_ON 38 +#define INSTR_GET_ON 65 + +#define INSTR_CALL_RULE 39 +#define INSTR_CALL_MEMBER_RULE 66 + +#define INSTR_APPLY_MODIFIERS 40 +#define INSTR_APPLY_INDEX 41 +#define INSTR_APPLY_INDEX_MODIFIERS 42 +#define INSTR_APPLY_MODIFIERS_GROUP 43 +#define INSTR_APPLY_INDEX_GROUP 44 +#define INSTR_APPLY_INDEX_MODIFIERS_GROUP 45 +#define INSTR_COMBINE_STRINGS 46 +#define INSTR_GET_GRIST 64 + +#define INSTR_INCLUDE 47 +#define INSTR_RULE 48 +#define INSTR_ACTIONS 49 +#define INSTR_PUSH_MODULE 50 +#define INSTR_POP_MODULE 51 +#define INSTR_CLASS 52 +#define INSTR_BIND_MODULE_VARIABLES 63 + +#define INSTR_APPEND_STRINGS 53 +#define INSTR_WRITE_FILE 54 +#define INSTR_OUTPUT_STRINGS 55 + +#define INSTR_DEBUG_LINE 67 +#define INSTR_FOR_POP 70 + +typedef struct instruction +{ + uint32_t op_code; + int32_t arg; +} instruction; + +typedef struct _subfunction +{ + OBJECT * name; + FUNCTION * code; + int32_t local; +} SUBFUNCTION; + +typedef struct _subaction +{ + OBJECT * name; + FUNCTION * command; + int32_t flags; +} SUBACTION; + +#define FUNCTION_BUILTIN 0 +#define FUNCTION_JAM 1 + +struct argument +{ + int32_t flags; +#define ARG_ONE 0 +#define ARG_OPTIONAL 1 +#define ARG_PLUS 2 +#define ARG_STAR 3 +#define ARG_VARIADIC 4 + OBJECT * type_name; + OBJECT * arg_name; + int32_t index; +}; + +struct arg_list +{ + int32_t size; + struct argument * args; +}; + +struct _function +{ + int32_t type; + int32_t reference_count; + OBJECT * rulename; + struct arg_list * formal_arguments; + int32_t num_formal_arguments; +}; + +typedef struct _builtin_function +{ + FUNCTION base; + LIST * ( * func )( FRAME *, int32_t flags ); + int32_t flags; +} BUILTIN_FUNCTION; + +typedef struct _jam_function +{ + FUNCTION base; + int32_t code_size; + instruction * code; + int32_t num_constants; + OBJECT * * constants; + int32_t num_subfunctions; + SUBFUNCTION * functions; + int32_t num_subactions; + SUBACTION * actions; + FUNCTION * generic; + OBJECT * file; + int32_t line; +} JAM_FUNCTION; + + +#ifdef HAVE_PYTHON + +#define FUNCTION_PYTHON 2 + +typedef struct _python_function +{ + FUNCTION base; + PyObject * python_function; +} PYTHON_FUNCTION; + +static LIST * call_python_function( PYTHON_FUNCTION *, FRAME * ); + +#endif + + +struct _stack +{ + void * data; +}; + +static void * stack; + +STACK * stack_global() +{ + static STACK result; + if ( !stack ) + { + int32_t const size = 1 << 21; + stack = BJAM_MALLOC( size ); + result.data = (char *)stack + size; + } + return &result; +} + +struct list_alignment_helper +{ + char ch; + LIST * l; +}; + +#define LISTPTR_ALIGN_BASE ( sizeof( struct list_alignment_helper ) - sizeof( LIST * ) ) +#define LISTPTR_ALIGN ( ( LISTPTR_ALIGN_BASE > sizeof( LIST * ) ) ? sizeof( LIST * ) : LISTPTR_ALIGN_BASE ) + +static void check_alignment( STACK * s ) +{ + assert( (size_t)s->data % LISTPTR_ALIGN == 0 ); +} + +void * stack_allocate( STACK * s, int32_t size ) +{ + check_alignment( s ); + s->data = (char *)s->data - size; + check_alignment( s ); + return s->data; +} + +void stack_deallocate( STACK * s, int32_t size ) +{ + check_alignment( s ); + s->data = (char *)s->data + size; + check_alignment( s ); +} + +void stack_push( STACK * s, LIST * l ) +{ + *(LIST * *)stack_allocate( s, sizeof( LIST * ) ) = l; +} + +LIST * stack_pop( STACK * s ) +{ + LIST * const result = *(LIST * *)s->data; + stack_deallocate( s, sizeof( LIST * ) ); + return result; +} + +LIST * stack_top( STACK * s ) +{ + check_alignment( s ); + return *(LIST * *)s->data; +} + +LIST * stack_at( STACK * s, int32_t n ) +{ + check_alignment( s ); + return *( (LIST * *)s->data + n ); +} + +void stack_set( STACK * s, int32_t n, LIST * value ) +{ + check_alignment( s ); + *((LIST * *)s->data + n) = value; +} + +void * stack_get( STACK * s ) +{ + check_alignment( s ); + return s->data; +} + +LIST * frame_get_local( FRAME * frame, int32_t idx ) +{ + /* The only local variables are the arguments. */ + return list_copy( lol_get( frame->args, idx ) ); +} + +static OBJECT * function_get_constant( JAM_FUNCTION * function, int32_t idx ) +{ + return function->constants[ idx ]; +} + +static LIST * function_get_variable( JAM_FUNCTION * function, FRAME * frame, + int32_t idx ) +{ + return list_copy( var_get( frame->module, function->constants[ idx ] ) ); +} + +static void function_set_variable( JAM_FUNCTION * function, FRAME * frame, + int32_t idx, LIST * value ) +{ + var_set( frame->module, function->constants[ idx ], value, VAR_SET ); +} + +static LIST * function_swap_variable( JAM_FUNCTION * function, FRAME * frame, + int32_t idx, LIST * value ) +{ + return var_swap( frame->module, function->constants[ idx ], value ); +} + +static void function_append_variable( JAM_FUNCTION * function, FRAME * frame, + int32_t idx, LIST * value ) +{ + var_set( frame->module, function->constants[ idx ], value, VAR_APPEND ); +} + +static void function_default_variable( JAM_FUNCTION * function, FRAME * frame, + int32_t idx, LIST * value ) +{ + var_set( frame->module, function->constants[ idx ], value, VAR_DEFAULT ); +} + +static void function_set_rule( JAM_FUNCTION * function, FRAME * frame, + STACK * s, int32_t idx ) +{ + SUBFUNCTION * sub = function->functions + idx; + new_rule_body( frame->module, sub->name, sub->code, !sub->local ); +} + +static void function_set_actions( JAM_FUNCTION * function, FRAME * frame, + STACK * s, int32_t idx ) +{ + SUBACTION * sub = function->actions + idx; + LIST * bindlist = stack_pop( s ); + new_rule_actions( frame->module, sub->name, sub->command, bindlist, + sub->flags ); +} + + +/* + * Returns the index if name is "<", ">", "1", "2", ... or "19" otherwise + * returns -1. + */ + +static int32_t get_argument_index( char const * s ) +{ + if ( s[ 0 ] != '\0') + { + if ( s[ 1 ] == '\0' ) + { + switch ( s[ 0 ] ) + { + case '<': return 0; + case '>': return 1; + + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + return s[ 0 ] - '1'; + } + } + else if ( s[ 0 ] == '1' && s[ 2 ] == '\0' ) + { + switch( s[ 1 ] ) + { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + return s[ 1 ] - '0' + 10 - 1; + } + } + } + return -1; +} + +static LIST * function_get_named_variable( JAM_FUNCTION * function, + FRAME * frame, OBJECT * name ) +{ + int32_t const idx = get_argument_index( object_str( name ) ); + return idx == -1 + ? list_copy( var_get( frame->module, name ) ) + : list_copy( lol_get( frame->args, idx ) ); +} + +static void function_set_named_variable( JAM_FUNCTION * function, FRAME * frame, + OBJECT * name, LIST * value) +{ + var_set( frame->module, name, value, VAR_SET ); +} + +static LIST * function_swap_named_variable( JAM_FUNCTION * function, + FRAME * frame, OBJECT * name, LIST * value ) +{ + return var_swap( frame->module, name, value ); +} + +static void function_append_named_variable( JAM_FUNCTION * function, + FRAME * frame, OBJECT * name, LIST * value) +{ + var_set( frame->module, name, value, VAR_APPEND ); +} + +static void function_default_named_variable( JAM_FUNCTION * function, + FRAME * frame, OBJECT * name, LIST * value ) +{ + var_set( frame->module, name, value, VAR_DEFAULT ); +} + +static LIST * function_call_rule( JAM_FUNCTION * function, FRAME * frame, + STACK * s, int32_t n_args, char const * unexpanded, OBJECT * file, int32_t line ) +{ + FRAME inner[ 1 ]; + int32_t i; + LIST * first = stack_pop( s ); + LIST * result = L0; + OBJECT * rulename; + LIST * trailing; + + frame->file = file; + frame->line = line; + + if ( list_empty( first ) ) + { + backtrace_line( frame ); + out_printf( "warning: rulename %s expands to empty string\n", unexpanded ); + backtrace( frame ); + list_free( first ); + for ( i = 0; i < n_args; ++i ) + list_free( stack_pop( s ) ); + return result; + } + + rulename = object_copy( list_front( first ) ); + + frame_init( inner ); + inner->prev = frame; + inner->prev_user = frame->module->user_module ? frame : frame->prev_user; + inner->module = frame->module; /* This gets fixed up in evaluate_rule(). */ + + if ( n_args > LOL_MAX ) + { + out_printf( "ERROR: rules are limited to %d arguments\n", LOL_MAX ); + backtrace( inner ); + b2::clean_exit( EXITBAD ); + } + + for ( i = 0; i < n_args; ++i ) + lol_add( inner->args, stack_at( s, n_args - i - 1 ) ); + + for ( i = 0; i < n_args; ++i ) + stack_pop( s ); + + trailing = list_pop_front( first ); + if ( trailing ) + { + if ( inner->args->count == 0 ) + lol_add( inner->args, trailing ); + else + { + LIST * * const l = &inner->args->list[ 0 ]; + *l = list_append( trailing, *l ); + } + } + + result = evaluate_rule( bindrule( rulename, inner->module ), rulename, inner ); + frame_free( inner ); + object_free( rulename ); + return result; +} + +static LIST * function_call_member_rule( JAM_FUNCTION * function, FRAME * frame, STACK * s, int32_t n_args, OBJECT * rulename, OBJECT * file, int32_t line ) +{ + FRAME inner[ 1 ]; + int32_t i; + LIST * first = stack_pop( s ); + LIST * result = L0; + RULE * rule; + module_t * module; + OBJECT * real_rulename = 0; + + frame->file = file; + frame->line = line; + + if ( list_empty( first ) ) + { + backtrace_line( frame ); + out_printf( "warning: object is empty\n" ); + backtrace( frame ); + + list_free( first ); + + for( i = 0; i < n_args; ++i ) + { + list_free( stack_pop( s ) ); + } + + return result; + } + + /* FIXME: handle generic case */ + assert( list_length( first ) == 1 ); + + module = bindmodule( list_front( first ) ); + if ( module->class_module ) + { + rule = bindrule( rulename, module ); + if ( rule->procedure ) + { + real_rulename = object_copy( function_rulename( rule->procedure ) ); + } + else + { + string buf[ 1 ]; + string_new( buf ); + string_append( buf, object_str( module->name ) ); + string_push_back( buf, '.' ); + string_append( buf, object_str( rulename ) ); + real_rulename = object_new( buf->value ); + string_free( buf ); + } + } + else + { + string buf[ 1 ]; + string_new( buf ); + string_append( buf, object_str( list_front( first ) ) ); + string_push_back( buf, '.' ); + string_append( buf, object_str( rulename ) ); + real_rulename = object_new( buf->value ); + string_free( buf ); + rule = bindrule( real_rulename, frame->module ); + } + + frame_init( inner ); + + inner->prev = frame; + inner->prev_user = frame->module->user_module ? frame : frame->prev_user; + inner->module = frame->module; /* This gets fixed up in evaluate_rule(), below. */ + + if ( n_args > LOL_MAX ) + { + out_printf( "ERROR: member rules are limited to %d arguments\n", LOL_MAX ); + backtrace( inner ); + b2::clean_exit( EXITBAD ); + } + + for( i = 0; i < n_args; ++i ) + { + lol_add( inner->args, stack_at( s, n_args - i - 1 ) ); + } + + for( i = 0; i < n_args; ++i ) + { + stack_pop( s ); + } + + if ( list_length( first ) > 1 ) + { + string buf[ 1 ]; + LIST * trailing = L0; + LISTITER iter = list_begin( first ), end = list_end( first ); + iter = list_next( iter ); + string_new( buf ); + for ( ; iter != end; iter = list_next( iter ) ) + { + string_append( buf, object_str( list_item( iter ) ) ); + string_push_back( buf, '.' ); + string_append( buf, object_str( rulename ) ); + trailing = list_push_back( trailing, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + string_free( buf ); + if ( inner->args->count == 0 ) + lol_add( inner->args, trailing ); + else + { + LIST * * const l = &inner->args->list[ 0 ]; + *l = list_append( trailing, *l ); + } + } + + list_free( first ); + result = evaluate_rule( rule, real_rulename, inner ); + frame_free( inner ); + object_free( real_rulename ); + return result; +} + + +/* Variable expansion */ + +typedef struct +{ + int32_t sub1; + int32_t sub2; +} subscript_t; + +typedef struct +{ + PATHNAME f; /* :GDBSMR -- pieces */ + PATHPART empty; /* :E -- default for empties */ + PATHPART join; /* :J -- join list with char */ + PATHPART prefix; /* :< */ + PATHPART postfix; /* :> */ + bool parent:1; /* :P -- go to parent directory */ + bool filemods:1; /* one of the above applied */ + bool downshift:1; /* :L -- downshift result */ + bool upshift:1; /* :U -- upshift result */ + bool to_slashes:1; /* :T -- convert "\" to "/" */ + bool to_windows:1; /* :W -- convert cygwin to native paths */ + bool opt_file:1; /* :O=F -- replace @() with the file part */ + bool opt_content:1; /* :O=C -- repalce @() with the content (E) part */ +} VAR_EDITS; + +struct VAR_EXPANDED +{ + LIST * value = L0; + LIST * inner = L0; + bool opt_file:1; + bool opt_content:1; +}; + +static VAR_EXPANDED apply_modifiers_impl( LIST * result, string * buf, + VAR_EDITS * edits, int32_t n, LISTITER iter, LISTITER end ); +static void get_iters( subscript_t const subscript, LISTITER * const first, + LISTITER * const last, int32_t const length ); + + +/* + * var_edit_parse() - parse : modifiers into PATHNAME structure + * + * The : modifiers in a $(varname:modifier) currently support replacing or + * omitting elements of a filename, and so they are parsed into a PATHNAME + * structure (which contains pointers into the original string). + * + * Modifiers of the form "X=value" replace the component X with the given value. + * Modifiers without the "=value" cause everything but the component X to be + * omitted. X is one of: + * + * G + * D directory name + * B base name + * S .suffix + * M (member) + * R root directory - prepended to whole path + * + * This routine sets: + * + * f->f_xxx.ptr = 0 + * f->f_xxx.len = 0 + * -> leave the original component xxx + * + * f->f_xxx.ptr = string + * f->f_xxx.len = strlen( string ) + * -> replace component xxx with string + * + * f->f_xxx.ptr = "" + * f->f_xxx.len = 0 + * -> omit component xxx + * + * var_edit_file() below and path_build() obligingly follow this convention. + */ + +static int32_t var_edit_parse( char const * mods, VAR_EDITS * edits, int32_t havezeroed + ) +{ + while ( *mods ) + { + PATHPART * fp; + bool opt = false; + + switch ( *mods++ ) + { + case 'L': edits->downshift = 1; continue; + case 'U': edits->upshift = 1; continue; + case 'P': edits->parent = edits->filemods = 1; continue; + case 'E': fp = &edits->empty; goto strval; + case 'J': fp = &edits->join; goto strval; + case 'G': fp = &edits->f.f_grist; goto fileval; + case 'R': fp = &edits->f.f_root; goto fileval; + case 'D': fp = &edits->f.f_dir; goto fileval; + case 'B': fp = &edits->f.f_base; goto fileval; + case 'S': fp = &edits->f.f_suffix; goto fileval; + case 'M': fp = &edits->f.f_member; goto fileval; + case 'T': edits->to_slashes = 1; continue; + case 'W': edits->to_windows = 1; continue; + case '<': fp = &edits->prefix; goto strval; + case '>': fp = &edits->postfix; goto strval; + case 'O': opt = true; goto strval; + default: + continue; /* Should complain, but so what... */ + } + + fileval: + /* Handle :CHARS, where each char (without a following =) selects a + * particular file path element. On the first such char, we deselect all + * others (by setting ptr = "", len = 0) and for each char we select + * that element (by setting ptr = 0). + */ + edits->filemods = 1; + + if ( *mods != '=' ) + { + if ( !havezeroed++ ) + { + int32_t i; + for ( i = 0; i < 6; ++i ) + { + edits->f.part[ i ].len = 0; + edits->f.part[ i ].ptr = ""; + } + } + + fp->ptr = 0; + continue; + } + + strval: + /* Handle :O=??? */ + if ( opt ) + { + if ( *mods == '=' ) + { + for (++mods; *mods; ++mods) + { + switch ( *mods ) + { + case 'F': edits->opt_file = true; break; + case 'C': edits->opt_content = true; break; + } + } + } + } + else + { + /* Handle :X=value, or :X */ + if ( *mods != '=' ) + { + fp->ptr = ""; + fp->len = 0; + } + else + { + fp->ptr = ++mods; + fp->len = int32_t(strlen( mods )); + mods += fp->len; + } + } + } + + return havezeroed; +} + + +/* + * var_edit_file() - copy input target name to output, modifying filename. + */ + +static void var_edit_file( char const * in, string * out, VAR_EDITS * edits ) +{ + if ( edits->filemods ) + { + PATHNAME pathname; + + /* Parse apart original filename, putting parts into "pathname". */ + path_parse( in, &pathname ); + + /* Replace any pathname with edits->f */ + if ( edits->f.f_grist .ptr ) pathname.f_grist = edits->f.f_grist; + if ( edits->f.f_root .ptr ) pathname.f_root = edits->f.f_root; + if ( edits->f.f_dir .ptr ) pathname.f_dir = edits->f.f_dir; + if ( edits->f.f_base .ptr ) pathname.f_base = edits->f.f_base; + if ( edits->f.f_suffix.ptr ) pathname.f_suffix = edits->f.f_suffix; + if ( edits->f.f_member.ptr ) pathname.f_member = edits->f.f_member; + + /* If requested, modify pathname to point to parent. */ + if ( edits->parent ) + path_parent( &pathname ); + + /* Put filename back together. */ + path_build( &pathname, out ); + } + else + string_append( out, in ); +} + + +#if defined( OS_CYGWIN ) || defined( OS_VMS ) + +/* + * var_edit_translate_path() - translate path to os native format. + */ + +static void var_edit_translate_path( string * out, int32_t pos, VAR_EDITS * edits ) +{ + if ( edits->to_windows ) + { + string result[ 1 ]; + int32_t translated; + + /* Translate path to os native format. */ + translated = path_translate_to_os( out->value + pos, result ); + if ( translated ) + { + string_truncate( out, pos ); + string_append( out, result->value ); + edits->to_slashes = 0; + } + + string_free( result ); + } +} + +#endif + + +/* + * var_edit_shift() - do upshift/downshift & other mods. + */ + +static void var_edit_shift( string * out, int32_t pos, VAR_EDITS * edits ) +{ +#if defined( OS_CYGWIN ) || defined( OS_VMS ) + var_edit_translate_path( out, pos, edits ); +#endif + + if ( edits->upshift || edits->downshift || edits->to_slashes ) + { + /* Handle upshifting, downshifting and slash translation now. */ + char * p; + for ( p = out->value + pos; *p; ++p ) + { + if ( edits->upshift ) + *p = toupper( *p ); + else if ( edits->downshift ) + *p = tolower( *p ); + if ( edits->to_slashes && ( *p == '\\' ) ) + *p = '/'; + } + } +} + + +/* + * Reads n LISTs from the top of the STACK and combines them to form VAR_EDITS. + * Returns the number of VAR_EDITS pushed onto the STACK. + */ + +static int32_t expand_modifiers( STACK * s, int32_t n ) +{ + int32_t i; + int32_t total = 1; + LIST * * args = (LIST**)stack_get( s ); + for ( i = 0; i < n; ++i ) + total *= list_length( args[ i ] ); + + if ( total != 0 ) + { + VAR_EDITS * out = (VAR_EDITS*)stack_allocate( s, total * sizeof( VAR_EDITS ) ); + LISTITER * iter = (LISTITER*)stack_allocate( s, n * sizeof( LIST * ) ); + for ( i = 0; i < n; ++i ) + iter[ i ] = list_begin( args[ i ] ); + i = 0; + { + int32_t havezeroed; + loop: + memset( out, 0, sizeof( *out ) ); + havezeroed = 0; + for ( i = 0; i < n; ++i ) + havezeroed = var_edit_parse( object_str( list_item( iter[ i ] ) + ), out, havezeroed ); + ++out; + while ( --i >= 0 ) + { + if ( list_next( iter[ i ] ) != list_end( args[ i ] ) ) + { + iter[ i ] = list_next( iter[ i ] ); + goto loop; + } + iter[ i ] = list_begin( args[ i ] ); + } + } + stack_deallocate( s, n * sizeof( LIST * ) ); + } + return total; +} + +static VAR_EXPANDED apply_modifiers( STACK * s, int32_t n ) +{ + LIST * value = stack_top( s ); + VAR_EXPANDED result; + VAR_EDITS * const edits = (VAR_EDITS *)( (LIST * *)stack_get( s ) + 1 ); + string buf[ 1 ]; + string_new( buf ); + result = apply_modifiers_impl( L0, buf, edits, n, list_begin( value ), + list_end( value ) ); + string_free( buf ); + return result; +} + +// STACK: LIST * modifiers[modifier_count] +static VAR_EXPANDED eval_modifiers( STACK * s, LIST * value, int32_t modifier_count ) +{ + // Convert modifiers to value edits. + int32_t edits = expand_modifiers( s, modifier_count ); + // Edit the value on the stack. + stack_push( s, value ); + VAR_EXPANDED result = apply_modifiers( s, edits ); + list_free( stack_pop( s ) ); + // Clean up the value edits on the stack. + stack_deallocate( s, edits * sizeof( VAR_EDITS ) ); + // Clean up the filename modifiers. + for ( int32_t i = 0; i < modifier_count; ++i ) + list_free( stack_pop( s ) ); + // Done. + return result; +} + + +/* + * Parse a string of the form "1-2", "-2--1", "2-" and return the two + * subscripts. + */ + +subscript_t parse_subscript( char const * s ) +{ + subscript_t result; + result.sub1 = 0; + result.sub2 = 0; + do /* so we can use "break" */ + { + /* Allow negative subscripts. */ + if ( !isdigit( *s ) && ( *s != '-' ) ) + { + result.sub2 = 0; + break; + } + result.sub1 = atoi( s ); + + /* Skip over the first symbol, which is either a digit or dash. */ + ++s; + while ( isdigit( *s ) ) ++s; + + if ( *s == '\0' ) + { + result.sub2 = result.sub1; + break; + } + + if ( *s != '-' ) + { + result.sub2 = 0; + break; + } + + ++s; + + if ( *s == '\0' ) + { + result.sub2 = -1; + break; + } + + if ( !isdigit( *s ) && ( *s != '-' ) ) + { + result.sub2 = 0; + break; + } + + /* First, compute the index of the last element. */ + result.sub2 = atoi( s ); + while ( isdigit( *++s ) ); + + if ( *s != '\0' ) + result.sub2 = 0; + + } while ( 0 ); + return result; +} + +static LIST * apply_subscript( STACK * s ) +{ + LIST * value = stack_top( s ); + LIST * indices = stack_at( s, 1 ); + LIST * result = L0; + int32_t length = list_length( value ); + string buf[ 1 ]; + LISTITER indices_iter = list_begin( indices ); + LISTITER const indices_end = list_end( indices ); + string_new( buf ); + for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter + ) ) + { + LISTITER iter = list_begin( value ); + LISTITER end = list_end( value ); + subscript_t const subscript = parse_subscript( object_str( list_item( + indices_iter ) ) ); + get_iters( subscript, &iter, &end, length ); + for ( ; iter != end; iter = list_next( iter ) ) + result = list_push_back( result, object_copy( list_item( iter ) ) ); + } + string_free( buf ); + return result; +} + + +/* + * Reads the LIST from first and applies subscript to it. The results are + * written to *first and *last. + */ + +static void get_iters( subscript_t const subscript, LISTITER * const first, + LISTITER * const last, int32_t const length ) +{ + int32_t start; + int32_t size; + LISTITER iter; + LISTITER end; + { + + if ( subscript.sub1 < 0 ) + start = length + subscript.sub1; + else if ( subscript.sub1 > length ) + start = length; + else + start = subscript.sub1 - 1; + + size = subscript.sub2 < 0 + ? length + 1 + subscript.sub2 - start + : subscript.sub2 - start; + + /* + * HACK: When the first subscript is before the start of the list, it + * magically becomes the beginning of the list. This is inconsistent, + * but needed for backwards compatibility. + */ + if ( start < 0 ) + start = 0; + + /* The "sub2 < 0" test handles the semantic error of sub2 < sub1. */ + if ( size < 0 ) + size = 0; + + if ( start + size > length ) + size = length - start; + } + + iter = *first; + while ( start-- > 0 ) + iter = list_next( iter ); + + end = iter; + while ( size-- > 0 ) + end = list_next( end ); + + *first = iter; + *last = end; +} + +static LIST * apply_modifiers_prepost( LIST * result, string * buf, + VAR_EDITS * edits, int32_t n, LISTITER begin, LISTITER end ) +{ + for ( LISTITER iter = begin; iter != end; iter = list_next( iter ) ) + { + for ( int32_t i = 0; i < n; ++i ) + { + if ( edits[ i ].prefix.ptr ) + { + string_append( buf, edits[ i ].prefix.ptr ); + } + } + string_append( buf, object_str( list_item( iter ) ) ); + for ( int32_t i = 0; i < n; ++i ) + { + if ( edits[ i ].postfix.ptr ) + { + string_append( buf, edits[ i ].postfix.ptr ); + } + } + result = list_push_back( result, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + return result; +} + +static LIST * apply_modifiers_empty( LIST * result, string * buf, + VAR_EDITS * edits, int32_t n ) +{ + int32_t i; + for ( i = 0; i < n; ++i ) + { + if ( edits[ i ].empty.ptr ) + { + /** FIXME: is empty.ptr always null-terminated? */ + var_edit_file( edits[ i ].empty.ptr, buf, edits + i ); + var_edit_shift( buf, 0, edits + i ); + result = list_push_back( result, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + } + return result; +} + +static LIST * apply_modifiers_non_empty( LIST * result, string * buf, + VAR_EDITS * edits, int32_t n, LISTITER begin, LISTITER end ) +{ + int32_t i; + LISTITER iter; + for ( i = 0; i < n; ++i ) + { + if ( edits[ i ].join.ptr ) + { + var_edit_file( object_str( list_item( begin ) ), buf, edits + i ); + var_edit_shift( buf, 0, edits + i ); + for ( iter = list_next( begin ); iter != end; iter = list_next( iter + ) ) + { + int32_t size; + string_append( buf, edits[ i ].join.ptr ); + size = buf->size; + var_edit_file( object_str( list_item( iter ) ), buf, edits + i + ); + var_edit_shift( buf, size, edits + i ); + } + result = list_push_back( result, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + else + { + for ( iter = begin; iter != end; iter = list_next( iter ) ) + { + var_edit_file( object_str( list_item( iter ) ), buf, edits + i ); + var_edit_shift( buf, 0, edits + i ); + result = list_push_back( result, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + } + } + return result; +} + +static VAR_EXPANDED apply_modifiers_impl( LIST * result, string * buf, + VAR_EDITS * edits, int32_t n, LISTITER iter, LISTITER end ) +{ + LIST * modified = iter == end + ? apply_modifiers_empty( result, buf, edits, n ) + : apply_modifiers_non_empty( result, buf, edits, n, iter, end ); + VAR_EXPANDED expanded; + expanded.value = apply_modifiers_prepost( + L0, buf, edits, n, list_begin( modified ), list_end( modified ) ); + expanded.inner = modified; + expanded.opt_file = false; + expanded.opt_content = false; + for ( int32_t i = 0; i < n; ++i ) + { + expanded.opt_file |= edits[i].opt_file; + expanded.opt_content |= edits[i].opt_content; + } + return expanded; +} + +static LIST * apply_subscript_and_modifiers( STACK * s, int32_t n ) +{ + LIST * const value = stack_top( s ); + LIST * const indices = stack_at( s, 1 ); + LIST * result = L0; + VAR_EDITS * const edits = (VAR_EDITS *)((LIST * *)stack_get( s ) + 2); + int32_t const length = list_length( value ); + string buf[ 1 ]; + LISTITER indices_iter = list_begin( indices ); + LISTITER const indices_end = list_end( indices ); + string_new( buf ); + for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter + ) ) + { + LISTITER iter = list_begin( value ); + LISTITER end = list_end( value ); + subscript_t const sub = parse_subscript( object_str( list_item( + indices_iter ) ) ); + get_iters( sub, &iter, &end, length ); + VAR_EXPANDED modified + = apply_modifiers_impl( result, buf, edits, n, iter, end ); + result = modified.value; + list_free( modified.inner ); + } + string_free( buf ); + return result; +} + + +/* + * expand() - expands a list of concatenated strings and variable references + * + * Takes a list of expansion items - each representing one element to be + * concatenated and each containing a list of its values. Returns a list of all + * possible values constructed by selecting a single value from each of the + * elements and concatenating them together. + * + * For example, in the following code: + * + * local a = one two three four ; + * local b = foo bar ; + * ECHO /$(a)/$(b)/$(a)/ ; + * + * When constructing the result of /$(a)/$(b)/ this function would get called + * with the following 7 expansion items: + * 1. / + * 2. one two three four + * 3. / + * 4. foo bar + * 5. / + * 6. one two three four + * 7. / + * + * And would result in a list containing 32 values: + * 1. /one/foo/one/ + * 2. /one/foo/two/ + * 3. /one/foo/three/ + * 4. /one/foo/four/ + * 5. /one/bar/one/ + * ... + * + */ + +typedef struct expansion_item +{ + /* Item's value list initialized prior to calling expand(). */ + LIST * values; + + /* Internal data initialized and used inside expand(). */ + LISTITER current; /* Currently used value. */ + int32_t size; /* Concatenated string length prior to concatenating the + * item's current value. + */ +} expansion_item; + +static LIST * expand( expansion_item * items, int32_t const length ) +{ + LIST * result = L0; + string buf[ 1 ]; + int32_t size = 0; + int32_t i; + + assert( length > 0 ); + for ( i = 0; i < length; ++i ) + { + LISTITER iter = list_begin( items[ i ].values ); + LISTITER const end = list_end( items[ i ].values ); + + /* If any of the items has no values - the result is an empty list. */ + if ( iter == end ) return L0; + + /* Set each item's 'current' to its first listed value. This indicates + * each item's next value to be used when constructing the list of all + * possible concatenated values. + */ + items[ i ].current = iter; + + /* Calculate the longest concatenated string length - to know how much + * memory we need to allocate as a buffer for holding the concatenated + * strings. + */ + { + int32_t max = 0; + for ( ; iter != end; iter = list_next( iter ) ) + { + int32_t const len = int32_t(strlen( object_str( list_item( iter ) ) )); + if ( len > max ) max = len; + } + size += max; + } + } + + string_new( buf ); + string_reserve( buf, size ); + + i = 0; + while ( i >= 0 ) + { + for ( ; i < length; ++i ) + { + items[ i ].size = buf->size; + string_append( buf, object_str( list_item( items[ i ].current ) ) ); + } + result = list_push_back( result, object_new( buf->value ) ); + while ( --i >= 0 ) + { + if ( list_next( items[ i ].current ) != list_end( items[ i ].values + ) ) + { + items[ i ].current = list_next( items[ i ].current ); + string_truncate( buf, items[ i ].size ); + break; + } + else + items[ i ].current = list_begin( items[ i ].values ); + } + } + + string_free( buf ); + return result; +} + +static void combine_strings( STACK * s, int32_t n, string * out ) +{ + int32_t i; + for ( i = 0; i < n; ++i ) + { + LIST * const values = stack_pop( s ); + LISTITER iter = list_begin( values ); + LISTITER const end = list_end( values ); + if ( iter != end ) + { + string_append( out, object_str( list_item( iter ) ) ); + for ( iter = list_next( iter ); iter != end; iter = list_next( iter + ) ) + { + string_push_back( out, ' ' ); + string_append( out, object_str( list_item( iter ) ) ); + } + list_free( values ); + } + } +} + +struct dynamic_array +{ + int32_t size; + int32_t capacity; + int32_t unit_size; + void * data; +}; + +static void dynamic_array_init( struct dynamic_array * array ) +{ + array->size = 0; + array->capacity = 0; + array->unit_size = 0; + array->data = 0; +} + +static void dynamic_array_free( struct dynamic_array * array ) +{ + BJAM_FREE( array->data ); +} + +static void dynamic_array_push_impl( struct dynamic_array * const array, + void const * const value, int32_t const unit_size ) +{ + if ( array->unit_size == 0 ) + { + array->unit_size = unit_size; + } + else + { + assert( array->unit_size == unit_size ); + } + if ( array->capacity == 0 ) + { + array->capacity = 2; + array->data = BJAM_MALLOC( array->capacity * unit_size ); + } + else if ( array->capacity == array->size ) + { + void * new_data; + array->capacity *= 2; + new_data = BJAM_MALLOC( array->capacity * unit_size ); + memcpy( new_data, array->data, array->size * unit_size ); + BJAM_FREE( array->data ); + array->data = new_data; + } + memcpy( (char *)array->data + array->size * unit_size, value, unit_size ); + ++array->size; +} + +#define dynamic_array_push( array, value ) (dynamic_array_push_impl(array, &value, sizeof(value))) +#define dynamic_array_at( type, array, idx ) ( (assert( array->unit_size == sizeof(type) )) , (((type *)(array)->data)[idx]) ) +#define dynamic_array_pop( array ) (--(array)->size) + +/* + * struct compiler + */ + +struct label_info +{ + int32_t absolute_position; + struct dynamic_array uses[ 1 ]; +}; + +#define LOOP_INFO_BREAK 0 +#define LOOP_INFO_CONTINUE 1 + +struct loop_info +{ + int32_t type; + int32_t label; + int32_t cleanup_depth; +}; + +struct stored_rule +{ + OBJECT * name; + PARSE * parse; + int32_t num_arguments; + struct arg_list * arguments; + int32_t local; +}; + +typedef struct compiler +{ + struct dynamic_array code[ 1 ]; + struct dynamic_array constants[ 1 ]; + struct dynamic_array labels[ 1 ]; + struct dynamic_array rules[ 1 ]; + struct dynamic_array actions[ 1 ]; + struct dynamic_array cleanups[ 1 ]; + struct dynamic_array loop_scopes[ 1 ]; +} compiler; + +static void compiler_init( compiler * c ) +{ + dynamic_array_init( c->code ); + dynamic_array_init( c->constants ); + dynamic_array_init( c->labels ); + dynamic_array_init( c->rules ); + dynamic_array_init( c->actions ); + dynamic_array_init( c->cleanups ); + dynamic_array_init( c->loop_scopes ); +} + +static void compiler_free( compiler * c ) +{ + int32_t i; + dynamic_array_free( c->actions ); + dynamic_array_free( c->rules ); + for ( i = 0; i < c->labels->size; ++i ) + dynamic_array_free( dynamic_array_at( struct label_info, c->labels, i + ).uses ); + dynamic_array_free( c->labels ); + dynamic_array_free( c->constants ); + dynamic_array_free( c->code ); + dynamic_array_free( c->cleanups ); + dynamic_array_free( c->loop_scopes ); +} + +static void compile_emit_instruction( compiler * c, instruction instr ) +{ + dynamic_array_push( c->code, instr ); +} + +static int32_t compile_new_label( compiler * c ) +{ + int32_t result = c->labels->size; + struct label_info info; + info.absolute_position = -1; + dynamic_array_init( info.uses ); + dynamic_array_push( c->labels, info ); + return result; +} + +static void compile_set_label( compiler * c, int32_t label ) +{ + struct label_info * const l = &dynamic_array_at( struct label_info, + c->labels, label ); + int32_t const pos = c->code->size; + int32_t i; + assert( l->absolute_position == -1 ); + l->absolute_position = pos; + for ( i = 0; i < l->uses->size; ++i ) + { + int32_t id = dynamic_array_at( int32_t, l->uses, i ); + int32_t offset = (int32_t)( pos - id - 1 ); + dynamic_array_at( instruction, c->code, id ).arg = offset; + } +} + +static void compile_emit( compiler * c, uint32_t op_code, int32_t arg ) +{ + instruction instr; + instr.op_code = op_code; + instr.arg = arg; + compile_emit_instruction( c, instr ); +} + +static void compile_emit_branch( compiler * c, uint32_t op_code, int32_t label ) +{ + struct label_info * const l = &dynamic_array_at( struct label_info, + c->labels, label ); + int32_t const pos = c->code->size; + instruction instr; + instr.op_code = op_code; + if ( l->absolute_position == -1 ) + { + instr.arg = 0; + dynamic_array_push( l->uses, pos ); + } + else + instr.arg = (int32_t)( l->absolute_position - pos - 1 ); + compile_emit_instruction( c, instr ); +} + +static int32_t compile_emit_constant( compiler * c, OBJECT * value ) +{ + OBJECT * copy = object_copy( value ); + dynamic_array_push( c->constants, copy ); + return c->constants->size - 1; +} + +static void compile_push_cleanup( compiler * c, uint32_t op_code, int32_t arg ) +{ + instruction instr; + instr.op_code = op_code; + instr.arg = arg; + dynamic_array_push( c->cleanups, instr ); +} + +static void compile_pop_cleanup( compiler * c ) +{ + dynamic_array_pop( c->cleanups ); +} + +static void compile_emit_cleanups( compiler * c, int32_t end ) +{ + int32_t i; + for ( i = c->cleanups->size; --i >= end; ) + { + compile_emit_instruction( c, dynamic_array_at( instruction, c->cleanups, i ) ); + } +} + +static void compile_emit_loop_jump( compiler * c, int32_t type ) +{ + struct loop_info * info = NULL; + int32_t i; + for ( i = c->loop_scopes->size; --i >= 0; ) + { + struct loop_info * elem = &dynamic_array_at( struct loop_info, c->loop_scopes, i ); + if ( elem->type == type ) + { + info = elem; + break; + } + } + if ( info == NULL ) + { + printf( "warning: ignoring break statement used outside of loop\n" ); + return; + } + compile_emit_cleanups( c, info->cleanup_depth ); + compile_emit_branch( c, INSTR_JUMP, info->label ); +} + +static void compile_push_break_scope( compiler * c, int32_t label ) +{ + struct loop_info info; + info.type = LOOP_INFO_BREAK; + info.label = label; + info.cleanup_depth = c->cleanups->size; + dynamic_array_push( c->loop_scopes, info ); +} + +static void compile_push_continue_scope( compiler * c, int32_t label ) +{ + struct loop_info info; + info.type = LOOP_INFO_CONTINUE; + info.label = label; + info.cleanup_depth = c->cleanups->size; + dynamic_array_push( c->loop_scopes, info ); +} + +static void compile_pop_break_scope( compiler * c ) +{ + assert( c->loop_scopes->size > 0 ); + assert( dynamic_array_at( struct loop_info, c->loop_scopes, c->loop_scopes->size - 1 ).type == LOOP_INFO_BREAK ); + dynamic_array_pop( c->loop_scopes ); +} + +static void compile_pop_continue_scope( compiler * c ) +{ + assert( c->loop_scopes->size > 0 ); + assert( dynamic_array_at( struct loop_info, c->loop_scopes, c->loop_scopes->size - 1 ).type == LOOP_INFO_CONTINUE ); + dynamic_array_pop( c->loop_scopes ); +} + +static int32_t compile_emit_rule( compiler * c, OBJECT * name, PARSE * parse, + int32_t num_arguments, struct arg_list * arguments, int32_t local ) +{ + struct stored_rule rule; + rule.name = object_copy( name ); + rule.parse = parse; + rule.num_arguments = num_arguments; + rule.arguments = arguments; + rule.local = local; + dynamic_array_push( c->rules, rule ); + return (int32_t)( c->rules->size - 1 ); +} + +static int32_t compile_emit_actions( compiler * c, PARSE * parse ) +{ + SUBACTION a; + a.name = object_copy( parse->string ); + a.command = function_compile_actions( object_str( parse->string1 ), + parse->file, parse->line ); + a.flags = parse->num; + dynamic_array_push( c->actions, a ); + return (int32_t)( c->actions->size - 1 ); +} + +static JAM_FUNCTION * compile_to_function( compiler * c ) +{ + JAM_FUNCTION * const result = (JAM_FUNCTION*)BJAM_MALLOC( sizeof( JAM_FUNCTION ) ); + int32_t i; + result->base.type = FUNCTION_JAM; + result->base.reference_count = 1; + result->base.formal_arguments = 0; + result->base.num_formal_arguments = 0; + + result->base.rulename = 0; + + result->code_size = c->code->size; + result->code = (instruction*)BJAM_MALLOC( c->code->size * sizeof( instruction ) ); + memcpy( result->code, c->code->data, c->code->size * sizeof( instruction ) ); + + result->constants = (OBJECT**)BJAM_MALLOC( c->constants->size * sizeof( OBJECT * ) ); + if ( c->constants->size != 0 ) + memcpy( result->constants, c->constants->data, + c->constants->size * sizeof( OBJECT * ) ); + result->num_constants = c->constants->size; + + result->num_subfunctions = c->rules->size; + result->functions = (SUBFUNCTION*)BJAM_MALLOC( c->rules->size * sizeof( SUBFUNCTION ) ); + for ( i = 0; i < c->rules->size; ++i ) + { + struct stored_rule * const rule = &dynamic_array_at( struct stored_rule, + c->rules, i ); + result->functions[ i ].name = rule->name; + result->functions[ i ].code = function_compile( rule->parse ); + result->functions[ i ].code->num_formal_arguments = rule->num_arguments; + result->functions[ i ].code->formal_arguments = rule->arguments; + result->functions[ i ].local = rule->local; + } + + result->actions = (SUBACTION*)BJAM_MALLOC( c->actions->size * sizeof( SUBACTION ) ); + if ( c->actions->size != 0 ) + memcpy( result->actions, c->actions->data, + c->actions->size * sizeof( SUBACTION ) ); + result->num_subactions = c->actions->size; + + result->generic = 0; + + result->file = 0; + result->line = -1; + + return result; +} + + +/* + * Parsing of variable expansions + */ + +typedef struct VAR_PARSE_GROUP +{ + struct dynamic_array elems[ 1 ]; +} VAR_PARSE_GROUP; + +typedef struct VAR_PARSE_ACTIONS +{ + struct dynamic_array elems[ 1 ]; +} VAR_PARSE_ACTIONS; + +#define VAR_PARSE_TYPE_VAR 0 +#define VAR_PARSE_TYPE_STRING 1 +#define VAR_PARSE_TYPE_FILE 2 + +typedef struct _var_parse +{ + int32_t type; /* string, variable or file */ +} VAR_PARSE; + +typedef struct +{ + VAR_PARSE base; + VAR_PARSE_GROUP * name; + VAR_PARSE_GROUP * subscript; + struct dynamic_array modifiers[ 1 ]; +} VAR_PARSE_VAR; + +typedef struct +{ + VAR_PARSE base; + OBJECT * s; +} VAR_PARSE_STRING; + +static void var_parse_free( VAR_PARSE * ); + +static std::string var_parse_to_string( VAR_PARSE_STRING * string, bool debug = false ); +static std::string var_parse_to_string( VAR_PARSE_GROUP * group, bool debug = false ); +static std::string var_parse_to_string( VAR_PARSE_VAR const * parse, bool debug = false ); + +static std::string var_parse_to_string( VAR_PARSE_STRING * string, bool debug ) +{ + std::string result; + if ( debug ) result += "'"; + result += object_str( string->s ) ? object_str( string->s ) : ""; + if ( debug ) result += "'"; + return result; +} +static std::string var_parse_to_string( VAR_PARSE_GROUP * group, bool debug ) +{ + std::string result; + if ( debug ) result += "["; + for ( int32_t i = 0; i < group->elems->size; ++i ) + { + switch ( dynamic_array_at( VAR_PARSE *, group->elems, i )->type ) + { + case VAR_PARSE_TYPE_VAR: + result += var_parse_to_string( dynamic_array_at( VAR_PARSE_VAR *, group->elems, i ), debug ); + break; + + case VAR_PARSE_TYPE_STRING: + result += var_parse_to_string( dynamic_array_at( VAR_PARSE_STRING *, group->elems, i ), debug ); + break; + } + } + if ( debug ) result += "["; + return result; +} +static std::string var_parse_to_string( VAR_PARSE_VAR const * parse, bool debug ) +{ + std::string result = "$("; + result += var_parse_to_string( parse->name, debug ); + if ( parse->subscript ) + { + result += "[" + var_parse_to_string( parse->subscript, debug ) + "]"; + } + for ( int32_t i = 0; i < parse->modifiers->size; ++i ) + { + result += ":" + var_parse_to_string( dynamic_array_at( VAR_PARSE_GROUP *, parse->modifiers, i ), debug ); + } + return result + ")"; +} + + +/* + * VAR_PARSE_GROUP + */ + +static VAR_PARSE_GROUP * var_parse_group_new() +{ + VAR_PARSE_GROUP * const result = (VAR_PARSE_GROUP*)BJAM_MALLOC( sizeof( VAR_PARSE_GROUP ) ); + dynamic_array_init( result->elems ); + return result; +} + +static void var_parse_group_free( VAR_PARSE_GROUP * group ) +{ + int32_t i; + for ( i = 0; i < group->elems->size; ++i ) + var_parse_free( dynamic_array_at( VAR_PARSE *, group->elems, i ) ); + dynamic_array_free( group->elems ); + BJAM_FREE( group ); +} + +static void var_parse_group_add( VAR_PARSE_GROUP * group, VAR_PARSE * elem ) +{ + dynamic_array_push( group->elems, elem ); +} + +static void var_parse_group_maybe_add_constant( VAR_PARSE_GROUP * group, + char const * start, char const * end ) +{ + if ( start != end ) + { + string buf[ 1 ]; + VAR_PARSE_STRING * const value = (VAR_PARSE_STRING *)BJAM_MALLOC( + sizeof(VAR_PARSE_STRING) ); + value->base.type = VAR_PARSE_TYPE_STRING; + string_new( buf ); + string_append_range( buf, start, end ); + value->s = object_new( buf->value ); + string_free( buf ); + var_parse_group_add( group, (VAR_PARSE *)value ); + } +} + +VAR_PARSE_STRING * var_parse_group_as_literal( VAR_PARSE_GROUP * group ) +{ + if ( group->elems->size == 1 ) + { + VAR_PARSE * result = dynamic_array_at( VAR_PARSE *, group->elems, 0 ); + if ( result->type == VAR_PARSE_TYPE_STRING ) + return (VAR_PARSE_STRING *)result; + } + return 0; +} + + +/* + * VAR_PARSE_ACTIONS + */ + +static VAR_PARSE_ACTIONS * var_parse_actions_new() +{ + VAR_PARSE_ACTIONS * const result = (VAR_PARSE_ACTIONS *)BJAM_MALLOC( + sizeof(VAR_PARSE_ACTIONS) ); + dynamic_array_init( result->elems ); + return result; +} + +static void var_parse_actions_free( VAR_PARSE_ACTIONS * actions ) +{ + int32_t i; + for ( i = 0; i < actions->elems->size; ++i ) + var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *, + actions->elems, i ) ); + dynamic_array_free( actions->elems ); + BJAM_FREE( actions ); +} + + +/* + * VAR_PARSE_VAR + */ + +static VAR_PARSE_VAR * var_parse_var_new() +{ + VAR_PARSE_VAR * result = (VAR_PARSE_VAR*)BJAM_MALLOC( sizeof( VAR_PARSE_VAR ) ); + result->base.type = VAR_PARSE_TYPE_VAR; + result->name = var_parse_group_new(); + result->subscript = 0; + dynamic_array_init( result->modifiers ); + return result; +} + +static void var_parse_var_free( VAR_PARSE_VAR * var ) +{ + int32_t i; + var_parse_group_free( var->name ); + if ( var->subscript ) + var_parse_group_free( var->subscript ); + for ( i = 0; i < var->modifiers->size; ++i ) + var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *, + var->modifiers, i ) ); + dynamic_array_free( var->modifiers ); + BJAM_FREE( var ); +} + +static VAR_PARSE_GROUP * var_parse_var_new_modifier( VAR_PARSE_VAR * var ) +{ + VAR_PARSE_GROUP * result = var_parse_group_new(); + dynamic_array_push( var->modifiers, result ); + return result; +} + +static int32_t var_parse_var_mod_index( VAR_PARSE_VAR const * var , char m) +{ + for ( int32_t i = 0; i < var->modifiers->size; ++i ) + { + VAR_PARSE_GROUP * mod = dynamic_array_at( VAR_PARSE_GROUP *, var->modifiers, i ); + VAR_PARSE_STRING * mod_val = dynamic_array_at( VAR_PARSE_STRING *, mod->elems, 0 ); + const char * mod_str = object_str(mod_val->s); + if (mod_str && mod_str[0] == m) + { + return i; + } + } + return -1; +} + + +/* + * VAR_PARSE_STRING + */ + +static void var_parse_string_free( VAR_PARSE_STRING * string ) +{ + object_free( string->s ); + BJAM_FREE( string ); +} + + +/* + * VAR_PARSE + */ + +static void var_parse_free( VAR_PARSE * parse ) +{ + switch ( parse->type ) + { + case VAR_PARSE_TYPE_VAR: + case VAR_PARSE_TYPE_FILE: + var_parse_var_free( (VAR_PARSE_VAR *)parse ); + break; + + case VAR_PARSE_TYPE_STRING: + var_parse_string_free( (VAR_PARSE_STRING *)parse ); + break; + + default: + assert( !"Invalid type" ); + } +} + + +/* + * Compile VAR_PARSE + */ + +static void var_parse_group_compile( VAR_PARSE_GROUP const * parse, + compiler * c ); + +static void var_parse_var_compile( VAR_PARSE_VAR const * parse, compiler * c ) +{ + int32_t expand_name = 0; + int32_t is_get_grist = 0; + int32_t has_modifiers = 0; + /* Special case common modifiers */ + if ( parse->modifiers->size == 1 ) + { + VAR_PARSE_GROUP * mod = dynamic_array_at( VAR_PARSE_GROUP *, parse->modifiers, 0 ); + if ( mod->elems->size == 1 ) + { + VAR_PARSE * mod1 = dynamic_array_at( VAR_PARSE *, mod->elems, 0 ); + if ( mod1->type == VAR_PARSE_TYPE_STRING ) + { + OBJECT * s = ( (VAR_PARSE_STRING *)mod1 )->s; + if ( ! strcmp ( object_str( s ), "G" ) ) + { + is_get_grist = 1; + } + } + } + } + /* If there are modifiers, emit them in reverse order. */ + if ( parse->modifiers->size > 0 && !is_get_grist ) + { + int32_t i; + has_modifiers = 1; + for ( i = 0; i < parse->modifiers->size; ++i ) + var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *, + parse->modifiers, parse->modifiers->size - i - 1 ), c ); + } + + /* If there is a subscript, emit it. */ + if ( parse->subscript ) + var_parse_group_compile( parse->subscript, c ); + + /* If the variable name is empty, look it up. */ + if ( parse->name->elems->size == 0 ) + compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c, + constant_empty ) ); + /* If the variable name does not need to be expanded, look it up. */ + else if ( parse->name->elems->size == 1 && dynamic_array_at( VAR_PARSE *, + parse->name->elems, 0 )->type == VAR_PARSE_TYPE_STRING ) + { + OBJECT * const name = ( (VAR_PARSE_STRING *)dynamic_array_at( + VAR_PARSE *, parse->name->elems, 0 ) )->s; + int32_t const idx = get_argument_index( object_str( name ) ); + if ( idx != -1 ) + compile_emit( c, INSTR_PUSH_ARG, idx ); + else + compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c, name ) ); + } + /* Otherwise, push the var names and use the group instruction. */ + else + { + var_parse_group_compile( parse->name, c ); + expand_name = 1; + } + + /** Select the instruction for expanding the variable. */ + if ( !has_modifiers && !parse->subscript && !expand_name ) + ; + else if ( !has_modifiers && !parse->subscript && expand_name ) + compile_emit( c, INSTR_PUSH_GROUP, 0 ); + else if ( !has_modifiers && parse->subscript && !expand_name ) + compile_emit( c, INSTR_APPLY_INDEX, 0 ); + else if ( !has_modifiers && parse->subscript && expand_name ) + compile_emit( c, INSTR_APPLY_INDEX_GROUP, 0 ); + else if ( has_modifiers && !parse->subscript && !expand_name ) + compile_emit( c, INSTR_APPLY_MODIFIERS, parse->modifiers->size ); + else if ( has_modifiers && !parse->subscript && expand_name ) + compile_emit( c, INSTR_APPLY_MODIFIERS_GROUP, parse->modifiers->size ); + else if ( has_modifiers && parse->subscript && !expand_name ) + compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS, parse->modifiers->size ); + else if ( has_modifiers && parse->subscript && expand_name ) + compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS_GROUP, + parse->modifiers->size ); + + /* Now apply any special modifiers */ + if ( is_get_grist ) + { + compile_emit( c, INSTR_GET_GRIST, 0 ); + } +} + +static void var_parse_string_compile( VAR_PARSE_STRING const * parse, + compiler * c ) +{ + compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, parse->s ) + ); +} + +static void parse_var_string( char const * first, char const * last, + struct dynamic_array * out ); + +static void var_parse_file_compile( VAR_PARSE_VAR const * parse, compiler * c ) +{ + std::string var = var_parse_to_string( parse, true ); + int32_t empty_mod_index = var_parse_var_mod_index( parse, 'E' ); + int32_t grist_mod_index = var_parse_var_mod_index( parse, 'G' ); + int32_t modifier_count = 0; + // Push the contents, aka the edit modifier value. + { + assert( empty_mod_index >= 0 ); + // We reparse the edit modifier as we do teh expansion differently than + // regular var expansion. + std::string contents_val = var_parse_to_string( + dynamic_array_at( + VAR_PARSE_GROUP *, parse->modifiers, empty_mod_index ), false ); + dynamic_array contents_dyn_array; + dynamic_array_init( &contents_dyn_array ); + parse_var_string( + contents_val.c_str() + 2, contents_val.c_str() + contents_val.size(), + &contents_dyn_array ); + for ( int32_t i = contents_dyn_array.size - 1; i >= 0; --i ) + { + auto group = dynamic_array_at( + VAR_PARSE_GROUP *, ( &contents_dyn_array ), i ); + var_parse_group_compile( group, c ); + var_parse_group_free( group ); + } + dynamic_array_free( &contents_dyn_array ); + compile_emit( c, INSTR_APPEND_STRINGS, contents_dyn_array.size ); + } + // If there are modifiers, emit them in reverse order. + if ( parse->modifiers->size > 0 ) + { + for ( int32_t i = parse->modifiers->size - 1; i >= 0; --i ) + { + // Skip special modifiers. + if ( i == empty_mod_index || i == grist_mod_index ) continue; + modifier_count += 1; + var_parse_group_compile( + dynamic_array_at( VAR_PARSE_GROUP *, parse->modifiers, i ), c ); + } + } + // Push the filename, aka var name. + var_parse_group_compile( parse->name, c ); + // This instruction applies the modifiers and writes out the file and fills + // in the file name. + compile_emit( c, INSTR_WRITE_FILE, modifier_count ); +} + +static void var_parse_compile( VAR_PARSE const * parse, compiler * c ) +{ + switch ( parse->type ) + { + case VAR_PARSE_TYPE_VAR: + var_parse_var_compile( (VAR_PARSE_VAR const *)parse, c ); + break; + + case VAR_PARSE_TYPE_STRING: + var_parse_string_compile( (VAR_PARSE_STRING const *)parse, c ); + break; + + case VAR_PARSE_TYPE_FILE: + var_parse_file_compile( (VAR_PARSE_VAR const *)parse, c ); + break; + + default: + assert( !"Unknown var parse type." ); + } +} + +static void var_parse_group_compile( VAR_PARSE_GROUP const * parse, compiler * c + ) +{ + /* Emit the elements in reverse order. */ + int32_t i; + for ( i = 0; i < parse->elems->size; ++i ) + var_parse_compile( dynamic_array_at( VAR_PARSE *, parse->elems, + parse->elems->size - i - 1 ), c ); + /* If there are no elements, emit an empty string. */ + if ( parse->elems->size == 0 ) + compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, + constant_empty ) ); + /* If there is more than one element, combine them. */ + if ( parse->elems->size > 1 ) + compile_emit( c, INSTR_COMBINE_STRINGS, parse->elems->size ); +} + +static void var_parse_actions_compile( VAR_PARSE_ACTIONS const * actions, + compiler * c ) +{ + int32_t i; + for ( i = 0; i < actions->elems->size; ++i ) + var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *, + actions->elems, actions->elems->size - i - 1 ), c ); + compile_emit( c, INSTR_OUTPUT_STRINGS, actions->elems->size ); +} + + +/* + * Parse VAR_PARSE_VAR + */ + +static VAR_PARSE * parse_variable( char const * * string ); +static int32_t try_parse_variable( char const * * s_, char const * * string, + VAR_PARSE_GROUP * out ); +static void balance_parentheses( char const * * s_, char const * * string, + VAR_PARSE_GROUP * out ); +static void parse_var_string( char const * first, char const * last, + struct dynamic_array * out ); + + +/* + * Parses a string that can contain variables to expand. + */ + +static VAR_PARSE_GROUP * parse_expansion( char const * * string ) +{ + VAR_PARSE_GROUP * result = var_parse_group_new(); + char const * s = *string; + for ( ; ; ) + { + if ( try_parse_variable( &s, string, result ) ) {} + else if ( s[ 0 ] == '\0' ) + { + var_parse_group_maybe_add_constant( result, *string, s ); + return result; + } + else + ++s; + } +} + +static VAR_PARSE_ACTIONS * parse_actions( char const * string ) +{ + VAR_PARSE_ACTIONS * const result = var_parse_actions_new(); + parse_var_string( string, string + strlen( string ), result->elems ); + return result; +} + +/* + * Checks whether the string a *s_ starts with a variable expansion "$(". + * *string should point to the first unemitted character before *s. If *s_ + * starts with variable expansion, appends elements to out up to the closing + * ")", and adjusts *s_ and *string to point to next character. Returns 1 if s_ + * starts with a variable, 0 otherwise. + */ + +static int32_t try_parse_variable( char const * * s_, char const * * string, + VAR_PARSE_GROUP * out ) +{ + char const * s = *s_; + if ( s[ 0 ] == '$' && s[ 1 ] == '(' ) + { + var_parse_group_maybe_add_constant( out, *string, s ); + s += 2; + var_parse_group_add( out, parse_variable( &s ) ); + *string = s; + *s_ = s; + return 1; + } + if ( s[ 0 ] == '@' && s[ 1 ] == '(' ) + { + var_parse_group_maybe_add_constant( out, *string, s ); + s += 2; + VAR_PARSE_VAR *vp = (VAR_PARSE_VAR*)parse_variable( &s ); + /* We at least need the empty (:E) modifier. */ + if (var_parse_var_mod_index(vp, 'E') >= 0) + { + vp->base.type = VAR_PARSE_TYPE_FILE; + var_parse_group_add( out, (VAR_PARSE*)vp ); + *string = s; + *s_ = s; + return 1; + } + else + { + var_parse_var_free( vp ); + } + } + return 0; +} + + +static char const * current_file = ""; +static int32_t current_line; + +static void parse_error( char const * message ) +{ + out_printf( "%s:%d: %s\n", current_file, current_line, message ); +} + + +/* + * Parses a single variable up to the closing ")" and adjusts *string to point + * to the next character. *string should point to the character immediately + * after the initial "$(". + */ + +static VAR_PARSE * parse_variable( char const * * string ) +{ + VAR_PARSE_VAR * const result = var_parse_var_new(); + VAR_PARSE_GROUP * const name = result->name; + char const * s = *string; + for ( ; ; ) + { + if ( try_parse_variable( &s, string, name ) ) {} + else if ( s[ 0 ] == ':' ) + { + VAR_PARSE_GROUP * mod; + var_parse_group_maybe_add_constant( name, *string, s ); + ++s; + *string = s; + mod = var_parse_var_new_modifier( result ); + for ( ; ; ) + { + if ( try_parse_variable( &s, string, mod ) ) {} + else if ( s[ 0 ] == ')' ) + { + var_parse_group_maybe_add_constant( mod, *string, s ); + *string = ++s; + return (VAR_PARSE *)result; + } + else if ( s[ 0 ] == '(' ) + { + ++s; + balance_parentheses( &s, string, mod ); + } + else if ( s[ 0 ] == ':' ) + { + var_parse_group_maybe_add_constant( mod, *string, s ); + *string = ++s; + mod = var_parse_var_new_modifier( result ); + } + else if ( s[ 0 ] == '[' ) + { + parse_error("unexpected subscript"); + ++s; + } + else if ( s[ 0 ] == '\0' ) + { + parse_error( "unbalanced parentheses" ); + var_parse_group_maybe_add_constant( mod, *string, s ); + *string = s; + return (VAR_PARSE *)result; + } + else + ++s; + } + } + else if ( s[ 0 ] == '[' ) + { + VAR_PARSE_GROUP * subscript = var_parse_group_new(); + result->subscript = subscript; + var_parse_group_maybe_add_constant( name, *string, s ); + *string = ++s; + for ( ; ; ) + { + if ( try_parse_variable( &s, string, subscript ) ) {} + else if ( s[ 0 ] == ']' ) + { + var_parse_group_maybe_add_constant( subscript, *string, s ); + *string = ++s; + if ( s[ 0 ] != ')' && s[ 0 ] != ':' && s[ 0 ] != '\0' ) + parse_error( "unexpected text following []" ); + break; + } + else if ( isdigit( s[ 0 ] ) || s[ 0 ] == '-' ) + { + ++s; + } + else if ( s[ 0 ] == '\0' ) + { + parse_error( "malformed subscript" ); + break; + } + else + { + parse_error( "malformed subscript" ); + ++s; + } + } + } + else if ( s[ 0 ] == ')' ) + { + var_parse_group_maybe_add_constant( name, *string, s ); + *string = ++s; + return (VAR_PARSE *)result; + } + else if ( s[ 0 ] == '(' ) + { + ++s; + balance_parentheses( &s, string, name ); + } + else if ( s[ 0 ] == '\0' ) + { + parse_error( "unbalanced parentheses" ); + var_parse_group_maybe_add_constant( name, *string, s ); + *string = s; + return (VAR_PARSE *)result; + } + else + ++s; + } +} + +static void parse_var_string( char const * first, char const * last, + struct dynamic_array * out ) +{ + char const * saved = first; + while ( first != last ) + { + /* Handle whitespace. */ + while ( first != last && isspace( *first ) ) ++first; + if ( saved != first ) + { + VAR_PARSE_GROUP * const group = var_parse_group_new(); + var_parse_group_maybe_add_constant( group, saved, first ); + saved = first; + dynamic_array_push( out, group ); + } + if ( first == last ) break; + + /* Handle non-whitespace */ + { + VAR_PARSE_GROUP * group = var_parse_group_new(); + for ( ; ; ) + { + if ( first == last || isspace( *first ) ) + { + var_parse_group_maybe_add_constant( group, saved, first ); + saved = first; + break; + } + if ( try_parse_variable( &first, &saved, group ) ) + assert( first <= last ); + else + ++first; + } + dynamic_array_push( out, group ); + } + } +} + +/* + * Given that *s_ points to the character after a "(", parses up to the matching + * ")". *string should point to the first unemitted character before *s_. + * + * When the function returns, *s_ will point to the character after the ")", and + * *string will point to the first unemitted character before *s_. The range + * from *string to *s_ does not contain any variables that need to be expanded. + */ + +void balance_parentheses( char const * * s_, char const * * string, + VAR_PARSE_GROUP * out) +{ + int32_t depth = 1; + char const * s = *s_; + for ( ; ; ) + { + if ( try_parse_variable( &s, string, out ) ) { } + else if ( s[ 0 ] == ':' || s[ 0 ] == '[' ) + { + parse_error( "unbalanced parentheses" ); + ++s; + } + else if ( s[ 0 ] == '\0' ) + { + parse_error( "unbalanced parentheses" ); + break; + } + else if ( s[ 0 ] == ')' ) + { + ++s; + if ( --depth == 0 ) break; + } + else if ( s[ 0 ] == '(' ) + { + ++depth; + ++s; + } + else + ++s; + } + *s_ = s; +} + + +/* + * Main compile. + */ + +#define RESULT_STACK 0 +#define RESULT_RETURN 1 +#define RESULT_NONE 2 + +static void compile_parse( PARSE * parse, compiler * c, int32_t result_location ); +static struct arg_list * arg_list_compile( PARSE * parse, int32_t * num_arguments ); + +static void compile_condition( PARSE * parse, compiler * c, int32_t branch_true, int32_t label ) +{ + assert( parse->type == PARSE_EVAL ); + switch ( parse->num ) + { + case EXPR_EXISTS: + compile_parse( parse->left, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_NOT_EMPTY, label ); + else + compile_emit_branch( c, INSTR_JUMP_EMPTY, label ); + break; + + case EXPR_EQUALS: + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_EQ, label ); + else + compile_emit_branch( c, INSTR_JUMP_NE, label ); + break; + + case EXPR_NOTEQ: + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_NE, label ); + else + compile_emit_branch( c, INSTR_JUMP_EQ, label ); + break; + + case EXPR_LESS: + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_LT, label ); + else + compile_emit_branch( c, INSTR_JUMP_GE, label ); + break; + + case EXPR_LESSEQ: + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_LE, label ); + else + compile_emit_branch( c, INSTR_JUMP_GT, label ); + break; + + case EXPR_MORE: + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_GT, label ); + else + compile_emit_branch( c, INSTR_JUMP_LE, label ); + break; + + case EXPR_MOREEQ: + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_GE, label ); + else + compile_emit_branch( c, INSTR_JUMP_LT, label ); + break; + + case EXPR_IN: + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + if ( branch_true ) + compile_emit_branch( c, INSTR_JUMP_IN, label ); + else + compile_emit_branch( c, INSTR_JUMP_NOT_IN, label ); + break; + + case EXPR_AND: + if ( branch_true ) + { + int32_t f = compile_new_label( c ); + compile_condition( parse->left, c, 0, f ); + compile_condition( parse->right, c, 1, label ); + compile_set_label( c, f ); + } + else + { + compile_condition( parse->left, c, 0, label ); + compile_condition( parse->right, c, 0, label ); + } + break; + + case EXPR_OR: + if ( branch_true ) + { + compile_condition( parse->left, c, 1, label ); + compile_condition( parse->right, c, 1, label ); + } + else + { + int32_t t = compile_new_label( c ); + compile_condition( parse->left, c, 1, t ); + compile_condition( parse->right, c, 0, label ); + compile_set_label( c, t ); + } + break; + + case EXPR_NOT: + compile_condition( parse->left, c, !branch_true, label ); + break; + } +} + +static void adjust_result( compiler * c, int32_t actual_location, + int32_t desired_location ) +{ + if ( actual_location == desired_location ) + ; + else if ( actual_location == RESULT_STACK && desired_location == RESULT_RETURN ) + compile_emit( c, INSTR_SET_RESULT, 0 ); + else if ( actual_location == RESULT_STACK && desired_location == RESULT_NONE ) + compile_emit( c, INSTR_POP, 0 ); + else if ( actual_location == RESULT_RETURN && desired_location == RESULT_STACK ) + compile_emit( c, INSTR_PUSH_RESULT, 0 ); + else if ( actual_location == RESULT_RETURN && desired_location == RESULT_NONE ) + ; + else if ( actual_location == RESULT_NONE && desired_location == RESULT_STACK ) + compile_emit( c, INSTR_PUSH_EMPTY, 0 ); + else if ( actual_location == RESULT_NONE && desired_location == RESULT_RETURN ) + { + compile_emit( c, INSTR_PUSH_EMPTY, 0 ); + compile_emit( c, INSTR_SET_RESULT, 0 ); + } + else + assert( !"invalid result location" ); +} + +static void compile_append_chain( PARSE * parse, compiler * c ) +{ + assert( parse->type == PARSE_APPEND ); + if ( parse->left->type == PARSE_NULL ) + compile_parse( parse->right, c, RESULT_STACK ); + else + { + if ( parse->left->type == PARSE_APPEND ) + compile_append_chain( parse->left, c ); + else + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_emit( c, INSTR_PUSH_APPEND, 0 ); + } +} + +static void compile_emit_debug(compiler * c, int32_t line) +{ +#ifdef JAM_DEBUGGER + if ( debug_is_debugging() ) + compile_emit( c, INSTR_DEBUG_LINE, line ); +#endif +} + +static void compile_parse( PARSE * parse, compiler * c, int32_t result_location ) +{ + compile_emit_debug(c, parse->line); + if ( parse->type == PARSE_APPEND ) + { + compile_append_chain( parse, c ); + adjust_result( c, RESULT_STACK, result_location ); + } + else if ( parse->type == PARSE_EVAL ) + { + /* FIXME: This is only needed because of the bizarre parsing of + * conditions. + */ + if ( parse->num == EXPR_EXISTS ) + compile_parse( parse->left, c, result_location ); + else + { + int32_t f = compile_new_label( c ); + int32_t end = compile_new_label( c ); + + out_printf( "%s:%d: Conditional used as list (check operator " + "precedence).\n", object_str( parse->file ), parse->line ); + + /* Emit the condition */ + compile_condition( parse, c, 0, f ); + compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, + constant_true ) ); + compile_emit_branch( c, INSTR_JUMP, end ); + compile_set_label( c, f ); + compile_emit( c, INSTR_PUSH_EMPTY, 0 ); + compile_set_label( c, end ); + adjust_result( c, RESULT_STACK, result_location ); + } + } + else if ( parse->type == PARSE_FOREACH ) + { + int32_t var = compile_emit_constant( c, parse->string ); + int32_t top = compile_new_label( c ); + int32_t end = compile_new_label( c ); + int32_t continue_ = compile_new_label( c ); + + /* + * Evaluate the list. + */ + compile_parse( parse->left, c, RESULT_STACK ); + + /* Localize the loop variable */ + if ( parse->num ) + { + compile_emit( c, INSTR_PUSH_EMPTY, 0 ); + compile_emit( c, INSTR_PUSH_LOCAL, var ); + compile_emit( c, INSTR_SWAP, 1 ); + compile_push_cleanup( c, INSTR_POP_LOCAL, var ); + } + + compile_emit( c, INSTR_FOR_INIT, 0 ); + compile_set_label( c, top ); + compile_emit_branch( c, INSTR_FOR_LOOP, end ); + compile_emit_debug( c, parse->line ); + compile_emit( c, INSTR_SET, var ); + + compile_push_break_scope( c, end ); + compile_push_cleanup( c, INSTR_FOR_POP, 0 ); + compile_push_continue_scope( c, continue_ ); + + /* Run the loop body */ + compile_parse( parse->right, c, RESULT_NONE ); + + compile_pop_continue_scope( c ); + compile_pop_cleanup( c ); + compile_pop_break_scope( c ); + + compile_set_label( c, continue_ ); + compile_emit_branch( c, INSTR_JUMP, top ); + compile_set_label( c, end ); + + if ( parse->num ) + { + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_LOCAL, var ); + } + + adjust_result( c, RESULT_NONE, result_location); + } + else if ( parse->type == PARSE_IF ) + { + int32_t f = compile_new_label( c ); + /* Emit the condition */ + compile_condition( parse->left, c, 0, f ); + /* Emit the if block */ + compile_parse( parse->right, c, result_location ); + if ( parse->third->type != PARSE_NULL || result_location != RESULT_NONE ) + { + /* Emit the else block */ + int32_t end = compile_new_label( c ); + compile_emit_branch( c, INSTR_JUMP, end ); + compile_set_label( c, f ); + compile_parse( parse->third, c, result_location ); + compile_set_label( c, end ); + } + else + compile_set_label( c, f ); + + } + else if ( parse->type == PARSE_WHILE ) + { + int32_t nested_result = result_location == RESULT_NONE + ? RESULT_NONE + : RESULT_RETURN; + int32_t test = compile_new_label( c ); + int32_t top = compile_new_label( c ); + int32_t end = compile_new_label( c ); + /* Make sure that we return an empty list if the loop runs zero times. + */ + adjust_result( c, RESULT_NONE, nested_result ); + /* Jump to the loop test. */ + compile_emit_branch( c, INSTR_JUMP, test ); + compile_set_label( c, top ); + /* Emit the loop body. */ + compile_push_break_scope( c, end ); + compile_push_continue_scope( c, test ); + compile_parse( parse->right, c, nested_result ); + compile_pop_continue_scope( c ); + compile_pop_break_scope( c ); + /* Emit the condition. */ + compile_set_label( c, test ); + compile_condition( parse->left, c, 1, top ); + compile_set_label( c, end ); + + adjust_result( c, nested_result, result_location ); + } + else if ( parse->type == PARSE_INCLUDE ) + { + compile_parse( parse->left, c, RESULT_STACK ); + compile_emit( c, INSTR_INCLUDE, 0 ); + compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 ); + adjust_result( c, RESULT_NONE, result_location ); + } + else if ( parse->type == PARSE_MODULE ) + { + int32_t const nested_result = result_location == RESULT_NONE + ? RESULT_NONE + : RESULT_RETURN; + compile_parse( parse->left, c, RESULT_STACK ); + compile_emit( c, INSTR_PUSH_MODULE, 0 ); + compile_push_cleanup( c, INSTR_POP_MODULE, 0 ); + compile_parse( parse->right, c, nested_result ); + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_MODULE, 0 ); + adjust_result( c, nested_result, result_location ); + } + else if ( parse->type == PARSE_CLASS ) + { + /* Evaluate the class name. */ + compile_parse( parse->left->right, c, RESULT_STACK ); + /* Evaluate the base classes. */ + if ( parse->left->left ) + compile_parse( parse->left->left->right, c, RESULT_STACK ); + else + compile_emit( c, INSTR_PUSH_EMPTY, 0 ); + compile_emit( c, INSTR_CLASS, 0 ); + compile_push_cleanup( c, INSTR_POP_MODULE, 0 ); + compile_parse( parse->right, c, RESULT_NONE ); + compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 ); + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_MODULE, 0 ); + + adjust_result( c, RESULT_NONE, result_location ); + } + else if ( parse->type == PARSE_LIST ) + { + OBJECT * const o = parse->string; + char const * s = object_str( o ); + VAR_PARSE_GROUP * group; + current_file = object_str( parse->file ); + current_line = parse->line; + group = parse_expansion( &s ); + var_parse_group_compile( group, c ); + var_parse_group_free( group ); + adjust_result( c, RESULT_STACK, result_location ); + } + else if ( parse->type == PARSE_LOCAL ) + { + int32_t nested_result = result_location == RESULT_NONE + ? RESULT_NONE + : RESULT_RETURN; + /* This should be left recursive group of compile_appends. */ + PARSE * vars = parse->left; + + /* Special case an empty list of vars */ + if ( vars->type == PARSE_NULL ) + { + compile_parse( parse->right, c, RESULT_NONE ); + compile_parse( parse->third, c, result_location ); + nested_result = result_location; + } + /* Check whether there is exactly one variable with a constant name. */ + else if ( vars->left->type == PARSE_NULL && + vars->right->type == PARSE_LIST ) + { + char const * s = object_str( vars->right->string ); + VAR_PARSE_GROUP * group; + current_file = object_str( parse->file ); + current_line = parse->line; + group = parse_expansion( &s ); + if ( group->elems->size == 1 && dynamic_array_at( VAR_PARSE *, + group->elems, 0 )->type == VAR_PARSE_TYPE_STRING ) + { + int32_t const name = compile_emit_constant( c, ( + (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, + group->elems, 0 ) )->s ); + var_parse_group_free( group ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_emit_debug(c, parse->line); + compile_emit( c, INSTR_PUSH_LOCAL, name ); + compile_push_cleanup( c, INSTR_POP_LOCAL, name ); + compile_parse( parse->third, c, nested_result ); + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_LOCAL, name ); + } + else + { + var_parse_group_compile( group, c ); + var_parse_group_free( group ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_emit_debug(c, parse->line); + compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 ); + compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 ); + compile_parse( parse->third, c, nested_result ); + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 ); + } + } + else + { + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_emit_debug(c, parse->line); + compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 ); + compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 ); + compile_parse( parse->third, c, nested_result ); + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 ); + } + adjust_result( c, nested_result, result_location ); + } + else if ( parse->type == PARSE_ON ) + { + if ( parse->right->type == PARSE_APPEND && + parse->right->left->type == PARSE_NULL && + parse->right->right->type == PARSE_LIST ) + { + /* [ on $(target) return $(variable) ] */ + PARSE * value = parse->right->right; + OBJECT * const o = value->string; + char const * s = object_str( o ); + VAR_PARSE_GROUP * group; + OBJECT * varname = 0; + current_file = object_str( value->file ); + current_line = value->line; + group = parse_expansion( &s ); + if ( group->elems->size == 1 ) + { + VAR_PARSE * one = dynamic_array_at( VAR_PARSE *, group->elems, 0 ); + if ( one->type == VAR_PARSE_TYPE_VAR ) + { + VAR_PARSE_VAR * var = ( VAR_PARSE_VAR * )one; + if ( var->modifiers->size == 0 && !var->subscript && var->name->elems->size == 1 ) + { + VAR_PARSE * name = dynamic_array_at( VAR_PARSE *, var->name->elems, 0 ); + if ( name->type == VAR_PARSE_TYPE_STRING ) + { + varname = ( ( VAR_PARSE_STRING * )name )->s; + } + } + } + } + if ( varname ) + { + /* We have one variable with a fixed name and no modifiers. */ + compile_parse( parse->left, c, RESULT_STACK ); + compile_emit( c, INSTR_GET_ON, compile_emit_constant( c, varname ) ); + } + else + { + /* Too complex. Fall back on push/pop. */ + int32_t end = compile_new_label( c ); + compile_parse( parse->left, c, RESULT_STACK ); + compile_emit_branch( c, INSTR_PUSH_ON, end ); + compile_push_cleanup( c, INSTR_POP_ON, 0 ); + var_parse_group_compile( group, c ); + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_ON, 0 ); + compile_set_label( c, end ); + } + var_parse_group_free( group ); + } + else + { + int32_t end = compile_new_label( c ); + compile_parse( parse->left, c, RESULT_STACK ); + compile_emit_branch( c, INSTR_PUSH_ON, end ); + compile_push_cleanup( c, INSTR_POP_ON, 0 ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_pop_cleanup( c ); + compile_emit( c, INSTR_POP_ON, 0 ); + compile_set_label( c, end ); + } + adjust_result( c, RESULT_STACK, result_location ); + } + else if ( parse->type == PARSE_RULE ) + { + PARSE * p; + int32_t n = 0; + VAR_PARSE_GROUP * group; + char const * s = object_str( parse->string ); + + if ( parse->left->left || parse->left->right->type != PARSE_NULL ) + for ( p = parse->left; p; p = p->left ) + { + compile_parse( p->right, c, RESULT_STACK ); + ++n; + } + + current_file = object_str( parse->file ); + current_line = parse->line; + group = parse_expansion( &s ); + + if ( group->elems->size == 2 && + dynamic_array_at( VAR_PARSE *, group->elems, 0 )->type == VAR_PARSE_TYPE_VAR && + dynamic_array_at( VAR_PARSE *, group->elems, 1 )->type == VAR_PARSE_TYPE_STRING && + ( object_str( ( (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 1 ) )->s )[ 0 ] == '.' ) ) + { + VAR_PARSE_STRING * access = (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 1 ); + OBJECT * member = object_new( object_str( access->s ) + 1 ); + /* Emit the object */ + var_parse_var_compile( (VAR_PARSE_VAR *)dynamic_array_at( VAR_PARSE *, group->elems, 0 ), c ); + var_parse_group_free( group ); + compile_emit( c, INSTR_CALL_MEMBER_RULE, n ); + compile_emit( c, compile_emit_constant( c, member ), parse->line ); + object_free( member ); + } + else + { + var_parse_group_compile( group, c ); + var_parse_group_free( group ); + compile_emit( c, INSTR_CALL_RULE, n ); + compile_emit( c, compile_emit_constant( c, parse->string ), parse->line ); + } + + adjust_result( c, RESULT_STACK, result_location ); + } + else if ( parse->type == PARSE_RULES ) + { + do compile_parse( parse->left, c, RESULT_NONE ); + while ( ( parse = parse->right )->type == PARSE_RULES ); + compile_parse( parse, c, result_location ); + } + else if ( parse->type == PARSE_SET ) + { + PARSE * vars = parse->left; + uint32_t op_code; + uint32_t op_code_group; + + switch ( parse->num ) + { + case ASSIGN_APPEND: op_code = INSTR_APPEND; op_code_group = INSTR_APPEND_GROUP; break; + case ASSIGN_DEFAULT: op_code = INSTR_DEFAULT; op_code_group = INSTR_DEFAULT_GROUP; break; + default: op_code = INSTR_SET; op_code_group = INSTR_SET_GROUP; break; + } + + /* Check whether there is exactly one variable with a constant name. */ + if ( vars->type == PARSE_LIST ) + { + char const * s = object_str( vars->string ); + VAR_PARSE_GROUP * group; + current_file = object_str( parse->file ); + current_line = parse->line; + group = parse_expansion( &s ); + if ( group->elems->size == 1 && dynamic_array_at( VAR_PARSE *, + group->elems, 0 )->type == VAR_PARSE_TYPE_STRING ) + { + int32_t const name = compile_emit_constant( c, ( + (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, + group->elems, 0 ) )->s ); + var_parse_group_free( group ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_emit_debug(c, parse->line); + if ( result_location != RESULT_NONE ) + { + compile_emit( c, INSTR_SET_RESULT, 1 ); + } + compile_emit( c, op_code, name ); + } + else + { + var_parse_group_compile( group, c ); + var_parse_group_free( group ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_emit_debug(c, parse->line); + if ( result_location != RESULT_NONE ) + { + compile_emit( c, INSTR_SET_RESULT, 1 ); + } + compile_emit( c, op_code_group, 0 ); + } + } + else + { + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + compile_emit_debug(c, parse->line); + if ( result_location != RESULT_NONE ) + { + compile_emit( c, INSTR_SET_RESULT, 1 ); + } + compile_emit( c, op_code_group, 0 ); + } + if ( result_location != RESULT_NONE ) + { + adjust_result( c, RESULT_RETURN, result_location ); + } + } + else if ( parse->type == PARSE_SETCOMP ) + { + int32_t n_args; + struct arg_list * args = arg_list_compile( parse->right, &n_args ); + int32_t const rule_id = compile_emit_rule( c, parse->string, parse->left, + n_args, args, parse->num ); + compile_emit( c, INSTR_RULE, rule_id ); + adjust_result( c, RESULT_NONE, result_location ); + } + else if ( parse->type == PARSE_SETEXEC ) + { + int32_t const actions_id = compile_emit_actions( c, parse ); + compile_parse( parse->left, c, RESULT_STACK ); + compile_emit( c, INSTR_ACTIONS, actions_id ); + adjust_result( c, RESULT_NONE, result_location ); + } + else if ( parse->type == PARSE_SETTINGS ) + { + compile_parse( parse->left, c, RESULT_STACK ); + compile_parse( parse->third, c, RESULT_STACK ); + compile_parse( parse->right, c, RESULT_STACK ); + + compile_emit_debug(c, parse->line); + switch ( parse->num ) + { + case ASSIGN_APPEND: compile_emit( c, INSTR_APPEND_ON, 0 ); break; + case ASSIGN_DEFAULT: compile_emit( c, INSTR_DEFAULT_ON, 0 ); break; + default: compile_emit( c, INSTR_SET_ON, 0 ); break; + } + + adjust_result( c, RESULT_STACK, result_location ); + } + else if ( parse->type == PARSE_SWITCH ) + { + int32_t const switch_end = compile_new_label( c ); + compile_parse( parse->left, c, RESULT_STACK ); + + for ( parse = parse->right; parse; parse = parse->right ) + { + int32_t const id = compile_emit_constant( c, parse->left->string ); + int32_t const next_case = compile_new_label( c ); + compile_emit( c, INSTR_PUSH_CONSTANT, id ); + compile_emit_branch( c, INSTR_JUMP_NOT_GLOB, next_case ); + compile_parse( parse->left->left, c, result_location ); + compile_emit_branch( c, INSTR_JUMP, switch_end ); + compile_set_label( c, next_case ); + } + compile_emit( c, INSTR_POP, 0 ); + adjust_result( c, RESULT_NONE, result_location ); + compile_set_label( c, switch_end ); + } + else if ( parse->type == PARSE_RETURN ) + { + compile_parse( parse->left, c, RESULT_RETURN ); + compile_emit_cleanups( c, 0 ); + compile_emit( c, INSTR_RETURN, 0 ); /* 0 for return in the middle of a function. */ + } + else if ( parse->type == PARSE_BREAK ) + { + compile_emit_loop_jump( c, LOOP_INFO_BREAK ); + } + else if ( parse->type == PARSE_CONTINUE ) + { + compile_emit_loop_jump( c, LOOP_INFO_CONTINUE ); + } + else if ( parse->type == PARSE_NULL ) + adjust_result( c, RESULT_NONE, result_location ); + else + assert( !"unknown PARSE type." ); +} + +OBJECT * function_rulename( FUNCTION * function ) +{ + return function->rulename; +} + +void function_set_rulename( FUNCTION * function, OBJECT * rulename ) +{ + function->rulename = rulename; +} + +void function_location( FUNCTION * function_, OBJECT * * file, int32_t * line ) +{ + if ( function_->type == FUNCTION_BUILTIN ) + { + *file = constant_builtin; + *line = -1; + } +#ifdef HAVE_PYTHON + if ( function_->type == FUNCTION_PYTHON ) + { + *file = constant_builtin; + *line = -1; + } +#endif + else + { + JAM_FUNCTION * function = (JAM_FUNCTION *)function_; + assert( function_->type == FUNCTION_JAM ); + *file = function->file; + *line = function->line; + } +} + +static struct arg_list * arg_list_compile_builtin( char const * * args, + int32_t * num_arguments ); + +FUNCTION * function_builtin( LIST * ( * func )( FRAME * frame, int32_t flags ), + int32_t flags, char const * * args ) +{ + BUILTIN_FUNCTION * result = (BUILTIN_FUNCTION*)BJAM_MALLOC( sizeof( BUILTIN_FUNCTION ) ); + result->base.type = FUNCTION_BUILTIN; + result->base.reference_count = 1; + result->base.rulename = 0; + result->base.formal_arguments = arg_list_compile_builtin( args, + &result->base.num_formal_arguments ); + result->func = func; + result->flags = flags; + return (FUNCTION *)result; +} + +FUNCTION * function_compile( PARSE * parse ) +{ + compiler c[ 1 ]; + JAM_FUNCTION * result; + compiler_init( c ); + compile_parse( parse, c, RESULT_RETURN ); + compile_emit( c, INSTR_RETURN, 1 ); + result = compile_to_function( c ); + compiler_free( c ); + result->file = object_copy( parse->file ); + result->line = parse->line; + return (FUNCTION *)result; +} + +FUNCTION * function_compile_actions( char const * actions, OBJECT * file, + int32_t line ) +{ + compiler c[ 1 ]; + JAM_FUNCTION * result; + VAR_PARSE_ACTIONS * parse; + current_file = object_str( file ); + current_line = line; + parse = parse_actions( actions ); + compiler_init( c ); + var_parse_actions_compile( parse, c ); + var_parse_actions_free( parse ); + compile_emit( c, INSTR_RETURN, 1 ); + result = compile_to_function( c ); + compiler_free( c ); + result->file = object_copy( file ); + result->line = line; + return (FUNCTION *)result; +} + +static void argument_list_print( struct arg_list * args, int32_t num_args ); + + +/* Define delimiters for type check elements in argument lists (and return type + * specifications, eventually). + */ +# define TYPE_OPEN_DELIM '[' +# define TYPE_CLOSE_DELIM ']' + +/* + * is_type_name() - true iff the given string represents a type check + * specification. + */ + +int32_t is_type_name( char const * s ) +{ + return s[ 0 ] == TYPE_OPEN_DELIM && s[ strlen( s ) - 1 ] == + TYPE_CLOSE_DELIM; +} + +static void argument_error( char const * message, FUNCTION * procedure, + FRAME * frame, OBJECT * arg ) +{ + extern void print_source_line( FRAME * ); + LOL * actual = frame->args; + backtrace_line( frame->prev ); + out_printf( "*** argument error\n* rule %s ( ", frame->rulename ); + argument_list_print( procedure->formal_arguments, + procedure->num_formal_arguments ); + out_printf( " )\n* called with: ( " ); + lol_print( actual ); + out_printf( " )\n* %s %s\n", message, arg ? object_str ( arg ) : "" ); + function_location( procedure, &frame->file, &frame->line ); + print_source_line( frame ); + out_printf( "see definition of rule '%s' being called\n", frame->rulename ); + backtrace( frame->prev ); + b2::clean_exit( EXITBAD ); +} + +static void type_check_range( OBJECT * type_name, LISTITER iter, LISTITER end, + FRAME * caller, FUNCTION * called, OBJECT * arg_name ) +{ + static module_t * typecheck = 0; + + /* If nothing to check, bail now. */ + if ( iter == end || !type_name ) + return; + + if ( !typecheck ) + typecheck = bindmodule( constant_typecheck ); + + /* If the checking rule can not be found, also bail. */ + if ( !typecheck->rules || !hash_find( typecheck->rules, type_name ) ) + return; + + for ( ; iter != end; iter = list_next( iter ) ) + { + LIST * error; + FRAME frame[ 1 ]; + frame_init( frame ); + frame->module = typecheck; + frame->prev = caller; + frame->prev_user = caller->module->user_module + ? caller + : caller->prev_user; + + /* Prepare the argument list */ + lol_add( frame->args, list_new( object_copy( list_item( iter ) ) ) ); + error = evaluate_rule( bindrule( type_name, frame->module ), type_name, frame ); + + if ( !list_empty( error ) ) + argument_error( object_str( list_front( error ) ), called, caller, + arg_name ); + + frame_free( frame ); + } +} + +static void type_check( OBJECT * type_name, LIST * values, FRAME * caller, + FUNCTION * called, OBJECT * arg_name ) +{ + type_check_range( type_name, list_begin( values ), list_end( values ), + caller, called, arg_name ); +} + +void argument_list_check( struct arg_list * formal, int32_t formal_count, + FUNCTION * function, FRAME * frame ) +{ + LOL * all_actual = frame->args; + int32_t i; + + for ( i = 0; i < formal_count; ++i ) + { + LIST * actual = lol_get( all_actual, i ); + LISTITER actual_iter = list_begin( actual ); + LISTITER const actual_end = list_end( actual ); + int32_t j; + for ( j = 0; j < formal[ i ].size; ++j ) + { + struct argument * formal_arg = &formal[ i ].args[ j ]; + + switch ( formal_arg->flags ) + { + case ARG_ONE: + if ( actual_iter == actual_end ) + argument_error( "missing argument", function, frame, + formal_arg->arg_name ); + type_check_range( formal_arg->type_name, actual_iter, + list_next( actual_iter ), frame, function, + formal_arg->arg_name ); + actual_iter = list_next( actual_iter ); + break; + case ARG_OPTIONAL: + if ( actual_iter != actual_end ) + { + type_check_range( formal_arg->type_name, actual_iter, + list_next( actual_iter ), frame, function, + formal_arg->arg_name ); + actual_iter = list_next( actual_iter ); + } + break; + case ARG_PLUS: + if ( actual_iter == actual_end ) + argument_error( "missing argument", function, frame, + formal_arg->arg_name ); + /* fallthrough */ + case ARG_STAR: + type_check_range( formal_arg->type_name, actual_iter, + actual_end, frame, function, formal_arg->arg_name ); + actual_iter = actual_end; + break; + case ARG_VARIADIC: + return; + } + } + + if ( actual_iter != actual_end ) + argument_error( "extra argument", function, frame, list_item( + actual_iter ) ); + } + + for ( ; i < all_actual->count; ++i ) + { + LIST * actual = lol_get( all_actual, i ); + if ( !list_empty( actual ) ) + argument_error( "extra argument", function, frame, list_front( + actual ) ); + } +} + +void argument_list_push( struct arg_list * formal, int32_t formal_count, + FUNCTION * function, FRAME * frame, STACK * s ) +{ + LOL * all_actual = frame->args; + int32_t i; + + for ( i = 0; i < formal_count; ++i ) + { + LIST * actual = lol_get( all_actual, i ); + LISTITER actual_iter = list_begin( actual ); + LISTITER const actual_end = list_end( actual ); + int32_t j; + for ( j = 0; j < formal[ i ].size; ++j ) + { + struct argument * formal_arg = &formal[ i ].args[ j ]; + LIST * value = L0; + + switch ( formal_arg->flags ) + { + case ARG_ONE: + if ( actual_iter == actual_end ) + argument_error( "missing argument", function, frame, + formal_arg->arg_name ); + value = list_new( object_copy( list_item( actual_iter ) ) ); + actual_iter = list_next( actual_iter ); + break; + case ARG_OPTIONAL: + if ( actual_iter == actual_end ) + value = L0; + else + { + value = list_new( object_copy( list_item( actual_iter ) ) ); + actual_iter = list_next( actual_iter ); + } + break; + case ARG_PLUS: + if ( actual_iter == actual_end ) + argument_error( "missing argument", function, frame, + formal_arg->arg_name ); + /* fallthrough */ + case ARG_STAR: + value = list_copy_range( actual, actual_iter, actual_end ); + actual_iter = actual_end; + break; + case ARG_VARIADIC: + return; + } + + type_check( formal_arg->type_name, value, frame, function, + formal_arg->arg_name ); + + if ( formal_arg->index != -1 ) + { + LIST * * const old = &frame->module->fixed_variables[ + formal_arg->index ]; + stack_push( s, *old ); + *old = value; + } + else + stack_push( s, var_swap( frame->module, formal_arg->arg_name, + value ) ); + } + + if ( actual_iter != actual_end ) + argument_error( "extra argument", function, frame, list_item( + actual_iter ) ); + } + + for ( ; i < all_actual->count; ++i ) + { + LIST * const actual = lol_get( all_actual, i ); + if ( !list_empty( actual ) ) + argument_error( "extra argument", function, frame, list_front( + actual ) ); + } +} + +void argument_list_pop( struct arg_list * formal, int32_t formal_count, + FRAME * frame, STACK * s ) +{ + int32_t i; + for ( i = formal_count - 1; i >= 0; --i ) + { + int32_t j; + for ( j = formal[ i ].size - 1; j >= 0 ; --j ) + { + struct argument * formal_arg = &formal[ i ].args[ j ]; + + if ( formal_arg->flags == ARG_VARIADIC ) + continue; + if ( formal_arg->index != -1 ) + { + LIST * const old = stack_pop( s ); + LIST * * const pos = &frame->module->fixed_variables[ + formal_arg->index ]; + list_free( *pos ); + *pos = old; + } + else + var_set( frame->module, formal_arg->arg_name, stack_pop( s ), + VAR_SET ); + } + } +} + + +struct argument_compiler +{ + struct dynamic_array args[ 1 ]; + struct argument arg; + int32_t state; +#define ARGUMENT_COMPILER_START 0 +#define ARGUMENT_COMPILER_FOUND_TYPE 1 +#define ARGUMENT_COMPILER_FOUND_OBJECT 2 +#define ARGUMENT_COMPILER_DONE 3 +}; + + +static void argument_compiler_init( struct argument_compiler * c ) +{ + dynamic_array_init( c->args ); + c->state = ARGUMENT_COMPILER_START; +} + +static void argument_compiler_free( struct argument_compiler * c ) +{ + dynamic_array_free( c->args ); +} + +static void argument_compiler_add( struct argument_compiler * c, OBJECT * arg, + OBJECT * file, int32_t line ) +{ + switch ( c->state ) + { + case ARGUMENT_COMPILER_FOUND_OBJECT: + + if ( object_equal( arg, constant_question_mark ) ) + { + c->arg.flags = ARG_OPTIONAL; + } + else if ( object_equal( arg, constant_plus ) ) + { + c->arg.flags = ARG_PLUS; + } + else if ( object_equal( arg, constant_star ) ) + { + c->arg.flags = ARG_STAR; + } + + dynamic_array_push( c->args, c->arg ); + c->state = ARGUMENT_COMPILER_START; + + if ( c->arg.flags != ARG_ONE ) + break; + /* fall-through */ + + case ARGUMENT_COMPILER_START: + + c->arg.type_name = 0; + c->arg.index = -1; + c->arg.flags = ARG_ONE; + + if ( is_type_name( object_str( arg ) ) ) + { + c->arg.type_name = object_copy( arg ); + c->state = ARGUMENT_COMPILER_FOUND_TYPE; + break; + } + /* fall-through */ + + case ARGUMENT_COMPILER_FOUND_TYPE: + + if ( is_type_name( object_str( arg ) ) ) + { + err_printf( "%s:%d: missing argument name before type name: %s\n", + object_str( file ), line, object_str( arg ) ); + b2::clean_exit( EXITBAD ); + } + + c->arg.arg_name = object_copy( arg ); + if ( object_equal( arg, constant_star ) ) + { + c->arg.flags = ARG_VARIADIC; + dynamic_array_push( c->args, c->arg ); + c->state = ARGUMENT_COMPILER_DONE; + } + else + { + c->state = ARGUMENT_COMPILER_FOUND_OBJECT; + } + break; + + case ARGUMENT_COMPILER_DONE: + break; + } +} + +static void argument_compiler_recurse( struct argument_compiler * c, + PARSE * parse ) +{ + if ( parse->type == PARSE_APPEND ) + { + argument_compiler_recurse( c, parse->left ); + argument_compiler_recurse( c, parse->right ); + } + else if ( parse->type != PARSE_NULL ) + { + assert( parse->type == PARSE_LIST ); + argument_compiler_add( c, parse->string, parse->file, parse->line ); + } +} + +static struct arg_list arg_compile_impl( struct argument_compiler * c, + OBJECT * file, int32_t line ) +{ + struct arg_list result; + switch ( c->state ) + { + case ARGUMENT_COMPILER_START: + case ARGUMENT_COMPILER_DONE: + break; + case ARGUMENT_COMPILER_FOUND_TYPE: + err_printf( "%s:%d: missing argument name after type name: %s\n", + object_str( file ), line, object_str( c->arg.type_name ) ); + b2::clean_exit( EXITBAD ); + case ARGUMENT_COMPILER_FOUND_OBJECT: + dynamic_array_push( c->args, c->arg ); + break; + } + result.size = c->args->size; + result.args = (struct argument*)BJAM_MALLOC( c->args->size * sizeof( struct argument ) ); + if ( c->args->size != 0 ) + memcpy( result.args, c->args->data, + c->args->size * sizeof( struct argument ) ); + return result; +} + +static struct arg_list arg_compile( PARSE * parse ) +{ + struct argument_compiler c[ 1 ]; + struct arg_list result; + argument_compiler_init( c ); + argument_compiler_recurse( c, parse ); + result = arg_compile_impl( c, parse->file, parse->line ); + argument_compiler_free( c ); + return result; +} + +struct argument_list_compiler +{ + struct dynamic_array args[ 1 ]; +}; + +static void argument_list_compiler_init( struct argument_list_compiler * c ) +{ + dynamic_array_init( c->args ); +} + +static void argument_list_compiler_free( struct argument_list_compiler * c ) +{ + dynamic_array_free( c->args ); +} + +static void argument_list_compiler_add( struct argument_list_compiler * c, + PARSE * parse ) +{ + struct arg_list args = arg_compile( parse ); + dynamic_array_push( c->args, args ); +} + +static void argument_list_compiler_recurse( struct argument_list_compiler * c, + PARSE * parse ) +{ + if ( parse ) + { + argument_list_compiler_add( c, parse->right ); + argument_list_compiler_recurse( c, parse->left ); + } +} + +static struct arg_list * arg_list_compile( PARSE * parse, int32_t * num_arguments ) +{ + if ( parse ) + { + struct argument_list_compiler c[ 1 ]; + struct arg_list * result; + argument_list_compiler_init( c ); + argument_list_compiler_recurse( c, parse ); + *num_arguments = c->args->size; + result = (struct arg_list*)BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) ); + memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list ) + ); + argument_list_compiler_free( c ); + return result; + } + *num_arguments = 0; + return 0; +} + +static struct arg_list * arg_list_compile_builtin( char const * * args, + int32_t * num_arguments ) +{ + if ( args ) + { + struct argument_list_compiler c[ 1 ]; + struct arg_list * result; + argument_list_compiler_init( c ); + while ( *args ) + { + struct argument_compiler arg_comp[ 1 ]; + struct arg_list arg; + argument_compiler_init( arg_comp ); + for ( ; *args; ++args ) + { + OBJECT * token; + if ( strcmp( *args, ":" ) == 0 ) + { + ++args; + break; + } + token = object_new( *args ); + argument_compiler_add( arg_comp, token, constant_builtin, -1 ); + object_free( token ); + } + arg = arg_compile_impl( arg_comp, constant_builtin, -1 ); + dynamic_array_push( c->args, arg ); + argument_compiler_free( arg_comp ); + } + *num_arguments = c->args->size; + result = (struct arg_list *)BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) ); + if ( c->args->size != 0 ) + memcpy( result, c->args->data, + c->args->size * sizeof( struct arg_list ) ); + argument_list_compiler_free( c ); + return result; + } + *num_arguments = 0; + return 0; +} + +static void argument_list_print( struct arg_list * args, int32_t num_args ) +{ + if ( args ) + { + int32_t i; + for ( i = 0; i < num_args; ++i ) + { + int32_t j; + if ( i ) out_printf( " : " ); + for ( j = 0; j < args[ i ].size; ++j ) + { + struct argument * formal_arg = &args[ i ].args[ j ]; + if ( j ) out_printf( " " ); + if ( formal_arg->type_name ) + out_printf( "%s ", object_str( formal_arg->type_name ) ); + out_printf( "%s", object_str( formal_arg->arg_name ) ); + switch ( formal_arg->flags ) + { + case ARG_OPTIONAL: out_printf( " ?" ); break; + case ARG_PLUS: out_printf( " +" ); break; + case ARG_STAR: out_printf( " *" ); break; + } + } + } + } +} + + +struct arg_list * argument_list_bind_variables( struct arg_list * formal, + int32_t formal_count, module_t * module, int32_t * counter ) +{ + if ( formal ) + { + struct arg_list * result = (struct arg_list *)BJAM_MALLOC( sizeof( + struct arg_list ) * formal_count ); + int32_t i; + + for ( i = 0; i < formal_count; ++i ) + { + int32_t j; + struct argument * args = (struct argument *)BJAM_MALLOC( sizeof( + struct argument ) * formal[ i ].size ); + for ( j = 0; j < formal[ i ].size; ++j ) + { + args[ j ] = formal[ i ].args[ j ]; + if ( args[ j ].type_name ) + args[ j ].type_name = object_copy( args[ j ].type_name ); + args[ j ].arg_name = object_copy( args[ j ].arg_name ); + if ( args[ j ].flags != ARG_VARIADIC ) + args[ j ].index = module_add_fixed_var( module, + args[ j ].arg_name, counter ); + } + result[ i ].args = args; + result[ i ].size = formal[ i ].size; + } + + return result; + } + return 0; +} + + +void argument_list_free( struct arg_list * args, int32_t args_count ) +{ + int32_t i; + for ( i = 0; i < args_count; ++i ) + { + int32_t j; + for ( j = 0; j < args[ i ].size; ++j ) + { + if ( args[ i ].args[ j ].type_name ) + object_free( args[ i ].args[ j ].type_name ); + object_free( args[ i ].args[ j ].arg_name ); + } + BJAM_FREE( args[ i ].args ); + } + BJAM_FREE( args ); +} + + +FUNCTION * function_unbind_variables( FUNCTION * f ) +{ + if ( f->type == FUNCTION_JAM ) + { + JAM_FUNCTION * const func = (JAM_FUNCTION *)f; + return func->generic ? func->generic : f; + } +#ifdef HAVE_PYTHON + if ( f->type == FUNCTION_PYTHON ) + return f; +#endif + assert( f->type == FUNCTION_BUILTIN ); + return f; +} + +FUNCTION * function_bind_variables( FUNCTION * f, module_t * module, + int32_t * counter ) +{ + if ( f->type == FUNCTION_BUILTIN ) + return f; +#ifdef HAVE_PYTHON + if ( f->type == FUNCTION_PYTHON ) + return f; +#endif + { + JAM_FUNCTION * func = (JAM_FUNCTION *)f; + JAM_FUNCTION * new_func = (JAM_FUNCTION *)BJAM_MALLOC( sizeof( JAM_FUNCTION ) ); + instruction * code; + int32_t i; + assert( f->type == FUNCTION_JAM ); + memcpy( new_func, func, sizeof( JAM_FUNCTION ) ); + new_func->base.reference_count = 1; + new_func->base.formal_arguments = argument_list_bind_variables( + f->formal_arguments, f->num_formal_arguments, module, counter ); + new_func->code = (instruction *)BJAM_MALLOC( func->code_size * sizeof( instruction ) ); + memcpy( new_func->code, func->code, func->code_size * sizeof( + instruction ) ); + new_func->generic = (FUNCTION *)func; + func = new_func; + for ( i = 0; ; ++i ) + { + OBJECT * key; + int32_t op_code; + code = func->code + i; + switch ( code->op_code ) + { + case INSTR_PUSH_VAR: op_code = INSTR_PUSH_VAR_FIXED; break; + case INSTR_PUSH_LOCAL: op_code = INSTR_PUSH_LOCAL_FIXED; break; + case INSTR_POP_LOCAL: op_code = INSTR_POP_LOCAL_FIXED; break; + case INSTR_SET: op_code = INSTR_SET_FIXED; break; + case INSTR_APPEND: op_code = INSTR_APPEND_FIXED; break; + case INSTR_DEFAULT: op_code = INSTR_DEFAULT_FIXED; break; + case INSTR_RETURN: + if( code->arg == 1 ) return (FUNCTION *)new_func; + else continue; + case INSTR_CALL_MEMBER_RULE: + case INSTR_CALL_RULE: ++i; continue; + case INSTR_PUSH_MODULE: + { + int32_t depth = 1; + ++i; + while ( depth > 0 ) + { + code = func->code + i; + switch ( code->op_code ) + { + case INSTR_PUSH_MODULE: + case INSTR_CLASS: + ++depth; + break; + case INSTR_POP_MODULE: + --depth; + break; + case INSTR_CALL_RULE: + ++i; + break; + } + ++i; + } + --i; + } + default: continue; + } + key = func->constants[ code->arg ]; + if ( !( object_equal( key, constant_TMPDIR ) || + object_equal( key, constant_TMPNAME ) || + object_equal( key, constant_TMPFILE ) || + object_equal( key, constant_STDOUT ) || + object_equal( key, constant_STDERR ) ) ) + { + code->op_code = op_code; + code->arg = module_add_fixed_var( module, key, counter ); + } + } + } +} + +LIST * function_get_variables( FUNCTION * f ) +{ + if ( f->type == FUNCTION_BUILTIN ) + return L0; +#ifdef HAVE_PYTHON + if ( f->type == FUNCTION_PYTHON ) + return L0; +#endif + { + JAM_FUNCTION * func = (JAM_FUNCTION *)f; + LIST * result = L0; + instruction * code; + int32_t i; + assert( f->type == FUNCTION_JAM ); + if ( func->generic ) func = ( JAM_FUNCTION * )func->generic; + + for ( i = 0; ; ++i ) + { + OBJECT * var; + code = func->code + i; + switch ( code->op_code ) + { + case INSTR_PUSH_LOCAL: break; + case INSTR_RETURN: return result; + case INSTR_CALL_MEMBER_RULE: + case INSTR_CALL_RULE: ++i; continue; + case INSTR_PUSH_MODULE: + { + int32_t depth = 1; + ++i; + while ( depth > 0 ) + { + code = func->code + i; + switch ( code->op_code ) + { + case INSTR_PUSH_MODULE: + case INSTR_CLASS: + ++depth; + break; + case INSTR_POP_MODULE: + --depth; + break; + case INSTR_CALL_RULE: + ++i; + break; + } + ++i; + } + --i; + } + default: continue; + } + var = func->constants[ code->arg ]; + if ( !( object_equal( var, constant_TMPDIR ) || + object_equal( var, constant_TMPNAME ) || + object_equal( var, constant_TMPFILE ) || + object_equal( var, constant_STDOUT ) || + object_equal( var, constant_STDERR ) ) ) + { + result = list_push_back( result, var ); + } + } + } +} + +void function_refer( FUNCTION * func ) +{ + ++func->reference_count; +} + +void function_free( FUNCTION * function_ ) +{ + int32_t i; + + if ( --function_->reference_count != 0 ) + return; + + if ( function_->formal_arguments ) + argument_list_free( function_->formal_arguments, + function_->num_formal_arguments ); + + if ( function_->type == FUNCTION_JAM ) + { + JAM_FUNCTION * func = (JAM_FUNCTION *)function_; + + BJAM_FREE( func->code ); + + if ( func->generic ) + function_free( func->generic ); + else + { + if ( function_->rulename ) object_free( function_->rulename ); + + for ( i = 0; i < func->num_constants; ++i ) + object_free( func->constants[ i ] ); + BJAM_FREE( func->constants ); + + for ( i = 0; i < func->num_subfunctions; ++i ) + { + object_free( func->functions[ i ].name ); + function_free( func->functions[ i ].code ); + } + BJAM_FREE( func->functions ); + + for ( i = 0; i < func->num_subactions; ++i ) + { + object_free( func->actions[ i ].name ); + function_free( func->actions[ i ].command ); + } + BJAM_FREE( func->actions ); + + object_free( func->file ); + } + } +#ifdef HAVE_PYTHON + else if ( function_->type == FUNCTION_PYTHON ) + { + PYTHON_FUNCTION * func = (PYTHON_FUNCTION *)function_; + Py_DECREF( func->python_function ); + if ( function_->rulename ) object_free( function_->rulename ); + } +#endif + else + { + assert( function_->type == FUNCTION_BUILTIN ); + if ( function_->rulename ) object_free( function_->rulename ); + } + + BJAM_FREE( function_ ); +} + + +/* Alignment check for stack */ + +struct align_var_edits +{ + char ch; + VAR_EDITS e; +}; + +struct align_expansion_item +{ + char ch; + expansion_item e; +}; + +static_assert( + sizeof(struct align_var_edits) <= sizeof(VAR_EDITS) + sizeof(void *), + "sizeof(struct align_var_edits) <= sizeof(VAR_EDITS) + sizeof(void *)" ); +static_assert( + sizeof(struct align_expansion_item) <= sizeof(expansion_item) + sizeof(void *), + "sizeof(struct align_expansion_item) <= sizeof(expansion_item) + sizeof(void *)" ); + +static_assert( sizeof(LIST *) <= sizeof(void *), "sizeof(LIST *) <= sizeof(void *)" ); +static_assert( sizeof(char *) <= sizeof(void *), "sizeof(char *) <= sizeof(void *)" ); + +void function_run_actions( FUNCTION * function, FRAME * frame, STACK * s, + string * out ) +{ + *(string * *)stack_allocate( s, sizeof( string * ) ) = out; + list_free( function_run( function, frame, s ) ); + stack_deallocate( s, sizeof( string * ) ); +} + +// Result is either the filename or contents depending on: +// 1. If the RESPONSE_FILE_SUB == f or not set (it's filename) +// 2. If the RESPONSE_FILE_SUB == c (it's contents) +// 3. If the RESPONSE_FILE_SUB == a (depends on the length of contents) +// Note, returns a *copy* of the filename or contents. +LIST * function_execute_write_file( + JAM_FUNCTION * function, FRAME * frame, STACK * s, + VAR_EXPANDED filename, LIST * contents ) +{ + LIST * filename_or_contents_result = nullptr; + + char response_file_sub_c = 'f'; + if ( filename.opt_file && filename.opt_content ) + { + LIST * response_file_sub = function_get_named_variable( + function, frame, constant_RESPONSE_FILE_SUB ); + if ( response_file_sub && list_front( response_file_sub ) ) + response_file_sub_c = object_str( list_front( response_file_sub ) )[0]; + list_free( response_file_sub ); + const char * contents_str = object_str( list_front( contents ) ); + if ( response_file_sub_c == 'a' ) + { + if ( int32_t( strlen( contents_str ) + 256 ) > shell_maxline() ) + response_file_sub_c = 'f'; + else + response_file_sub_c = 'c'; + } + } + else if ( filename.opt_file ) + response_file_sub_c = 'f'; + else if ( filename.opt_content ) + response_file_sub_c = 'c'; + if ( response_file_sub_c == 'c' ) + { + filename_or_contents_result = list_copy( contents ); + } + else + { + char const * out = object_str( list_front( filename.inner ) ); + OBJECT * tmp_filename = nullptr; + FILE * out_file = nullptr; + bool out_debug = DEBUG_EXEC != 0; + + /* For stdout/stderr we will create a temp file and generate a + * command that outputs the content as needed. + */ + if ( ( strcmp( "STDOUT", out ) == 0 ) || + ( strcmp( "STDERR", out ) == 0 ) ) + { + int32_t err_redir = strcmp( "STDERR", out ) == 0; + string result[ 1 ]; + + tmp_filename = path_tmpfile(); + + /* Construct os-specific cat command. */ + { + const char * command = "cat"; + const char * quote = "\""; + const char * redirect = "1>&2"; + + #ifdef OS_NT + command = "type"; + quote = "\""; + #elif defined( OS_VMS ) + command = "pipe type"; + quote = ""; + + /* Get tmp file name in os-format. */ + { + string os_filename[ 1 ]; + + string_new( os_filename ); + path_translate_to_os( object_str( tmp_filename ), os_filename ); + object_free( tmp_filename ); + tmp_filename = object_new( os_filename->value ); + string_free( os_filename ); + } + #endif + + string_new( result ); + string_append( result, command ); + string_append( result, " " ); + string_append( result, quote ); + string_append( result, object_str( tmp_filename ) ); + string_append( result, quote ); + if ( err_redir ) + { + string_append( result, " " ); + string_append( result, redirect ); + } + } + + /* Replace STDXXX with the temporary file. */ + filename_or_contents_result = list_new( object_new( result->value ) ); + out = object_str( tmp_filename ); + + string_free( result ); + + /* Make sure temp files created by this get nuked eventually. */ + file_remove_atexit( tmp_filename ); + } + else + { + filename_or_contents_result = list_copy( filename.value ); + } + + if ( !globs.noexec ) + { + string out_name[ 1 ]; + /* Handle "path to file" filenames. */ + if ( ( out[ 0 ] == '"' ) && ( out[ strlen( out ) - 1 ] == '"' ) + ) + { + string_copy( out_name, out + 1 ); + string_truncate( out_name, out_name->size - 1 ); + } + else + string_copy( out_name, out ); + out_file = fopen( out_name->value, "w" ); + + if ( !out_file ) + { + err_printf( "[errno %d] failed to write output file '%s': %s", + errno, out_name->value, strerror(errno) ); + b2::clean_exit( EXITBAD ); + } + string_free( out_name ); + } + + if ( out_debug ) out_printf( "\nfile %s\n", out ); + if ( out_file ) fputs( object_str( list_front( contents ) ), out_file ); + if ( out_debug ) out_puts( object_str( list_front( contents ) ) ); + if ( out_file ) + { + fflush( out_file ); + fclose( out_file ); + } + if ( tmp_filename ) + object_free( tmp_filename ); + + if ( out_debug ) out_putc( '\n' ); + } + + return filename_or_contents_result; +} + +/* + * WARNING: The instruction set is tuned for Jam and is not really generic. Be + * especially careful about stack push/pop. + */ + +LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s ) +{ + JAM_FUNCTION * function; + instruction * code; + LIST * l; + LIST * r; + LIST * result = L0; +#ifndef NDEBUG + void * saved_stack = s->data; +#endif + + PROFILE_ENTER_LOCAL(function_run); + +#ifdef JAM_DEBUGGER + frame->function = function_; +#endif + + if ( function_->type == FUNCTION_BUILTIN ) + { + PROFILE_ENTER_LOCAL(function_run_FUNCTION_BUILTIN); + BUILTIN_FUNCTION const * const f = (BUILTIN_FUNCTION *)function_; + if ( function_->formal_arguments ) + argument_list_check( function_->formal_arguments, + function_->num_formal_arguments, function_, frame ); + + debug_on_enter_function( frame, f->base.rulename, NULL, -1 ); + result = f->func( frame, f->flags ); + debug_on_exit_function( f->base.rulename ); + PROFILE_EXIT_LOCAL(function_run_FUNCTION_BUILTIN); + PROFILE_EXIT_LOCAL(function_run); + return result; + } + +#ifdef HAVE_PYTHON + else if ( function_->type == FUNCTION_PYTHON ) + { + PROFILE_ENTER_LOCAL(function_run_FUNCTION_PYTHON); + PYTHON_FUNCTION * f = (PYTHON_FUNCTION *)function_; + debug_on_enter_function( frame, f->base.rulename, NULL, -1 ); + result = call_python_function( f, frame ); + debug_on_exit_function( f->base.rulename ); + PROFILE_EXIT_LOCAL(function_run_FUNCTION_PYTHON); + PROFILE_EXIT_LOCAL(function_run); + return result; + } +#endif + + assert( function_->type == FUNCTION_JAM ); + + if ( function_->formal_arguments ) + argument_list_push( function_->formal_arguments, + function_->num_formal_arguments, function_, frame, s ); + + function = (JAM_FUNCTION *)function_; + debug_on_enter_function( frame, function->base.rulename, function->file, function->line ); + code = function->code; + for ( ; ; ) + { + switch ( code->op_code ) + { + + /* + * Basic stack manipulation + */ + + case INSTR_PUSH_EMPTY: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_EMPTY); + stack_push( s, L0 ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_EMPTY); + break; + } + + case INSTR_PUSH_CONSTANT: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_CONSTANT); + OBJECT * value = function_get_constant( function, code->arg ); + stack_push( s, list_new( object_copy( value ) ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_CONSTANT); + break; + } + + case INSTR_PUSH_ARG: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_ARG); + stack_push( s, frame_get_local( frame, code->arg ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_ARG); + break; + } + + case INSTR_PUSH_VAR: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_VAR); + stack_push( s, function_get_variable( function, frame, code->arg ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_VAR); + break; + } + + case INSTR_PUSH_VAR_FIXED: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_VAR_FIXED); + stack_push( s, list_copy( frame->module->fixed_variables[ code->arg + ] ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_VAR_FIXED); + break; + } + + case INSTR_PUSH_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_GROUP); + LIST * value = L0; + LISTITER iter; + LISTITER end; + l = stack_pop( s ); + for ( iter = list_begin( l ), end = list_end( l ); iter != end; + iter = list_next( iter ) ) + value = list_append( value, function_get_named_variable( + function, frame, list_item( iter ) ) ); + list_free( l ); + stack_push( s, value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_GROUP); + break; + } + + case INSTR_PUSH_APPEND: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_APPEND); + r = stack_pop( s ); + l = stack_pop( s ); + stack_push( s, list_append( l, r ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_APPEND); + break; + } + + case INSTR_SWAP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_SWAP); + l = stack_top( s ); + stack_set( s, 0, stack_at( s, code->arg ) ); + stack_set( s, code->arg, l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_SWAP); + break; + } + + case INSTR_POP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_POP); + list_free( stack_pop( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_POP); + break; + } + + /* + * Branch instructions + */ + + case INSTR_JUMP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP); + code += code->arg; + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP); + break; + } + + case INSTR_JUMP_EMPTY: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_EMPTY); + l = stack_pop( s ); + if ( !list_cmp( l, L0 ) ) code += code->arg; + list_free( l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_EMPTY); + break; + } + + case INSTR_JUMP_NOT_EMPTY: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NOT_EMPTY); + l = stack_pop( s ); + if ( list_cmp( l, L0 ) ) code += code->arg; + list_free( l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NOT_EMPTY); + break; + } + + case INSTR_JUMP_LT: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_LT); + r = stack_pop( s ); + l = stack_pop( s ); + if ( list_cmp( l, r ) < 0 ) code += code->arg; + list_free( l ); + list_free( r ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_LT); + break; + } + + case INSTR_JUMP_LE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_LE); + r = stack_pop( s ); + l = stack_pop( s ); + if ( list_cmp( l, r ) <= 0 ) code += code->arg; + list_free( l ); + list_free( r ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_LE); + break; + } + + case INSTR_JUMP_GT: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_GT); + r = stack_pop( s ); + l = stack_pop( s ); + if ( list_cmp( l, r ) > 0 ) code += code->arg; + list_free( l ); + list_free( r ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_GT); + break; + } + + case INSTR_JUMP_GE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_GE); + r = stack_pop( s ); + l = stack_pop( s ); + if ( list_cmp( l, r ) >= 0 ) code += code->arg; + list_free( l ); + list_free( r ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_GE); + break; + } + + case INSTR_JUMP_EQ: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_EQ); + r = stack_pop( s ); + l = stack_pop( s ); + if ( list_cmp( l, r ) == 0 ) code += code->arg; + list_free( l ); + list_free( r ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_EQ); + break; + } + + case INSTR_JUMP_NE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NE); + r = stack_pop(s); + l = stack_pop(s); + if ( list_cmp(l, r) != 0 ) code += code->arg; + list_free(l); + list_free(r); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NE); + break; + } + + case INSTR_JUMP_IN: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_IN); + r = stack_pop(s); + l = stack_pop(s); + if ( list_is_sublist( l, r ) ) code += code->arg; + list_free(l); + list_free(r); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_IN); + break; + } + + case INSTR_JUMP_NOT_IN: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NOT_IN); + r = stack_pop( s ); + l = stack_pop( s ); + if ( !list_is_sublist( l, r ) ) code += code->arg; + list_free( l ); + list_free( r ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NOT_IN); + break; + } + + /* + * For + */ + + case INSTR_FOR_INIT: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_FOR_INIT); + l = stack_top( s ); + *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) = + list_begin( l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_FOR_INIT); + break; + } + + case INSTR_FOR_LOOP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_FOR_LOOP); + LISTITER iter = *(LISTITER *)stack_get( s ); + stack_deallocate( s, sizeof( LISTITER ) ); + l = stack_top( s ); + if ( iter == list_end( l ) ) + { + list_free( stack_pop( s ) ); + code += code->arg; + } + else + { + r = list_new( object_copy( list_item( iter ) ) ); + iter = list_next( iter ); + *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) = iter; + stack_push( s, r ); + } + PROFILE_EXIT_LOCAL(function_run_INSTR_FOR_LOOP); + break; + } + + case INSTR_FOR_POP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_FOR_POP); + stack_deallocate( s, sizeof( LISTITER ) ); + list_free( stack_pop( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_FOR_POP); + break; + } + + /* + * Switch + */ + + case INSTR_JUMP_NOT_GLOB: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_JUMP_NOT_GLOB); + char const * pattern; + char const * match; + l = stack_pop( s ); + r = stack_top( s ); + pattern = list_empty( l ) ? "" : object_str( list_front( l ) ); + match = list_empty( r ) ? "" : object_str( list_front( r ) ); + if ( glob( pattern, match ) ) + code += code->arg; + else + list_free( stack_pop( s ) ); + list_free( l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_JUMP_NOT_GLOB); + break; + } + + /* + * Return + */ + + case INSTR_SET_RESULT: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_SET_RESULT); + list_free( result ); + if ( !code->arg ) + result = stack_pop( s ); + else + result = list_copy( stack_top( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_SET_RESULT); + break; + } + + case INSTR_PUSH_RESULT: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_RESULT); + stack_push( s, result ); + result = L0; + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_RESULT); + break; + } + + case INSTR_RETURN: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_RETURN); + if ( function_->formal_arguments ) + argument_list_pop( function_->formal_arguments, + function_->num_formal_arguments, frame, s ); +#ifndef NDEBUG + if ( !( saved_stack == s->data ) ) + { + frame->file = function->file; + frame->line = function->line; + backtrace_line( frame ); + out_printf( "error: stack check failed.\n" ); + backtrace( frame ); + assert( saved_stack == s->data ); + } + assert( saved_stack == s->data ); +#endif + debug_on_exit_function( function->base.rulename ); + PROFILE_EXIT_LOCAL(function_run_INSTR_RETURN); + PROFILE_EXIT_LOCAL(function_run); + return result; + } + + /* + * Local variables + */ + + case INSTR_PUSH_LOCAL: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_LOCAL); + LIST * value = stack_pop( s ); + stack_push( s, function_swap_variable( function, frame, code->arg, + value ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_LOCAL); + break; + } + + case INSTR_POP_LOCAL: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_POP_LOCAL); + function_set_variable( function, frame, code->arg, stack_pop( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_POP_LOCAL); + break; + } + + case INSTR_PUSH_LOCAL_FIXED: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_LOCAL_FIXED); + LIST * value = stack_pop( s ); + LIST * * ptr = &frame->module->fixed_variables[ code->arg ]; + assert( code->arg < frame->module->num_fixed_variables ); + stack_push( s, *ptr ); + *ptr = value; + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_LOCAL_FIXED); + break; + } + + case INSTR_POP_LOCAL_FIXED: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_POP_LOCAL_FIXED); + LIST * value = stack_pop( s ); + LIST * * ptr = &frame->module->fixed_variables[ code->arg ]; + assert( code->arg < frame->module->num_fixed_variables ); + list_free( *ptr ); + *ptr = value; + PROFILE_EXIT_LOCAL(function_run_INSTR_POP_LOCAL_FIXED); + break; + } + + case INSTR_PUSH_LOCAL_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_LOCAL_GROUP); + LIST * const value = stack_pop( s ); + LISTITER iter; + LISTITER end; + l = stack_pop( s ); + for ( iter = list_begin( l ), end = list_end( l ); iter != end; + iter = list_next( iter ) ) + stack_push( s, function_swap_named_variable( function, frame, + list_item( iter ), list_copy( value ) ) ); + list_free( value ); + stack_push( s, l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_LOCAL_GROUP); + break; + } + + case INSTR_POP_LOCAL_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_POP_LOCAL_GROUP); + LISTITER iter; + LISTITER end; + r = stack_pop( s ); + l = list_reverse( r ); + list_free( r ); + for ( iter = list_begin( l ), end = list_end( l ); iter != end; + iter = list_next( iter ) ) + function_set_named_variable( function, frame, list_item( iter ), + stack_pop( s ) ); + list_free( l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_POP_LOCAL_GROUP); + break; + } + + /* + * on $(TARGET) variables + */ + + case INSTR_PUSH_ON: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_ON); + LIST * targets = stack_top( s ); + if ( !list_empty( targets ) ) + { + /* FIXME: push the state onto the stack instead of using + * pushsettings. + */ + TARGET * t = bindtarget( list_front( targets ) ); + pushsettings( frame->module, t->settings ); + } + else + { + /* [ on $(TARGET) ... ] is ignored if $(TARGET) is empty. */ + list_free( stack_pop( s ) ); + stack_push( s, L0 ); + code += code->arg; + } + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_ON); + break; + } + + case INSTR_POP_ON: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_POP_ON); + LIST * result = stack_pop( s ); + LIST * targets = stack_pop( s ); + if ( !list_empty( targets ) ) + { + TARGET * t = bindtarget( list_front( targets ) ); + popsettings( frame->module, t->settings ); + } + list_free( targets ); + stack_push( s, result ); + PROFILE_EXIT_LOCAL(function_run_INSTR_POP_ON); + break; + } + + case INSTR_SET_ON: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_SET_ON); + LIST * targets = stack_pop( s ); + LIST * value = stack_pop( s ); + LIST * vars = stack_pop( s ); + LISTITER iter = list_begin( targets ); + LISTITER const end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * t = bindtarget( list_item( iter ) ); + LISTITER vars_iter = list_begin( vars ); + LISTITER const vars_end = list_end( vars ); + for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter + ) ) + t->settings = addsettings( t->settings, VAR_SET, list_item( + vars_iter ), list_copy( value ) ); + } + list_free( vars ); + list_free( targets ); + stack_push( s, value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_SET_ON); + break; + } + + case INSTR_APPEND_ON: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_ON); + LIST * targets = stack_pop( s ); + LIST * value = stack_pop( s ); + LIST * vars = stack_pop( s ); + LISTITER iter = list_begin( targets ); + LISTITER const end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * const t = bindtarget( list_item( iter ) ); + LISTITER vars_iter = list_begin( vars ); + LISTITER const vars_end = list_end( vars ); + for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter + ) ) + t->settings = addsettings( t->settings, VAR_APPEND, + list_item( vars_iter ), list_copy( value ) ); + } + list_free( vars ); + list_free( targets ); + stack_push( s, value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_ON); + break; + } + + case INSTR_DEFAULT_ON: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT_ON); + LIST * targets = stack_pop( s ); + LIST * value = stack_pop( s ); + LIST * vars = stack_pop( s ); + LISTITER iter = list_begin( targets ); + LISTITER const end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * t = bindtarget( list_item( iter ) ); + LISTITER vars_iter = list_begin( vars ); + LISTITER const vars_end = list_end( vars ); + for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter + ) ) + t->settings = addsettings( t->settings, VAR_DEFAULT, + list_item( vars_iter ), list_copy( value ) ); + } + list_free( vars ); + list_free( targets ); + stack_push( s, value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT_ON); + break; + } + + /* [ on $(target) return $(variable) ] */ + case INSTR_GET_ON: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_GET_ON); + LIST * targets = stack_pop( s ); + LIST * result = L0; + if ( !list_empty( targets ) ) + { + OBJECT * varname = function->constants[ code->arg ]; + TARGET * t = bindtarget( list_front( targets ) ); + SETTINGS * s = t->settings; + int32_t found = 0; + for ( ; s != 0; s = s->next ) + { + if ( object_equal( s->symbol, varname ) ) + { + result = s->value; + found = 1; + break; + } + } + if ( !found ) + { + result = var_get( frame->module, varname ) ; + } + } + list_free( targets ); + stack_push( s, list_copy( result ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_GET_ON); + break; + } + + /* + * Variable setting + */ + + case INSTR_SET: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_SET); + function_set_variable( function, frame, code->arg, + stack_pop( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_SET); + break; + } + + case INSTR_APPEND: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND); + function_append_variable( function, frame, code->arg, + stack_pop( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND); + break; + } + + case INSTR_DEFAULT: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT); + function_default_variable( function, frame, code->arg, + stack_pop( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT); + break; + } + + case INSTR_SET_FIXED: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_SET_FIXED); + LIST * * ptr = &frame->module->fixed_variables[ code->arg ]; + assert( code->arg < frame->module->num_fixed_variables ); + list_free( *ptr ); + *ptr = stack_pop( s ); + PROFILE_EXIT_LOCAL(function_run_INSTR_SET_FIXED); + break; + } + + case INSTR_APPEND_FIXED: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_FIXED); + LIST * * ptr = &frame->module->fixed_variables[ code->arg ]; + assert( code->arg < frame->module->num_fixed_variables ); + *ptr = list_append( *ptr, stack_pop( s ) ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_FIXED); + break; + } + + case INSTR_DEFAULT_FIXED: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT_FIXED); + LIST * * ptr = &frame->module->fixed_variables[ code->arg ]; + LIST * value = stack_pop( s ); + assert( code->arg < frame->module->num_fixed_variables ); + if ( list_empty( *ptr ) ) + *ptr = value; + else + list_free( value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT_FIXED); + break; + } + + case INSTR_SET_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_SET_GROUP); + LIST * value = stack_pop( s ); + LIST * vars = stack_pop( s ); + LISTITER iter = list_begin( vars ); + LISTITER const end = list_end( vars ); + for ( ; iter != end; iter = list_next( iter ) ) + function_set_named_variable( function, frame, list_item( iter ), + list_copy( value ) ); + list_free( vars ); + list_free( value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_SET_GROUP); + break; + } + + case INSTR_APPEND_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_GROUP); + LIST * value = stack_pop( s ); + LIST * vars = stack_pop( s ); + LISTITER iter = list_begin( vars ); + LISTITER const end = list_end( vars ); + for ( ; iter != end; iter = list_next( iter ) ) + function_append_named_variable( function, frame, list_item( iter + ), list_copy( value ) ); + list_free( vars ); + list_free( value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_GROUP); + break; + } + + case INSTR_DEFAULT_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_DEFAULT_GROUP); + LIST * value = stack_pop( s ); + LIST * vars = stack_pop( s ); + LISTITER iter = list_begin( vars ); + LISTITER const end = list_end( vars ); + for ( ; iter != end; iter = list_next( iter ) ) + function_default_named_variable( function, frame, list_item( + iter ), list_copy( value ) ); + list_free( vars ); + list_free( value ); + PROFILE_EXIT_LOCAL(function_run_INSTR_DEFAULT_GROUP); + break; + } + + /* + * Rules + */ + + case INSTR_CALL_RULE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_CALL_RULE); + char const * unexpanded = object_str( function_get_constant( + function, code[ 1 ].op_code ) ); + LIST * result = function_call_rule( function, frame, s, code->arg, + unexpanded, function->file, code[ 1 ].arg ); + stack_push( s, result ); + ++code; + PROFILE_EXIT_LOCAL(function_run_INSTR_CALL_RULE); + break; + } + + case INSTR_CALL_MEMBER_RULE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_CALL_MEMBER_RULE); + OBJECT * rule_name = function_get_constant( function, code[1].op_code ); + LIST * result = function_call_member_rule( function, frame, s, code->arg, rule_name, function->file, code[1].arg ); + stack_push( s, result ); + ++code; + PROFILE_EXIT_LOCAL(function_run_INSTR_CALL_MEMBER_RULE); + break; + } + + case INSTR_RULE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_RULE); + function_set_rule( function, frame, s, code->arg ); + PROFILE_EXIT_LOCAL(function_run_INSTR_RULE); + break; + } + + case INSTR_ACTIONS: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_ACTIONS); + function_set_actions( function, frame, s, code->arg ); + PROFILE_EXIT_LOCAL(function_run_INSTR_ACTIONS); + break; + } + + /* + * Variable expansion + */ + + case INSTR_APPLY_MODIFIERS: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_MODIFIERS); + int32_t n; + int32_t i; + l = stack_pop( s ); + n = expand_modifiers( s, code->arg ); + stack_push( s, l ); + VAR_EXPANDED m = apply_modifiers( s, n ); + l = m.value; + list_free( m.inner ); + list_free( stack_pop( s ) ); + stack_deallocate( s, n * sizeof( VAR_EDITS ) ); + for ( i = 0; i < code->arg; ++i ) + list_free( stack_pop( s ) ); /* pop modifiers */ + stack_push( s, l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_MODIFIERS); + break; + } + + case INSTR_APPLY_INDEX: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX); + l = apply_subscript( s ); + list_free( stack_pop( s ) ); + list_free( stack_pop( s ) ); + stack_push( s, l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX); + break; + } + + case INSTR_APPLY_INDEX_MODIFIERS: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS); + int32_t i; + int32_t n; + l = stack_pop( s ); + r = stack_pop( s ); + n = expand_modifiers( s, code->arg ); + stack_push( s, r ); + stack_push( s, l ); + l = apply_subscript_and_modifiers( s, n ); + list_free( stack_pop( s ) ); + list_free( stack_pop( s ) ); + stack_deallocate( s, n * sizeof( VAR_EDITS ) ); + for ( i = 0; i < code->arg; ++i ) + list_free( stack_pop( s ) ); /* pop modifiers */ + stack_push( s, l ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS); + break; + } + + case INSTR_APPLY_MODIFIERS_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_MODIFIERS_GROUP); + int32_t i; + LIST * const vars = stack_pop( s ); + int32_t const n = expand_modifiers( s, code->arg ); + LIST * result = L0; + LISTITER iter = list_begin( vars ); + LISTITER const end = list_end( vars ); + for ( ; iter != end; iter = list_next( iter ) ) + { + stack_push( s, function_get_named_variable( function, frame, + list_item( iter ) ) ); + VAR_EXPANDED m = apply_modifiers( s, n ); + result = m.value; + list_free( m.inner ); + list_free( stack_pop( s ) ); + } + list_free( vars ); + stack_deallocate( s, n * sizeof( VAR_EDITS ) ); + for ( i = 0; i < code->arg; ++i ) + list_free( stack_pop( s ) ); /* pop modifiers */ + stack_push( s, result ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_MODIFIERS_GROUP); + break; + } + + case INSTR_APPLY_INDEX_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX_GROUP); + LIST * vars = stack_pop( s ); + LIST * result = L0; + LISTITER iter = list_begin( vars ); + LISTITER const end = list_end( vars ); + for ( ; iter != end; iter = list_next( iter ) ) + { + stack_push( s, function_get_named_variable( function, frame, + list_item( iter ) ) ); + result = list_append( result, apply_subscript( s ) ); + list_free( stack_pop( s ) ); + } + list_free( vars ); + list_free( stack_pop( s ) ); + stack_push( s, result ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX_GROUP); + break; + } + + case INSTR_APPLY_INDEX_MODIFIERS_GROUP: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS_GROUP); + int32_t i; + LIST * const vars = stack_pop( s ); + LIST * const r = stack_pop( s ); + int32_t const n = expand_modifiers( s, code->arg ); + LIST * result = L0; + LISTITER iter = list_begin( vars ); + LISTITER const end = list_end( vars ); + stack_push( s, r ); + for ( ; iter != end; iter = list_next( iter ) ) + { + stack_push( s, function_get_named_variable( function, frame, + list_item( iter ) ) ); + result = list_append( result, apply_subscript_and_modifiers( s, + n ) ); + list_free( stack_pop( s ) ); + } + list_free( stack_pop( s ) ); + list_free( vars ); + stack_deallocate( s, n * sizeof( VAR_EDITS ) ); + for ( i = 0; i < code->arg; ++i ) + list_free( stack_pop( s ) ); /* pop modifiers */ + stack_push( s, result ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPLY_INDEX_MODIFIERS_GROUP); + break; + } + + case INSTR_COMBINE_STRINGS: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_COMBINE_STRINGS); + int32_t const buffer_size = code->arg * sizeof( expansion_item ); + LIST * * const stack_pos = (LIST * * const)stack_get( s ); + expansion_item * items = (expansion_item *)stack_allocate( s, buffer_size ); + LIST * result; + int32_t i; + for ( i = 0; i < code->arg; ++i ) + items[ i ].values = stack_pos[ i ]; + result = expand( items, code->arg ); + stack_deallocate( s, buffer_size ); + for ( i = 0; i < code->arg; ++i ) + list_free( stack_pop( s ) ); + stack_push( s, result ); + PROFILE_EXIT_LOCAL(function_run_INSTR_COMBINE_STRINGS); + break; + } + + case INSTR_GET_GRIST: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_GET_GRIST); + LIST * vals = stack_pop( s ); + LIST * result = L0; + LISTITER iter, end; + + for ( iter = list_begin( vals ), end = list_end( vals ); iter != end; ++iter ) + { + OBJECT * new_object; + const char * value = object_str( list_item( iter ) ); + const char * p; + if ( value[ 0 ] == '<' && ( p = strchr( value, '>' ) ) ) + { + if( p[ 1 ] ) + new_object = object_new_range( value, int32_t(p - value + 1) ); + else + new_object = object_copy( list_item( iter ) ); + } + else + { + new_object = object_copy( constant_empty ); + } + result = list_push_back( result, new_object ); + } + + list_free( vals ); + stack_push( s, result ); + PROFILE_EXIT_LOCAL(function_run_INSTR_GET_GRIST); + break; + } + + case INSTR_INCLUDE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_INCLUDE); + LIST * nt = stack_pop( s ); + if ( !list_empty( nt ) ) + { + TARGET * const t = bindtarget( list_front( nt ) ); + list_free( nt ); + + /* DWA 2001/10/22 - Perforce Jam cleared the arguments here, + * which prevented an included file from being treated as part + * of the body of a rule. I did not see any reason to do that, + * so I lifted the restriction. + */ + + /* Bind the include file under the influence of "on-target" + * variables. Though they are targets, include files are not + * built with make(). + */ + + pushsettings( root_module(), t->settings ); + /* We do not expect that a file to be included is generated by + * some action. Therefore, pass 0 as third argument. If the name + * resolves to a directory, let it error out. + */ + object_free( t->boundname ); + t->boundname = search( t->name, &t->time, 0, 0 ); + popsettings( root_module(), t->settings ); + + parse_file( t->boundname, frame ); +#ifdef JAM_DEBUGGER + frame->function = function_; +#endif + } + PROFILE_EXIT_LOCAL(function_run_INSTR_INCLUDE); + break; + } + + /* + * Classes and modules + */ + + case INSTR_PUSH_MODULE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_PUSH_MODULE); + LIST * const module_name = stack_pop( s ); + module_t * const outer_module = frame->module; + frame->module = !list_empty( module_name ) + ? bindmodule( list_front( module_name ) ) + : root_module(); + list_free( module_name ); + *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) = + outer_module; + PROFILE_EXIT_LOCAL(function_run_INSTR_PUSH_MODULE); + break; + } + + case INSTR_POP_MODULE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_POP_MODULE); + module_t * const outer_module = *(module_t * *)stack_get( s ); + stack_deallocate( s, sizeof( module_t * ) ); + frame->module = outer_module; + PROFILE_EXIT_LOCAL(function_run_INSTR_POP_MODULE); + break; + } + + case INSTR_CLASS: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_CLASS); + LIST * bases = stack_pop( s ); + LIST * name = stack_pop( s ); + OBJECT * class_module = make_class_module( name, bases, frame ); + + module_t * const outer_module = frame->module; + frame->module = bindmodule( class_module ); + object_free( class_module ); + + *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) = + outer_module; + PROFILE_EXIT_LOCAL(function_run_INSTR_CLASS); + break; + } + + case INSTR_BIND_MODULE_VARIABLES: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_BIND_MODULE_VARIABLES); + module_bind_variables( frame->module ); + PROFILE_EXIT_LOCAL(function_run_INSTR_BIND_MODULE_VARIABLES); + break; + } + + case INSTR_APPEND_STRINGS: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_APPEND_STRINGS); + string buf[ 1 ]; + string_new( buf ); + combine_strings( s, code->arg, buf ); + stack_push( s, list_new( object_new( buf->value ) ) ); + string_free( buf ); + PROFILE_EXIT_LOCAL(function_run_INSTR_APPEND_STRINGS); + break; + } + + // WRITE_FILE( LIST*1 filename, LIST*1 modifiers[N], LIST*1 contents ) + case INSTR_WRITE_FILE: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_WRITE_FILE); + // Get expanded filename. + LIST * filename = nullptr; + { + expansion_item ei = { stack_pop( s ) }; + filename = expand( &ei, 1 ); + list_free( ei.values ); + } + // Apply modifiers to "raw" filename. + VAR_EXPANDED filename_mod = eval_modifiers( s, filename, code->arg ); + // Get contents. + LIST * contents = stack_pop( s ); + // Write out the contents file, or expand the contents, as needed. + LIST * filename_or_contents = function_execute_write_file( function, frame, s, filename_mod, contents ); + // The result that gets replaced into the @() space. + stack_push( s, filename_or_contents ); + list_free( filename_mod.value ); + list_free( filename_mod.inner ); + list_free( contents ); + PROFILE_EXIT_LOCAL(function_run_INSTR_WRITE_FILE); + break; + } + + case INSTR_OUTPUT_STRINGS: + { + PROFILE_ENTER_LOCAL(function_run_INSTR_OUTPUT_STRINGS); + string * const buf = *(string * *)( (char *)stack_get( s ) + ( + code->arg * sizeof( LIST * ) ) ); + combine_strings( s, code->arg, buf ); + PROFILE_EXIT_LOCAL(function_run_INSTR_OUTPUT_STRINGS); + break; + } + + case INSTR_DEBUG_LINE: + { + debug_on_instruction( frame, function->file, code->arg ); + break; + } + + } + ++code; + } + + PROFILE_EXIT_LOCAL(function_run); +} + + +#ifdef HAVE_PYTHON + +static struct arg_list * arg_list_compile_python( PyObject * bjam_signature, + int32_t * num_arguments ) +{ + if ( bjam_signature ) + { + struct argument_list_compiler c[ 1 ]; + struct arg_list * result; + Py_ssize_t s; + Py_ssize_t i; + argument_list_compiler_init( c ); + + s = PySequence_Size( bjam_signature ); + for ( i = 0; i < s; ++i ) + { + struct argument_compiler arg_comp[ 1 ]; + struct arg_list arg; + PyObject * v = PySequence_GetItem( bjam_signature, i ); + Py_ssize_t j; + Py_ssize_t inner; + argument_compiler_init( arg_comp ); + + inner = PySequence_Size( v ); + for ( j = 0; j < inner; ++j ) + argument_compiler_add( arg_comp, object_new( PyString_AsString( + PySequence_GetItem( v, j ) ) ), constant_builtin, -1 ); + + arg = arg_compile_impl( arg_comp, constant_builtin, -1 ); + dynamic_array_push( c->args, arg ); + argument_compiler_free( arg_comp ); + Py_DECREF( v ); + } + + *num_arguments = c->args->size; + result = (struct arg_list *)BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) ); + memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list ) + ); + argument_list_compiler_free( c ); + return result; + } + *num_arguments = 0; + return 0; +} + +FUNCTION * function_python( PyObject * function, PyObject * bjam_signature ) +{ + PYTHON_FUNCTION * result = (PYTHON_FUNCTION *)BJAM_MALLOC( sizeof( PYTHON_FUNCTION ) ); + + result->base.type = FUNCTION_PYTHON; + result->base.reference_count = 1; + result->base.rulename = 0; + result->base.formal_arguments = arg_list_compile_python( bjam_signature, + &result->base.num_formal_arguments ); + Py_INCREF( function ); + result->python_function = function; + + return (FUNCTION *)result; +} + + +static void argument_list_to_python( struct arg_list * formal, int32_t formal_count, + FUNCTION * function, FRAME * frame, PyObject * kw ) +{ + LOL * all_actual = frame->args; + int32_t i; + + for ( i = 0; i < formal_count; ++i ) + { + LIST * actual = lol_get( all_actual, i ); + LISTITER actual_iter = list_begin( actual ); + LISTITER const actual_end = list_end( actual ); + int32_t j; + for ( j = 0; j < formal[ i ].size; ++j ) + { + struct argument * formal_arg = &formal[ i ].args[ j ]; + PyObject * value; + LIST * l; + + switch ( formal_arg->flags ) + { + case ARG_ONE: + if ( actual_iter == actual_end ) + argument_error( "missing argument", function, frame, + formal_arg->arg_name ); + type_check_range( formal_arg->type_name, actual_iter, list_next( + actual_iter ), frame, function, formal_arg->arg_name ); + value = PyString_FromString( object_str( list_item( actual_iter + ) ) ); + actual_iter = list_next( actual_iter ); + break; + case ARG_OPTIONAL: + if ( actual_iter == actual_end ) + value = 0; + else + { + type_check_range( formal_arg->type_name, actual_iter, + list_next( actual_iter ), frame, function, + formal_arg->arg_name ); + value = PyString_FromString( object_str( list_item( + actual_iter ) ) ); + actual_iter = list_next( actual_iter ); + } + break; + case ARG_PLUS: + if ( actual_iter == actual_end ) + argument_error( "missing argument", function, frame, + formal_arg->arg_name ); + /* fallthrough */ + case ARG_STAR: + type_check_range( formal_arg->type_name, actual_iter, + actual_end, frame, function, formal_arg->arg_name ); + l = list_copy_range( actual, actual_iter, actual_end ); + value = list_to_python( l ); + list_free( l ); + actual_iter = actual_end; + break; + case ARG_VARIADIC: + return; + } + + if ( value ) + { + PyObject * key = PyString_FromString( object_str( + formal_arg->arg_name ) ); + PyDict_SetItem( kw, key, value ); + Py_DECREF( key ); + Py_DECREF( value ); + } + } + + if ( actual_iter != actual_end ) + argument_error( "extra argument", function, frame, list_item( + actual_iter ) ); + } + + for ( ; i < all_actual->count; ++i ) + { + LIST * const actual = lol_get( all_actual, i ); + if ( !list_empty( actual ) ) + argument_error( "extra argument", function, frame, list_front( + actual ) ); + } +} + + +/* Given a Python object, return a string to use in Jam code instead of the said + * object. + * + * If the object is a string, use the string value. + * If the object implemenets __jam_repr__ method, use that. + * Otherwise return 0. + */ + +OBJECT * python_to_string( PyObject * value ) +{ + if ( PyString_Check( value ) ) + return object_new( PyString_AS_STRING( value ) ); + + /* See if this instance defines the special __jam_repr__ method. */ + if ( PyInstance_Check( value ) + && PyObject_HasAttrString( value, "__jam_repr__" ) ) + { + PyObject * repr = PyObject_GetAttrString( value, "__jam_repr__" ); + if ( repr ) + { + PyObject * arguments2 = PyTuple_New( 0 ); + PyObject * value2 = PyObject_Call( repr, arguments2, 0 ); + Py_DECREF( repr ); + Py_DECREF( arguments2 ); + if ( PyString_Check( value2 ) ) + return object_new( PyString_AS_STRING( value2 ) ); + Py_DECREF( value2 ); + } + } + return 0; +} + + +static module_t * python_module() +{ + static module_t * python = 0; + if ( !python ) + python = bindmodule( constant_python ); + return python; +} + + +static LIST * call_python_function( PYTHON_FUNCTION * function, FRAME * frame ) +{ + LIST * result = 0; + PyObject * arguments = 0; + PyObject * kw = NULL; + int32_t i; + PyObject * py_result; + FRAME * prev_frame_before_python_call; + + if ( function->base.formal_arguments ) + { + arguments = PyTuple_New( 0 ); + kw = PyDict_New(); + argument_list_to_python( function->base.formal_arguments, + function->base.num_formal_arguments, &function->base, frame, kw ); + } + else + { + arguments = PyTuple_New( frame->args->count ); + for ( i = 0; i < frame->args->count; ++i ) + PyTuple_SetItem( arguments, i, list_to_python( lol_get( frame->args, + i ) ) ); + } + + frame->module = python_module(); + + prev_frame_before_python_call = frame_before_python_call; + frame_before_python_call = frame; + py_result = PyObject_Call( function->python_function, arguments, kw ); + frame_before_python_call = prev_frame_before_python_call; + Py_DECREF( arguments ); + Py_XDECREF( kw ); + if ( py_result != NULL ) + { + if ( PyList_Check( py_result ) ) + { + int32_t size = PyList_Size( py_result ); + int32_t i; + for ( i = 0; i < size; ++i ) + { + OBJECT * s = python_to_string( PyList_GetItem( py_result, i ) ); + if ( !s ) + err_printf( + "Non-string object returned by Python call.\n" ); + else + result = list_push_back( result, s ); + } + } + else if ( py_result == Py_None ) + { + result = L0; + } + else + { + OBJECT * const s = python_to_string( py_result ); + if ( s ) + result = list_new( s ); + else + /* We have tried all we could. Return empty list. There are + * cases, e.g. feature.feature function that should return a + * value for the benefit of Python code and which also can be + * called by Jam code, where no sensible value can be returned. + * We cannot even emit a warning, since there would be a pile of + * them. + */ + result = L0; + } + + Py_DECREF( py_result ); + } + else + { + PyErr_Print(); + err_printf( "Call failed\n" ); + } + + return result; +} + +#endif + + +void function_done( void ) +{ + BJAM_FREE( stack ); +} diff --git a/src/boost/tools/build/src/engine/function.h b/src/boost/tools/build/src/engine/function.h new file mode 100644 index 000000000..afa0277bb --- /dev/null +++ b/src/boost/tools/build/src/engine/function.h @@ -0,0 +1,53 @@ +/* + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2011 Steven Watanabe + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef FUNCTION_SW20111123_H +#define FUNCTION_SW20111123_H + +#include "config.h" +#include "object.h" +#include "frames.h" +#include "lists.h" +#include "parse.h" +#include "jam_strings.h" + +typedef struct _function FUNCTION; +typedef struct _stack STACK; + +typedef FUNCTION* function_ptr; +typedef STACK* stack_ptr; + +STACK * stack_global( void ); +void stack_push( STACK * s, LIST * l ); +LIST * stack_pop( STACK * s ); + +FUNCTION * function_compile( PARSE * parse ); +FUNCTION * function_builtin( LIST * ( * func )( FRAME * frame, int32_t flags ), int32_t flags, const char * * args ); +void function_refer( FUNCTION * ); +void function_free( FUNCTION * ); +OBJECT * function_rulename( FUNCTION * ); +void function_set_rulename( FUNCTION *, OBJECT * ); +void function_location( FUNCTION *, OBJECT * *, int32_t * ); +LIST * function_run( FUNCTION * function, FRAME * frame, STACK * s ); + +FUNCTION * function_compile_actions( const char * actions, OBJECT * file, int32_t line ); +void function_run_actions( FUNCTION * function, FRAME * frame, STACK * s, string * out ); + +FUNCTION * function_bind_variables( FUNCTION * f, module_t * module, int32_t * counter ); +FUNCTION * function_unbind_variables( FUNCTION * f ); + +LIST * function_get_variables( FUNCTION * f ); + +void function_done( void ); + +#ifdef HAVE_PYTHON + +FUNCTION * function_python( PyObject * function, PyObject * bjam_signature ); + +#endif + +#endif diff --git a/src/boost/tools/build/src/engine/glob.cpp b/src/boost/tools/build/src/engine/glob.cpp new file mode 100644 index 000000000..bec00ee56 --- /dev/null +++ b/src/boost/tools/build/src/engine/glob.cpp @@ -0,0 +1,152 @@ +/* + * Copyright 1994 Christopher Seiwald. All rights reserved. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * glob.c - match a string against a simple pattern + * + * Understands the following patterns: + * + * * any number of characters + * ? any single character + * [a-z] any single character in the range a-z + * [^a-z] any single character not in the range a-z + * \x match x + * + * External functions: + * + * glob() - match a string against a simple pattern + * + * Internal functions: + * + * globchars() - build a bitlist to check for character group match + */ + +# include "jam.h" + +# define CHECK_BIT( tab, bit ) ( tab[ (bit)/8 ] & (1<<( (bit)%8 )) ) +# define BITLISTSIZE 16 /* bytes used for [chars] in compiled expr */ + +static void globchars( const char * s, const char * e, char * b ); + + +/* + * glob() - match a string against a simple pattern. + */ + +int glob( const char * c, const char * s ) +{ + char bitlist[ BITLISTSIZE ]; + const char * here; + + for ( ; ; ) + switch ( *c++ ) + { + case '\0': + return *s ? -1 : 0; + + case '?': + if ( !*s++ ) + return 1; + break; + + case '[': + /* Scan for matching ]. */ + + here = c; + do if ( !*c++ ) return 1; + while ( ( here == c ) || ( *c != ']' ) ); + ++c; + + /* Build character class bitlist. */ + + globchars( here, c, bitlist ); + + if ( !CHECK_BIT( bitlist, *(const unsigned char *)s ) ) + return 1; + ++s; + break; + + case '*': + here = s; + + while ( *s ) + ++s; + + /* Try to match the rest of the pattern in a recursive */ + /* call. If the match fails we'll back up chars, retrying. */ + + while ( s != here ) + { + int r; + + /* A fast path for the last token in a pattern. */ + r = *c ? glob( c, s ) : *s ? -1 : 0; + + if ( !r ) + return 0; + if ( r < 0 ) + return 1; + --s; + } + break; + + case '\\': + /* Force literal match of next char. */ + if ( !*c || ( *s++ != *c++ ) ) + return 1; + break; + + default: + if ( *s++ != c[ -1 ] ) + return 1; + break; + } +} + + +/* + * globchars() - build a bitlist to check for character group match. + */ + +static void globchars( const char * s, const char * e, char * b ) +{ + int neg = 0; + + memset( b, '\0', BITLISTSIZE ); + + if ( *s == '^' ) + { + ++neg; + ++s; + } + + while ( s < e ) + { + int c; + + if ( ( s + 2 < e ) && ( s[1] == '-' ) ) + { + for ( c = s[0]; c <= s[2]; ++c ) + b[ c/8 ] |= ( 1 << ( c % 8 ) ); + s += 3; + } + else + { + c = *s++; + b[ c/8 ] |= ( 1 << ( c % 8 ) ); + } + } + + if ( neg ) + { + int i; + for ( i = 0; i < BITLISTSIZE; ++i ) + b[ i ] ^= 0377; + } + + /* Do not include \0 in either $[chars] or $[^chars]. */ + b[0] &= 0376; +} diff --git a/src/boost/tools/build/src/engine/guess_toolset.bat b/src/boost/tools/build/src/engine/guess_toolset.bat new file mode 100644 index 000000000..09f7808b0 --- /dev/null +++ b/src/boost/tools/build/src/engine/guess_toolset.bat @@ -0,0 +1,107 @@ +@ECHO OFF + +REM ~ Copyright 2002-2018 Rene Rivera. +REM ~ Distributed under the Boost Software License, Version 1.0. +REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +if "_%1_" == "_yacc_" goto Guess_Yacc +if "_%1_" == "_test_path_" ( + shift + goto Test_Path) +goto Guess + + +:Clear_Error +ver >nul +goto :eof + + +:Test_Path +REM Tests for the given executable file presence in the directories in the PATH +REM environment variable. Additionally sets FOUND_PATH to the path of the +REM found file. +call :Clear_Error +setlocal +set test=%~$PATH:1 +endlocal +if not errorlevel 1 set FOUND_PATH=%~dp$PATH:1 +goto :eof + + +:Guess +REM Let vswhere tell us where msvc is at, if available. +call :Clear_Error +call vswhere_usability_wrapper.cmd +call :Clear_Error +if NOT "_%VS170COMNTOOLS%_" == "__" ( + set "B2_TOOLSET=vc143" + set "B2_TOOLSET_ROOT=%VS170COMNTOOLS%..\..\VC\" + goto :eof) +if NOT "_%VS160COMNTOOLS%_" == "__" ( + set "B2_TOOLSET=vc142" + set "B2_TOOLSET_ROOT=%VS160COMNTOOLS%..\..\VC\" + goto :eof) +if NOT "_%VS150COMNTOOLS%_" == "__" ( + set "B2_TOOLSET=vc141" + set "B2_TOOLSET_ROOT=%VS150COMNTOOLS%..\..\VC\" + goto :eof) +REM VSUNKCOMNTOOLS represents unknown but detected version from vswhere +if NOT "_%VSUNKCOMNTOOLS%_" == "__" ( + set "B2_TOOLSET=vcunk" + set "B2_TOOLSET_ROOT=%VSUNKCOMNTOOLS%..\..\VC\" + goto :eof) +if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" ( + set "B2_TOOLSET=vc141" + set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Enterprise\VC\" + exit /b 0) +if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Professional\VC\Auxiliary\Build\vcvarsall.bat" ( + set "B2_TOOLSET=vc141" + set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Professional\VC\" + exit /b 0) +if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" ( + set "B2_TOOLSET=vc141" + set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio\2017\Community\VC\" + exit /b 0) +if NOT "_%VS140COMNTOOLS%_" == "__" ( + set "B2_TOOLSET=vc14" + set "B2_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\" + exit /b 0) +if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio 14.0\VC\VCVARSALL.BAT" ( + set "B2_TOOLSET=vc14" + set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio 14.0\VC\" + exit /b 0) +if NOT "_%VS120COMNTOOLS%_" == "__" ( + set "B2_TOOLSET=vc12" + set "B2_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\" + exit /b 0) +if EXIST "%VS_ProgramFiles%\Microsoft Visual Studio 12.0\VC\VCVARSALL.BAT" ( + set "B2_TOOLSET=vc12" + set "B2_TOOLSET_ROOT=%VS_ProgramFiles%\Microsoft Visual Studio 12.0\VC\" + exit /b 0) +call :Test_Path cl.exe +if not errorlevel 1 ( + set "B2_TOOLSET=msvc" + set "B2_TOOLSET_ROOT=%FOUND_PATH%..\" + exit /b 0) +call :Test_Path vcvars32.bat +if not errorlevel 1 ( + set "B2_TOOLSET=msvc" + call "%FOUND_PATH%VCVARS32.BAT" + set "B2_TOOLSET_ROOT=%MSVCDir%\" + exit /b 0) +call :Test_Path bcc32c.exe +if not errorlevel 1 ( + set "B2_TOOLSET=borland" + set "B2_TOOLSET_ROOT=%FOUND_PATH%..\" + exit /b 0) +call :Test_Path icl.exe +if not errorlevel 1 ( + set "B2_TOOLSET=intel-win32" + set "B2_TOOLSET_ROOT=%FOUND_PATH%..\" + exit /b 0) +if EXIST "C:\MinGW\bin\gcc.exe" ( + set "B2_TOOLSET=mingw" + set "B2_TOOLSET_ROOT=C:\MinGW\" + exit /b 0) +REM Could not find a suitable toolset +exit /b 1 diff --git a/src/boost/tools/build/src/engine/hash.cpp b/src/boost/tools/build/src/engine/hash.cpp new file mode 100644 index 000000000..9dcd5816b --- /dev/null +++ b/src/boost/tools/build/src/engine/hash.cpp @@ -0,0 +1,388 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * hash.c - simple in-memory hashing routines + * + * External routines: + * hashinit() - initialize a hash table, returning a handle + * hashitem() - find a record in the table, and optionally enter a new one + * hashdone() - free a hash table, given its handle + * + * Internal routines: + * hashrehash() - resize and rebuild hp->tab, the hash table + */ + +#include "jam.h" +#include "hash.h" + +#include "compile.h" +#include "output.h" + +#include + +/* +#define HASH_DEBUG_PROFILE 1 +*/ + +/* Header attached to all hash table data items. */ + +typedef struct item ITEM; +struct item +{ + ITEM * next; +}; + +#define MAX_LISTS 32 + +struct hash +{ + /* + * the hash table, just an array of item pointers + */ + struct + { + int32_t nel; + ITEM * * base; + } tab; + + int32_t bloat; /* tab.nel / items.nel */ + int32_t inel; /* initial number of elements */ + + /* + * the array of records, maintained by these routines - essentially a + * microallocator + */ + struct + { + int32_t more; /* how many more ITEMs fit in lists[ list ] */ + ITEM * free; /* free list of items */ + char * next; /* where to put more ITEMs in lists[ list ] */ + int32_t size; /* sizeof( ITEM ) + aligned datalen */ + int32_t nel; /* total ITEMs held by all lists[] */ + int32_t list; /* index into lists[] */ + + struct + { + int32_t nel; /* total ITEMs held by this list */ + char * base; /* base of ITEMs array */ + } lists[ MAX_LISTS ]; + } items; + + char const * name; /* just for hashstats() */ +}; + +static void hashrehash( struct hash * ); +static void hashstat( struct hash * ); + +static uint32_t hash_keyval( OBJECT * key ) +{ + return object_hash( key ); +} + +#define hash_bucket(hp, keyval) ((hp)->tab.base + ((keyval) % (hp)->tab.nel)) + +#define hash_data_key(data) (*(OBJECT * *)(data)) +#define hash_item_data(item) ((HASHDATA *)((char *)item + sizeof(ITEM))) +#define hash_item_key(item) (hash_data_key(hash_item_data(item))) + + +#define ALIGNED(x) ((x + sizeof(ITEM) - 1) & ~(sizeof(ITEM) - 1)) + +/* + * hashinit() - initialize a hash table, returning a handle + */ + +struct hash * hashinit( int32_t datalen, char const * name ) +{ + struct hash * hp = (struct hash *)BJAM_MALLOC( sizeof( *hp ) ); + + hp->bloat = 3; + hp->tab.nel = 0; + hp->tab.base = 0; + hp->items.more = 0; + hp->items.free = 0; + hp->items.size = sizeof( ITEM ) + ALIGNED( datalen ); + hp->items.list = -1; + hp->items.nel = 0; + hp->inel = 11; /* 47 */ + hp->name = name; + + return hp; +} + + +/* + * hash_search() - Find the hash item for the given data. + * + * Returns a pointer to a hashed item with the given key. If given a 'previous' + * pointer, makes it point to the item prior to the found item in the same + * bucket or to 0 if our item is the first item in its bucket. + */ + +static ITEM * hash_search( struct hash * hp, uint32_t keyval, + OBJECT * keydata, ITEM * * previous ) +{ + ITEM * i = *hash_bucket( hp, keyval ); + ITEM * p = 0; + for ( ; i; i = i->next ) + { + if ( object_equal( hash_item_key( i ), keydata ) ) + { + if ( previous ) + *previous = p; + return i; + } + p = i; + } + return 0; +} + + +/* + * hash_insert() - insert a record in the table or return the existing one + */ + +HASHDATA * hash_insert( struct hash * hp, OBJECT * key, int32_t * found ) +{ + ITEM * i; + uint32_t keyval = hash_keyval( key ); + + #ifdef HASH_DEBUG_PROFILE + profile_frame prof[ 1 ]; + if ( DEBUG_PROFILE ) + profile_enter( 0, prof ); + #endif + + if ( !hp->items.more ) + hashrehash( hp ); + + i = hash_search( hp, keyval, key, 0 ); + if ( i ) + *found = 1; + else + { + ITEM * * base = hash_bucket( hp, keyval ); + + /* Try to grab one from the free list. */ + if ( hp->items.free ) + { + i = hp->items.free; + hp->items.free = i->next; + assert( !hash_item_key( i ) ); + } + else + { + i = (ITEM *)hp->items.next; + hp->items.next += hp->items.size; + } + --hp->items.more; + i->next = *base; + *base = i; + *found = 0; + } + + #ifdef HASH_DEBUG_PROFILE + if ( DEBUG_PROFILE ) + profile_exit( prof ); + #endif + + return hash_item_data( i ); +} + + +/* + * hash_find() - find a record in the table or NULL if none exists + */ + +HASHDATA * hash_find( struct hash * hp, OBJECT * key ) +{ + ITEM * i; + uint32_t keyval = hash_keyval( key ); + + #ifdef HASH_DEBUG_PROFILE + profile_frame prof[ 1 ]; + if ( DEBUG_PROFILE ) + profile_enter( 0, prof ); + #endif + + if ( !hp->items.nel ) + { + #ifdef HASH_DEBUG_PROFILE + if ( DEBUG_PROFILE ) + profile_exit( prof ); + #endif + return 0; + } + + i = hash_search( hp, keyval, key, 0 ); + + #ifdef HASH_DEBUG_PROFILE + if ( DEBUG_PROFILE ) + profile_exit( prof ); + #endif + + return i ? hash_item_data( i ) : 0; +} + + +/* + * hashrehash() - resize and rebuild hp->tab, the hash table + */ + +static void hashrehash( struct hash * hp ) +{ + int32_t i = ++hp->items.list; + hp->items.more = i ? 2 * hp->items.nel : hp->inel; + hp->items.next = (char *)BJAM_MALLOC( hp->items.more * hp->items.size ); + hp->items.free = 0; + + hp->items.lists[ i ].nel = hp->items.more; + hp->items.lists[ i ].base = hp->items.next; + hp->items.nel += hp->items.more; + + if ( hp->tab.base ) + BJAM_FREE( (char *)hp->tab.base ); + + hp->tab.nel = hp->items.nel * hp->bloat; + hp->tab.base = (ITEM * *)BJAM_MALLOC( hp->tab.nel * sizeof( ITEM * ) ); + + memset( (char *)hp->tab.base, '\0', hp->tab.nel * sizeof( ITEM * ) ); + + for ( i = 0; i < hp->items.list; ++i ) + { + int32_t nel = hp->items.lists[ i ].nel; + char * next = hp->items.lists[ i ].base; + + for ( ; nel--; next += hp->items.size ) + { + ITEM * i = (ITEM *)next; + ITEM * * ip = hp->tab.base + object_hash( hash_item_key( i ) ) % + hp->tab.nel; + /* code currently assumes rehashing only when there are no free + * items + */ + assert( hash_item_key( i ) ); + + i->next = *ip; + *ip = i; + } + } +} + + +void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data + ) +{ + int32_t i; + for ( i = 0; i <= hp->items.list; ++i ) + { + char * next = hp->items.lists[ i ].base; + int32_t nel = hp->items.lists[ i ].nel; + if ( i == hp->items.list ) + nel -= hp->items.more; + + for ( ; nel--; next += hp->items.size ) + { + ITEM * const i = (ITEM *)next; + if ( hash_item_key( i ) != 0 ) /* Do not enumerate freed items. */ + f( hash_item_data( i ), data ); + } + } +} + + +/* + * hash_free() - free a hash table, given its handle + */ + +void hash_free( struct hash * hp ) +{ + int32_t i; + if ( !hp ) + return; + if ( hp->tab.base ) + BJAM_FREE( (char *)hp->tab.base ); + for ( i = 0; i <= hp->items.list; ++i ) + BJAM_FREE( hp->items.lists[ i ].base ); + BJAM_FREE( (char *)hp ); +} + + +static void hashstat( struct hash * hp ) +{ + struct hashstats stats[ 1 ]; + hashstats_init( stats ); + hashstats_add( stats, hp ); + hashstats_print( stats, hp->name ); +} + + +void hashstats_init( struct hashstats * stats ) +{ + stats->count = 0; + stats->num_items = 0; + stats->tab_size = 0; + stats->item_size = 0; + stats->sets = 0; + stats->num_hashes = 0; +} + + +void hashstats_add( struct hashstats * stats, struct hash * hp ) +{ + if ( hp ) + { + ITEM * * tab = hp->tab.base; + int nel = hp->tab.nel; + int count = 0; + int sets = 0; + int i; + + for ( i = 0; i < nel; ++i ) + { + ITEM * item; + int here = 0; + for ( item = tab[ i ]; item; item = item->next ) + ++here; + + count += here; + if ( here > 0 ) + ++sets; + } + + stats->count += count; + stats->sets += sets; + stats->num_items += hp->items.nel; + stats->tab_size += hp->tab.nel; + stats->item_size = hp->items.size; + ++stats->num_hashes; + } +} + + +void hashstats_print( struct hashstats * stats, char const * name ) +{ + out_printf( "%s table: %d+%d+%d (%dK+%luK+%luK) items+table+hash, %f density\n", + name, + stats->count, + stats->num_items, + stats->tab_size, + stats->num_items * stats->item_size / 1024, + (long unsigned)stats->tab_size * sizeof( ITEM * * ) / 1024, + (long unsigned)stats->num_hashes * sizeof( struct hash ) / 1024, + (float)stats->count / (float)stats->sets ); +} + + +void hashdone( struct hash * hp ) +{ + if ( !hp ) + return; + if ( DEBUG_MEM || DEBUG_PROFILE ) + hashstat( hp ); + hash_free( hp ); +} diff --git a/src/boost/tools/build/src/engine/hash.h b/src/boost/tools/build/src/engine/hash.h new file mode 100644 index 000000000..4088894e3 --- /dev/null +++ b/src/boost/tools/build/src/engine/hash.h @@ -0,0 +1,99 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * hash.h - simple in-memory hashing routines + */ + +#ifndef BOOST_JAM_HASH_H +#define BOOST_JAM_HASH_H + +#include "config.h" +#include "object.h" + +/* + * An opaque struct representing an item in the hash table. The first element of + * every struct stored in the table must be an OBJECT * which is treated as the + * key. + */ +typedef struct hashdata HASHDATA; + +/* + * hashinit() - initialize a hash table, returning a handle. + * + * Parameters: + * datalen - item size + * name - used for debugging + */ +struct hash * hashinit( int32_t datalen, char const * name ); + +/* + * hash_free() - free a hash table, given its handle + */ +void hash_free( struct hash * ); +void hashdone( struct hash * ); + +typedef void (* hashenumerate_f)( void *, void * ); + +/* + * hashenumerate() - call f(i, data) on each item, i in the hash table. The + * enumeration order is unspecified. + */ +void hashenumerate( struct hash *, void (* f)( void *, void * ), void * data ); + +template +void hash_enumerate( struct hash * h, void (* f)(T *, D *), D * data) +{ + hashenumerate(h, reinterpret_cast(f), data); +} +template +void hash_enumerate( struct hash * h, void (* f)(T *, D *)) +{ + hashenumerate(h, reinterpret_cast(f), nullptr); +} + +/* + * hash_insert() - insert a new item in a hash table, or return an existing one. + * + * Preconditions: + * - hp must be a hash table created by hashinit() + * - key must be an object created by object_new() + * + * Postconditions: + * - if the key does not already exist in the hash table, *found == 0 and the + * result will be a pointer to an uninitialized item. The key of the new + * item must be set to a value equal to key before any further operations on + * the hash table except hashdone(). + * - if the key is present then *found == 1 and the result is a pointer to the + * existing record. + */ +HASHDATA * hash_insert( struct hash *, OBJECT * key, int32_t * found ); + +/* + * hash_find() - find a record in the table or NULL if none exists + */ +HASHDATA * hash_find( struct hash *, OBJECT * key ); + +struct hashstats { + int count; + int num_items; + int tab_size; + int item_size; + int sets; + int num_hashes; +}; + +void hashstats_init( struct hashstats * stats ); +void hashstats_add( struct hashstats * stats, struct hash * ); +void hashstats_print( struct hashstats * stats, char const * name ); + +#endif diff --git a/src/boost/tools/build/src/engine/hcache.cpp b/src/boost/tools/build/src/engine/hcache.cpp new file mode 100644 index 000000000..f46f69bf9 --- /dev/null +++ b/src/boost/tools/build/src/engine/hcache.cpp @@ -0,0 +1,534 @@ +/* + * This file has been donated to Jam. + */ + +/* + * Craig W. McPheeters, Alias|Wavefront. + * + * hcache.c hcache.h - handle caching of #includes in source files. + * + * Create a cache of files scanned for headers. When starting jam, look for the + * cache file and load it if present. When finished the binding phase, create a + * new header cache. The cache contains files, their timestamps and the header + * files found in their scan. During the binding phase of jam, look in the + * header cache first for the headers contained in a file. If the cache is + * present and valid, use its contents. This results in dramatic speedups with + * large projects (e.g. 3min -> 1min startup for one project.) + * + * External routines: + * hcache_init() - read and parse the local .jamdeps file. + * hcache_done() - write a new .jamdeps file. + * hcache() - return list of headers on target. Use cache or do a scan. + * + * The dependency file format is an ASCII file with 1 line per target. Each line + * has the following fields: + * @boundname@ timestamp_sec timestamp_nsec @file@ @file@ @file@ ... + */ + +#include "config.h" + +#ifdef OPT_HEADER_CACHE_EXT + +#include "jam.h" +#include "hcache.h" + +#include "hash.h" +#include "headers.h" +#include "lists.h" +#include "modules.h" +#include "object.h" +#include "parse.h" +#include "regexp.h" +#include "rules.h" +#include "search.h" +#include "timestamp.h" +#include "variable.h" +#include "output.h" + +#include +#include + +typedef struct hcachedata HCACHEDATA ; + +struct hcachedata +{ + OBJECT * boundname; + timestamp time; + LIST * includes; + LIST * hdrscan; /* the HDRSCAN value for this target */ + int age; /* if too old, we will remove it from cache */ + HCACHEDATA * next; +}; + + +static struct hash * hcachehash = 0; +static HCACHEDATA * hcachelist = 0; + +static int queries = 0; +static int hits = 0; + +#define CACHE_FILE_VERSION "version 5" +#define CACHE_RECORD_HEADER "header" +#define CACHE_RECORD_END "end" + + +/* + * Return the name of the header cache file. May return NULL. + * + * The user sets this by setting the HCACHEFILE variable in a Jamfile. We cache + * the result so the user can not change the cache file during header scanning. + */ + +static const char * cache_name( void ) +{ + static OBJECT * name = 0; + if ( !name ) + { + LIST * const hcachevar = var_get( root_module(), constant_HCACHEFILE ); + + if ( !list_empty( hcachevar ) ) + { + TARGET * const t = bindtarget( list_front( hcachevar ) ); + + pushsettings( root_module(), t->settings ); + /* Do not expect the cache file to be generated, so pass 0 as the + * third argument to search. Expect the location to be specified via + * LOCATE, so pass 0 as the fourth argument. + */ + object_free( t->boundname ); + t->boundname = search( t->name, &t->time, 0, 0 ); + popsettings( root_module(), t->settings ); + + name = object_copy( t->boundname ); + } + } + return name ? object_str( name ) : 0; +} + + +/* + * Return the maximum age a cache entry can have before it is purged from the + * cache. + */ + +static int cache_maxage( void ) +{ + int age = 100; + LIST * const var = var_get( root_module(), constant_HCACHEMAXAGE ); + if ( !list_empty( var ) ) + { + age = atoi( object_str( list_front( var ) ) ); + if ( age < 0 ) + age = 0; + } + return age; +} + + +/* + * Read a netstring. The caveat is that the string can not contain ASCII 0. The + * returned value is as returned by object_new(). + */ + +OBJECT * read_netstring( FILE * f ) +{ + unsigned long len; + static char * buf = NULL; + static unsigned long buf_len = 0; + + if ( fscanf( f, " %9lu", &len ) != 1 ) + return NULL; + if ( fgetc( f ) != (int)'\t' ) + return NULL; + + if ( len > 1024 * 64 ) + return NULL; /* sanity check */ + + if ( len > buf_len ) + { + unsigned long new_len = buf_len * 2; + if ( new_len < len ) + new_len = len; + buf = (char *)BJAM_REALLOC( buf, new_len + 1 ); + if ( buf ) + buf_len = new_len; + } + + if ( !buf ) + return NULL; + + if ( fread( buf, 1, len, f ) != len ) + return NULL; + if ( fgetc( f ) != (int)'\n' ) + return NULL; + + buf[ len ] = 0; + return object_new( buf ); +} + + +/* + * Write a netstring. + */ + +void write_netstring( FILE * f, char const * s ) +{ + if ( !s ) + s = ""; + fprintf( f, "%lu\t%s\n", (long unsigned)strlen( s ), s ); +} + + +void hcache_init() +{ + FILE * f; + OBJECT * version = 0; + int header_count = 0; + const char * hcachename; + + if ( hcachehash ) + return; + + hcachehash = hashinit( sizeof( HCACHEDATA ), "hcache" ); + + if ( !( hcachename = cache_name() ) ) + return; + + if ( !( f = fopen( hcachename, "rb" ) ) ) + { + if ( errno != ENOENT ) + err_printf( "[errno %d] failed to read hcache file '%s': %s", + errno, hcachename, strerror(errno) ); + return; + } + + version = read_netstring( f ); + + if ( !version || strcmp( object_str( version ), CACHE_FILE_VERSION ) ) + goto bail; + + while ( 1 ) + { + HCACHEDATA cachedata; + HCACHEDATA * c; + OBJECT * record_type = 0; + OBJECT * time_secs_str = 0; + OBJECT * time_nsecs_str = 0; + OBJECT * age_str = 0; + OBJECT * includes_count_str = 0; + OBJECT * hdrscan_count_str = 0; + int i; + int count; + LIST * l; + int found; + + cachedata.boundname = 0; + cachedata.includes = 0; + cachedata.hdrscan = 0; + + record_type = read_netstring( f ); + if ( !record_type ) + { + err_printf( "invalid %s\n", hcachename ); + goto cleanup; + } + if ( !strcmp( object_str( record_type ), CACHE_RECORD_END ) ) + { + object_free( record_type ); + break; + } + if ( strcmp( object_str( record_type ), CACHE_RECORD_HEADER ) ) + { + err_printf( "invalid %s with record separator <%s>\n", + hcachename, record_type ? object_str( record_type ) : "" ); + goto cleanup; + } + + cachedata.boundname = read_netstring( f ); + time_secs_str = read_netstring( f ); + time_nsecs_str = read_netstring( f ); + age_str = read_netstring( f ); + includes_count_str = read_netstring( f ); + + if ( !cachedata.boundname || !time_secs_str || !time_nsecs_str || + !age_str || !includes_count_str ) + { + err_printf( "invalid %s\n", hcachename ); + goto cleanup; + } + + timestamp_init( &cachedata.time, atoi( object_str( time_secs_str ) ), + atoi( object_str( time_nsecs_str ) ) ); + cachedata.age = atoi( object_str( age_str ) ) + 1; + + count = atoi( object_str( includes_count_str ) ); + for ( l = L0, i = 0; i < count; ++i ) + { + OBJECT * const s = read_netstring( f ); + if ( !s ) + { + err_printf( "invalid %s\n", hcachename ); + list_free( l ); + goto cleanup; + } + l = list_push_back( l, s ); + } + cachedata.includes = l; + + hdrscan_count_str = read_netstring( f ); + if ( !hdrscan_count_str ) + { + err_printf( "invalid %s\n", hcachename ); + goto cleanup; + } + + count = atoi( object_str( hdrscan_count_str ) ); + for ( l = L0, i = 0; i < count; ++i ) + { + OBJECT * const s = read_netstring( f ); + if ( !s ) + { + err_printf( "invalid %s\n", hcachename ); + list_free( l ); + goto cleanup; + } + l = list_push_back( l, s ); + } + cachedata.hdrscan = l; + + c = (HCACHEDATA *)hash_insert( hcachehash, cachedata.boundname, &found ) + ; + if ( !found ) + { + c->boundname = cachedata.boundname; + c->includes = cachedata.includes; + c->hdrscan = cachedata.hdrscan; + c->age = cachedata.age; + timestamp_copy( &c->time, &cachedata.time ); + } + else + { + err_printf( "can not insert header cache item, bailing on %s" + "\n", hcachename ); + goto cleanup; + } + + c->next = hcachelist; + hcachelist = c; + + ++header_count; + + object_free( record_type ); + object_free( time_secs_str ); + object_free( time_nsecs_str ); + object_free( age_str ); + object_free( includes_count_str ); + object_free( hdrscan_count_str ); + continue; + +cleanup: + + if ( record_type ) object_free( record_type ); + if ( time_secs_str ) object_free( time_secs_str ); + if ( time_nsecs_str ) object_free( time_nsecs_str ); + if ( age_str ) object_free( age_str ); + if ( includes_count_str ) object_free( includes_count_str ); + if ( hdrscan_count_str ) object_free( hdrscan_count_str ); + + if ( cachedata.boundname ) object_free( cachedata.boundname ); + if ( cachedata.includes ) list_free( cachedata.includes ); + if ( cachedata.hdrscan ) list_free( cachedata.hdrscan ); + + goto bail; + } + + if ( DEBUG_HEADER ) + out_printf( "hcache read from file %s\n", hcachename ); + +bail: + if ( version ) + object_free( version ); + fclose( f ); +} + + +void hcache_done() +{ + FILE * f; + HCACHEDATA * c; + int header_count = 0; + const char * hcachename; + int maxage; + + if ( !hcachehash ) + return; + + if ( !( hcachename = cache_name() ) ) + goto cleanup; + + if ( !( f = fopen( hcachename, "wb" ) ) ) + { + err_printf( "[errno %d] failed to write hcache file '%s': %s", + errno, hcachename, strerror(errno) ); + goto cleanup; + } + + maxage = cache_maxage(); + + /* Print out the version. */ + write_netstring( f, CACHE_FILE_VERSION ); + + c = hcachelist; + for ( c = hcachelist; c; c = c->next ) + { + LISTITER iter; + LISTITER end; + char time_secs_str[ 30 ]; + char time_nsecs_str[ 30 ]; + char age_str[ 30 ]; + char includes_count_str[ 30 ]; + char hdrscan_count_str[ 30 ]; + + if ( maxage == 0 ) + c->age = 0; + else if ( c->age > maxage ) + continue; + + sprintf( includes_count_str, "%lu", (long unsigned)list_length( + c->includes ) ); + sprintf( hdrscan_count_str, "%lu", (long unsigned)list_length( + c->hdrscan ) ); + sprintf( time_secs_str, "%lu", (long unsigned)c->time.secs ); + sprintf( time_nsecs_str, "%lu", (long unsigned)c->time.nsecs ); + sprintf( age_str, "%lu", (long unsigned)c->age ); + + write_netstring( f, CACHE_RECORD_HEADER ); + write_netstring( f, object_str( c->boundname ) ); + write_netstring( f, time_secs_str ); + write_netstring( f, time_nsecs_str ); + write_netstring( f, age_str ); + write_netstring( f, includes_count_str ); + for ( iter = list_begin( c->includes ), end = list_end( c->includes ); + iter != end; iter = list_next( iter ) ) + write_netstring( f, object_str( list_item( iter ) ) ); + write_netstring( f, hdrscan_count_str ); + for ( iter = list_begin( c->hdrscan ), end = list_end( c->hdrscan ); + iter != end; iter = list_next( iter ) ) + write_netstring( f, object_str( list_item( iter ) ) ); + fputs( "\n", f ); + ++header_count; + } + write_netstring( f, CACHE_RECORD_END ); + + if ( DEBUG_HEADER ) + out_printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n", + hcachename, header_count, queries ? 100.0 * hits / queries : 0 ); + + fclose ( f ); + +cleanup: + for ( c = hcachelist; c; c = c->next ) + { + list_free( c->includes ); + list_free( c->hdrscan ); + object_free( c->boundname ); + } + + hcachelist = 0; + if ( hcachehash ) + hashdone( hcachehash ); + hcachehash = 0; +} + + +LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan ) +{ + HCACHEDATA * c; + + ++queries; + + if ( ( c = (HCACHEDATA *)hash_find( hcachehash, t->boundname ) ) ) + { + if ( !timestamp_cmp( &c->time, &t->time ) ) + { + LIST * const l1 = hdrscan; + LIST * const l2 = c->hdrscan; + LISTITER iter1 = list_begin( l1 ); + LISTITER const end1 = list_end( l1 ); + LISTITER iter2 = list_begin( l2 ); + LISTITER const end2 = list_end( l2 ); + while ( iter1 != end1 && iter2 != end2 ) + { + if ( !object_equal( list_item( iter1 ), list_item( iter2 ) ) ) + iter1 = end1; + else + { + iter1 = list_next( iter1 ); + iter2 = list_next( iter2 ); + } + } + if ( iter1 != end1 || iter2 != end2 ) + { + if ( DEBUG_HEADER ) + { + out_printf( "HDRSCAN out of date in cache for %s\n", + object_str( t->boundname ) ); + out_printf(" real : "); + list_print( hdrscan ); + out_printf( "\n cached: " ); + list_print( c->hdrscan ); + out_printf( "\n" ); + } + + list_free( c->includes ); + list_free( c->hdrscan ); + c->includes = L0; + c->hdrscan = L0; + } + else + { + if ( DEBUG_HEADER ) + out_printf( "using header cache for %s\n", object_str( + t->boundname ) ); + c->age = 0; + ++hits; + return list_copy( c->includes ); + } + } + else + { + if ( DEBUG_HEADER ) + out_printf ("header cache out of date for %s\n", object_str( + t->boundname ) ); + list_free( c->includes ); + list_free( c->hdrscan ); + c->includes = L0; + c->hdrscan = L0; + } + } + else + { + int found; + c = (HCACHEDATA *)hash_insert( hcachehash, t->boundname, &found ); + if ( !found ) + { + c->boundname = object_copy( t->boundname ); + c->next = hcachelist; + hcachelist = c; + } + } + + /* 'c' points at the cache entry. Its out of date. */ + { + LIST * const l = headers1( L0, t->boundname, rec, re ); + + timestamp_copy( &c->time, &t->time ); + c->age = 0; + c->includes = list_copy( l ); + c->hdrscan = list_copy( hdrscan ); + + return l; + } +} + +#endif /* OPT_HEADER_CACHE_EXT */ diff --git a/src/boost/tools/build/src/engine/hcache.h b/src/boost/tools/build/src/engine/hcache.h new file mode 100644 index 000000000..95267c267 --- /dev/null +++ b/src/boost/tools/build/src/engine/hcache.h @@ -0,0 +1,20 @@ +/* + * This file is not part of Jam + */ + +/* + * hcache.h - handle #includes in source files + */ +#ifndef HCACHE_H +#define HCACHE_H + +#include "config.h" +#include "lists.h" +#include "regexp.h" +#include "rules.h" + +void hcache_init( void ); +void hcache_done( void ); +LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan ); + +#endif diff --git a/src/boost/tools/build/src/engine/hdrmacro.cpp b/src/boost/tools/build/src/engine/hdrmacro.cpp new file mode 100644 index 000000000..946a5fb44 --- /dev/null +++ b/src/boost/tools/build/src/engine/hdrmacro.cpp @@ -0,0 +1,146 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * hdrmacro.c - handle header files that define macros used in #include + * statements. + * + * we look for lines like "#define MACRO <....>" or '#define MACRO " "' in + * the target file. When found, we then phony up a rule invocation like: + * + * $(HDRRULE) : ; + * + * External routines: + * headers1() - scan a target for "#include MACRO" lines and try to resolve + * them when needed + * + * Internal routines: + * headers1() - using regexp, scan a file and build include LIST + */ + +#include "jam.h" +#include "hdrmacro.h" + +#include "compile.h" +#include "hash.h" +#include "lists.h" +#include "object.h" +#include "parse.h" +#include "rules.h" +#include "jam_strings.h" +#include "subst.h" +#include "variable.h" +#include "output.h" + +#include +#include + +/* this type is used to store a dictionary of file header macros */ +typedef struct header_macro +{ + OBJECT * symbol; + OBJECT * filename; /* we could maybe use a LIST here ?? */ +} HEADER_MACRO; + +static struct hash * header_macros_hash = 0; + + +/* + * headers() - scan a target for include files and call HDRRULE + */ + +#define MAXINC 10 + +void macro_headers( TARGET * t ) +{ + static regexp * re = 0; + FILE * f; + char buf[ 1024 ]; + + if ( DEBUG_HEADER ) + out_printf( "macro header scan for %s\n", object_str( t->name ) ); + + /* This regexp is used to detect lines of the form + * "#define MACRO <....>" or "#define MACRO "....." + * in the header macro files. + */ + if ( !re ) + { + OBJECT * const re_str = object_new( + "^[ ]*#[ ]*define[ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*" + "[<\"]([^\">]*)[\">].*$" ); + re = regex_compile( re_str ); + object_free( re_str ); + } + + if ( !( f = fopen( object_str( t->boundname ), "r" ) ) ) + { + err_printf( "[errno %d] failed to scan include file '%s': %s", + errno, object_str( t->boundname ), strerror(errno) ); + return; + } + + while ( fgets( buf, sizeof( buf ), f ) ) + { + HEADER_MACRO var; + HEADER_MACRO * v = &var; + + if ( regexec( re, buf ) && re->startp[ 1 ] ) + { + OBJECT * symbol; + int found; + /* we detected a line that looks like "#define MACRO filename */ + ( (char *)re->endp[ 1 ] )[ 0 ] = '\0'; + ( (char *)re->endp[ 2 ] )[ 0 ] = '\0'; + + if ( DEBUG_HEADER ) + out_printf( "macro '%s' used to define filename '%s' in '%s'\n", + re->startp[ 1 ], re->startp[ 2 ], object_str( t->boundname ) + ); + + /* add macro definition to hash table */ + if ( !header_macros_hash ) + header_macros_hash = hashinit( sizeof( HEADER_MACRO ), + "hdrmacros" ); + + symbol = object_new( re->startp[ 1 ] ); + v = (HEADER_MACRO *)hash_insert( header_macros_hash, symbol, &found + ); + if ( !found ) + { + v->symbol = symbol; + v->filename = object_new( re->startp[ 2 ] ); /* never freed */ + } + else + object_free( symbol ); + /* XXXX: FOR NOW, WE IGNORE MULTIPLE MACRO DEFINITIONS !! */ + /* WE MIGHT AS WELL USE A LIST TO STORE THEM.. */ + } + } + + fclose( f ); +} + + +OBJECT * macro_header_get( OBJECT * macro_name ) +{ + HEADER_MACRO * v; + if ( header_macros_hash && ( v = (HEADER_MACRO *)hash_find( + header_macros_hash, macro_name ) ) ) + { + if ( DEBUG_HEADER ) + out_printf( "### macro '%s' evaluated to '%s'\n", object_str( macro_name + ), object_str( v->filename ) ); + return v->filename; + } + return 0; +} diff --git a/src/boost/tools/build/src/engine/hdrmacro.h b/src/boost/tools/build/src/engine/hdrmacro.h new file mode 100644 index 000000000..1489aef9c --- /dev/null +++ b/src/boost/tools/build/src/engine/hdrmacro.h @@ -0,0 +1,22 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * hdrmacro.h - parses header files for #define MACRO or + * #define MACRO "filename" definitions + */ + +#ifndef HDRMACRO_SW20111118_H +#define HDRMACRO_SW20111118_H + +#include "config.h" +#include "object.h" +#include "rules.h" + +void macro_headers( TARGET * ); +OBJECT * macro_header_get( OBJECT * macro_name ); + +#endif diff --git a/src/boost/tools/build/src/engine/headers.cpp b/src/boost/tools/build/src/engine/headers.cpp new file mode 100644 index 000000000..79f393ad7 --- /dev/null +++ b/src/boost/tools/build/src/engine/headers.cpp @@ -0,0 +1,207 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * headers.c - handle #includes in source files + * + * Using regular expressions provided as the variable $(HDRSCAN), headers() + * searches a file for #include files and phonies up a rule invocation: + * $(HDRRULE) : ; + * + * External routines: + * headers() - scan a target for include files and call HDRRULE + * + * Internal routines: + * headers1() - using regexp, scan a file and build include LIST + */ + +#include "jam.h" +#include "headers.h" + +#include "compile.h" +#include "hdrmacro.h" +#include "lists.h" +#include "modules.h" +#include "object.h" +#include "parse.h" +#include "rules.h" +#include "subst.h" +#include "variable.h" +#include "output.h" + +#ifdef OPT_HEADER_CACHE_EXT +# include "hcache.h" +#endif + +#include +#include + +#ifndef OPT_HEADER_CACHE_EXT +static LIST * headers1( LIST *, OBJECT * file, int rec, regexp * re[] ); +#endif + + +/* + * headers() - scan a target for include files and call HDRRULE + */ + +#define MAXINC 10 + +void headers( TARGET * t ) +{ + LIST * hdrscan; + LIST * hdrrule; + #ifndef OPT_HEADER_CACHE_EXT + LIST * headlist = L0; + #endif + regexp * re[ MAXINC ]; + int rec = 0; + LISTITER iter; + LISTITER end; + + hdrscan = var_get( root_module(), constant_HDRSCAN ); + if ( list_empty( hdrscan ) ) + return; + + hdrrule = var_get( root_module(), constant_HDRRULE ); + if ( list_empty( hdrrule ) ) + return; + + if ( DEBUG_HEADER ) + out_printf( "header scan %s\n", object_str( t->name ) ); + + /* Compile all regular expressions in HDRSCAN */ + iter = list_begin( hdrscan ); + end = list_end( hdrscan ); + for ( ; ( rec < MAXINC ) && iter != end; iter = list_next( iter ) ) + { + re[ rec++ ] = regex_compile( list_item( iter ) ); + } + + /* Doctor up call to HDRRULE rule */ + /* Call headers1() to get LIST of included files. */ + { + FRAME frame[ 1 ]; + frame_init( frame ); + lol_add( frame->args, list_new( object_copy( t->name ) ) ); +#ifdef OPT_HEADER_CACHE_EXT + lol_add( frame->args, hcache( t, rec, re, hdrscan ) ); +#else + lol_add( frame->args, headers1( headlist, t->boundname, rec, re ) ); +#endif + + if ( lol_get( frame->args, 1 ) ) + { + OBJECT * rulename = list_front( hdrrule ); + /* The third argument to HDRRULE is the bound name of $(<). */ + lol_add( frame->args, list_new( object_copy( t->boundname ) ) ); + list_free( evaluate_rule( bindrule( rulename, frame->module ), rulename, frame ) ); + } + + /* Clean up. */ + frame_free( frame ); + } +} + + +/* + * headers1() - using regexp, scan a file and build include LIST. + */ + +#ifndef OPT_HEADER_CACHE_EXT +static +#endif +LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] ) +{ + FILE * f; + char buf[ 1024 ]; + int i; + static regexp * re_macros = 0; + +#ifdef OPT_IMPROVED_PATIENCE_EXT + static int count = 0; + ++count; + if ( ( ( count == 100 ) || !( count % 1000 ) ) && DEBUG_MAKE ) + { + out_printf( "...patience...\n" ); + out_flush(); + } +#endif + + /* The following regexp is used to detect cases where a file is included + * through a line like "#include MACRO". + */ + if ( re_macros == 0 ) + { + OBJECT * const re_str = object_new( + "#[ \t]*include[ \t]*([A-Za-z][A-Za-z0-9_]*).*$" ); + re_macros = regex_compile( re_str ); + object_free( re_str ); + } + + if ( !( f = fopen( object_str( file ), "r" ) ) ) + { + /* No source files will be generated when -n flag is passed */ + if ( !globs.noexec || errno != ENOENT ) + err_printf( "[errno %d] failed to scan file '%s': %s", + errno, object_str( file ), strerror(errno) ); + return l; + } + + while ( fgets( buf, sizeof( buf ), f ) ) + { + for ( i = 0; i < rec; ++i ) + if ( regexec( re[ i ], buf ) && re[ i ]->startp[ 1 ] ) + { + ( (char *)re[ i ]->endp[ 1 ] )[ 0 ] = '\0'; + if ( DEBUG_HEADER ) + out_printf( "header found: %s\n", re[ i ]->startp[ 1 ] ); + l = list_push_back( l, object_new( re[ i ]->startp[ 1 ] ) ); + } + + /* Special treatment for #include MACRO. */ + if ( regexec( re_macros, buf ) && re_macros->startp[ 1 ] ) + { + OBJECT * header_filename; + OBJECT * macro_name; + + ( (char *)re_macros->endp[ 1 ] )[ 0 ] = '\0'; + + if ( DEBUG_HEADER ) + out_printf( "macro header found: %s", re_macros->startp[ 1 ] ); + + macro_name = object_new( re_macros->startp[ 1 ] ); + header_filename = macro_header_get( macro_name ); + object_free( macro_name ); + if ( header_filename ) + { + if ( DEBUG_HEADER ) + out_printf( " resolved to '%s'\n", object_str( header_filename ) + ); + l = list_push_back( l, object_copy( header_filename ) ); + } + else + { + if ( DEBUG_HEADER ) + out_printf( " ignored !!\n" ); + } + } + } + + fclose( f ); + return l; +} + + +void regerror( char const * s ) +{ + out_printf( "re error %s\n", s ); +} diff --git a/src/boost/tools/build/src/engine/headers.h b/src/boost/tools/build/src/engine/headers.h new file mode 100644 index 000000000..a875c2d87 --- /dev/null +++ b/src/boost/tools/build/src/engine/headers.h @@ -0,0 +1,26 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * headers.h - handle #includes in source files + */ + +#ifndef HEADERS_SW20111118_H +#define HEADERS_SW20111118_H + +#include "config.h" +#include "object.h" +#include "rules.h" +#include "regexp.h" + +void headers( TARGET * t ); + +#ifdef OPT_HEADER_CACHE_EXT +struct regexp; +LIST * headers1( LIST *l, OBJECT * file, int rec, struct regexp *re[] ); +#endif + +#endif diff --git a/src/boost/tools/build/src/engine/jam.cpp b/src/boost/tools/build/src/engine/jam.cpp new file mode 100644 index 000000000..5c3baff7c --- /dev/null +++ b/src/boost/tools/build/src/engine/jam.cpp @@ -0,0 +1,723 @@ +/* + * /+\ + * +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * \+/ + * + * This file is part of jam. + * + * License is hereby granted to use this software and distribute it freely, as + * long as this copyright notice is retained and modifications are clearly + * marked. + * + * ALL WARRANTIES ARE HEREBY DISCLAIMED. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2018 Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * jam.c - make redux + * + * See Jam.html for usage information. + * + * These comments document the code. + * + * The top half of the code is structured such: + * + * jam + * / | \ + * +---+ | \ + * / | \ + * jamgram option \ + * / | \ \ + * / | \ \ + * / | \ | + * scan | compile make + * | | / | \ / | \ + * | | / | \ / | \ + * | | / | \ / | \ + * jambase parse | rules search make1 + * | | | \ + * | | | \ + * | | | \ + * builtins timestamp command execute + * | + * | + * | + * filesys + * + * + * The support routines are called by all of the above, but themselves are + * layered thus: + * + * variable|expand + * / | | + * / | | + * / | | + * lists | pathsys + * \ | + * \ hash + * \ | + * \ | + * \ | + * \ | + * \ | + * object + * + * Roughly, the modules are: + * + * builtins.c - jam's built-in rules + * command.c - maintain lists of commands + * compile.c - compile parsed jam statements + * exec*.c - execute a shell script on a specific OS + * file*.c - scan directories and archives on a specific OS + * hash.c - simple in-memory hashing routines + * hdrmacro.c - handle header file parsing for filename macro definitions + * headers.c - handle #includes in source files + * jamgram.y - jam grammar + * lists.c - maintain lists of strings + * make.c - bring a target up to date, once rules are in place + * make1.c - execute command to bring targets up to date + * object.c - string manipulation routines + * option.c - command line option processing + * parse.c - make and destroy parse trees as driven by the parser + * path*.c - manipulate file names on a specific OS + * hash.c - simple in-memory hashing routines + * regexp.c - Henry Spencer's regexp + * rules.c - access to RULEs, TARGETs, and ACTIONs + * scan.c - the jam yacc scanner + * search.c - find a target along $(SEARCH) or $(LOCATE) + * timestamp.c - get the timestamp of a file or archive member + * variable.c - handle jam multi-element variables + */ + + +#include "jam.h" + +#include "patchlevel.h" + +/* Keep JAMVERSYM in sync with VERSION. */ +/* It can be accessed as $(JAMVERSION) in the Jamfile. */ +#define JAM_STRINGIZE(X) JAM_DO_STRINGIZE(X) +#define JAM_DO_STRINGIZE(X) #X +#define VERSION_MAJOR_SYM JAM_STRINGIZE(VERSION_MAJOR) +#define VERSION_MINOR_SYM JAM_STRINGIZE(VERSION_MINOR) +#define VERSION_PATCH_SYM JAM_STRINGIZE(VERSION_PATCH) +#define VERSION VERSION_MAJOR_SYM "." VERSION_MINOR_SYM +#define JAMVERSYM "JAMVERSION=" VERSION + +#include "builtins.h" +#include "class.h" +#include "compile.h" +#include "constants.h" +#include "debugger.h" +#include "filesys.h" +#include "function.h" +#include "hcache.h" +#include "lists.h" +#include "make.h" +#include "object.h" +#include "option.h" +#include "output.h" +#include "parse.h" +#include "cwd.h" +#include "rules.h" +#include "scan.h" +#include "search.h" +#include "startup.h" +#include "jam_strings.h" +#include "timestamp.h" +#include "variable.h" +#include "execcmd.h" +#include "sysinfo.h" + +#include +#include + +/* Macintosh is "special" */ +#ifdef OS_MAC +# include +#endif + +/* And UNIX for this. */ +#ifdef unix +# include +# include +#endif + +struct globs globs = +{ + 0, /* noexec */ + 1, /* jobs */ + 0, /* quitquick */ + 0, /* newestfirst */ + 0, /* pipes action stdout and stderr merged to action output */ +#ifdef OS_MAC + { 0, 0 }, /* debug - suppress tracing output */ +#else + { 0, 1 }, /* debug ... */ +#endif + 0, /* output commands, not run them */ + 0, /* action timeout */ + 0 /* maximum buffer size zero is all output */ +}; + +/* Symbols to be defined as true for use in Jambase. */ +static const char * othersyms[] = { OSMAJOR, OSMINOR, OSPLAT, JAMVERSYM, 0 }; + + +/* on Win32-LCC */ +#if defined( OS_NT ) && defined( __LCC__ ) +# define use_environ _environ +#endif + +#if defined( __MWERKS__) +# define use_environ _environ + extern char * * _environ; +#endif + +#ifndef use_environ +# define use_environ environ +# if !defined( __WATCOM__ ) && !defined( OS_OS2 ) && !defined( OS_NT ) + extern char **environ; +# endif +#endif + +#if YYDEBUG != 0 + extern int yydebug; +#endif + +#ifndef NDEBUG +static void run_unit_tests() +{ +# if defined( USE_EXECNT ) + extern void execnt_unit_test(); + execnt_unit_test(); +# endif + string_unit_test(); +} +#endif + +int anyhow = 0; + +#ifdef HAVE_PYTHON + extern PyObject * bjam_call ( PyObject * self, PyObject * args ); + extern PyObject * bjam_import_rule ( PyObject * self, PyObject * args ); + extern PyObject * bjam_define_action( PyObject * self, PyObject * args ); + extern PyObject * bjam_variable ( PyObject * self, PyObject * args ); + extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args ); + extern PyObject * bjam_caller ( PyObject * self, PyObject * args ); + int python_optimize = 1; /* Set Python optimzation on by default */ +#endif + +void regex_done(); + +char const * saved_argv0; + +static void usage( const char * progname ) +{ + err_printf("\nusage: %s [ options ] targets...\n\n", progname); + + err_printf("-a Build all targets, even if they are current.\n"); + err_printf("-dx Set the debug level to x (0-13,console,mi).\n"); + err_printf("-fx Read x instead of bootstrap.\n"); + /* err_printf( "-g Build from newest sources first.\n" ); */ + err_printf("-jx Run up to x shell commands concurrently.\n"); + err_printf("-lx Limit actions to x number of seconds after which they are stopped.\n"); + err_printf("-mx Maximum target output saved (kb), default is to save all output.\n"); + err_printf("-n Don't actually execute the updating actions.\n"); + err_printf("-ox Mirror all output to file x.\n"); + err_printf("-px x=0, pipes action stdout and stderr merged into action output.\n"); + err_printf("-q Quit quickly as soon as a target fails.\n"); + err_printf("-sx=y Set variable x=y, overriding environment.\n"); + err_printf("-tx Rebuild x, even if it is up-to-date.\n"); + err_printf("-v Print the version of jam and exit.\n"); +#ifdef HAVE_PYTHON + err_printf("-z Disable Python Optimization and enable asserts\n"); +#endif + err_printf("--x Option is ignored.\n\n"); + + b2::clean_exit( EXITBAD ); +} + +int guarded_main( int argc, char * * argv ) +{ + int n; + char * s; + struct bjam_option optv[ N_OPTS ]; + int status = 0; + int arg_c = argc; + char * * arg_v = argv; + char const * progname = argv[ 0 ]; + module_t * environ_module; + int is_debugger; + b2::system_info sys_info; + + saved_argv0 = argv[ 0 ]; + last_update_now_status = 0; + +#ifdef JAM_DEBUGGER + + is_debugger = 0; + + if ( getoptions( argc - 1, argv + 1, "-:l:m:d:j:p:f:gs:t:ano:qv", optv ) < 0 ) + usage( progname ); + + if ( ( s = getoptval( optv, 'd', 0 ) ) ) + { + if ( strcmp( s, "mi" ) == 0 ) + { + debug_interface = DEBUG_INTERFACE_MI; + is_debugger = 1; + } + else if ( strcmp( s, "console" ) == 0 ) + { + debug_interface = DEBUG_INTERFACE_CONSOLE; + is_debugger = 1; + } + } + +#if NT + + if ( argc >= 3 ) + { + /* Check whether this instance is being run by the debugger. */ + size_t opt_len = strlen( debugger_opt ); + if ( strncmp( argv[ 1 ], debugger_opt, opt_len ) == 0 && + strncmp( argv[ 2 ], debugger_opt, opt_len ) == 0 ) + { + debug_init_handles( argv[ 1 ] + opt_len, argv[ 2 ] + opt_len ); + /* Fix up argc/argv to hide the internal options */ + arg_c = argc = (argc - 2); + argv[ 2 ] = argv[ 0 ]; + arg_v = argv = (argv + 2); + debug_interface = DEBUG_INTERFACE_CHILD; + } + } + + if ( is_debugger ) + { + return debugger(); + } + +#else + + if ( is_debugger ) + { + if ( setjmp( debug_child_data.jmp ) != 0 ) + { + arg_c = argc = debug_child_data.argc; + arg_v = argv = (char * *)debug_child_data.argv; + debug_interface = DEBUG_INTERFACE_CHILD; + } + else + { + return debugger(); + } + } + +#endif + +#endif + + --argc; + ++argv; + + #ifdef HAVE_PYTHON + #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qvz" + #else + #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qv" + #endif + + if ( getoptions( argc, argv, OPTSTRING, optv ) < 0 ) + { + usage( progname ); + } + + /* Set default parallel jobs to match cpu threads. This can be overridden + the usual way with -jX or PARALLELISM env var. */ + globs.jobs = sys_info.cpu_thread_count(); + + /* Version info. */ + if ( ( s = getoptval( optv, 'v', 0 ) ) ) + { + out_printf( "B2 Version %s. %s.\n", VERSION, OSMINOR ); + out_printf( " Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.\n" ); + out_printf( " Copyright 2001 David Turner.\n" ); + out_printf( " Copyright 2001-2004 David Abrahams.\n" ); + out_printf( " Copyright 2002-2019 Rene Rivera.\n" ); + out_printf( " Copyright 2003-2015 Vladimir Prus.\n" ); + out_printf( "\n DEFAULTS: jobs = %i\n", globs.jobs); + return EXITOK; + } + + /* Pick up interesting options. */ + if ( ( s = getoptval( optv, 'n', 0 ) ) ) + { + ++globs.noexec; + globs.debug[ 2 ] = 1; + } + + if ( ( s = getoptval( optv, 'p', 0 ) ) ) + { + /* Undocumented -p3 (acts like both -p1 -p2) means separate pipe action + * stdout and stderr. + */ + globs.pipe_action = atoi( s ); + if ( globs.pipe_action < 0 || 3 < globs.pipe_action ) + { + err_printf( "Invalid pipe descriptor '%d', valid values are -p[0..3]." + "\n", globs.pipe_action ); + b2::clean_exit( EXITBAD ); + } + } + + if ( ( s = getoptval( optv, 'q', 0 ) ) ) + globs.quitquick = 1; + + if ( ( s = getoptval( optv, 'a', 0 ) ) ) + anyhow++; + + if ( ( s = getoptval( optv, 'j', 0 ) ) ) + { + globs.jobs = atoi( s ); + if ( globs.jobs < 1 ) + { + err_printf( "Invalid value for the '-j' option.\n" ); + b2::clean_exit( EXITBAD ); + } + } + + if ( ( s = getoptval( optv, 'g', 0 ) ) ) + globs.newestfirst = 1; + + if ( ( s = getoptval( optv, 'l', 0 ) ) ) + globs.timeout = atoi( s ); + + if ( ( s = getoptval( optv, 'm', 0 ) ) ) + globs.max_buf = atoi( s ) * 1024; /* convert to kb */ + + #ifdef HAVE_PYTHON + if ( ( s = getoptval( optv, 'z', 0 ) ) ) + python_optimize = 0; /* disable python optimization */ + #endif + + /* Turn on/off debugging */ + for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n ) + { + int i; + + /* First -d, turn off defaults. */ + if ( !n ) + for ( i = 0; i < DEBUG_MAX; ++i ) + globs.debug[i] = 0; + + i = atoi( s ); + + if ( ( i < 0 ) || ( i >= DEBUG_MAX ) ) + { + out_printf( "Invalid debug level '%s'.\n", s ); + continue; + } + + /* n turns on levels 1-n. */ + /* +n turns on level n. */ + if ( *s == '+' ) + globs.debug[ i ] = 1; + else while ( i ) + globs.debug[ i-- ] = 1; + } + + /* If an output file is specified, set globs.out to that. */ + if ( ( s = getoptval( optv, 'o', 0 ) ) ) + { + if ( !( globs.out = fopen( s, "w" ) ) ) + { + err_printf( "[errno %d] failed to write output file '%s': %s", + errno, s, strerror(errno) ); + b2::clean_exit( EXITBAD ); + } + /* ++globs.noexec; */ + } + + { + PROFILE_ENTER( MAIN ); + +#ifdef HAVE_PYTHON + { + PROFILE_ENTER( MAIN_PYTHON ); + Py_OptimizeFlag = python_optimize; + Py_Initialize(); + { + static PyMethodDef BjamMethods[] = { + {"call", bjam_call, METH_VARARGS, + "Call the specified bjam rule."}, + {"import_rule", bjam_import_rule, METH_VARARGS, + "Imports Python callable to bjam."}, + {"define_action", bjam_define_action, METH_VARARGS, + "Defines a command line action."}, + {"variable", bjam_variable, METH_VARARGS, + "Obtains a variable from bjam's global module."}, + {"backtrace", bjam_backtrace, METH_VARARGS, + "Returns bjam backtrace from the last call into Python."}, + {"caller", bjam_caller, METH_VARARGS, + "Returns the module from which the last call into Python is made."}, + {NULL, NULL, 0, NULL} + }; + + Py_InitModule( "bjam", BjamMethods ); + } + PROFILE_EXIT( MAIN_PYTHON ); + } +#endif + +#ifndef NDEBUG + run_unit_tests(); +#endif +#if YYDEBUG != 0 + if ( DEBUG_PARSE ) + yydebug = 1; +#endif + + /* Set JAMDATE. */ + { + timestamp current; + timestamp_current( ¤t ); + var_set( root_module(), constant_JAMDATE, list_new( outf_time( + ¤t ) ), VAR_SET ); + } + + /* Set JAM_VERSION. */ + var_set( root_module(), constant_JAM_VERSION, + list_push_back( list_push_back( list_new( + object_new( VERSION_MAJOR_SYM ) ), + object_new( VERSION_MINOR_SYM ) ), + object_new( VERSION_PATCH_SYM ) ), + VAR_SET ); + + /* Set JAMUNAME. */ +#ifdef unix + { + struct utsname u; + + if ( uname( &u ) >= 0 ) + { + var_set( root_module(), constant_JAMUNAME, + list_push_back( + list_push_back( + list_push_back( + list_push_back( + list_new( + object_new( u.sysname ) ), + object_new( u.nodename ) ), + object_new( u.release ) ), + object_new( u.version ) ), + object_new( u.machine ) ), VAR_SET ); + } + } +#endif /* unix */ + + /* Set JAM_TIMESTAMP_RESOLUTION. */ + { + timestamp fmt_resolution[ 1 ]; + file_supported_fmt_resolution( fmt_resolution ); + var_set( root_module(), constant_JAM_TIMESTAMP_RESOLUTION, list_new( + object_new( timestamp_timestr( fmt_resolution ) ) ), VAR_SET ); + } + + /* Load up environment variables. */ + + /* First into the global module, with splitting, for backward + * compatibility. + */ + var_defines( root_module(), use_environ, 1 ); + + environ_module = bindmodule( constant_ENVIRON ); + /* Then into .ENVIRON, without splitting. */ + var_defines( environ_module, use_environ, 0 ); + + /* + * Jam defined variables OS & OSPLAT. We load them after environment, so + * that setting OS in environment does not change Jam's notion of the + * current platform. + */ + var_defines( root_module(), othersyms, 1 ); + + /* Load up variables set on command line. */ + for ( n = 0; ( s = getoptval( optv, 's', n ) ); ++n ) + { + char * symv[ 2 ]; + symv[ 0 ] = s; + symv[ 1 ] = 0; + var_defines( root_module(), symv, 1 ); + var_defines( environ_module, symv, 0 ); + } + + /* Set the ARGV to reflect the complete list of arguments of invocation. + */ + for ( n = 0; n < arg_c; ++n ) + var_set( root_module(), constant_ARGV, list_new( object_new( + arg_v[ n ] ) ), VAR_APPEND ); + + /* Initialize built-in rules. */ + load_builtins(); + b2::startup::load_builtins(); + + /* Add the targets in the command line to the update list. */ + for ( n = 1; n < arg_c; ++n ) + { + if ( arg_v[ n ][ 0 ] == '-' ) + { + const char * f = "-:l:d:j:f:gs:t:ano:qv"; + for ( ; *f; ++f ) if ( *f == arg_v[ n ][ 1 ] ) break; + if ( f[0] && f[1] && ( f[ 1 ] == ':' ) && ( arg_v[ n ][ 2 ] == '\0' ) ) ++n; + } + else + { + OBJECT * const target = object_new( arg_v[ n ] ); + mark_target_for_updating( target ); + object_free( target ); + } + } + + /* The build system may set the PARALLELISM variable to override -j + * options. + */ + { + LIST * const p = var_get( root_module(), constant_PARALLELISM ); + if ( !list_empty( p ) ) + { + int const j = atoi( object_str( list_front( p ) ) ); + if ( j < 1 ) + out_printf( "Invalid value of PARALLELISM: %s.\n", + object_str( list_front( p ) ) ); + else + globs.jobs = j; + } + } + + /* KEEP_GOING overrides -q option. */ + { + LIST * const p = var_get( root_module(), constant_KEEP_GOING ); + if ( !list_empty( p ) ) + globs.quitquick = atoi( object_str( list_front( p ) ) ) ? 0 : 1; + } + + + if ( list_empty( targets_to_update() ) ) + mark_target_for_updating( constant_all ); + + /* Parse ruleset. */ + { + FRAME frame[ 1 ]; + frame_init( frame ); + for ( n = 0; ( s = getoptval( optv, 'f', n ) ); ++n ) + { + OBJECT * const filename = object_new( s ); + parse_file( filename, frame ); + object_free( filename ); + } + + if ( !n ) + status = b2::startup::bootstrap(frame) ? 0 : 13; + } + + /* FIXME: What shall we do if builtin_update_now, + * the sole place setting last_update_now_status, + * failed earlier? + */ + + if ( status == 0 ) + status = yyanyerrors(); + if ( status == 0 ) + { + /* Manually touch -t targets. */ + for ( n = 0; ( s = getoptval( optv, 't', n ) ); ++n ) + { + OBJECT * const target = object_new( s ); + touch_target( target ); + object_free( target ); + } + + /* Now make target. */ + { + PROFILE_ENTER( MAIN_MAKE ); + LIST * const targets = targets_to_update(); + if ( !list_empty( targets ) ) + status |= make( targets, anyhow ); + else + status = last_update_now_status; + PROFILE_EXIT( MAIN_MAKE ); + } + } + + PROFILE_EXIT( MAIN ); + } + + return status ? EXITBAD : EXITOK; +} + +int main( int argc, char * * argv ) +{ + BJAM_MEM_INIT(); + +#ifdef OS_MAC + InitGraf( &qd.thePort ); +#endif + + cwd_init(); + constants_init(); + + int result = EXIT_SUCCESS; + try + { + result = guarded_main( argc, argv ); + } + catch ( b2::exit_result exit_code ) + { + result = (int)exit_code; + } + + if ( DEBUG_PROFILE ) + profile_dump(); + +#ifdef OPT_HEADER_CACHE_EXT + hcache_done(); +#endif + + clear_targets_to_update(); + + /* Widely scattered cleanup. */ + property_set_done(); + exec_done(); + file_done(); + rules_done(); + timestamp_done(); + search_done(); + class_done(); + modules_done(); + regex_done(); + cwd_done(); + path_done(); + function_done(); + list_done(); + constants_done(); + object_done(); + + /* Close log out. */ + if ( globs.out ) + fclose( globs.out ); + +#ifdef HAVE_PYTHON + Py_Finalize(); +#endif + + BJAM_MEM_CLOSE(); + + return result; +} diff --git a/src/boost/tools/build/src/engine/jam.h b/src/boost/tools/build/src/engine/jam.h new file mode 100644 index 000000000..34a6fbeaa --- /dev/null +++ b/src/boost/tools/build/src/engine/jam.h @@ -0,0 +1,533 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * jam.h - includes and globals for jam + */ + +#ifndef JAM_H_VP_2003_08_01 +#define JAM_H_VP_2003_08_01 + +#include "config.h" + +#ifdef HAVE_PYTHON +#include +#endif + +/* Assume popen support is available unless known otherwise. */ +#define HAVE_POPEN 1 + +/* + * VMS, OPENVMS + */ + +#ifdef VMS + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define OSMINOR "OS=VMS" +#define OSMAJOR "VMS=true" +#define OS_VMS +#define MAXLINE 1024 /* longest 'together' actions */ +#define PATH_DELIM '/' /* use CRTL POSIX-style handling */ +#define SPLITPATH ',' +#define EXITOK EXIT_SUCCESS +#define EXITBAD EXIT_FAILURE +#define DOWNSHIFT_PATHS + +/* This may be inaccurate. */ +#ifndef __DECC +#define OSPLAT "OSPLAT=VAX" +#endif + +#define glob jam_glob /* use jam's glob, not CRTL's */ + +#endif + +/* + * Windows NT + */ + +#ifdef NT + +#include +#include +#include +#ifndef __MWERKS__ + #include +#endif +#include +#include +#include +#include +#include + +#define OSMAJOR "NT=true" +#define OSMINOR "OS=NT" +#define OS_NT +#define SPLITPATH ';' +#define MAXLINE (undefined__see_execnt_c) /* max chars per command line */ +#define USE_EXECNT +#define USE_PATHNT +#define PATH_DELIM '\\' + +/* AS400 cross-compile from NT. */ + +#ifdef AS400 + #undef OSMINOR + #undef OSMAJOR + #define OSMAJOR "AS400=true" + #define OSMINOR "OS=AS400" + #define OS_AS400 +#endif + +/* Metrowerks Standard Library on Windows. */ + +#ifdef __MSL__ + #undef HAVE_POPEN +#endif + +#endif /* #ifdef NT */ + + +/* + * Windows MingW32 + */ + +#ifdef MINGW + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define OSMAJOR "MINGW=true" +#define OSMINOR "OS=MINGW" +#define OS_NT +#define SPLITPATH ';' +#define MAXLINE 996 /* max chars per command line */ +#define USE_EXECUNIX +#define USE_PATHNT +#define PATH_DELIM '\\' + +#endif /* #ifdef MINGW */ + + +/* + * God fearing UNIX. + */ + +#ifndef OSMINOR + +#define OSMAJOR "UNIX=true" +#define USE_EXECUNIX +#define USE_FILEUNIX +#define USE_PATHUNIX +#define PATH_DELIM '/' + +#ifdef _AIX + #define unix + #define MAXLINE 23552 /* 24k - 1k, max chars per command line */ + #define OSMINOR "OS=AIX" + #define OS_AIX + #define NO_VFORK +#endif +#ifdef AMIGA + #define OSMINOR "OS=AMIGA" + #define OS_AMIGA +#endif +#ifdef __BEOS__ + #define unix + #define OSMINOR "OS=BEOS" + #define OS_BEOS + #define NO_VFORK +#endif +#ifdef __bsdi__ + #define OSMINOR "OS=BSDI" + #define OS_BSDI +#endif +#if defined (COHERENT) && defined (_I386) + #define OSMINOR "OS=COHERENT" + #define OS_COHERENT + #define NO_VFORK +#endif +#if defined(__cygwin__) || defined(__CYGWIN__) + #define OSMINOR "OS=CYGWIN" + #define OS_CYGWIN +#endif +#if defined(__FreeBSD__) && !defined(__DragonFly__) + #define OSMINOR "OS=FREEBSD" + #define OS_FREEBSD +#endif +#ifdef __DragonFly__ + #define OSMINOR "OS=DRAGONFLYBSD" + #define OS_DRAGONFLYBSD +#endif +#ifdef __DGUX__ + #define OSMINOR "OS=DGUX" + #define OS_DGUX +#endif +#ifdef __GNU__ + #define OSMINOR "OS=HURD" + #define OS_HURD +#endif +#ifdef __hpux + #define OSMINOR "OS=HPUX" + #define OS_HPUX +#endif +#ifdef __HAIKU__ + #define unix + #define OSMINOR "OS=HAIKU" + #define OS_HAIKU +#endif +#ifdef __OPENNT + #define unix + #define OSMINOR "OS=INTERIX" + #define OS_INTERIX + #define NO_VFORK +#endif +#ifdef __sgi + #define OSMINOR "OS=IRIX" + #define OS_IRIX + #define NO_VFORK +#endif +#ifdef __ISC + #define OSMINOR "OS=ISC" + #define OS_ISC + #define NO_VFORK +#endif +#if defined(linux) || defined(__linux) || \ + defined(__linux__) || defined(__gnu_linux__) + #define OSMINOR "OS=LINUX" + #define OS_LINUX +#endif +#ifdef __Lynx__ + #define OSMINOR "OS=LYNX" + #define OS_LYNX + #define NO_VFORK + #define unix +#endif +#ifdef __MACHTEN__ + #define OSMINOR "OS=MACHTEN" + #define OS_MACHTEN +#endif +#ifdef mpeix + #define unix + #define OSMINOR "OS=MPEIX" + #define OS_MPEIX + #define NO_VFORK +#endif +#ifdef __MVS__ + #define unix + #define OSMINOR "OS=MVS" + #define OS_MVS +#endif +#ifdef _ATT4 + #define OSMINOR "OS=NCR" + #define OS_NCR +#endif +#ifdef __NetBSD__ + #define unix + #define OSMINOR "OS=NETBSD" + #define OS_NETBSD + #define NO_VFORK +#endif +#ifdef __QNX__ + #define unix + #ifdef __QNXNTO__ + #define OSMINOR "OS=QNXNTO" + #define OS_QNXNTO + #else + #define OSMINOR "OS=QNX" + #define OS_QNX + #define NO_VFORK + #define MAXLINE 996 /* max chars per command line */ + #endif +#endif +#ifdef NeXT + #ifdef __APPLE__ + #define OSMINOR "OS=RHAPSODY" + #define OS_RHAPSODY + #else + #define OSMINOR "OS=NEXT" + #define OS_NEXT + #endif +#endif +#ifdef __APPLE__ + #define unix + #define OSMINOR "OS=MACOSX" + #define OS_MACOSX +#endif +#ifdef __osf__ + #ifndef unix + #define unix + #endif + #define OSMINOR "OS=OSF" + #define OS_OSF +#endif +#ifdef _SEQUENT_ + #define OSMINOR "OS=PTX" + #define OS_PTX +#endif +#ifdef M_XENIX + #define OSMINOR "OS=SCO" + #define OS_SCO + #define NO_VFORK +#endif +#ifdef sinix + #define unix + #define OSMINOR "OS=SINIX" + #define OS_SINIX +#endif +#ifdef sun + #if defined(__svr4__) || defined(__SVR4) + #define OSMINOR "OS=SOLARIS" + #define OS_SOLARIS + #else + #define OSMINOR "OS=SUNOS" + #define OS_SUNOS + #endif +#endif +#ifdef ultrix + #define OSMINOR "OS=ULTRIX" + #define OS_ULTRIX +#endif +#ifdef _UNICOS + #define OSMINOR "OS=UNICOS" + #define OS_UNICOS +#endif +#if defined(__USLC__) && !defined(M_XENIX) + #define OSMINOR "OS=UNIXWARE" + #define OS_UNIXWARE +#endif +#ifdef __OpenBSD__ + #define OSMINOR "OS=OPENBSD" + #define OS_OPENBSD + #ifndef unix + #define unix + #endif +#endif +#if defined (__FreeBSD_kernel__) && !defined(__FreeBSD__) + #define OSMINOR "OS=KFREEBSD" + #define OS_KFREEBSD +#endif +#ifndef OSMINOR + #define OSMINOR "OS=UNKNOWN" +#endif + +/* All the UNIX includes */ + +#include + +#ifndef OS_MPEIX + #include +#endif + +#include +#include +#include +#include +#include +#include +#include + +#ifndef OS_QNX + #include +#endif + +#ifndef OS_ULTRIX + #include +#endif + +#if !defined( OS_BSDI ) && \ + !defined( OS_FREEBSD ) && \ + !defined( OS_DRAGONFLYBSD ) && \ + !defined( OS_NEXT ) && \ + !defined( OS_MACHTEN ) && \ + !defined( OS_MACOSX ) && \ + !defined( OS_RHAPSODY ) && \ + !defined( OS_MVS ) && \ + !defined( OS_OPENBSD ) + #include +#endif + +#endif /* #ifndef OSMINOR */ + + +/* + * OSPLAT definitions - suppressed when it is a one-of-a-kind. + */ + +#if defined( _M_PPC ) || \ + defined( PPC ) || \ + defined( ppc ) || \ + defined( __powerpc__ ) || \ + defined( __ppc__ ) + #define OSPLAT "OSPLAT=PPC" +#endif + +#if defined( _ALPHA_ ) || \ + defined( __alpha__ ) + #define OSPLAT "OSPLAT=AXP" +#endif + +#if defined( _i386_ ) || \ + defined( __i386__ ) || \ + defined( __i386 ) || \ + defined( _M_IX86 ) + #define OSPLAT "OSPLAT=X86" +#endif + +#if defined( __ia64__ ) || \ + defined( __IA64__ ) || \ + defined( __ia64 ) + #define OSPLAT "OSPLAT=IA64" +#endif + +#if defined( __x86_64__ ) || \ + defined( __amd64__ ) || \ + defined( _M_AMD64 ) + #define OSPLAT "OSPLAT=X86_64" +#endif + +#if defined( __sparc__ ) || \ + defined( __sparc ) + #define OSPLAT "OSPLAT=SPARC" +#endif + +#ifdef __mips__ + #if _MIPS_SIM == _MIPS_SIM_ABI64 + #define OSPLAT "OSPLAT=MIPS64" + #elif _MIPS_SIM == _MIPS_SIM_ABI32 + #define OSPLAT "OSPLAT=MIPS32" + #endif +#endif + +#if defined( __arm__ ) || \ + defined( __aarch64__ ) + #define OSPLAT "OSPLAT=ARM" +#endif + +#ifdef __s390__ + #define OSPLAT "OSPLAT=390" +#endif + +#ifdef __hppa + #define OSPLAT "OSPLAT=PARISC" +#endif + +#ifndef OSPLAT + #define OSPLAT "" +#endif + + +/* + * Jam implementation misc. + */ + +#ifndef MAXLINE + #define MAXLINE 102400 /* max chars per command line */ +#endif + +#ifndef EXITOK + #define EXITOK 0 + #define EXITBAD 1 +#endif + +#ifndef SPLITPATH + #define SPLITPATH ':' +#endif + +/* You probably do not need to muck with these. */ + +#define MAXSYM 1024 /* longest symbol in the environment */ +#define MAXJPATH 1024 /* longest filename */ + +#define MAXARGC 32 /* words in $(JAMSHELL) */ + +/* Jam private definitions below. */ + +#define DEBUG_MAX 14 + + +struct globs +{ + int noexec; + int jobs; + int quitquick; + int newestfirst; /* build newest sources first */ + int pipe_action; + char debug[ DEBUG_MAX ]; + FILE * out; /* mirror output here */ + long timeout; /* number of seconds to limit actions to, + * default 0 for no limit. + */ + int dart; /* output build and test results formatted for + * Dart + */ + int max_buf; /* maximum amount of output saved from target + * (kb) + */ +}; + +extern struct globs globs; + +#define DEBUG_MAKE ( globs.debug[ 1 ] ) /* show actions when executed */ +#define DEBUG_MAKEQ ( globs.debug[ 2 ] ) /* show even quiet actions */ +#define DEBUG_EXEC ( globs.debug[ 2 ] ) /* show text of actons */ +#define DEBUG_MAKEPROG ( globs.debug[ 3 ] ) /* show make0 progress */ +#define DEBUG_BIND ( globs.debug[ 3 ] ) /* show when files bound */ + +#define DEBUG_EXECCMD ( globs.debug[ 4 ] ) /* show execcmds()'s work */ + +#define DEBUG_COMPILE ( globs.debug[ 5 ] ) /* show rule invocations */ + +#define DEBUG_HEADER ( globs.debug[ 6 ] ) /* show result of header scan */ +#define DEBUG_BINDSCAN ( globs.debug[ 6 ] ) /* show result of dir scan */ +#define DEBUG_SEARCH ( globs.debug[ 6 ] ) /* show binding attempts */ + +#define DEBUG_VARSET ( globs.debug[ 7 ] ) /* show variable settings */ +#define DEBUG_VARGET ( globs.debug[ 8 ] ) /* show variable fetches */ +#define DEBUG_VAREXP ( globs.debug[ 8 ] ) /* show variable expansions */ +#define DEBUG_IF ( globs.debug[ 8 ] ) /* show 'if' calculations */ +#define DEBUG_LISTS ( globs.debug[ 9 ] ) /* show list manipulation */ +#define DEBUG_SCAN ( globs.debug[ 9 ] ) /* show scanner tokens */ +#define DEBUG_MEM ( globs.debug[ 9 ] ) /* show memory use */ + +#define DEBUG_PROFILE ( globs.debug[ 10 ] ) /* dump rule execution times */ +#define DEBUG_PARSE ( globs.debug[ 11 ] ) /* debug parsing */ +#define DEBUG_GRAPH ( globs.debug[ 12 ] ) /* debug dependencies */ +#define DEBUG_FATE ( globs.debug[ 13 ] ) /* show fate changes in make0() */ + +/* Everyone gets the memory definitions. */ +#include "mem.h" + +/* They also get the profile functions. */ +#include "debug.h" + +#endif diff --git a/src/boost/tools/build/src/engine/jam_strings.cpp b/src/boost/tools/build/src/engine/jam_strings.cpp new file mode 100644 index 000000000..814a5679c --- /dev/null +++ b/src/boost/tools/build/src/engine/jam_strings.cpp @@ -0,0 +1,240 @@ +/* Copyright David Abrahams 2004. Distributed under the Boost */ +/* Software License, Version 1.0. (See accompanying */ +/* file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) */ + +#include "jam.h" +#include "jam_strings.h" + +#include +#include +#include + + +#ifndef NDEBUG +# define JAM_STRING_MAGIC ((char)0xcf) +# define JAM_STRING_MAGIC_SIZE 4 +static void assert_invariants( string * self ) +{ + int i; + + if ( self->value == 0 ) + { + assert( self->size == 0 ); + assert( self->capacity == 0 ); + assert( self->opt[ 0 ] == 0 ); + return; + } + + assert( self->size < self->capacity ); + assert( ( self->capacity <= int32_t(sizeof( self->opt )) ) == ( self->value == self->opt ) ); + assert( self->value[ self->size ] == 0 ); + /* String objects modified manually after construction to contain embedded + * '\0' characters are considered structurally valid. + */ + assert( strlen( self->value ) <= size_t(self->size) ); + + for ( i = 0; i < 4; ++i ) + { + assert( self->magic[ i ] == JAM_STRING_MAGIC ); + assert( self->value[ self->capacity + i ] == JAM_STRING_MAGIC ); + } +} +#else +# define JAM_STRING_MAGIC_SIZE 0 +# define assert_invariants(x) do {} while (0) +#endif + + +void string_new( string * s ) +{ + s->value = s->opt; + s->size = 0; + s->capacity = sizeof( s->opt ); + s->opt[ 0 ] = 0; +#ifndef NDEBUG + memset( s->magic, JAM_STRING_MAGIC, sizeof( s->magic ) ); +#endif + assert_invariants( s ); +} + + +void string_free( string * s ) +{ + assert_invariants( s ); + if ( s->value != s->opt ) + BJAM_FREE( s->value ); + string_new( s ); +} + + +static void string_reserve_internal( string * self, int32_t capacity ) +{ + if ( self->value == self->opt ) + { + self->value = (char *)BJAM_MALLOC_ATOMIC( size_t(capacity) + + JAM_STRING_MAGIC_SIZE ); + self->value[ 0 ] = 0; + size_t opt_size = sizeof(self->opt); // Workaround sizeof in strncat warning. + strncat( self->value, self->opt, opt_size ); + assert( strlen( self->value ) <= size_t(self->capacity) && "Regression test" ); + } + else + { + self->value = (char *)BJAM_REALLOC( self->value, size_t(capacity) + + JAM_STRING_MAGIC_SIZE ); + } +#ifndef NDEBUG + memcpy( self->value + capacity, self->magic, JAM_STRING_MAGIC_SIZE ); +#endif + self->capacity = capacity; +} + + +void string_reserve( string * self, int32_t capacity ) +{ + assert_invariants( self ); + if ( capacity <= self->capacity ) + return; + string_reserve_internal( self, capacity ); + assert_invariants( self ); +} + + +static void maybe_reserve( string * self, int32_t new_size ) +{ + int32_t capacity = self->capacity; + if ( capacity <= new_size ) + { + int32_t new_capacity = capacity; + while ( new_capacity <= new_size ) + new_capacity <<= 1; + string_reserve_internal( self, new_capacity ); + } +} + + +void string_append( string * self, char const * rhs ) +{ + int32_t rhs_size = int32_t(strlen( rhs )); + int32_t new_size = self->size + rhs_size; + assert_invariants( self ); + + maybe_reserve( self, new_size ); + + memcpy( self->value + self->size, rhs, size_t(rhs_size) + 1 ); + self->size = new_size; + + assert_invariants( self ); +} + + +void string_append_range( string * self, char const * start, char const * finish ) +{ + int32_t rhs_size = int32_t(finish - start); + int32_t new_size = self->size + rhs_size; + assert_invariants( self ); + + maybe_reserve( self, new_size ); + + if ( start != finish ) + memcpy( self->value + self->size, start, size_t(rhs_size) ); + self->size = new_size; + self->value[ new_size ] = 0; + + assert_invariants( self ); +} + + +void string_copy( string * s, char const * rhs ) +{ + string_new( s ); + string_append( s, rhs ); +} + +void string_truncate( string * self, int32_t n ) +{ + assert_invariants( self ); + assert( n <= self->capacity ); + self->value[ self->size = n ] = 0; + assert_invariants( self ); +} + + +void string_pop_back( string * self ) +{ + string_truncate( self, self->size - 1 ); +} + + +void string_push_back( string * self, char x ) +{ + string_append_range( self, &x, &x + 1 ); +} + + +char string_back( string * self ) +{ + assert_invariants( self ); + return self->value[ self->size - 1 ]; +} + +void string_rtrim( string * self ) +{ + char *p; + assert_invariants( self ); + p = self->value + self->size - 1; + for ( ; p >= self->value && ( *p == '\0' || isspace( *p ) ); *p-- = 0 ); +} + +#ifndef NDEBUG +void string_unit_test() +{ + { + string s[ 1 ]; + unsigned long i; + unsigned long const limit = sizeof( s->opt ) * 2 + 2; + string_new( s ); + assert( s->value == s->opt ); + for ( i = 0; i < limit; ++i ) + { + string_push_back( s, (char)( i + 1 ) ); + assert( s->size == int32_t(i + 1) ); + } + assert( s->size == int32_t(limit) ); + assert( s->value != s->opt ); + for ( i = 0; i < limit; ++i ) + assert( s->value[ i ] == (char)( i + 1 ) ); + string_free( s ); + } + + { + const char * const original = " \n\t\v Foo \r\n\v \tBar\n\n\r\r\t\n\v\t \t"; + string copy[ 1 ]; + string_copy( copy, original ); + assert( !strcmp( copy->value, original ) ); + assert( copy->size == int32_t(strlen( original )) ); + string_free( copy ); + } + + { + const char * const foo = "Foo "; + string foo_copy[ 1 ]; + string_copy( foo_copy, foo ); + string_rtrim( foo_copy ); + assert( !strcmp( foo_copy->value, "Foo" ) ); + + string_rtrim( foo_copy ); + assert( !strcmp( foo_copy->value, "Foo" ) ); + } + { + const char * const bar = "Bar\0\0\0"; + string bar_copy[ 1 ]; + string_copy( bar_copy, bar ); + string_rtrim( bar_copy ); + assert( !strcmp( bar_copy->value, "Bar" ) ); + + string_rtrim( bar_copy ); + assert( !strcmp( bar_copy->value, "Bar" ) ); + } +} +#endif diff --git a/src/boost/tools/build/src/engine/jam_strings.h b/src/boost/tools/build/src/engine/jam_strings.h new file mode 100644 index 000000000..9f7fd43f0 --- /dev/null +++ b/src/boost/tools/build/src/engine/jam_strings.h @@ -0,0 +1,38 @@ +/* + * Copyright 2004. David Abrahams + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef JAM_STRINGS_DWA20011024_H +#define JAM_STRINGS_DWA20011024_H + +#include "config.h" +#include + +typedef struct string +{ + char * value; + int32_t size; + int32_t capacity; + char opt[ 32 ]; +#ifndef NDEBUG + char magic[ 4 ]; +#endif +} string; + +void string_new( string * ); +void string_copy( string *, char const * ); +void string_free( string * ); +void string_append( string *, char const * ); +void string_append_range( string *, char const *, char const * ); +void string_push_back( string * s, char x ); +void string_reserve( string *, int32_t ); +void string_truncate( string *, int32_t ); +void string_pop_back( string * ); +char string_back( string * ); +void string_rtrim( string * ); +void string_unit_test(); + +#endif diff --git a/src/boost/tools/build/src/engine/jamgram.cpp b/src/boost/tools/build/src/engine/jamgram.cpp new file mode 100644 index 000000000..0cda88047 --- /dev/null +++ b/src/boost/tools/build/src/engine/jamgram.cpp @@ -0,0 +1,2287 @@ +/* A Bison parser, made by GNU Bison 3.8.2. */ + +/* Bison implementation for Yacc-like parsers in C + + Copyright (C) 1984, 1989-1990, 2000-2015, 2018-2021 Free Software Foundation, + Inc. + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . */ + +/* As a special exception, you may create a larger work that contains + part or all of the Bison parser skeleton and distribute that work + under terms of your choice, so long as that work isn't itself a + parser generator using the skeleton or a modified version thereof + as a parser skeleton. Alternatively, if you modify or redistribute + the parser skeleton itself, you may (at your option) remove this + special exception, which will cause the skeleton and the resulting + Bison output files to be licensed under the GNU General Public + License without this special exception. + + This special exception was added by the Free Software Foundation in + version 2.2 of Bison. */ + +/* C LALR(1) parser skeleton written by Richard Stallman, by + simplifying the original so-called "semantic" parser. */ + +/* DO NOT RELY ON FEATURES THAT ARE NOT DOCUMENTED in the manual, + especially those whose name start with YY_ or yy_. They are + private implementation details that can be changed or removed. */ + +/* All symbols defined below should begin with yy or YY, to avoid + infringing on user name space. This should be done even for local + variables, as they might otherwise be expanded by user macros. + There are some unavoidable exceptions within include files to + define necessary library symbols; they are noted "INFRINGES ON + USER NAME SPACE" below. */ + +/* Identify Bison output, and Bison version. */ +#define YYBISON 30802 + +/* Bison version string. */ +#define YYBISON_VERSION "3.8.2" + +/* Skeleton name. */ +#define YYSKELETON_NAME "yacc.c" + +/* Pure parsers. */ +#define YYPURE 0 + +/* Push parsers. */ +#define YYPUSH 0 + +/* Pull parsers. */ +#define YYPULL 1 + + + + +/* First part of user prologue. */ +#line 98 "src/engine/jamgram.y" + +#include "jam.h" + +#include "lists.h" +#include "parse.h" +#include "scan.h" +#include "compile.h" +#include "object.h" +#include "rules.h" + +# define YYINITDEPTH 5000 /* for C++ parsing */ +# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */ + +# define F0 -1 +# define P0 (PARSE *)0 +# define S0 (OBJECT *)0 + +# define pappend( l,r ) parse_make( PARSE_APPEND,l,r,P0,S0,S0,0 ) +# define peval( c,l,r ) parse_make( PARSE_EVAL,l,r,P0,S0,S0,c ) +# define pfor( s,l,r,x ) parse_make( PARSE_FOREACH,l,r,P0,s,S0,x ) +# define pif( l,r,t ) parse_make( PARSE_IF,l,r,t,S0,S0,0 ) +# define pincl( l ) parse_make( PARSE_INCLUDE,l,P0,P0,S0,S0,0 ) +# define plist( s ) parse_make( PARSE_LIST,P0,P0,P0,s,S0,0 ) +# define plocal( l,r,t ) parse_make( PARSE_LOCAL,l,r,t,S0,S0,0 ) +# define pmodule( l,r ) parse_make( PARSE_MODULE,l,r,P0,S0,S0,0 ) +# define pclass( l,r ) parse_make( PARSE_CLASS,l,r,P0,S0,S0,0 ) +# define pnull() parse_make( PARSE_NULL,P0,P0,P0,S0,S0,0 ) +# define pon( l,r ) parse_make( PARSE_ON,l,r,P0,S0,S0,0 ) +# define prule( s,p ) parse_make( PARSE_RULE,p,P0,P0,s,S0,0 ) +# define prules( l,r ) parse_make( PARSE_RULES,l,r,P0,S0,S0,0 ) +# define pset( l,r,a ) parse_make( PARSE_SET,l,r,P0,S0,S0,a ) +# define pset1( l,r,t,a ) parse_make( PARSE_SETTINGS,l,r,t,S0,S0,a ) +# define psetc( s,p,a,l ) parse_make( PARSE_SETCOMP,p,a,P0,s,S0,l ) +# define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f ) +# define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 ) +# define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 ) +# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 ) +# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 ) +# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 ) + +# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 ) +# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 ) + + +#line 116 "src/engine/jamgram.cpp" + +# ifndef YY_CAST +# ifdef __cplusplus +# define YY_CAST(Type, Val) static_cast (Val) +# define YY_REINTERPRET_CAST(Type, Val) reinterpret_cast (Val) +# else +# define YY_CAST(Type, Val) ((Type) (Val)) +# define YY_REINTERPRET_CAST(Type, Val) ((Type) (Val)) +# endif +# endif +# ifndef YY_NULLPTR +# if defined __cplusplus +# if 201103L <= __cplusplus +# define YY_NULLPTR nullptr +# else +# define YY_NULLPTR 0 +# endif +# else +# define YY_NULLPTR ((void*)0) +# endif +# endif + +#include "jamgram.hpp" +/* Symbol kind. */ +enum yysymbol_kind_t +{ + YYSYMBOL_YYEMPTY = -2, + YYSYMBOL_YYEOF = 0, /* "end of file" */ + YYSYMBOL_YYerror = 1, /* error */ + YYSYMBOL_YYUNDEF = 2, /* "invalid token" */ + YYSYMBOL__BANG_t = 3, /* _BANG_t */ + YYSYMBOL__BANG_EQUALS_t = 4, /* _BANG_EQUALS_t */ + YYSYMBOL__AMPER_t = 5, /* _AMPER_t */ + YYSYMBOL__AMPERAMPER_t = 6, /* _AMPERAMPER_t */ + YYSYMBOL__LPAREN_t = 7, /* _LPAREN_t */ + YYSYMBOL__RPAREN_t = 8, /* _RPAREN_t */ + YYSYMBOL__PLUS_EQUALS_t = 9, /* _PLUS_EQUALS_t */ + YYSYMBOL__COLON_t = 10, /* _COLON_t */ + YYSYMBOL__SEMIC_t = 11, /* _SEMIC_t */ + YYSYMBOL__LANGLE_t = 12, /* _LANGLE_t */ + YYSYMBOL__LANGLE_EQUALS_t = 13, /* _LANGLE_EQUALS_t */ + YYSYMBOL__EQUALS_t = 14, /* _EQUALS_t */ + YYSYMBOL__RANGLE_t = 15, /* _RANGLE_t */ + YYSYMBOL__RANGLE_EQUALS_t = 16, /* _RANGLE_EQUALS_t */ + YYSYMBOL__QUESTION_EQUALS_t = 17, /* _QUESTION_EQUALS_t */ + YYSYMBOL__LBRACKET_t = 18, /* _LBRACKET_t */ + YYSYMBOL__RBRACKET_t = 19, /* _RBRACKET_t */ + YYSYMBOL_ACTIONS_t = 20, /* ACTIONS_t */ + YYSYMBOL_BIND_t = 21, /* BIND_t */ + YYSYMBOL_BREAK_t = 22, /* BREAK_t */ + YYSYMBOL_CASE_t = 23, /* CASE_t */ + YYSYMBOL_CLASS_t = 24, /* CLASS_t */ + YYSYMBOL_CONTINUE_t = 25, /* CONTINUE_t */ + YYSYMBOL_DEFAULT_t = 26, /* DEFAULT_t */ + YYSYMBOL_ELSE_t = 27, /* ELSE_t */ + YYSYMBOL_EXISTING_t = 28, /* EXISTING_t */ + YYSYMBOL_FOR_t = 29, /* FOR_t */ + YYSYMBOL_IF_t = 30, /* IF_t */ + YYSYMBOL_IGNORE_t = 31, /* IGNORE_t */ + YYSYMBOL_IN_t = 32, /* IN_t */ + YYSYMBOL_INCLUDE_t = 33, /* INCLUDE_t */ + YYSYMBOL_LOCAL_t = 34, /* LOCAL_t */ + YYSYMBOL_MODULE_t = 35, /* MODULE_t */ + YYSYMBOL_ON_t = 36, /* ON_t */ + YYSYMBOL_PIECEMEAL_t = 37, /* PIECEMEAL_t */ + YYSYMBOL_QUIETLY_t = 38, /* QUIETLY_t */ + YYSYMBOL_RETURN_t = 39, /* RETURN_t */ + YYSYMBOL_RULE_t = 40, /* RULE_t */ + YYSYMBOL_SWITCH_t = 41, /* SWITCH_t */ + YYSYMBOL_TOGETHER_t = 42, /* TOGETHER_t */ + YYSYMBOL_UPDATED_t = 43, /* UPDATED_t */ + YYSYMBOL_WHILE_t = 44, /* WHILE_t */ + YYSYMBOL__LBRACE_t = 45, /* _LBRACE_t */ + YYSYMBOL__BAR_t = 46, /* _BAR_t */ + YYSYMBOL__BARBAR_t = 47, /* _BARBAR_t */ + YYSYMBOL__RBRACE_t = 48, /* _RBRACE_t */ + YYSYMBOL_ARG = 49, /* ARG */ + YYSYMBOL_STRING = 50, /* STRING */ + YYSYMBOL_YYACCEPT = 51, /* $accept */ + YYSYMBOL_run = 52, /* run */ + YYSYMBOL_block = 53, /* block */ + YYSYMBOL_rules = 54, /* rules */ + YYSYMBOL_55_1 = 55, /* $@1 */ + YYSYMBOL_56_2 = 56, /* $@2 */ + YYSYMBOL_null = 57, /* null */ + YYSYMBOL_assign_list_opt = 58, /* assign_list_opt */ + YYSYMBOL_59_3 = 59, /* $@3 */ + YYSYMBOL_arglist_opt = 60, /* arglist_opt */ + YYSYMBOL_local_opt = 61, /* local_opt */ + YYSYMBOL_else_opt = 62, /* else_opt */ + YYSYMBOL_rule = 63, /* rule */ + YYSYMBOL_64_4 = 64, /* $@4 */ + YYSYMBOL_65_5 = 65, /* $@5 */ + YYSYMBOL_66_6 = 66, /* $@6 */ + YYSYMBOL_67_7 = 67, /* $@7 */ + YYSYMBOL_68_8 = 68, /* $@8 */ + YYSYMBOL_69_9 = 69, /* $@9 */ + YYSYMBOL_70_10 = 70, /* $@10 */ + YYSYMBOL_71_11 = 71, /* $@11 */ + YYSYMBOL_72_12 = 72, /* $@12 */ + YYSYMBOL_73_13 = 73, /* $@13 */ + YYSYMBOL_74_14 = 74, /* $@14 */ + YYSYMBOL_75_15 = 75, /* $@15 */ + YYSYMBOL_76_16 = 76, /* $@16 */ + YYSYMBOL_77_17 = 77, /* $@17 */ + YYSYMBOL_78_18 = 78, /* $@18 */ + YYSYMBOL_79_19 = 79, /* $@19 */ + YYSYMBOL_80_20 = 80, /* $@20 */ + YYSYMBOL_81_21 = 81, /* $@21 */ + YYSYMBOL_82_22 = 82, /* $@22 */ + YYSYMBOL_83_23 = 83, /* $@23 */ + YYSYMBOL_84_24 = 84, /* $@24 */ + YYSYMBOL_85_25 = 85, /* $@25 */ + YYSYMBOL_86_26 = 86, /* $@26 */ + YYSYMBOL_assign = 87, /* assign */ + YYSYMBOL_expr = 88, /* expr */ + YYSYMBOL_89_27 = 89, /* $@27 */ + YYSYMBOL_90_28 = 90, /* $@28 */ + YYSYMBOL_91_29 = 91, /* $@29 */ + YYSYMBOL_92_30 = 92, /* $@30 */ + YYSYMBOL_93_31 = 93, /* $@31 */ + YYSYMBOL_94_32 = 94, /* $@32 */ + YYSYMBOL_95_33 = 95, /* $@33 */ + YYSYMBOL_96_34 = 96, /* $@34 */ + YYSYMBOL_97_35 = 97, /* $@35 */ + YYSYMBOL_98_36 = 98, /* $@36 */ + YYSYMBOL_99_37 = 99, /* $@37 */ + YYSYMBOL_100_38 = 100, /* $@38 */ + YYSYMBOL_101_39 = 101, /* $@39 */ + YYSYMBOL_cases = 102, /* cases */ + YYSYMBOL_case = 103, /* case */ + YYSYMBOL_104_40 = 104, /* $@40 */ + YYSYMBOL_105_41 = 105, /* $@41 */ + YYSYMBOL_lol = 106, /* lol */ + YYSYMBOL_list = 107, /* list */ + YYSYMBOL_listp = 108, /* listp */ + YYSYMBOL_arg = 109, /* arg */ + YYSYMBOL_110_42 = 110, /* @42 */ + YYSYMBOL_func = 111, /* func */ + YYSYMBOL_112_43 = 112, /* $@43 */ + YYSYMBOL_113_44 = 113, /* $@44 */ + YYSYMBOL_114_45 = 114, /* $@45 */ + YYSYMBOL_eflags = 115, /* eflags */ + YYSYMBOL_eflag = 116, /* eflag */ + YYSYMBOL_bindlist = 117, /* bindlist */ + YYSYMBOL_118_46 = 118 /* $@46 */ +}; +typedef enum yysymbol_kind_t yysymbol_kind_t; + + + + +#ifdef short +# undef short +#endif + +/* On compilers that do not define __PTRDIFF_MAX__ etc., make sure + and (if available) are included + so that the code can choose integer types of a good width. */ + +#ifndef __PTRDIFF_MAX__ +# include /* INFRINGES ON USER NAME SPACE */ +# if defined __STDC_VERSION__ && 199901 <= __STDC_VERSION__ +# include /* INFRINGES ON USER NAME SPACE */ +# define YY_STDINT_H +# endif +#endif + +/* Narrow types that promote to a signed type and that can represent a + signed or unsigned integer of at least N bits. In tables they can + save space and decrease cache pressure. Promoting to a signed type + helps avoid bugs in integer arithmetic. */ + +#ifdef __INT_LEAST8_MAX__ +typedef __INT_LEAST8_TYPE__ yytype_int8; +#elif defined YY_STDINT_H +typedef int_least8_t yytype_int8; +#else +typedef signed char yytype_int8; +#endif + +#ifdef __INT_LEAST16_MAX__ +typedef __INT_LEAST16_TYPE__ yytype_int16; +#elif defined YY_STDINT_H +typedef int_least16_t yytype_int16; +#else +typedef short yytype_int16; +#endif + +/* Work around bug in HP-UX 11.23, which defines these macros + incorrectly for preprocessor constants. This workaround can likely + be removed in 2023, as HPE has promised support for HP-UX 11.23 + (aka HP-UX 11i v2) only through the end of 2022; see Table 2 of + . */ +#ifdef __hpux +# undef UINT_LEAST8_MAX +# undef UINT_LEAST16_MAX +# define UINT_LEAST8_MAX 255 +# define UINT_LEAST16_MAX 65535 +#endif + +#if defined __UINT_LEAST8_MAX__ && __UINT_LEAST8_MAX__ <= __INT_MAX__ +typedef __UINT_LEAST8_TYPE__ yytype_uint8; +#elif (!defined __UINT_LEAST8_MAX__ && defined YY_STDINT_H \ + && UINT_LEAST8_MAX <= INT_MAX) +typedef uint_least8_t yytype_uint8; +#elif !defined __UINT_LEAST8_MAX__ && UCHAR_MAX <= INT_MAX +typedef unsigned char yytype_uint8; +#else +typedef short yytype_uint8; +#endif + +#if defined __UINT_LEAST16_MAX__ && __UINT_LEAST16_MAX__ <= __INT_MAX__ +typedef __UINT_LEAST16_TYPE__ yytype_uint16; +#elif (!defined __UINT_LEAST16_MAX__ && defined YY_STDINT_H \ + && UINT_LEAST16_MAX <= INT_MAX) +typedef uint_least16_t yytype_uint16; +#elif !defined __UINT_LEAST16_MAX__ && USHRT_MAX <= INT_MAX +typedef unsigned short yytype_uint16; +#else +typedef int yytype_uint16; +#endif + +#ifndef YYPTRDIFF_T +# if defined __PTRDIFF_TYPE__ && defined __PTRDIFF_MAX__ +# define YYPTRDIFF_T __PTRDIFF_TYPE__ +# define YYPTRDIFF_MAXIMUM __PTRDIFF_MAX__ +# elif defined PTRDIFF_MAX +# ifndef ptrdiff_t +# include /* INFRINGES ON USER NAME SPACE */ +# endif +# define YYPTRDIFF_T ptrdiff_t +# define YYPTRDIFF_MAXIMUM PTRDIFF_MAX +# else +# define YYPTRDIFF_T long +# define YYPTRDIFF_MAXIMUM LONG_MAX +# endif +#endif + +#ifndef YYSIZE_T +# ifdef __SIZE_TYPE__ +# define YYSIZE_T __SIZE_TYPE__ +# elif defined size_t +# define YYSIZE_T size_t +# elif defined __STDC_VERSION__ && 199901 <= __STDC_VERSION__ +# include /* INFRINGES ON USER NAME SPACE */ +# define YYSIZE_T size_t +# else +# define YYSIZE_T unsigned +# endif +#endif + +#define YYSIZE_MAXIMUM \ + YY_CAST (YYPTRDIFF_T, \ + (YYPTRDIFF_MAXIMUM < YY_CAST (YYSIZE_T, -1) \ + ? YYPTRDIFF_MAXIMUM \ + : YY_CAST (YYSIZE_T, -1))) + +#define YYSIZEOF(X) YY_CAST (YYPTRDIFF_T, sizeof (X)) + + +/* Stored state numbers (used for stacks). */ +typedef yytype_uint8 yy_state_t; + +/* State numbers in computations. */ +typedef int yy_state_fast_t; + +#ifndef YY_ +# if defined YYENABLE_NLS && YYENABLE_NLS +# if ENABLE_NLS +# include /* INFRINGES ON USER NAME SPACE */ +# define YY_(Msgid) dgettext ("bison-runtime", Msgid) +# endif +# endif +# ifndef YY_ +# define YY_(Msgid) Msgid +# endif +#endif + + +#ifndef YY_ATTRIBUTE_PURE +# if defined __GNUC__ && 2 < __GNUC__ + (96 <= __GNUC_MINOR__) +# define YY_ATTRIBUTE_PURE __attribute__ ((__pure__)) +# else +# define YY_ATTRIBUTE_PURE +# endif +#endif + +#ifndef YY_ATTRIBUTE_UNUSED +# if defined __GNUC__ && 2 < __GNUC__ + (7 <= __GNUC_MINOR__) +# define YY_ATTRIBUTE_UNUSED __attribute__ ((__unused__)) +# else +# define YY_ATTRIBUTE_UNUSED +# endif +#endif + +/* Suppress unused-variable warnings by "using" E. */ +#if ! defined lint || defined __GNUC__ +# define YY_USE(E) ((void) (E)) +#else +# define YY_USE(E) /* empty */ +#endif + +/* Suppress an incorrect diagnostic about yylval being uninitialized. */ +#if defined __GNUC__ && ! defined __ICC && 406 <= __GNUC__ * 100 + __GNUC_MINOR__ +# if __GNUC__ * 100 + __GNUC_MINOR__ < 407 +# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN \ + _Pragma ("GCC diagnostic push") \ + _Pragma ("GCC diagnostic ignored \"-Wuninitialized\"") +# else +# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN \ + _Pragma ("GCC diagnostic push") \ + _Pragma ("GCC diagnostic ignored \"-Wuninitialized\"") \ + _Pragma ("GCC diagnostic ignored \"-Wmaybe-uninitialized\"") +# endif +# define YY_IGNORE_MAYBE_UNINITIALIZED_END \ + _Pragma ("GCC diagnostic pop") +#else +# define YY_INITIAL_VALUE(Value) Value +#endif +#ifndef YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN +# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN +# define YY_IGNORE_MAYBE_UNINITIALIZED_END +#endif +#ifndef YY_INITIAL_VALUE +# define YY_INITIAL_VALUE(Value) /* Nothing. */ +#endif + +#if defined __cplusplus && defined __GNUC__ && ! defined __ICC && 6 <= __GNUC__ +# define YY_IGNORE_USELESS_CAST_BEGIN \ + _Pragma ("GCC diagnostic push") \ + _Pragma ("GCC diagnostic ignored \"-Wuseless-cast\"") +# define YY_IGNORE_USELESS_CAST_END \ + _Pragma ("GCC diagnostic pop") +#endif +#ifndef YY_IGNORE_USELESS_CAST_BEGIN +# define YY_IGNORE_USELESS_CAST_BEGIN +# define YY_IGNORE_USELESS_CAST_END +#endif + + +#define YY_ASSERT(E) ((void) (0 && (E))) + +#if !defined yyoverflow + +/* The parser invokes alloca or malloc; define the necessary symbols. */ + +# ifdef YYSTACK_USE_ALLOCA +# if YYSTACK_USE_ALLOCA +# ifdef __GNUC__ +# define YYSTACK_ALLOC __builtin_alloca +# elif defined __BUILTIN_VA_ARG_INCR +# include /* INFRINGES ON USER NAME SPACE */ +# elif defined _AIX +# define YYSTACK_ALLOC __alloca +# elif defined _MSC_VER +# include /* INFRINGES ON USER NAME SPACE */ +# define alloca _alloca +# else +# define YYSTACK_ALLOC alloca +# if ! defined _ALLOCA_H && ! defined EXIT_SUCCESS +# include /* INFRINGES ON USER NAME SPACE */ + /* Use EXIT_SUCCESS as a witness for stdlib.h. */ +# ifndef EXIT_SUCCESS +# define EXIT_SUCCESS 0 +# endif +# endif +# endif +# endif +# endif + +# ifdef YYSTACK_ALLOC + /* Pacify GCC's 'empty if-body' warning. */ +# define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) +# ifndef YYSTACK_ALLOC_MAXIMUM + /* The OS might guarantee only one guard page at the bottom of the stack, + and a page size can be as small as 4096 bytes. So we cannot safely + invoke alloca (N) if N exceeds 4096. Use a slightly smaller number + to allow for a few compiler-allocated temporary stack slots. */ +# define YYSTACK_ALLOC_MAXIMUM 4032 /* reasonable circa 2006 */ +# endif +# else +# define YYSTACK_ALLOC YYMALLOC +# define YYSTACK_FREE YYFREE +# ifndef YYSTACK_ALLOC_MAXIMUM +# define YYSTACK_ALLOC_MAXIMUM YYSIZE_MAXIMUM +# endif +# if (defined __cplusplus && ! defined EXIT_SUCCESS \ + && ! ((defined YYMALLOC || defined malloc) \ + && (defined YYFREE || defined free))) +# include /* INFRINGES ON USER NAME SPACE */ +# ifndef EXIT_SUCCESS +# define EXIT_SUCCESS 0 +# endif +# endif +# ifndef YYMALLOC +# define YYMALLOC malloc +# if ! defined malloc && ! defined EXIT_SUCCESS +void *malloc (YYSIZE_T); /* INFRINGES ON USER NAME SPACE */ +# endif +# endif +# ifndef YYFREE +# define YYFREE free +# if ! defined free && ! defined EXIT_SUCCESS +void free (void *); /* INFRINGES ON USER NAME SPACE */ +# endif +# endif +# endif +#endif /* !defined yyoverflow */ + +#if (! defined yyoverflow \ + && (! defined __cplusplus \ + || (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL))) + +/* A type that is properly aligned for any stack member. */ +union yyalloc +{ + yy_state_t yyss_alloc; + YYSTYPE yyvs_alloc; +}; + +/* The size of the maximum gap between one aligned stack and the next. */ +# define YYSTACK_GAP_MAXIMUM (YYSIZEOF (union yyalloc) - 1) + +/* The size of an array large to enough to hold all stacks, each with + N elements. */ +# define YYSTACK_BYTES(N) \ + ((N) * (YYSIZEOF (yy_state_t) + YYSIZEOF (YYSTYPE)) \ + + YYSTACK_GAP_MAXIMUM) + +# define YYCOPY_NEEDED 1 + +/* Relocate STACK from its old location to the new one. The + local variables YYSIZE and YYSTACKSIZE give the old and new number of + elements in the stack, and YYPTR gives the new location of the + stack. Advance YYPTR to a properly aligned location for the next + stack. */ +# define YYSTACK_RELOCATE(Stack_alloc, Stack) \ + do \ + { \ + YYPTRDIFF_T yynewbytes; \ + YYCOPY (&yyptr->Stack_alloc, Stack, yysize); \ + Stack = &yyptr->Stack_alloc; \ + yynewbytes = yystacksize * YYSIZEOF (*Stack) + YYSTACK_GAP_MAXIMUM; \ + yyptr += yynewbytes / YYSIZEOF (*yyptr); \ + } \ + while (0) + +#endif + +#if defined YYCOPY_NEEDED && YYCOPY_NEEDED +/* Copy COUNT objects from SRC to DST. The source and destination do + not overlap. */ +# ifndef YYCOPY +# if defined __GNUC__ && 1 < __GNUC__ +# define YYCOPY(Dst, Src, Count) \ + __builtin_memcpy (Dst, Src, YY_CAST (YYSIZE_T, (Count)) * sizeof (*(Src))) +# else +# define YYCOPY(Dst, Src, Count) \ + do \ + { \ + YYPTRDIFF_T yyi; \ + for (yyi = 0; yyi < (Count); yyi++) \ + (Dst)[yyi] = (Src)[yyi]; \ + } \ + while (0) +# endif +# endif +#endif /* !YYCOPY_NEEDED */ + +/* YYFINAL -- State number of the termination state. */ +#define YYFINAL 42 +/* YYLAST -- Last index in YYTABLE. */ +#define YYLAST 242 + +/* YYNTOKENS -- Number of terminals. */ +#define YYNTOKENS 51 +/* YYNNTS -- Number of nonterminals. */ +#define YYNNTS 68 +/* YYNRULES -- Number of rules. */ +#define YYNRULES 121 +/* YYNSTATES -- Number of states. */ +#define YYNSTATES 207 + +/* YYMAXUTOK -- Last valid token kind. */ +#define YYMAXUTOK 305 + + +/* YYTRANSLATE(TOKEN-NUM) -- Symbol number corresponding to TOKEN-NUM + as returned by yylex, with out-of-bounds checking. */ +#define YYTRANSLATE(YYX) \ + (0 <= (YYX) && (YYX) <= YYMAXUTOK \ + ? YY_CAST (yysymbol_kind_t, yytranslate[YYX]) \ + : YYSYMBOL_YYUNDEF) + +/* YYTRANSLATE[TOKEN-NUM] -- Symbol number corresponding to TOKEN-NUM + as returned by yylex. */ +static const yytype_int8 yytranslate[] = +{ + 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 1, 2, 3, 4, + 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, + 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, + 45, 46, 47, 48, 49, 50 +}; + +#if YYDEBUG +/* YYRLINE[YYN] -- Source line where rule number YYN was defined. */ +static const yytype_int16 yyrline[] = +{ + 0, 145, 145, 147, 158, 160, 164, 166, 168, 168, + 168, 173, 176, 176, 178, 182, 185, 188, 191, 194, + 197, 199, 201, 201, 203, 203, 205, 205, 207, 207, + 207, 209, 209, 211, 213, 215, 215, 215, 217, 217, + 217, 219, 219, 219, 221, 221, 221, 223, 223, 223, + 225, 225, 225, 227, 227, 227, 227, 229, 232, 234, + 231, 243, 245, 247, 249, 256, 258, 258, 260, 260, + 262, 262, 264, 264, 266, 266, 268, 268, 270, 270, + 272, 272, 274, 274, 276, 276, 278, 278, 280, 280, + 282, 282, 294, 295, 299, 299, 299, 308, 310, 320, + 325, 326, 330, 332, 332, 341, 341, 343, 343, 345, + 345, 356, 357, 361, 363, 365, 367, 369, 371, 381, + 382, 382 +}; +#endif + +/** Accessing symbol of state STATE. */ +#define YY_ACCESSING_SYMBOL(State) YY_CAST (yysymbol_kind_t, yystos[State]) + +#if YYDEBUG || 0 +/* The user-facing name of the symbol whose (internal) number is + YYSYMBOL. No bounds checking. */ +static const char *yysymbol_name (yysymbol_kind_t yysymbol) YY_ATTRIBUTE_UNUSED; + +/* YYTNAME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. + First, the terminals, then, starting at YYNTOKENS, nonterminals. */ +static const char *const yytname[] = +{ + "\"end of file\"", "error", "\"invalid token\"", "_BANG_t", + "_BANG_EQUALS_t", "_AMPER_t", "_AMPERAMPER_t", "_LPAREN_t", "_RPAREN_t", + "_PLUS_EQUALS_t", "_COLON_t", "_SEMIC_t", "_LANGLE_t", + "_LANGLE_EQUALS_t", "_EQUALS_t", "_RANGLE_t", "_RANGLE_EQUALS_t", + "_QUESTION_EQUALS_t", "_LBRACKET_t", "_RBRACKET_t", "ACTIONS_t", + "BIND_t", "BREAK_t", "CASE_t", "CLASS_t", "CONTINUE_t", "DEFAULT_t", + "ELSE_t", "EXISTING_t", "FOR_t", "IF_t", "IGNORE_t", "IN_t", "INCLUDE_t", + "LOCAL_t", "MODULE_t", "ON_t", "PIECEMEAL_t", "QUIETLY_t", "RETURN_t", + "RULE_t", "SWITCH_t", "TOGETHER_t", "UPDATED_t", "WHILE_t", "_LBRACE_t", + "_BAR_t", "_BARBAR_t", "_RBRACE_t", "ARG", "STRING", "$accept", "run", + "block", "rules", "$@1", "$@2", "null", "assign_list_opt", "$@3", + "arglist_opt", "local_opt", "else_opt", "rule", "$@4", "$@5", "$@6", + "$@7", "$@8", "$@9", "$@10", "$@11", "$@12", "$@13", "$@14", "$@15", + "$@16", "$@17", "$@18", "$@19", "$@20", "$@21", "$@22", "$@23", "$@24", + "$@25", "$@26", "assign", "expr", "$@27", "$@28", "$@29", "$@30", "$@31", + "$@32", "$@33", "$@34", "$@35", "$@36", "$@37", "$@38", "$@39", "cases", + "case", "$@40", "$@41", "lol", "list", "listp", "arg", "@42", "func", + "$@43", "$@44", "$@45", "eflags", "eflag", "bindlist", "$@46", YY_NULLPTR +}; + +static const char * +yysymbol_name (yysymbol_kind_t yysymbol) +{ + return yytname[yysymbol]; +} +#endif + +#define YYPACT_NINF (-119) + +#define yypact_value_is_default(Yyn) \ + ((Yyn) == YYPACT_NINF) + +#define YYTABLE_NINF (-25) + +#define yytable_value_is_error(Yyn) \ + 0 + +/* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing + STATE-NUM. */ +static const yytype_int16 yypact[] = +{ + 140, -119, -119, 1, -119, 2, -18, -119, -119, -23, + -119, -9, -119, -119, -119, 140, 12, 31, -119, 4, + 140, 77, -17, 186, -119, -119, -119, -119, -7, 3, + -119, -119, -119, -119, 177, -119, -119, 3, -5, -119, + -119, -119, -119, -119, -119, -119, -119, -119, 33, -119, + -119, -9, -119, 29, -119, -119, -119, -119, -119, -119, + 35, -119, 14, 50, -9, 34, -119, -119, 23, 39, + 52, 53, 40, -119, 66, 45, 94, -119, 67, 30, + -119, -119, -119, 16, -119, -119, -119, 47, -119, -119, + -119, -119, 3, 3, -119, -119, -119, -119, -119, -119, + -119, -119, -119, -119, -119, -119, -119, -119, -119, 84, + -119, -119, -119, 51, -119, -119, 32, 105, -119, -119, + -119, -119, -119, 140, -119, -119, -119, 68, 3, 3, + 3, 3, 3, 3, 3, 3, 140, 3, 3, -119, + -119, -119, 140, 95, 140, 110, -119, -119, -119, -119, + -119, 69, 73, 87, -119, 89, 139, 139, -119, -119, + 89, -119, -119, 90, 226, 226, -119, -119, 140, 91, + -119, 97, 95, 98, -119, -119, -119, -119, -119, -119, + -119, -119, 108, -119, -119, 88, -119, -119, -119, 141, + 177, 145, 102, 140, 177, -119, 149, -119, -119, -119, + -119, 115, -119, -119, -119, 140, -119 +}; + +/* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM. + Performed when YYTABLE does not specify something else to do. Zero + means the default is an error. */ +static const yytype_int8 yydefact[] = +{ + 2, 103, 111, 0, 47, 0, 18, 41, 22, 8, + 44, 0, 31, 38, 50, 11, 102, 0, 3, 0, + 6, 0, 0, 0, 33, 100, 34, 17, 0, 0, + 100, 100, 100, 102, 18, 100, 100, 0, 0, 5, + 4, 100, 1, 53, 7, 62, 61, 63, 0, 28, + 26, 0, 105, 0, 118, 115, 117, 116, 114, 113, + 119, 112, 0, 97, 99, 0, 88, 90, 0, 65, + 0, 11, 0, 57, 0, 0, 51, 21, 0, 0, + 64, 100, 100, 0, 100, 104, 120, 0, 48, 100, + 101, 35, 0, 0, 68, 78, 80, 70, 72, 66, + 74, 76, 42, 82, 84, 86, 23, 12, 14, 0, + 45, 32, 39, 0, 25, 54, 0, 0, 109, 107, + 106, 100, 58, 11, 98, 100, 89, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 11, 0, 0, 100, + 100, 9, 11, 92, 11, 16, 29, 27, 100, 100, + 121, 0, 0, 0, 91, 69, 79, 81, 71, 73, + 67, 75, 77, 0, 83, 85, 87, 13, 11, 0, + 94, 0, 92, 0, 100, 55, 100, 110, 108, 59, + 49, 36, 20, 10, 46, 0, 40, 93, 52, 0, + 18, 0, 0, 11, 18, 43, 0, 15, 56, 30, + 60, 0, 19, 95, 37, 11, 96 +}; + +/* YYPGOTO[NTERM-NUM]. */ +static const yytype_int16 yypgoto[] = +{ + -119, -119, -118, 25, -119, -119, 96, -119, -119, -119, + 160, -119, -33, -119, -119, -119, -119, -119, -119, -119, + -119, -119, -119, -119, -119, -119, -119, -119, -119, -119, + -119, -119, -119, -119, -119, -119, 55, -4, -119, -119, + -119, -119, -119, -119, -119, -119, -119, -119, -119, -119, + -119, 5, -119, -119, -119, -27, -28, -119, 0, -119, + -119, -119, -119, -119, -119, -119, -119, -119 +}; + +/* YYDEFGOTO[NTERM-NUM]. */ +static const yytype_uint8 yydefgoto[] = +{ + 0, 17, 38, 39, 31, 168, 40, 109, 140, 175, + 19, 195, 20, 30, 41, 82, 81, 176, 35, 125, + 193, 36, 143, 29, 136, 32, 142, 25, 123, 37, + 113, 79, 145, 190, 151, 192, 50, 68, 133, 128, + 131, 132, 134, 135, 129, 130, 137, 138, 139, 92, + 93, 171, 172, 185, 205, 62, 63, 64, 69, 22, + 53, 84, 149, 148, 23, 61, 87, 121 +}; + +/* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If + positive, shift that token. If negative, reduce the rule whose + number is the opposite. If YYTABLE_NINF, syntax error. */ +static const yytype_int16 yytable[] = +{ + 21, 73, 70, 71, 72, 152, 66, 74, 75, 1, + 67, 34, 24, 26, 78, 21, 27, -17, 163, 51, + 21, 1, -24, -24, 169, 18, 173, 94, 95, 96, + -24, 42, 52, 76, 21, 97, 98, 99, 100, 101, + 33, 45, 65, 77, 43, 44, 46, 80, 85, 47, + 183, 83, 33, 116, 117, 118, 86, 120, 48, 88, + 89, -24, 124, 106, 90, 119, 91, 107, 102, 103, + 104, 105, 94, 95, 96, 201, 154, 111, 114, 115, + 97, 98, 99, 100, 101, 110, 45, 206, 126, 127, + 112, 46, 122, 150, 47, 141, 144, 153, 94, 95, + 96, 97, 98, 48, 100, 101, 97, 98, 99, 100, + 101, 166, 167, 49, 103, 104, 147, 174, 170, 179, + 177, 180, 178, 21, 155, 156, 157, 158, 159, 160, + 161, 162, 181, 164, 165, 194, 21, 196, 182, 184, + 103, 104, 21, 94, 21, 186, 188, 189, 191, 197, + 200, 97, 98, 99, 100, 101, 199, 198, 1, 203, + 2, 202, 3, 204, 4, 5, 28, 108, 21, 6, + 7, 146, 0, 8, 9, 10, 11, 187, 0, 12, + -18, 13, 0, 0, 14, 15, 0, 0, 0, 16, + 21, 0, 0, 21, 21, 1, 0, 2, 0, 3, + 0, 4, 5, 0, 0, 21, 6, 7, 0, 0, + 8, 27, 10, 11, 54, 0, 12, 55, 13, 0, + 0, 14, 15, 56, 57, 0, 16, 0, 58, 59, + 94, 95, 96, 0, 0, 60, 0, 0, 97, 98, + 99, 100, 101 +}; + +static const yytype_int16 yycheck[] = +{ + 0, 34, 30, 31, 32, 123, 3, 35, 36, 18, + 7, 11, 11, 11, 41, 15, 34, 40, 136, 36, + 20, 18, 10, 11, 142, 0, 144, 4, 5, 6, + 18, 0, 49, 37, 34, 12, 13, 14, 15, 16, + 49, 9, 49, 48, 40, 20, 14, 14, 19, 17, + 168, 51, 49, 81, 82, 39, 21, 84, 26, 45, + 10, 49, 89, 11, 64, 49, 32, 14, 45, 46, + 47, 32, 4, 5, 6, 193, 8, 11, 11, 49, + 12, 13, 14, 15, 16, 45, 9, 205, 92, 93, + 45, 14, 45, 121, 17, 11, 45, 125, 4, 5, + 6, 12, 13, 26, 15, 16, 12, 13, 14, 15, + 16, 139, 140, 36, 46, 47, 11, 7, 23, 50, + 148, 48, 149, 123, 128, 129, 130, 131, 132, 133, + 134, 135, 45, 137, 138, 27, 136, 49, 48, 48, + 46, 47, 142, 4, 144, 48, 48, 174, 176, 8, + 48, 12, 13, 14, 15, 16, 11, 190, 18, 10, + 20, 194, 22, 48, 24, 25, 6, 71, 168, 29, + 30, 116, -1, 33, 34, 35, 36, 172, -1, 39, + 40, 41, -1, -1, 44, 45, -1, -1, -1, 49, + 190, -1, -1, 193, 194, 18, -1, 20, -1, 22, + -1, 24, 25, -1, -1, 205, 29, 30, -1, -1, + 33, 34, 35, 36, 28, -1, 39, 31, 41, -1, + -1, 44, 45, 37, 38, -1, 49, -1, 42, 43, + 4, 5, 6, -1, -1, 49, -1, -1, 12, 13, + 14, 15, 16 +}; + +/* YYSTOS[STATE-NUM] -- The symbol kind of the accessing symbol of + state STATE-NUM. */ +static const yytype_int8 yystos[] = +{ + 0, 18, 20, 22, 24, 25, 29, 30, 33, 34, + 35, 36, 39, 41, 44, 45, 49, 52, 54, 61, + 63, 109, 110, 115, 11, 78, 11, 34, 61, 74, + 64, 55, 76, 49, 109, 69, 72, 80, 53, 54, + 57, 65, 0, 40, 54, 9, 14, 17, 26, 36, + 87, 36, 49, 111, 28, 31, 37, 38, 42, 43, + 49, 116, 106, 107, 108, 49, 3, 7, 88, 109, + 107, 107, 107, 63, 107, 107, 88, 48, 106, 82, + 14, 67, 66, 109, 112, 19, 21, 117, 45, 10, + 109, 32, 100, 101, 4, 5, 6, 12, 13, 14, + 15, 16, 45, 46, 47, 32, 11, 14, 57, 58, + 45, 11, 45, 81, 11, 49, 107, 107, 39, 49, + 106, 118, 45, 79, 106, 70, 88, 88, 90, 95, + 96, 91, 92, 89, 93, 94, 75, 97, 98, 99, + 59, 11, 77, 73, 45, 83, 87, 11, 114, 113, + 107, 85, 53, 107, 8, 88, 88, 88, 88, 88, + 88, 88, 88, 53, 88, 88, 107, 107, 56, 53, + 23, 102, 103, 53, 7, 60, 68, 107, 106, 50, + 48, 45, 48, 53, 48, 104, 48, 102, 48, 106, + 84, 107, 86, 71, 27, 62, 49, 8, 63, 11, + 48, 53, 63, 10, 48, 105, 53 +}; + +/* YYR1[RULE-NUM] -- Symbol kind of the left-hand side of rule RULE-NUM. */ +static const yytype_int8 yyr1[] = +{ + 0, 51, 52, 52, 53, 53, 54, 54, 55, 56, + 54, 57, 59, 58, 58, 60, 60, 61, 61, 62, + 62, 63, 64, 63, 65, 63, 66, 63, 67, 68, + 63, 69, 63, 63, 63, 70, 71, 63, 72, 73, + 63, 74, 75, 63, 76, 77, 63, 78, 79, 63, + 80, 81, 63, 82, 83, 84, 63, 63, 85, 86, + 63, 87, 87, 87, 87, 88, 89, 88, 90, 88, + 91, 88, 92, 88, 93, 88, 94, 88, 95, 88, + 96, 88, 97, 88, 98, 88, 99, 88, 100, 88, + 101, 88, 102, 102, 104, 105, 103, 106, 106, 107, + 108, 108, 109, 110, 109, 112, 111, 113, 111, 114, + 111, 115, 115, 116, 116, 116, 116, 116, 116, 117, + 118, 117 +}; + +/* YYR2[RULE-NUM] -- Number of symbols on the right-hand side of rule RULE-NUM. */ +static const yytype_int8 yyr2[] = +{ + 0, 2, 0, 1, 1, 1, 1, 2, 0, 0, + 7, 0, 0, 3, 1, 3, 0, 1, 0, 2, + 0, 3, 0, 4, 0, 4, 0, 5, 0, 0, + 8, 0, 4, 2, 2, 0, 0, 10, 0, 0, + 7, 0, 0, 8, 0, 0, 7, 0, 0, 7, + 0, 0, 7, 0, 0, 0, 8, 3, 0, 0, + 9, 1, 1, 1, 2, 1, 0, 4, 0, 4, + 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, + 0, 4, 0, 4, 0, 4, 0, 4, 0, 3, + 0, 4, 0, 2, 0, 0, 6, 1, 3, 1, + 0, 2, 1, 0, 4, 0, 3, 0, 5, 0, + 5, 0, 2, 1, 1, 1, 1, 1, 1, 0, + 0, 3 +}; + + +enum { YYENOMEM = -2 }; + +#define yyerrok (yyerrstatus = 0) +#define yyclearin (yychar = YYEMPTY) + +#define YYACCEPT goto yyacceptlab +#define YYABORT goto yyabortlab +#define YYERROR goto yyerrorlab +#define YYNOMEM goto yyexhaustedlab + + +#define YYRECOVERING() (!!yyerrstatus) + +#define YYBACKUP(Token, Value) \ + do \ + if (yychar == YYEMPTY) \ + { \ + yychar = (Token); \ + yylval = (Value); \ + YYPOPSTACK (yylen); \ + yystate = *yyssp; \ + goto yybackup; \ + } \ + else \ + { \ + yyerror (YY_("syntax error: cannot back up")); \ + YYERROR; \ + } \ + while (0) + +/* Backward compatibility with an undocumented macro. + Use YYerror or YYUNDEF. */ +#define YYERRCODE YYUNDEF + + +/* Enable debugging if requested. */ +#if YYDEBUG + +# ifndef YYFPRINTF +# include /* INFRINGES ON USER NAME SPACE */ +# define YYFPRINTF fprintf +# endif + +# define YYDPRINTF(Args) \ +do { \ + if (yydebug) \ + YYFPRINTF Args; \ +} while (0) + + + + +# define YY_SYMBOL_PRINT(Title, Kind, Value, Location) \ +do { \ + if (yydebug) \ + { \ + YYFPRINTF (stderr, "%s ", Title); \ + yy_symbol_print (stderr, \ + Kind, Value); \ + YYFPRINTF (stderr, "\n"); \ + } \ +} while (0) + + +/*-----------------------------------. +| Print this symbol's value on YYO. | +`-----------------------------------*/ + +static void +yy_symbol_value_print (FILE *yyo, + yysymbol_kind_t yykind, YYSTYPE const * const yyvaluep) +{ + FILE *yyoutput = yyo; + YY_USE (yyoutput); + if (!yyvaluep) + return; + YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN + YY_USE (yykind); + YY_IGNORE_MAYBE_UNINITIALIZED_END +} + + +/*---------------------------. +| Print this symbol on YYO. | +`---------------------------*/ + +static void +yy_symbol_print (FILE *yyo, + yysymbol_kind_t yykind, YYSTYPE const * const yyvaluep) +{ + YYFPRINTF (yyo, "%s %s (", + yykind < YYNTOKENS ? "token" : "nterm", yysymbol_name (yykind)); + + yy_symbol_value_print (yyo, yykind, yyvaluep); + YYFPRINTF (yyo, ")"); +} + +/*------------------------------------------------------------------. +| yy_stack_print -- Print the state stack from its BOTTOM up to its | +| TOP (included). | +`------------------------------------------------------------------*/ + +static void +yy_stack_print (yy_state_t *yybottom, yy_state_t *yytop) +{ + YYFPRINTF (stderr, "Stack now"); + for (; yybottom <= yytop; yybottom++) + { + int yybot = *yybottom; + YYFPRINTF (stderr, " %d", yybot); + } + YYFPRINTF (stderr, "\n"); +} + +# define YY_STACK_PRINT(Bottom, Top) \ +do { \ + if (yydebug) \ + yy_stack_print ((Bottom), (Top)); \ +} while (0) + + +/*------------------------------------------------. +| Report that the YYRULE is going to be reduced. | +`------------------------------------------------*/ + +static void +yy_reduce_print (yy_state_t *yyssp, YYSTYPE *yyvsp, + int yyrule) +{ + int yylno = yyrline[yyrule]; + int yynrhs = yyr2[yyrule]; + int yyi; + YYFPRINTF (stderr, "Reducing stack by rule %d (line %d):\n", + yyrule - 1, yylno); + /* The symbols being reduced. */ + for (yyi = 0; yyi < yynrhs; yyi++) + { + YYFPRINTF (stderr, " $%d = ", yyi + 1); + yy_symbol_print (stderr, + YY_ACCESSING_SYMBOL (+yyssp[yyi + 1 - yynrhs]), + &yyvsp[(yyi + 1) - (yynrhs)]); + YYFPRINTF (stderr, "\n"); + } +} + +# define YY_REDUCE_PRINT(Rule) \ +do { \ + if (yydebug) \ + yy_reduce_print (yyssp, yyvsp, Rule); \ +} while (0) + +/* Nonzero means print parse trace. It is left uninitialized so that + multiple parsers can coexist. */ +int yydebug; +#else /* !YYDEBUG */ +# define YYDPRINTF(Args) ((void) 0) +# define YY_SYMBOL_PRINT(Title, Kind, Value, Location) +# define YY_STACK_PRINT(Bottom, Top) +# define YY_REDUCE_PRINT(Rule) +#endif /* !YYDEBUG */ + + +/* YYINITDEPTH -- initial size of the parser's stacks. */ +#ifndef YYINITDEPTH +# define YYINITDEPTH 200 +#endif + +/* YYMAXDEPTH -- maximum size the stacks can grow to (effective only + if the built-in stack extension method is used). + + Do not make this value too large; the results are undefined if + YYSTACK_ALLOC_MAXIMUM < YYSTACK_BYTES (YYMAXDEPTH) + evaluated with infinite-precision integer arithmetic. */ + +#ifndef YYMAXDEPTH +# define YYMAXDEPTH 10000 +#endif + + + + + + +/*-----------------------------------------------. +| Release the memory associated to this symbol. | +`-----------------------------------------------*/ + +static void +yydestruct (const char *yymsg, + yysymbol_kind_t yykind, YYSTYPE *yyvaluep) +{ + YY_USE (yyvaluep); + if (!yymsg) + yymsg = "Deleting"; + YY_SYMBOL_PRINT (yymsg, yykind, yyvaluep, yylocationp); + + YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN + YY_USE (yykind); + YY_IGNORE_MAYBE_UNINITIALIZED_END +} + + +/* Lookahead token kind. */ +int yychar; + +/* The semantic value of the lookahead symbol. */ +YYSTYPE yylval; +/* Number of syntax errors so far. */ +int yynerrs; + + + + +/*----------. +| yyparse. | +`----------*/ + +int +yyparse (void) +{ + yy_state_fast_t yystate = 0; + /* Number of tokens to shift before error messages enabled. */ + int yyerrstatus = 0; + + /* Refer to the stacks through separate pointers, to allow yyoverflow + to reallocate them elsewhere. */ + + /* Their size. */ + YYPTRDIFF_T yystacksize = YYINITDEPTH; + + /* The state stack: array, bottom, top. */ + yy_state_t yyssa[YYINITDEPTH]; + yy_state_t *yyss = yyssa; + yy_state_t *yyssp = yyss; + + /* The semantic value stack: array, bottom, top. */ + YYSTYPE yyvsa[YYINITDEPTH]; + YYSTYPE *yyvs = yyvsa; + YYSTYPE *yyvsp = yyvs; + + int yyn; + /* The return value of yyparse. */ + int yyresult; + /* Lookahead symbol kind. */ + yysymbol_kind_t yytoken = YYSYMBOL_YYEMPTY; + /* The variables used to return semantic value and location from the + action routines. */ + YYSTYPE yyval; + + + +#define YYPOPSTACK(N) (yyvsp -= (N), yyssp -= (N)) + + /* The number of symbols on the RHS of the reduced rule. + Keep to zero when no symbol should be popped. */ + int yylen = 0; + + YYDPRINTF ((stderr, "Starting parse\n")); + + yychar = YYEMPTY; /* Cause a token to be read. */ + + goto yysetstate; + + +/*------------------------------------------------------------. +| yynewstate -- push a new state, which is found in yystate. | +`------------------------------------------------------------*/ +yynewstate: + /* In all cases, when you get here, the value and location stacks + have just been pushed. So pushing a state here evens the stacks. */ + yyssp++; + + +/*--------------------------------------------------------------------. +| yysetstate -- set current state (the top of the stack) to yystate. | +`--------------------------------------------------------------------*/ +yysetstate: + YYDPRINTF ((stderr, "Entering state %d\n", yystate)); + YY_ASSERT (0 <= yystate && yystate < YYNSTATES); + YY_IGNORE_USELESS_CAST_BEGIN + *yyssp = YY_CAST (yy_state_t, yystate); + YY_IGNORE_USELESS_CAST_END + YY_STACK_PRINT (yyss, yyssp); + + if (yyss + yystacksize - 1 <= yyssp) +#if !defined yyoverflow && !defined YYSTACK_RELOCATE + YYNOMEM; +#else + { + /* Get the current used size of the three stacks, in elements. */ + YYPTRDIFF_T yysize = yyssp - yyss + 1; + +# if defined yyoverflow + { + /* Give user a chance to reallocate the stack. Use copies of + these so that the &'s don't force the real ones into + memory. */ + yy_state_t *yyss1 = yyss; + YYSTYPE *yyvs1 = yyvs; + + /* Each stack pointer address is followed by the size of the + data in use in that stack, in bytes. This used to be a + conditional around just the two extra args, but that might + be undefined if yyoverflow is a macro. */ + yyoverflow (YY_("memory exhausted"), + &yyss1, yysize * YYSIZEOF (*yyssp), + &yyvs1, yysize * YYSIZEOF (*yyvsp), + &yystacksize); + yyss = yyss1; + yyvs = yyvs1; + } +# else /* defined YYSTACK_RELOCATE */ + /* Extend the stack our own way. */ + if (YYMAXDEPTH <= yystacksize) + YYNOMEM; + yystacksize *= 2; + if (YYMAXDEPTH < yystacksize) + yystacksize = YYMAXDEPTH; + + { + yy_state_t *yyss1 = yyss; + union yyalloc *yyptr = + YY_CAST (union yyalloc *, + YYSTACK_ALLOC (YY_CAST (YYSIZE_T, YYSTACK_BYTES (yystacksize)))); + if (! yyptr) + YYNOMEM; + YYSTACK_RELOCATE (yyss_alloc, yyss); + YYSTACK_RELOCATE (yyvs_alloc, yyvs); +# undef YYSTACK_RELOCATE + if (yyss1 != yyssa) + YYSTACK_FREE (yyss1); + } +# endif + + yyssp = yyss + yysize - 1; + yyvsp = yyvs + yysize - 1; + + YY_IGNORE_USELESS_CAST_BEGIN + YYDPRINTF ((stderr, "Stack size increased to %ld\n", + YY_CAST (long, yystacksize))); + YY_IGNORE_USELESS_CAST_END + + if (yyss + yystacksize - 1 <= yyssp) + YYABORT; + } +#endif /* !defined yyoverflow && !defined YYSTACK_RELOCATE */ + + + if (yystate == YYFINAL) + YYACCEPT; + + goto yybackup; + + +/*-----------. +| yybackup. | +`-----------*/ +yybackup: + /* Do appropriate processing given the current state. Read a + lookahead token if we need one and don't already have one. */ + + /* First try to decide what to do without reference to lookahead token. */ + yyn = yypact[yystate]; + if (yypact_value_is_default (yyn)) + goto yydefault; + + /* Not known => get a lookahead token if don't already have one. */ + + /* YYCHAR is either empty, or end-of-input, or a valid lookahead. */ + if (yychar == YYEMPTY) + { + YYDPRINTF ((stderr, "Reading a token\n")); + yychar = yylex (); + } + + if (yychar <= YYEOF) + { + yychar = YYEOF; + yytoken = YYSYMBOL_YYEOF; + YYDPRINTF ((stderr, "Now at end of input.\n")); + } + else if (yychar == YYerror) + { + /* The scanner already issued an error message, process directly + to error recovery. But do not keep the error token as + lookahead, it is too special and may lead us to an endless + loop in error recovery. */ + yychar = YYUNDEF; + yytoken = YYSYMBOL_YYerror; + goto yyerrlab1; + } + else + { + yytoken = YYTRANSLATE (yychar); + YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc); + } + + /* If the proper action on seeing token YYTOKEN is to reduce or to + detect an error, take that action. */ + yyn += yytoken; + if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken) + goto yydefault; + yyn = yytable[yyn]; + if (yyn <= 0) + { + if (yytable_value_is_error (yyn)) + goto yyerrlab; + yyn = -yyn; + goto yyreduce; + } + + /* Count tokens shifted since error; after three, turn off error + status. */ + if (yyerrstatus) + yyerrstatus--; + + /* Shift the lookahead token. */ + YY_SYMBOL_PRINT ("Shifting", yytoken, &yylval, &yylloc); + yystate = yyn; + YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN + *++yyvsp = yylval; + YY_IGNORE_MAYBE_UNINITIALIZED_END + + /* Discard the shifted token. */ + yychar = YYEMPTY; + goto yynewstate; + + +/*-----------------------------------------------------------. +| yydefault -- do the default action for the current state. | +`-----------------------------------------------------------*/ +yydefault: + yyn = yydefact[yystate]; + if (yyn == 0) + goto yyerrlab; + goto yyreduce; + + +/*-----------------------------. +| yyreduce -- do a reduction. | +`-----------------------------*/ +yyreduce: + /* yyn is the number of a rule to reduce with. */ + yylen = yyr2[yyn]; + + /* If YYLEN is nonzero, implement the default value of the action: + '$$ = $1'. + + Otherwise, the following line sets YYVAL to garbage. + This behavior is undocumented and Bison + users should not rely upon it. Assigning to YYVAL + unconditionally makes the parser a bit smaller, and it avoids a + GCC warning that YYVAL may be used uninitialized. */ + yyval = yyvsp[1-yylen]; + + + YY_REDUCE_PRINT (yyn); + switch (yyn) + { + case 3: /* run: rules */ +#line 148 "src/engine/jamgram.y" + { parse_save( yyvsp[0].parse ); } +#line 1384 "src/engine/jamgram.cpp" + break; + + case 4: /* block: null */ +#line 159 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; } +#line 1390 "src/engine/jamgram.cpp" + break; + + case 5: /* block: rules */ +#line 161 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; } +#line 1396 "src/engine/jamgram.cpp" + break; + + case 6: /* rules: rule */ +#line 165 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; } +#line 1402 "src/engine/jamgram.cpp" + break; + + case 7: /* rules: rule rules */ +#line 167 "src/engine/jamgram.y" + { yyval.parse = prules( yyvsp[-1].parse, yyvsp[0].parse ); } +#line 1408 "src/engine/jamgram.cpp" + break; + + case 8: /* $@1: %empty */ +#line 168 "src/engine/jamgram.y" + { yymode( SCAN_ASSIGN ); } +#line 1414 "src/engine/jamgram.cpp" + break; + + case 9: /* $@2: %empty */ +#line 168 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1420 "src/engine/jamgram.cpp" + break; + + case 10: /* rules: LOCAL_t $@1 list assign_list_opt _SEMIC_t $@2 block */ +#line 169 "src/engine/jamgram.y" + { yyval.parse = plocal( yyvsp[-4].parse, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1426 "src/engine/jamgram.cpp" + break; + + case 11: /* null: %empty */ +#line 173 "src/engine/jamgram.y" + { yyval.parse = pnull(); } +#line 1432 "src/engine/jamgram.cpp" + break; + + case 12: /* $@3: %empty */ +#line 176 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1438 "src/engine/jamgram.cpp" + break; + + case 13: /* assign_list_opt: _EQUALS_t $@3 list */ +#line 177 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_SET; } +#line 1444 "src/engine/jamgram.cpp" + break; + + case 14: /* assign_list_opt: null */ +#line 179 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; yyval.number = ASSIGN_APPEND; } +#line 1450 "src/engine/jamgram.cpp" + break; + + case 15: /* arglist_opt: _LPAREN_t lol _RPAREN_t */ +#line 183 "src/engine/jamgram.y" + { yyval.parse = yyvsp[-1].parse; } +#line 1456 "src/engine/jamgram.cpp" + break; + + case 16: /* arglist_opt: %empty */ +#line 185 "src/engine/jamgram.y" + { yyval.parse = P0; } +#line 1462 "src/engine/jamgram.cpp" + break; + + case 17: /* local_opt: LOCAL_t */ +#line 189 "src/engine/jamgram.y" + { yyval.number = 1; } +#line 1468 "src/engine/jamgram.cpp" + break; + + case 18: /* local_opt: %empty */ +#line 191 "src/engine/jamgram.y" + { yyval.number = 0; } +#line 1474 "src/engine/jamgram.cpp" + break; + + case 19: /* else_opt: ELSE_t rule */ +#line 195 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; } +#line 1480 "src/engine/jamgram.cpp" + break; + + case 20: /* else_opt: %empty */ +#line 197 "src/engine/jamgram.y" + { yyval.parse = pnull(); } +#line 1486 "src/engine/jamgram.cpp" + break; + + case 21: /* rule: _LBRACE_t block _RBRACE_t */ +#line 200 "src/engine/jamgram.y" + { yyval.parse = yyvsp[-1].parse; } +#line 1492 "src/engine/jamgram.cpp" + break; + + case 22: /* $@4: %empty */ +#line 201 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1498 "src/engine/jamgram.cpp" + break; + + case 23: /* rule: INCLUDE_t $@4 list _SEMIC_t */ +#line 202 "src/engine/jamgram.y" + { yyval.parse = pincl( yyvsp[-1].parse ); yymode( SCAN_NORMAL ); } +#line 1504 "src/engine/jamgram.cpp" + break; + + case 24: /* $@5: %empty */ +#line 203 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1510 "src/engine/jamgram.cpp" + break; + + case 25: /* rule: ARG $@5 lol _SEMIC_t */ +#line 204 "src/engine/jamgram.y" + { yyval.parse = prule( yyvsp[-3].string, yyvsp[-1].parse ); yymode( SCAN_NORMAL ); } +#line 1516 "src/engine/jamgram.cpp" + break; + + case 26: /* $@6: %empty */ +#line 205 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1522 "src/engine/jamgram.cpp" + break; + + case 27: /* rule: arg assign $@6 list _SEMIC_t */ +#line 206 "src/engine/jamgram.y" + { yyval.parse = pset( yyvsp[-4].parse, yyvsp[-1].parse, yyvsp[-3].number ); yymode( SCAN_NORMAL ); } +#line 1528 "src/engine/jamgram.cpp" + break; + + case 28: /* $@7: %empty */ +#line 207 "src/engine/jamgram.y" + { yymode( SCAN_ASSIGN ); } +#line 1534 "src/engine/jamgram.cpp" + break; + + case 29: /* $@8: %empty */ +#line 207 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1540 "src/engine/jamgram.cpp" + break; + + case 30: /* rule: arg ON_t $@7 list assign $@8 list _SEMIC_t */ +#line 208 "src/engine/jamgram.y" + { yyval.parse = pset1( yyvsp[-7].parse, yyvsp[-4].parse, yyvsp[-1].parse, yyvsp[-3].number ); yymode( SCAN_NORMAL ); } +#line 1546 "src/engine/jamgram.cpp" + break; + + case 31: /* $@9: %empty */ +#line 209 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1552 "src/engine/jamgram.cpp" + break; + + case 32: /* rule: RETURN_t $@9 list _SEMIC_t */ +#line 210 "src/engine/jamgram.y" + { yyval.parse = preturn( yyvsp[-1].parse ); yymode( SCAN_NORMAL ); } +#line 1558 "src/engine/jamgram.cpp" + break; + + case 33: /* rule: BREAK_t _SEMIC_t */ +#line 212 "src/engine/jamgram.y" + { yyval.parse = pbreak(); } +#line 1564 "src/engine/jamgram.cpp" + break; + + case 34: /* rule: CONTINUE_t _SEMIC_t */ +#line 214 "src/engine/jamgram.y" + { yyval.parse = pcontinue(); } +#line 1570 "src/engine/jamgram.cpp" + break; + + case 35: /* $@10: %empty */ +#line 215 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1576 "src/engine/jamgram.cpp" + break; + + case 36: /* $@11: %empty */ +#line 215 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1582 "src/engine/jamgram.cpp" + break; + + case 37: /* rule: FOR_t local_opt ARG IN_t $@10 list _LBRACE_t $@11 block _RBRACE_t */ +#line 216 "src/engine/jamgram.y" + { yyval.parse = pfor( yyvsp[-7].string, yyvsp[-4].parse, yyvsp[-1].parse, yyvsp[-8].number ); } +#line 1588 "src/engine/jamgram.cpp" + break; + + case 38: /* $@12: %empty */ +#line 217 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1594 "src/engine/jamgram.cpp" + break; + + case 39: /* $@13: %empty */ +#line 217 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1600 "src/engine/jamgram.cpp" + break; + + case 40: /* rule: SWITCH_t $@12 list _LBRACE_t $@13 cases _RBRACE_t */ +#line 218 "src/engine/jamgram.y" + { yyval.parse = pswitch( yyvsp[-4].parse, yyvsp[-1].parse ); } +#line 1606 "src/engine/jamgram.cpp" + break; + + case 41: /* $@14: %empty */ +#line 219 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1612 "src/engine/jamgram.cpp" + break; + + case 42: /* $@15: %empty */ +#line 219 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1618 "src/engine/jamgram.cpp" + break; + + case 43: /* rule: IF_t $@14 expr _LBRACE_t $@15 block _RBRACE_t else_opt */ +#line 220 "src/engine/jamgram.y" + { yyval.parse = pif( yyvsp[-5].parse, yyvsp[-2].parse, yyvsp[0].parse ); } +#line 1624 "src/engine/jamgram.cpp" + break; + + case 44: /* $@16: %empty */ +#line 221 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1630 "src/engine/jamgram.cpp" + break; + + case 45: /* $@17: %empty */ +#line 221 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1636 "src/engine/jamgram.cpp" + break; + + case 46: /* rule: MODULE_t $@16 list _LBRACE_t $@17 block _RBRACE_t */ +#line 222 "src/engine/jamgram.y" + { yyval.parse = pmodule( yyvsp[-4].parse, yyvsp[-1].parse ); } +#line 1642 "src/engine/jamgram.cpp" + break; + + case 47: /* $@18: %empty */ +#line 223 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1648 "src/engine/jamgram.cpp" + break; + + case 48: /* $@19: %empty */ +#line 223 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1654 "src/engine/jamgram.cpp" + break; + + case 49: /* rule: CLASS_t $@18 lol _LBRACE_t $@19 block _RBRACE_t */ +#line 224 "src/engine/jamgram.y" + { yyval.parse = pclass( yyvsp[-4].parse, yyvsp[-1].parse ); } +#line 1660 "src/engine/jamgram.cpp" + break; + + case 50: /* $@20: %empty */ +#line 225 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1666 "src/engine/jamgram.cpp" + break; + + case 51: /* $@21: %empty */ +#line 225 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1672 "src/engine/jamgram.cpp" + break; + + case 52: /* rule: WHILE_t $@20 expr $@21 _LBRACE_t block _RBRACE_t */ +#line 226 "src/engine/jamgram.y" + { yyval.parse = pwhile( yyvsp[-4].parse, yyvsp[-1].parse ); } +#line 1678 "src/engine/jamgram.cpp" + break; + + case 53: /* $@22: %empty */ +#line 227 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1684 "src/engine/jamgram.cpp" + break; + + case 54: /* $@23: %empty */ +#line 227 "src/engine/jamgram.y" + { yymode( SCAN_PARAMS ); } +#line 1690 "src/engine/jamgram.cpp" + break; + + case 55: /* $@24: %empty */ +#line 227 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1696 "src/engine/jamgram.cpp" + break; + + case 56: /* rule: local_opt RULE_t $@22 ARG $@23 arglist_opt $@24 rule */ +#line 228 "src/engine/jamgram.y" + { yyval.parse = psetc( yyvsp[-4].string, yyvsp[0].parse, yyvsp[-2].parse, yyvsp[-7].number ); } +#line 1702 "src/engine/jamgram.cpp" + break; + + case 57: /* rule: ON_t arg rule */ +#line 230 "src/engine/jamgram.y" + { yyval.parse = pon( yyvsp[-1].parse, yyvsp[0].parse ); } +#line 1708 "src/engine/jamgram.cpp" + break; + + case 58: /* $@25: %empty */ +#line 232 "src/engine/jamgram.y" + { yymode( SCAN_STRING ); } +#line 1714 "src/engine/jamgram.cpp" + break; + + case 59: /* $@26: %empty */ +#line 234 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1720 "src/engine/jamgram.cpp" + break; + + case 60: /* rule: ACTIONS_t eflags ARG bindlist _LBRACE_t $@25 STRING $@26 _RBRACE_t */ +#line 236 "src/engine/jamgram.y" + { yyval.parse = psete( yyvsp[-6].string,yyvsp[-5].parse,yyvsp[-2].string,yyvsp[-7].number ); } +#line 1726 "src/engine/jamgram.cpp" + break; + + case 61: /* assign: _EQUALS_t */ +#line 244 "src/engine/jamgram.y" + { yyval.number = ASSIGN_SET; } +#line 1732 "src/engine/jamgram.cpp" + break; + + case 62: /* assign: _PLUS_EQUALS_t */ +#line 246 "src/engine/jamgram.y" + { yyval.number = ASSIGN_APPEND; } +#line 1738 "src/engine/jamgram.cpp" + break; + + case 63: /* assign: _QUESTION_EQUALS_t */ +#line 248 "src/engine/jamgram.y" + { yyval.number = ASSIGN_DEFAULT; } +#line 1744 "src/engine/jamgram.cpp" + break; + + case 64: /* assign: DEFAULT_t _EQUALS_t */ +#line 250 "src/engine/jamgram.y" + { yyval.number = ASSIGN_DEFAULT; } +#line 1750 "src/engine/jamgram.cpp" + break; + + case 65: /* expr: arg */ +#line 257 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_EXISTS, yyvsp[0].parse, pnull() ); yymode( SCAN_COND ); } +#line 1756 "src/engine/jamgram.cpp" + break; + + case 66: /* $@27: %empty */ +#line 258 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1762 "src/engine/jamgram.cpp" + break; + + case 67: /* expr: expr _EQUALS_t $@27 expr */ +#line 259 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_EQUALS, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1768 "src/engine/jamgram.cpp" + break; + + case 68: /* $@28: %empty */ +#line 260 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1774 "src/engine/jamgram.cpp" + break; + + case 69: /* expr: expr _BANG_EQUALS_t $@28 expr */ +#line 261 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_NOTEQ, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1780 "src/engine/jamgram.cpp" + break; + + case 70: /* $@29: %empty */ +#line 262 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1786 "src/engine/jamgram.cpp" + break; + + case 71: /* expr: expr _LANGLE_t $@29 expr */ +#line 263 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_LESS, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1792 "src/engine/jamgram.cpp" + break; + + case 72: /* $@30: %empty */ +#line 264 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1798 "src/engine/jamgram.cpp" + break; + + case 73: /* expr: expr _LANGLE_EQUALS_t $@30 expr */ +#line 265 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_LESSEQ, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1804 "src/engine/jamgram.cpp" + break; + + case 74: /* $@31: %empty */ +#line 266 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1810 "src/engine/jamgram.cpp" + break; + + case 75: /* expr: expr _RANGLE_t $@31 expr */ +#line 267 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_MORE, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1816 "src/engine/jamgram.cpp" + break; + + case 76: /* $@32: %empty */ +#line 268 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1822 "src/engine/jamgram.cpp" + break; + + case 77: /* expr: expr _RANGLE_EQUALS_t $@32 expr */ +#line 269 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_MOREEQ, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1828 "src/engine/jamgram.cpp" + break; + + case 78: /* $@33: %empty */ +#line 270 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1834 "src/engine/jamgram.cpp" + break; + + case 79: /* expr: expr _AMPER_t $@33 expr */ +#line 271 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_AND, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1840 "src/engine/jamgram.cpp" + break; + + case 80: /* $@34: %empty */ +#line 272 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1846 "src/engine/jamgram.cpp" + break; + + case 81: /* expr: expr _AMPERAMPER_t $@34 expr */ +#line 273 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_AND, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1852 "src/engine/jamgram.cpp" + break; + + case 82: /* $@35: %empty */ +#line 274 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1858 "src/engine/jamgram.cpp" + break; + + case 83: /* expr: expr _BAR_t $@35 expr */ +#line 275 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_OR, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1864 "src/engine/jamgram.cpp" + break; + + case 84: /* $@36: %empty */ +#line 276 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1870 "src/engine/jamgram.cpp" + break; + + case 85: /* expr: expr _BARBAR_t $@36 expr */ +#line 277 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_OR, yyvsp[-3].parse, yyvsp[0].parse ); } +#line 1876 "src/engine/jamgram.cpp" + break; + + case 86: /* $@37: %empty */ +#line 278 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1882 "src/engine/jamgram.cpp" + break; + + case 87: /* expr: arg IN_t $@37 list */ +#line 279 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_IN, yyvsp[-3].parse, yyvsp[0].parse ); yymode( SCAN_COND ); } +#line 1888 "src/engine/jamgram.cpp" + break; + + case 88: /* $@38: %empty */ +#line 280 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1894 "src/engine/jamgram.cpp" + break; + + case 89: /* expr: _BANG_t $@38 expr */ +#line 281 "src/engine/jamgram.y" + { yyval.parse = peval( EXPR_NOT, yyvsp[0].parse, pnull() ); } +#line 1900 "src/engine/jamgram.cpp" + break; + + case 90: /* $@39: %empty */ +#line 282 "src/engine/jamgram.y" + { yymode( SCAN_CONDB ); } +#line 1906 "src/engine/jamgram.cpp" + break; + + case 91: /* expr: _LPAREN_t $@39 expr _RPAREN_t */ +#line 283 "src/engine/jamgram.y" + { yyval.parse = yyvsp[-1].parse; } +#line 1912 "src/engine/jamgram.cpp" + break; + + case 92: /* cases: %empty */ +#line 294 "src/engine/jamgram.y" + { yyval.parse = P0; } +#line 1918 "src/engine/jamgram.cpp" + break; + + case 93: /* cases: case cases */ +#line 296 "src/engine/jamgram.y" + { yyval.parse = pnode( yyvsp[-1].parse, yyvsp[0].parse ); } +#line 1924 "src/engine/jamgram.cpp" + break; + + case 94: /* $@40: %empty */ +#line 299 "src/engine/jamgram.y" + { yymode( SCAN_CASE ); } +#line 1930 "src/engine/jamgram.cpp" + break; + + case 95: /* $@41: %empty */ +#line 299 "src/engine/jamgram.y" + { yymode( SCAN_NORMAL ); } +#line 1936 "src/engine/jamgram.cpp" + break; + + case 96: /* case: CASE_t $@40 ARG _COLON_t $@41 block */ +#line 300 "src/engine/jamgram.y" + { yyval.parse = psnode( yyvsp[-3].string, yyvsp[0].parse ); } +#line 1942 "src/engine/jamgram.cpp" + break; + + case 97: /* lol: list */ +#line 309 "src/engine/jamgram.y" + { yyval.parse = pnode( P0, yyvsp[0].parse ); } +#line 1948 "src/engine/jamgram.cpp" + break; + + case 98: /* lol: list _COLON_t lol */ +#line 311 "src/engine/jamgram.y" + { yyval.parse = pnode( yyvsp[0].parse, yyvsp[-2].parse ); } +#line 1954 "src/engine/jamgram.cpp" + break; + + case 99: /* list: listp */ +#line 321 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; } +#line 1960 "src/engine/jamgram.cpp" + break; + + case 100: /* listp: %empty */ +#line 325 "src/engine/jamgram.y" + { yyval.parse = pnull(); } +#line 1966 "src/engine/jamgram.cpp" + break; + + case 101: /* listp: listp arg */ +#line 327 "src/engine/jamgram.y" + { yyval.parse = pappend( yyvsp[-1].parse, yyvsp[0].parse ); } +#line 1972 "src/engine/jamgram.cpp" + break; + + case 102: /* arg: ARG */ +#line 331 "src/engine/jamgram.y" + { yyval.parse = plist( yyvsp[0].string ); } +#line 1978 "src/engine/jamgram.cpp" + break; + + case 103: /* @42: %empty */ +#line 332 "src/engine/jamgram.y" + { yyval.number = yymode( SCAN_CALL ); } +#line 1984 "src/engine/jamgram.cpp" + break; + + case 104: /* arg: _LBRACKET_t @42 func _RBRACKET_t */ +#line 333 "src/engine/jamgram.y" + { yyval.parse = yyvsp[-1].parse; yymode( yyvsp[-2].number ); } +#line 1990 "src/engine/jamgram.cpp" + break; + + case 105: /* $@43: %empty */ +#line 341 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 1996 "src/engine/jamgram.cpp" + break; + + case 106: /* func: ARG $@43 lol */ +#line 342 "src/engine/jamgram.y" + { yyval.parse = prule( yyvsp[-2].string, yyvsp[0].parse ); } +#line 2002 "src/engine/jamgram.cpp" + break; + + case 107: /* $@44: %empty */ +#line 343 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 2008 "src/engine/jamgram.cpp" + break; + + case 108: /* func: ON_t arg ARG $@44 lol */ +#line 344 "src/engine/jamgram.y" + { yyval.parse = pon( yyvsp[-3].parse, prule( yyvsp[-2].string, yyvsp[0].parse ) ); } +#line 2014 "src/engine/jamgram.cpp" + break; + + case 109: /* $@45: %empty */ +#line 345 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 2020 "src/engine/jamgram.cpp" + break; + + case 110: /* func: ON_t arg RETURN_t $@45 list */ +#line 346 "src/engine/jamgram.y" + { yyval.parse = pon( yyvsp[-3].parse, yyvsp[0].parse ); } +#line 2026 "src/engine/jamgram.cpp" + break; + + case 111: /* eflags: %empty */ +#line 356 "src/engine/jamgram.y" + { yyval.number = 0; } +#line 2032 "src/engine/jamgram.cpp" + break; + + case 112: /* eflags: eflags eflag */ +#line 358 "src/engine/jamgram.y" + { yyval.number = yyvsp[-1].number | yyvsp[0].number; } +#line 2038 "src/engine/jamgram.cpp" + break; + + case 113: /* eflag: UPDATED_t */ +#line 362 "src/engine/jamgram.y" + { yyval.number = EXEC_UPDATED; } +#line 2044 "src/engine/jamgram.cpp" + break; + + case 114: /* eflag: TOGETHER_t */ +#line 364 "src/engine/jamgram.y" + { yyval.number = EXEC_TOGETHER; } +#line 2050 "src/engine/jamgram.cpp" + break; + + case 115: /* eflag: IGNORE_t */ +#line 366 "src/engine/jamgram.y" + { yyval.number = EXEC_IGNORE; } +#line 2056 "src/engine/jamgram.cpp" + break; + + case 116: /* eflag: QUIETLY_t */ +#line 368 "src/engine/jamgram.y" + { yyval.number = EXEC_QUIETLY; } +#line 2062 "src/engine/jamgram.cpp" + break; + + case 117: /* eflag: PIECEMEAL_t */ +#line 370 "src/engine/jamgram.y" + { yyval.number = EXEC_PIECEMEAL; } +#line 2068 "src/engine/jamgram.cpp" + break; + + case 118: /* eflag: EXISTING_t */ +#line 372 "src/engine/jamgram.y" + { yyval.number = EXEC_EXISTING; } +#line 2074 "src/engine/jamgram.cpp" + break; + + case 119: /* bindlist: %empty */ +#line 381 "src/engine/jamgram.y" + { yyval.parse = pnull(); } +#line 2080 "src/engine/jamgram.cpp" + break; + + case 120: /* $@46: %empty */ +#line 382 "src/engine/jamgram.y" + { yymode( SCAN_PUNCT ); } +#line 2086 "src/engine/jamgram.cpp" + break; + + case 121: /* bindlist: BIND_t $@46 list */ +#line 383 "src/engine/jamgram.y" + { yyval.parse = yyvsp[0].parse; } +#line 2092 "src/engine/jamgram.cpp" + break; + + +#line 2096 "src/engine/jamgram.cpp" + + default: break; + } + /* User semantic actions sometimes alter yychar, and that requires + that yytoken be updated with the new translation. We take the + approach of translating immediately before every use of yytoken. + One alternative is translating here after every semantic action, + but that translation would be missed if the semantic action invokes + YYABORT, YYACCEPT, or YYERROR immediately after altering yychar or + if it invokes YYBACKUP. In the case of YYABORT or YYACCEPT, an + incorrect destructor might then be invoked immediately. In the + case of YYERROR or YYBACKUP, subsequent parser actions might lead + to an incorrect destructor call or verbose syntax error message + before the lookahead is translated. */ + YY_SYMBOL_PRINT ("-> $$ =", YY_CAST (yysymbol_kind_t, yyr1[yyn]), &yyval, &yyloc); + + YYPOPSTACK (yylen); + yylen = 0; + + *++yyvsp = yyval; + + /* Now 'shift' the result of the reduction. Determine what state + that goes to, based on the state we popped back to and the rule + number reduced by. */ + { + const int yylhs = yyr1[yyn] - YYNTOKENS; + const int yyi = yypgoto[yylhs] + *yyssp; + yystate = (0 <= yyi && yyi <= YYLAST && yycheck[yyi] == *yyssp + ? yytable[yyi] + : yydefgoto[yylhs]); + } + + goto yynewstate; + + +/*--------------------------------------. +| yyerrlab -- here on detecting error. | +`--------------------------------------*/ +yyerrlab: + /* Make sure we have latest lookahead translation. See comments at + user semantic actions for why this is necessary. */ + yytoken = yychar == YYEMPTY ? YYSYMBOL_YYEMPTY : YYTRANSLATE (yychar); + /* If not already recovering from an error, report this error. */ + if (!yyerrstatus) + { + ++yynerrs; + yyerror (YY_("syntax error")); + } + + if (yyerrstatus == 3) + { + /* If just tried and failed to reuse lookahead token after an + error, discard it. */ + + if (yychar <= YYEOF) + { + /* Return failure if at end of input. */ + if (yychar == YYEOF) + YYABORT; + } + else + { + yydestruct ("Error: discarding", + yytoken, &yylval); + yychar = YYEMPTY; + } + } + + /* Else will try to reuse lookahead token after shifting the error + token. */ + goto yyerrlab1; + + +/*---------------------------------------------------. +| yyerrorlab -- error raised explicitly by YYERROR. | +`---------------------------------------------------*/ +yyerrorlab: + /* Pacify compilers when the user code never invokes YYERROR and the + label yyerrorlab therefore never appears in user code. */ + if (0) + YYERROR; + ++yynerrs; + + /* Do not reclaim the symbols of the rule whose action triggered + this YYERROR. */ + YYPOPSTACK (yylen); + yylen = 0; + YY_STACK_PRINT (yyss, yyssp); + yystate = *yyssp; + goto yyerrlab1; + + +/*-------------------------------------------------------------. +| yyerrlab1 -- common code for both syntax error and YYERROR. | +`-------------------------------------------------------------*/ +yyerrlab1: + yyerrstatus = 3; /* Each real token shifted decrements this. */ + + /* Pop stack until we find a state that shifts the error token. */ + for (;;) + { + yyn = yypact[yystate]; + if (!yypact_value_is_default (yyn)) + { + yyn += YYSYMBOL_YYerror; + if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYSYMBOL_YYerror) + { + yyn = yytable[yyn]; + if (0 < yyn) + break; + } + } + + /* Pop the current state because it cannot handle the error token. */ + if (yyssp == yyss) + YYABORT; + + + yydestruct ("Error: popping", + YY_ACCESSING_SYMBOL (yystate), yyvsp); + YYPOPSTACK (1); + yystate = *yyssp; + YY_STACK_PRINT (yyss, yyssp); + } + + YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN + *++yyvsp = yylval; + YY_IGNORE_MAYBE_UNINITIALIZED_END + + + /* Shift the error token. */ + YY_SYMBOL_PRINT ("Shifting", YY_ACCESSING_SYMBOL (yyn), yyvsp, yylsp); + + yystate = yyn; + goto yynewstate; + + +/*-------------------------------------. +| yyacceptlab -- YYACCEPT comes here. | +`-------------------------------------*/ +yyacceptlab: + yyresult = 0; + goto yyreturnlab; + + +/*-----------------------------------. +| yyabortlab -- YYABORT comes here. | +`-----------------------------------*/ +yyabortlab: + yyresult = 1; + goto yyreturnlab; + + +/*-----------------------------------------------------------. +| yyexhaustedlab -- YYNOMEM (memory exhaustion) comes here. | +`-----------------------------------------------------------*/ +yyexhaustedlab: + yyerror (YY_("memory exhausted")); + yyresult = 2; + goto yyreturnlab; + + +/*----------------------------------------------------------. +| yyreturnlab -- parsing is finished, clean up and return. | +`----------------------------------------------------------*/ +yyreturnlab: + if (yychar != YYEMPTY) + { + /* Make sure we have latest lookahead translation. See comments at + user semantic actions for why this is necessary. */ + yytoken = YYTRANSLATE (yychar); + yydestruct ("Cleanup: discarding lookahead", + yytoken, &yylval); + } + /* Do not reclaim the symbols of the rule whose action triggered + this YYABORT or YYACCEPT. */ + YYPOPSTACK (yylen); + YY_STACK_PRINT (yyss, yyssp); + while (yyssp != yyss) + { + yydestruct ("Cleanup: popping", + YY_ACCESSING_SYMBOL (+*yyssp), yyvsp); + YYPOPSTACK (1); + } +#ifndef yyoverflow + if (yyss != yyssa) + YYSTACK_FREE (yyss); +#endif + + return yyresult; +} + diff --git a/src/boost/tools/build/src/engine/jamgram.hpp b/src/boost/tools/build/src/engine/jamgram.hpp new file mode 100644 index 000000000..f836468c6 --- /dev/null +++ b/src/boost/tools/build/src/engine/jamgram.hpp @@ -0,0 +1,176 @@ +/* A Bison parser, made by GNU Bison 3.8.2. */ + +/* Bison interface for Yacc-like parsers in C + + Copyright (C) 1984, 1989-1990, 2000-2015, 2018-2021 Free Software Foundation, + Inc. + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . */ + +/* As a special exception, you may create a larger work that contains + part or all of the Bison parser skeleton and distribute that work + under terms of your choice, so long as that work isn't itself a + parser generator using the skeleton or a modified version thereof + as a parser skeleton. Alternatively, if you modify or redistribute + the parser skeleton itself, you may (at your option) remove this + special exception, which will cause the skeleton and the resulting + Bison output files to be licensed under the GNU General Public + License without this special exception. + + This special exception was added by the Free Software Foundation in + version 2.2 of Bison. */ + +/* DO NOT RELY ON FEATURES THAT ARE NOT DOCUMENTED in the manual, + especially those whose name start with YY_ or yy_. They are + private implementation details that can be changed or removed. */ + +#ifndef YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED +# define YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED +/* Debug traces. */ +#ifndef YYDEBUG +# define YYDEBUG 0 +#endif +#if YYDEBUG +extern int yydebug; +#endif + +/* Token kinds. */ +#ifndef YYTOKENTYPE +# define YYTOKENTYPE + enum yytokentype + { + YYEMPTY = -2, + YYEOF = 0, /* "end of file" */ + YYerror = 256, /* error */ + YYUNDEF = 257, /* "invalid token" */ + _BANG_t = 258, /* _BANG_t */ + _BANG_EQUALS_t = 259, /* _BANG_EQUALS_t */ + _AMPER_t = 260, /* _AMPER_t */ + _AMPERAMPER_t = 261, /* _AMPERAMPER_t */ + _LPAREN_t = 262, /* _LPAREN_t */ + _RPAREN_t = 263, /* _RPAREN_t */ + _PLUS_EQUALS_t = 264, /* _PLUS_EQUALS_t */ + _COLON_t = 265, /* _COLON_t */ + _SEMIC_t = 266, /* _SEMIC_t */ + _LANGLE_t = 267, /* _LANGLE_t */ + _LANGLE_EQUALS_t = 268, /* _LANGLE_EQUALS_t */ + _EQUALS_t = 269, /* _EQUALS_t */ + _RANGLE_t = 270, /* _RANGLE_t */ + _RANGLE_EQUALS_t = 271, /* _RANGLE_EQUALS_t */ + _QUESTION_EQUALS_t = 272, /* _QUESTION_EQUALS_t */ + _LBRACKET_t = 273, /* _LBRACKET_t */ + _RBRACKET_t = 274, /* _RBRACKET_t */ + ACTIONS_t = 275, /* ACTIONS_t */ + BIND_t = 276, /* BIND_t */ + BREAK_t = 277, /* BREAK_t */ + CASE_t = 278, /* CASE_t */ + CLASS_t = 279, /* CLASS_t */ + CONTINUE_t = 280, /* CONTINUE_t */ + DEFAULT_t = 281, /* DEFAULT_t */ + ELSE_t = 282, /* ELSE_t */ + EXISTING_t = 283, /* EXISTING_t */ + FOR_t = 284, /* FOR_t */ + IF_t = 285, /* IF_t */ + IGNORE_t = 286, /* IGNORE_t */ + IN_t = 287, /* IN_t */ + INCLUDE_t = 288, /* INCLUDE_t */ + LOCAL_t = 289, /* LOCAL_t */ + MODULE_t = 290, /* MODULE_t */ + ON_t = 291, /* ON_t */ + PIECEMEAL_t = 292, /* PIECEMEAL_t */ + QUIETLY_t = 293, /* QUIETLY_t */ + RETURN_t = 294, /* RETURN_t */ + RULE_t = 295, /* RULE_t */ + SWITCH_t = 296, /* SWITCH_t */ + TOGETHER_t = 297, /* TOGETHER_t */ + UPDATED_t = 298, /* UPDATED_t */ + WHILE_t = 299, /* WHILE_t */ + _LBRACE_t = 300, /* _LBRACE_t */ + _BAR_t = 301, /* _BAR_t */ + _BARBAR_t = 302, /* _BARBAR_t */ + _RBRACE_t = 303, /* _RBRACE_t */ + ARG = 304, /* ARG */ + STRING = 305 /* STRING */ + }; + typedef enum yytokentype yytoken_kind_t; +#endif +/* Token kinds. */ +#define YYEMPTY -2 +#define YYEOF 0 +#define YYerror 256 +#define YYUNDEF 257 +#define _BANG_t 258 +#define _BANG_EQUALS_t 259 +#define _AMPER_t 260 +#define _AMPERAMPER_t 261 +#define _LPAREN_t 262 +#define _RPAREN_t 263 +#define _PLUS_EQUALS_t 264 +#define _COLON_t 265 +#define _SEMIC_t 266 +#define _LANGLE_t 267 +#define _LANGLE_EQUALS_t 268 +#define _EQUALS_t 269 +#define _RANGLE_t 270 +#define _RANGLE_EQUALS_t 271 +#define _QUESTION_EQUALS_t 272 +#define _LBRACKET_t 273 +#define _RBRACKET_t 274 +#define ACTIONS_t 275 +#define BIND_t 276 +#define BREAK_t 277 +#define CASE_t 278 +#define CLASS_t 279 +#define CONTINUE_t 280 +#define DEFAULT_t 281 +#define ELSE_t 282 +#define EXISTING_t 283 +#define FOR_t 284 +#define IF_t 285 +#define IGNORE_t 286 +#define IN_t 287 +#define INCLUDE_t 288 +#define LOCAL_t 289 +#define MODULE_t 290 +#define ON_t 291 +#define PIECEMEAL_t 292 +#define QUIETLY_t 293 +#define RETURN_t 294 +#define RULE_t 295 +#define SWITCH_t 296 +#define TOGETHER_t 297 +#define UPDATED_t 298 +#define WHILE_t 299 +#define _LBRACE_t 300 +#define _BAR_t 301 +#define _BARBAR_t 302 +#define _RBRACE_t 303 +#define ARG 304 +#define STRING 305 + +/* Value type. */ +#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED +typedef int YYSTYPE; +# define YYSTYPE_IS_TRIVIAL 1 +# define YYSTYPE_IS_DECLARED 1 +#endif + + +extern YYSTYPE yylval; + + +int yyparse (void); + + +#endif /* !YY_YY_SRC_ENGINE_JAMGRAM_HPP_INCLUDED */ diff --git a/src/boost/tools/build/src/engine/jamgram.y b/src/boost/tools/build/src/engine/jamgram.y new file mode 100644 index 000000000..4e5839381 --- /dev/null +++ b/src/boost/tools/build/src/engine/jamgram.y @@ -0,0 +1,386 @@ +%token _BANG_t +%token _BANG_EQUALS_t +%token _AMPER_t +%token _AMPERAMPER_t +%token _LPAREN_t +%token _RPAREN_t +%token _PLUS_EQUALS_t +%token _COLON_t +%token _SEMIC_t +%token _LANGLE_t +%token _LANGLE_EQUALS_t +%token _EQUALS_t +%token _RANGLE_t +%token _RANGLE_EQUALS_t +%token _QUESTION_EQUALS_t +%token _LBRACKET_t +%token _RBRACKET_t +%token ACTIONS_t +%token BIND_t +%token BREAK_t +%token CASE_t +%token CLASS_t +%token CONTINUE_t +%token DEFAULT_t +%token ELSE_t +%token EXISTING_t +%token FOR_t +%token IF_t +%token IGNORE_t +%token IN_t +%token INCLUDE_t +%token LOCAL_t +%token MODULE_t +%token ON_t +%token PIECEMEAL_t +%token QUIETLY_t +%token RETURN_t +%token RULE_t +%token SWITCH_t +%token TOGETHER_t +%token UPDATED_t +%token WHILE_t +%token _LBRACE_t +%token _BAR_t +%token _BARBAR_t +%token _RBRACE_t +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + */ + +/* + * jamgram.yy - jam grammar + * + * 04/13/94 (seiwald) - added shorthand L0 for null list pointer + * 06/01/94 (seiwald) - new 'actions existing' does existing sources + * 08/23/94 (seiwald) - Support for '+=' (append to variable) + * 08/31/94 (seiwald) - Allow ?= as alias for "default =". + * 09/15/94 (seiwald) - if conditionals take only single arguments, so + * that 'if foo == bar' gives syntax error (use =). + * 02/11/95 (seiwald) - when scanning arguments to rules, only treat + * punctuation keywords as keywords. All arg lists + * are terminated with punctuation keywords. + * + * 09/11/00 (seiwald) - Support for function calls: + * + * Rules now return lists (LIST *), rather than void. + * + * New "[ rule ]" syntax evals rule into a LIST. + * + * Lists are now generated by compile_list() and + * compile_append(), and any other rule that indirectly + * makes a list, rather than being built directly here, + * so that lists values can contain rule evaluations. + * + * New 'return' rule sets the return value, though + * other statements also may have return values. + * + * 'run' production split from 'block' production so + * that empty blocks can be handled separately. + */ + +%token ARG STRING + +%left _BARBAR_t _BAR_t +%left _AMPERAMPER_t _AMPER_t +%left _EQUALS_t _BANG_EQUALS_t IN_t +%left _LANGLE_t _LANGLE_EQUALS_t _RANGLE_t _RANGLE_EQUALS_t +%left _BANG_t + +%{ +#include "jam.h" + +#include "lists.h" +#include "parse.h" +#include "scan.h" +#include "compile.h" +#include "object.h" +#include "rules.h" + +# define YYINITDEPTH 5000 /* for C++ parsing */ +# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */ + +# define F0 -1 +# define P0 (PARSE *)0 +# define S0 (OBJECT *)0 + +# define pappend( l,r ) parse_make( PARSE_APPEND,l,r,P0,S0,S0,0 ) +# define peval( c,l,r ) parse_make( PARSE_EVAL,l,r,P0,S0,S0,c ) +# define pfor( s,l,r,x ) parse_make( PARSE_FOREACH,l,r,P0,s,S0,x ) +# define pif( l,r,t ) parse_make( PARSE_IF,l,r,t,S0,S0,0 ) +# define pincl( l ) parse_make( PARSE_INCLUDE,l,P0,P0,S0,S0,0 ) +# define plist( s ) parse_make( PARSE_LIST,P0,P0,P0,s,S0,0 ) +# define plocal( l,r,t ) parse_make( PARSE_LOCAL,l,r,t,S0,S0,0 ) +# define pmodule( l,r ) parse_make( PARSE_MODULE,l,r,P0,S0,S0,0 ) +# define pclass( l,r ) parse_make( PARSE_CLASS,l,r,P0,S0,S0,0 ) +# define pnull() parse_make( PARSE_NULL,P0,P0,P0,S0,S0,0 ) +# define pon( l,r ) parse_make( PARSE_ON,l,r,P0,S0,S0,0 ) +# define prule( s,p ) parse_make( PARSE_RULE,p,P0,P0,s,S0,0 ) +# define prules( l,r ) parse_make( PARSE_RULES,l,r,P0,S0,S0,0 ) +# define pset( l,r,a ) parse_make( PARSE_SET,l,r,P0,S0,S0,a ) +# define pset1( l,r,t,a ) parse_make( PARSE_SETTINGS,l,r,t,S0,S0,a ) +# define psetc( s,p,a,l ) parse_make( PARSE_SETCOMP,p,a,P0,s,S0,l ) +# define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f ) +# define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 ) +# define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 ) +# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 ) +# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 ) +# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 ) + +# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 ) +# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 ) + +%} + +%% + +run : /* empty */ + /* do nothing */ + | rules + { parse_save( $1.parse ); } + ; + +/* + * block - zero or more rules + * rules - one or more rules + * rule - any one of jam's rules + * right-recursive so rules execute in order. + */ + +block : null + { $$.parse = $1.parse; } + | rules + { $$.parse = $1.parse; } + ; + +rules : rule + { $$.parse = $1.parse; } + | rule rules + { $$.parse = prules( $1.parse, $2.parse ); } + | LOCAL_t { yymode( SCAN_ASSIGN ); } list assign_list_opt _SEMIC_t { yymode( SCAN_NORMAL ); } block + { $$.parse = plocal( $3.parse, $4.parse, $7.parse ); } + ; + +null : /* empty */ + { $$.parse = pnull(); } + ; + +assign_list_opt : _EQUALS_t { yymode( SCAN_PUNCT ); } list + { $$.parse = $3.parse; $$.number = ASSIGN_SET; } + | null + { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; } + ; + +arglist_opt : _LPAREN_t lol _RPAREN_t + { $$.parse = $2.parse; } + | + { $$.parse = P0; } + ; + +local_opt : LOCAL_t + { $$.number = 1; } + | /* empty */ + { $$.number = 0; } + ; + +else_opt : ELSE_t rule + { $$.parse = $2.parse; } + | /* empty */ + { $$.parse = pnull(); } + +rule : _LBRACE_t block _RBRACE_t + { $$.parse = $2.parse; } + | INCLUDE_t { yymode( SCAN_PUNCT ); } list _SEMIC_t + { $$.parse = pincl( $3.parse ); yymode( SCAN_NORMAL ); } + | ARG { yymode( SCAN_PUNCT ); } lol _SEMIC_t + { $$.parse = prule( $1.string, $3.parse ); yymode( SCAN_NORMAL ); } + | arg assign { yymode( SCAN_PUNCT ); } list _SEMIC_t + { $$.parse = pset( $1.parse, $4.parse, $2.number ); yymode( SCAN_NORMAL ); } + | arg ON_t { yymode( SCAN_ASSIGN ); } list assign { yymode( SCAN_PUNCT ); } list _SEMIC_t + { $$.parse = pset1( $1.parse, $4.parse, $7.parse, $5.number ); yymode( SCAN_NORMAL ); } + | RETURN_t { yymode( SCAN_PUNCT ); } list _SEMIC_t + { $$.parse = preturn( $3.parse ); yymode( SCAN_NORMAL ); } + | BREAK_t _SEMIC_t + { $$.parse = pbreak(); } + | CONTINUE_t _SEMIC_t + { $$.parse = pcontinue(); } + | FOR_t local_opt ARG IN_t { yymode( SCAN_PUNCT ); } list _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t + { $$.parse = pfor( $3.string, $6.parse, $9.parse, $2.number ); } + | SWITCH_t { yymode( SCAN_PUNCT ); } list _LBRACE_t { yymode( SCAN_NORMAL ); } cases _RBRACE_t + { $$.parse = pswitch( $3.parse, $6.parse ); } + | IF_t { yymode( SCAN_CONDB ); } expr _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t else_opt + { $$.parse = pif( $3.parse, $6.parse, $8.parse ); } + | MODULE_t { yymode( SCAN_PUNCT ); } list _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t + { $$.parse = pmodule( $3.parse, $6.parse ); } + | CLASS_t { yymode( SCAN_PUNCT ); } lol _LBRACE_t { yymode( SCAN_NORMAL ); } block _RBRACE_t + { $$.parse = pclass( $3.parse, $6.parse ); } + | WHILE_t { yymode( SCAN_CONDB ); } expr { yymode( SCAN_NORMAL ); } _LBRACE_t block _RBRACE_t + { $$.parse = pwhile( $3.parse, $6.parse ); } + | local_opt RULE_t { yymode( SCAN_PUNCT ); } ARG { yymode( SCAN_PARAMS ); } arglist_opt { yymode( SCAN_NORMAL ); } rule + { $$.parse = psetc( $4.string, $8.parse, $6.parse, $1.number ); } + | ON_t arg rule + { $$.parse = pon( $2.parse, $3.parse ); } + | ACTIONS_t eflags ARG bindlist _LBRACE_t + { yymode( SCAN_STRING ); } + STRING + { yymode( SCAN_NORMAL ); } + _RBRACE_t + { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); } + ; + +/* + * assign - = or += + */ + +assign : _EQUALS_t + { $$.number = ASSIGN_SET; } + | _PLUS_EQUALS_t + { $$.number = ASSIGN_APPEND; } + | _QUESTION_EQUALS_t + { $$.number = ASSIGN_DEFAULT; } + | DEFAULT_t _EQUALS_t + { $$.number = ASSIGN_DEFAULT; } + ; + +/* + * expr - an expression for if + */ +expr : arg + { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); yymode( SCAN_COND ); } + | expr _EQUALS_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_EQUALS, $1.parse, $4.parse ); } + | expr _BANG_EQUALS_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_NOTEQ, $1.parse, $4.parse ); } + | expr _LANGLE_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_LESS, $1.parse, $4.parse ); } + | expr _LANGLE_EQUALS_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_LESSEQ, $1.parse, $4.parse ); } + | expr _RANGLE_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_MORE, $1.parse, $4.parse ); } + | expr _RANGLE_EQUALS_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_MOREEQ, $1.parse, $4.parse ); } + | expr _AMPER_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); } + | expr _AMPERAMPER_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); } + | expr _BAR_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); } + | expr _BARBAR_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); } + | arg IN_t { yymode( SCAN_PUNCT ); } list + { $$.parse = peval( EXPR_IN, $1.parse, $4.parse ); yymode( SCAN_COND ); } + | _BANG_t { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_NOT, $3.parse, pnull() ); } + | _LPAREN_t { yymode( SCAN_CONDB ); } expr _RPAREN_t + { $$.parse = $3.parse; } + ; + + +/* + * cases - action elements inside a 'switch' + * case - a single action element inside a 'switch' + * right-recursive rule so cases can be examined in order. + */ + +cases : /* empty */ + { $$.parse = P0; } + | case cases + { $$.parse = pnode( $1.parse, $2.parse ); } + ; + +case : CASE_t { yymode( SCAN_CASE ); } ARG _COLON_t { yymode( SCAN_NORMAL ); } block + { $$.parse = psnode( $3.string, $6.parse ); } + ; + +/* + * lol - list of lists + * right-recursive rule so that lists can be added in order. + */ + +lol : list + { $$.parse = pnode( P0, $1.parse ); } + | list _COLON_t lol + { $$.parse = pnode( $3.parse, $1.parse ); } + ; + +/* + * list - zero or more args in a LIST + * listp - list (in puncutation only mode) + * arg - one ARG or function call + */ + +list : listp + { $$.parse = $1.parse; } + ; + +listp : /* empty */ + { $$.parse = pnull(); } + | listp arg + { $$.parse = pappend( $1.parse, $2.parse ); } + ; + +arg : ARG + { $$.parse = plist( $1.string ); } + | _LBRACKET_t { $$.number = yymode( SCAN_CALL ); } func _RBRACKET_t + { $$.parse = $3.parse; yymode( $2.number ); } + ; + +/* + * func - a function call (inside []) + * This needs to be split cleanly out of 'rule' + */ + +func : ARG { yymode( SCAN_PUNCT ); } lol + { $$.parse = prule( $1.string, $3.parse ); } + | ON_t arg ARG { yymode( SCAN_PUNCT ); } lol + { $$.parse = pon( $2.parse, prule( $3.string, $5.parse ) ); } + | ON_t arg RETURN_t { yymode( SCAN_PUNCT ); } list + { $$.parse = pon( $2.parse, $5.parse ); } + ; + + +/* + * eflags - zero or more modifiers to 'executes' + * eflag - a single modifier to 'executes' + */ + +eflags : /* empty */ + { $$.number = 0; } + | eflags eflag + { $$.number = $1.number | $2.number; } + ; + +eflag : UPDATED_t + { $$.number = EXEC_UPDATED; } + | TOGETHER_t + { $$.number = EXEC_TOGETHER; } + | IGNORE_t + { $$.number = EXEC_IGNORE; } + | QUIETLY_t + { $$.number = EXEC_QUIETLY; } + | PIECEMEAL_t + { $$.number = EXEC_PIECEMEAL; } + | EXISTING_t + { $$.number = EXEC_EXISTING; } + ; + + +/* + * bindlist - list of variable to bind for an action + */ + +bindlist : /* empty */ + { $$.parse = pnull(); } + | BIND_t { yymode( SCAN_PUNCT ); } list + { $$.parse = $3.parse; } + ; + + diff --git a/src/boost/tools/build/src/engine/jamgram.yy b/src/boost/tools/build/src/engine/jamgram.yy new file mode 100644 index 000000000..d6158f771 --- /dev/null +++ b/src/boost/tools/build/src/engine/jamgram.yy @@ -0,0 +1,340 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + */ + +/* + * jamgram.yy - jam grammar + * + * 04/13/94 (seiwald) - added shorthand L0 for null list pointer + * 06/01/94 (seiwald) - new 'actions existing' does existing sources + * 08/23/94 (seiwald) - Support for '+=' (append to variable) + * 08/31/94 (seiwald) - Allow ?= as alias for "default =". + * 09/15/94 (seiwald) - if conditionals take only single arguments, so + * that 'if foo == bar' gives syntax error (use =). + * 02/11/95 (seiwald) - when scanning arguments to rules, only treat + * punctuation keywords as keywords. All arg lists + * are terminated with punctuation keywords. + * + * 09/11/00 (seiwald) - Support for function calls: + * + * Rules now return lists (LIST *), rather than void. + * + * New "[ rule ]" syntax evals rule into a LIST. + * + * Lists are now generated by compile_list() and + * compile_append(), and any other rule that indirectly + * makes a list, rather than being built directly here, + * so that lists values can contain rule evaluations. + * + * New 'return' rule sets the return value, though + * other statements also may have return values. + * + * 'run' production split from 'block' production so + * that empty blocks can be handled separately. + */ + +%token ARG STRING + +%left `||` `|` +%left `&&` `&` +%left `=` `!=` `in` +%left `<` `<=` `>` `>=` +%left `!` + +%{ +#include "jam.h" + +#include "lists.h" +#include "parse.h" +#include "scan.h" +#include "compile.h" +#include "object.h" +#include "rules.h" + +# define YYINITDEPTH 5000 /* for C++ parsing */ +# define YYMAXDEPTH 10000 /* for OSF and other less endowed yaccs */ + +# define F0 -1 +# define P0 (PARSE *)0 +# define S0 (OBJECT *)0 + +# define pappend( l,r ) parse_make( PARSE_APPEND,l,r,P0,S0,S0,0 ) +# define peval( c,l,r ) parse_make( PARSE_EVAL,l,r,P0,S0,S0,c ) +# define pfor( s,l,r,x ) parse_make( PARSE_FOREACH,l,r,P0,s,S0,x ) +# define pif( l,r,t ) parse_make( PARSE_IF,l,r,t,S0,S0,0 ) +# define pincl( l ) parse_make( PARSE_INCLUDE,l,P0,P0,S0,S0,0 ) +# define plist( s ) parse_make( PARSE_LIST,P0,P0,P0,s,S0,0 ) +# define plocal( l,r,t ) parse_make( PARSE_LOCAL,l,r,t,S0,S0,0 ) +# define pmodule( l,r ) parse_make( PARSE_MODULE,l,r,P0,S0,S0,0 ) +# define pclass( l,r ) parse_make( PARSE_CLASS,l,r,P0,S0,S0,0 ) +# define pnull() parse_make( PARSE_NULL,P0,P0,P0,S0,S0,0 ) +# define pon( l,r ) parse_make( PARSE_ON,l,r,P0,S0,S0,0 ) +# define prule( s,p ) parse_make( PARSE_RULE,p,P0,P0,s,S0,0 ) +# define prules( l,r ) parse_make( PARSE_RULES,l,r,P0,S0,S0,0 ) +# define pset( l,r,a ) parse_make( PARSE_SET,l,r,P0,S0,S0,a ) +# define pset1( l,r,t,a ) parse_make( PARSE_SETTINGS,l,r,t,S0,S0,a ) +# define psetc( s,p,a,l ) parse_make( PARSE_SETCOMP,p,a,P0,s,S0,l ) +# define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f ) +# define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 ) +# define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 ) +# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 ) +# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 ) +# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 ) + +# define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 ) +# define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 ) + +%} + +%% + +run : /* empty */ + /* do nothing */ + | rules + { parse_save( $1.parse ); } + ; + +/* + * block - zero or more rules + * rules - one or more rules + * rule - any one of jam's rules + * right-recursive so rules execute in order. + */ + +block : null + { $$.parse = $1.parse; } + | rules + { $$.parse = $1.parse; } + ; + +rules : rule + { $$.parse = $1.parse; } + | rule rules + { $$.parse = prules( $1.parse, $2.parse ); } + | `local` { yymode( SCAN_ASSIGN ); } list assign_list_opt `;` { yymode( SCAN_NORMAL ); } block + { $$.parse = plocal( $3.parse, $4.parse, $7.parse ); } + ; + +null : /* empty */ + { $$.parse = pnull(); } + ; + +assign_list_opt : `=` { yymode( SCAN_PUNCT ); } list + { $$.parse = $3.parse; $$.number = ASSIGN_SET; } + | null + { $$.parse = $1.parse; $$.number = ASSIGN_APPEND; } + ; + +arglist_opt : `(` lol `)` + { $$.parse = $2.parse; } + | + { $$.parse = P0; } + ; + +local_opt : `local` + { $$.number = 1; } + | /* empty */ + { $$.number = 0; } + ; + +else_opt : `else` rule + { $$.parse = $2.parse; } + | /* empty */ + { $$.parse = pnull(); } + +rule : `{` block `}` + { $$.parse = $2.parse; } + | `include` { yymode( SCAN_PUNCT ); } list `;` + { $$.parse = pincl( $3.parse ); yymode( SCAN_NORMAL ); } + | ARG { yymode( SCAN_PUNCT ); } lol `;` + { $$.parse = prule( $1.string, $3.parse ); yymode( SCAN_NORMAL ); } + | arg assign { yymode( SCAN_PUNCT ); } list `;` + { $$.parse = pset( $1.parse, $4.parse, $2.number ); yymode( SCAN_NORMAL ); } + | arg `on` { yymode( SCAN_ASSIGN ); } list assign { yymode( SCAN_PUNCT ); } list `;` + { $$.parse = pset1( $1.parse, $4.parse, $7.parse, $5.number ); yymode( SCAN_NORMAL ); } + | `return` { yymode( SCAN_PUNCT ); } list `;` + { $$.parse = preturn( $3.parse ); yymode( SCAN_NORMAL ); } + | `break` `;` + { $$.parse = pbreak(); } + | `continue` `;` + { $$.parse = pcontinue(); } + | `for` local_opt ARG `in` { yymode( SCAN_PUNCT ); } list `{` { yymode( SCAN_NORMAL ); } block `}` + { $$.parse = pfor( $3.string, $6.parse, $9.parse, $2.number ); } + | `switch` { yymode( SCAN_PUNCT ); } list `{` { yymode( SCAN_NORMAL ); } cases `}` + { $$.parse = pswitch( $3.parse, $6.parse ); } + | `if` { yymode( SCAN_CONDB ); } expr `{` { yymode( SCAN_NORMAL ); } block `}` else_opt + { $$.parse = pif( $3.parse, $6.parse, $8.parse ); } + | `module` { yymode( SCAN_PUNCT ); } list `{` { yymode( SCAN_NORMAL ); } block `}` + { $$.parse = pmodule( $3.parse, $6.parse ); } + | `class` { yymode( SCAN_PUNCT ); } lol `{` { yymode( SCAN_NORMAL ); } block `}` + { $$.parse = pclass( $3.parse, $6.parse ); } + | `while` { yymode( SCAN_CONDB ); } expr { yymode( SCAN_NORMAL ); } `{` block `}` + { $$.parse = pwhile( $3.parse, $6.parse ); } + | local_opt `rule` { yymode( SCAN_PUNCT ); } ARG { yymode( SCAN_PARAMS ); } arglist_opt { yymode( SCAN_NORMAL ); } rule + { $$.parse = psetc( $4.string, $8.parse, $6.parse, $1.number ); } + | `on` arg rule + { $$.parse = pon( $2.parse, $3.parse ); } + | `actions` eflags ARG bindlist `{` + { yymode( SCAN_STRING ); } + STRING + { yymode( SCAN_NORMAL ); } + `}` + { $$.parse = psete( $3.string,$4.parse,$7.string,$2.number ); } + ; + +/* + * assign - = or += + */ + +assign : `=` + { $$.number = ASSIGN_SET; } + | `+=` + { $$.number = ASSIGN_APPEND; } + | `?=` + { $$.number = ASSIGN_DEFAULT; } + | `default` `=` + { $$.number = ASSIGN_DEFAULT; } + ; + +/* + * expr - an expression for if + */ +expr : arg + { $$.parse = peval( EXPR_EXISTS, $1.parse, pnull() ); yymode( SCAN_COND ); } + | expr `=` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_EQUALS, $1.parse, $4.parse ); } + | expr `!=` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_NOTEQ, $1.parse, $4.parse ); } + | expr `<` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_LESS, $1.parse, $4.parse ); } + | expr `<=` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_LESSEQ, $1.parse, $4.parse ); } + | expr `>` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_MORE, $1.parse, $4.parse ); } + | expr `>=` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_MOREEQ, $1.parse, $4.parse ); } + | expr `&` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); } + | expr `&&` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_AND, $1.parse, $4.parse ); } + | expr `|` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); } + | expr `||` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_OR, $1.parse, $4.parse ); } + | arg `in` { yymode( SCAN_PUNCT ); } list + { $$.parse = peval( EXPR_IN, $1.parse, $4.parse ); yymode( SCAN_COND ); } + | `!` { yymode( SCAN_CONDB ); } expr + { $$.parse = peval( EXPR_NOT, $3.parse, pnull() ); } + | `(` { yymode( SCAN_CONDB ); } expr `)` + { $$.parse = $3.parse; } + ; + + +/* + * cases - action elements inside a 'switch' + * case - a single action element inside a 'switch' + * right-recursive rule so cases can be examined in order. + */ + +cases : /* empty */ + { $$.parse = P0; } + | case cases + { $$.parse = pnode( $1.parse, $2.parse ); } + ; + +case : `case` { yymode( SCAN_CASE ); } ARG `:` { yymode( SCAN_NORMAL ); } block + { $$.parse = psnode( $3.string, $6.parse ); } + ; + +/* + * lol - list of lists + * right-recursive rule so that lists can be added in order. + */ + +lol : list + { $$.parse = pnode( P0, $1.parse ); } + | list `:` lol + { $$.parse = pnode( $3.parse, $1.parse ); } + ; + +/* + * list - zero or more args in a LIST + * listp - list (in puncutation only mode) + * arg - one ARG or function call + */ + +list : listp + { $$.parse = $1.parse; } + ; + +listp : /* empty */ + { $$.parse = pnull(); } + | listp arg + { $$.parse = pappend( $1.parse, $2.parse ); } + ; + +arg : ARG + { $$.parse = plist( $1.string ); } + | `[` { $$.number = yymode( SCAN_CALL ); } func `]` + { $$.parse = $3.parse; yymode( $2.number ); } + ; + +/* + * func - a function call (inside []) + * This needs to be split cleanly out of 'rule' + */ + +func : ARG { yymode( SCAN_PUNCT ); } lol + { $$.parse = prule( $1.string, $3.parse ); } + | `on` arg ARG { yymode( SCAN_PUNCT ); } lol + { $$.parse = pon( $2.parse, prule( $3.string, $5.parse ) ); } + | `on` arg `return` { yymode( SCAN_PUNCT ); } list + { $$.parse = pon( $2.parse, $5.parse ); } + ; + + +/* + * eflags - zero or more modifiers to 'executes' + * eflag - a single modifier to 'executes' + */ + +eflags : /* empty */ + { $$.number = 0; } + | eflags eflag + { $$.number = $1.number | $2.number; } + ; + +eflag : `updated` + { $$.number = EXEC_UPDATED; } + | `together` + { $$.number = EXEC_TOGETHER; } + | `ignore` + { $$.number = EXEC_IGNORE; } + | `quietly` + { $$.number = EXEC_QUIETLY; } + | `piecemeal` + { $$.number = EXEC_PIECEMEAL; } + | `existing` + { $$.number = EXEC_EXISTING; } + ; + + +/* + * bindlist - list of variable to bind for an action + */ + +bindlist : /* empty */ + { $$.parse = pnull(); } + | `bind` { yymode( SCAN_PUNCT ); } list + { $$.parse = $3.parse; } + ; + + diff --git a/src/boost/tools/build/src/engine/jamgramtab.h b/src/boost/tools/build/src/engine/jamgramtab.h new file mode 100644 index 000000000..38a810871 --- /dev/null +++ b/src/boost/tools/build/src/engine/jamgramtab.h @@ -0,0 +1,46 @@ + { "!", _BANG_t }, + { "!=", _BANG_EQUALS_t }, + { "&", _AMPER_t }, + { "&&", _AMPERAMPER_t }, + { "(", _LPAREN_t }, + { ")", _RPAREN_t }, + { "+=", _PLUS_EQUALS_t }, + { ":", _COLON_t }, + { ";", _SEMIC_t }, + { "<", _LANGLE_t }, + { "<=", _LANGLE_EQUALS_t }, + { "=", _EQUALS_t }, + { ">", _RANGLE_t }, + { ">=", _RANGLE_EQUALS_t }, + { "?=", _QUESTION_EQUALS_t }, + { "[", _LBRACKET_t }, + { "]", _RBRACKET_t }, + { "actions", ACTIONS_t }, + { "bind", BIND_t }, + { "break", BREAK_t }, + { "case", CASE_t }, + { "class", CLASS_t }, + { "continue", CONTINUE_t }, + { "default", DEFAULT_t }, + { "else", ELSE_t }, + { "existing", EXISTING_t }, + { "for", FOR_t }, + { "if", IF_t }, + { "ignore", IGNORE_t }, + { "in", IN_t }, + { "include", INCLUDE_t }, + { "local", LOCAL_t }, + { "module", MODULE_t }, + { "on", ON_t }, + { "piecemeal", PIECEMEAL_t }, + { "quietly", QUIETLY_t }, + { "return", RETURN_t }, + { "rule", RULE_t }, + { "switch", SWITCH_t }, + { "together", TOGETHER_t }, + { "updated", UPDATED_t }, + { "while", WHILE_t }, + { "{", _LBRACE_t }, + { "|", _BAR_t }, + { "||", _BARBAR_t }, + { "}", _RBRACE_t }, diff --git a/src/boost/tools/build/src/engine/lists.cpp b/src/boost/tools/build/src/engine/lists.cpp new file mode 100644 index 000000000..abdbeda48 --- /dev/null +++ b/src/boost/tools/build/src/engine/lists.cpp @@ -0,0 +1,455 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * lists.c - maintain lists of objects + */ + +#include "jam.h" +#include "lists.h" +#include "mem.h" +#include "output.h" +#include "startup.h" + +#include + +static int32_t get_bucket( int32_t size ) +{ + int32_t bucket = 0; + while ( size > ( int32_t(1) << bucket ) ) ++bucket; + return bucket; +} + +static LIST * list_alloc( int32_t size ) +{ + int32_t bucket = get_bucket( size ); + return b2::jam::ctor_ptr( BJAM_CALLOC( + 1, sizeof( LIST ) + ( size_t( 1 ) << bucket ) * sizeof( OBJECT * ) ) ); +} + +static void list_dealloc( LIST * l ) +{ + int32_t size = list_length( l ); + LIST * node = l; + + if ( size == 0 ) return; + + b2::jam::free_ptr( node ); +} + +/* + * list_append() - append a list onto another one, returning total + */ + +LIST * list_append( LIST * l, LIST * nl ) +{ + if ( list_empty( l ) ) + return nl; + if ( !list_empty( nl ) ) + { + int32_t l_size = list_length( l ); + int32_t nl_size = list_length( nl ); + int32_t size = l_size + nl_size; + int32_t bucket = get_bucket( size ); + + /* Do we need to reallocate? */ + if ( l_size <= ( int32_t(1) << ( bucket - 1 ) ) ) + { + LIST * result = list_alloc( size ); + memcpy( list_begin( result ), list_begin( l ), l_size * sizeof( + OBJECT * ) ); + list_dealloc( l ); + l = result; + } + + l->impl.size = size; + memcpy( list_begin( l ) + l_size, list_begin( nl ), nl_size * sizeof( + OBJECT * ) ); + list_dealloc( nl ); + } + return l; +} + +LISTITER list_begin( LIST * l ) +{ + return l ? (LISTITER)( (char *)l + sizeof( LIST ) ) : 0; +} + +LISTITER list_end( LIST * l ) +{ + return l ? list_begin( l ) + l->impl.size : 0; +} + +LIST * list_new( OBJECT * value ) +{ + LIST * const head = list_alloc( 1 ) ; + head->impl.size = 1; + list_begin( head )[ 0 ] = value; + return head; +} + +/* + * list_push_back() - tack a string onto the end of a list of strings + */ + +LIST * list_push_back( LIST * head, OBJECT * value ) +{ + int32_t size = list_length( head ); + + if ( DEBUG_LISTS ) + out_printf( "list > %s <\n", object_str( value ) ); + + /* If the size is a power of 2, reallocate. */ + if ( size == 0 ) + { + head = list_alloc( 1 ); + } + else if ( ( ( size - 1 ) & size ) == 0 ) + { + LIST * l = list_alloc( size + 1 ); + memcpy( l, head, sizeof( LIST ) + size * sizeof( OBJECT * ) ); + list_dealloc( head ); + head = l; + } + + list_begin( head )[ size ] = value; + head->impl.size = size + 1; + + return head; +} + + +/* + * list_copy() - copy a whole list of strings (nl) onto end of another (l). + */ + +LIST * list_copy( LIST * l ) +{ + int32_t size = list_length( l ); + int32_t i; + LIST * result; + + if ( size == 0 ) return L0; + + result = list_alloc( size ); + result->impl.size = size; + for ( i = 0; i < size; ++i ) + list_begin( result )[ i ] = object_copy( list_begin( l )[ i ] ); + return result; +} + + +LIST * list_copy_range( LIST * l, LISTITER first, LISTITER last ) +{ + if ( first == last ) + return L0; + else + { + int32_t size = int32_t( last - first ); + LIST * result = list_alloc( size ); + LISTITER dest = list_begin( result ); + result->impl.size = size; + for ( ; first != last; ++first, ++dest ) + *dest = object_copy( *first ); + return result; + } +} + + +/* + * list_sublist() - copy a subset of a list of strings. + */ + +LIST * list_sublist( LIST * l, int32_t start, int32_t count ) +{ + int32_t end = start + count; + int32_t size = list_length( l ); + if ( start >= size ) return L0; + if ( end > size ) end = size; + return list_copy_range( l, list_begin( l ) + start, list_begin( l ) + end ); +} + + +static int32_t str_ptr_compare( void const * va, void const * vb ) +{ + OBJECT * a = *( (OBJECT * *)va ); + OBJECT * b = *( (OBJECT * *)vb ); + return strcmp( object_str( a ), object_str( b ) ); +} + + +LIST * list_sort( LIST * l ) +{ + int32_t len; + LIST * result; + + if ( !l ) + return L0; + + len = list_length( l ); + result = list_copy( l ); + + qsort( list_begin( result ), len, sizeof( OBJECT * ), str_ptr_compare ); + + return result; +} + + +/* + * list_free() - free a list of strings + */ + +void list_free( LIST * head ) +{ + if ( !list_empty( head ) ) + { + LISTITER iter = list_begin( head ); + LISTITER const end = list_end( head ); + for ( ; iter != end; iter = list_next( iter ) ) + object_free( list_item( iter ) ); + list_dealloc( head ); + } +} + + +/* + * list_pop_front() - remove the front element from a list of strings + */ + +LIST * list_pop_front( LIST * l ) +{ + int32_t size = list_length( l ); + assert( size ); + --size; + object_free( list_front( l ) ); + + if ( size == 0 ) + { + list_dealloc( l ); + return L0; + } + + if ( ( ( size - 1 ) & size ) == 0 ) + { + LIST * const nl = list_alloc( size ); + nl->impl.size = size; + memcpy( list_begin( nl ), list_begin( l ) + 1, size * sizeof( OBJECT * ) + ); + list_dealloc( l ); + return nl; + } + + l->impl.size = size; + memmove( list_begin( l ), list_begin( l ) + 1, size * sizeof( OBJECT * ) ); + return l; +} + +LIST * list_reverse( LIST * l ) +{ + int32_t size = list_length( l ); + if ( size == 0 ) return L0; + { + LIST * const result = list_alloc( size ); + int32_t i; + result->impl.size = size; + for ( i = 0; i < size; ++i ) + list_begin( result )[ i ] = object_copy( list_begin( l )[ size - i - + 1 ] ); + return result; + } +} + +int32_t list_cmp( LIST * t, LIST * s ) +{ + int32_t status = 0; + LISTITER t_it = list_begin( t ); + LISTITER const t_end = list_end( t ); + LISTITER s_it = list_begin( s ); + LISTITER const s_end = list_end( s ); + + while ( !status && ( t_it != t_end || s_it != s_end ) ) + { + char const * st = t_it != t_end ? object_str( list_item( t_it ) ) : ""; + char const * ss = s_it != s_end ? object_str( list_item( s_it ) ) : ""; + + status = strcmp( st, ss ); + + t_it = t_it != t_end ? list_next( t_it ) : t_it; + s_it = s_it != s_end ? list_next( s_it ) : s_it; + } + + return status; +} + +int32_t list_is_sublist( LIST * sub, LIST * l ) +{ + LISTITER iter = list_begin( sub ); + LISTITER const end = list_end( sub ); + for ( ; iter != end; iter = list_next( iter ) ) + if ( !list_in( l, list_item( iter ) ) ) + return 0; + return 1; +} + +/* + * list_print() - print a list of strings to stdout + */ + +void list_print( LIST * l ) +{ + LISTITER iter = list_begin( l ), end = list_end( l ); + if ( iter != end ) + { + out_printf( "%s", object_str( list_item( iter ) ) ); + iter = list_next( iter ); + for ( ; iter != end; iter = list_next( iter ) ) + out_printf( " %s", object_str( list_item( iter ) ) ); + } +} + + +/* + * list_length() - return the number of items in the list + */ + +int32_t list_length( LIST * l ) +{ + return l ? l->impl.size : 0; +} + + +int32_t list_in( LIST * l, OBJECT * value ) +{ + LISTITER iter = list_begin( l ); + LISTITER end = list_end( l ); + for ( ; iter != end; iter = list_next( iter ) ) + if ( object_equal( list_item( iter ), value ) ) + return 1; + return 0; +} + + +LIST * list_unique( LIST * sorted_list ) +{ + LIST * result = L0; + OBJECT * last_added = 0; + + LISTITER iter = list_begin( sorted_list ), end = list_end( sorted_list ); + for ( ; iter != end; iter = list_next( iter ) ) + { + if ( !last_added || !object_equal( list_item( iter ), last_added ) ) + { + result = list_push_back( result, object_copy( list_item( iter ) ) ); + last_added = list_item( iter ); + } + } + return result; +} + +void list_done() +{ +} + + +/* + * lol_init() - initialize a LOL (list of lists). + */ + +void lol_init( LOL * lol ) +{ + lol->count = 0; +} + + +/* + * lol_add() - append a LIST onto an LOL. + */ + +void lol_add( LOL * lol, LIST * l ) +{ + if ( lol->count < LOL_MAX ) + { + lol->list[ lol->count++ ] = l; + return; + } + + err_printf( "lol_add failed due to reached limit of %d elements\n", LOL_MAX ); + b2::clean_exit( EXITBAD ); +} + + +/* + * lol_free() - free the LOL and its LISTs. + */ + +void lol_free( LOL * lol ) +{ + int32_t i; + for ( i = 0; i < lol->count; ++i ) + list_free( lol->list[ i ] ); + lol->count = 0; +} + + +/* + * lol_get() - return one of the LISTs in the LOL. + */ + +LIST * lol_get( LOL * lol, int32_t i ) +{ + return i < lol->count ? lol->list[ i ] : L0; +} + + +/* + * lol_print() - debug print LISTS separated by ":". + */ + +void lol_print( LOL * lol ) +{ + int32_t i; + for ( i = 0; i < lol->count; ++i ) + { + if ( i ) + out_printf( " : " ); + list_print( lol->list[ i ] ); + } +} + +#ifdef HAVE_PYTHON + +PyObject * list_to_python( LIST * l ) +{ + PyObject * result = PyList_New( 0 ); + LISTITER iter = list_begin( l ); + LISTITER const end = list_end( l ); + for ( ; iter != end; iter = list_next( iter ) ) + { + PyObject * s = PyString_FromString( object_str( list_item( iter ) ) ); + PyList_Append( result, s ); + Py_DECREF( s ); + } + + return result; +} + +LIST * list_from_python( PyObject * l ) +{ + LIST * result = L0; + + Py_ssize_t n = PySequence_Size( l ); + Py_ssize_t i; + for ( i = 0; i < n; ++i ) + { + PyObject * v = PySequence_GetItem( l, i ); + result = list_push_back( result, object_new( PyString_AsString( v ) ) ); + Py_DECREF( v ); + } + + return result; +} + +#endif diff --git a/src/boost/tools/build/src/engine/lists.h b/src/boost/tools/build/src/engine/lists.h new file mode 100644 index 000000000..15b1f6f4e --- /dev/null +++ b/src/boost/tools/build/src/engine/lists.h @@ -0,0 +1,182 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * lists.h - the LIST structure and routines to manipulate them + * + * The whole of jam relies on lists of objects as a datatype. This module, in + * conjunction with object.c, handles these relatively efficiently. + * + * Structures defined: + * + * LIST - list of OBJECTs + * LOL - list of LISTs + * + * External routines: + * + * list_append() - append a list onto another one, returning total + * list_new() - tack an object onto the end of a list of objects + * list_copy() - copy a whole list of objects + * list_sublist() - copy a subset of a list of objects + * list_free() - free a list of objects + * list_print() - print a list of objects to stdout + * list_length() - return the number of items in the list + * + * lol_init() - initialize a LOL (list of lists) + * lol_add() - append a LIST onto an LOL + * lol_free() - free the LOL and its LISTs + * lol_get() - return one of the LISTs in the LOL + * lol_print() - debug print LISTS separated by ":" + */ + +#ifndef LISTS_DWA20011022_H +#define LISTS_DWA20011022_H + +#include "config.h" +#include "object.h" + +#ifdef HAVE_PYTHON +# include +#endif + +/* + * LIST - list of strings + */ + +struct LIST { + union { + int32_t size; + struct LIST * next; + OBJECT * align; + } impl; + + LIST() + { + this->impl.next = nullptr; + } +}; + +typedef LIST * list_ptr; +typedef OBJECT * * LISTITER; + +/* + * LOL - list of LISTs + */ + +#define LOL_MAX 19 +typedef struct _lol { + int32_t count; + LIST * list[ LOL_MAX ]; +} LOL; + +LIST * list_new( OBJECT * value ); +LIST * list_append( LIST * destination, LIST * source ); +LIST * list_copy( LIST * ); +LIST * list_copy_range( LIST * destination, LISTITER first, LISTITER last ); +void list_free( LIST * head ); +LIST * list_push_back( LIST * head, OBJECT * value ); +void list_print( LIST * ); +int32_t list_length( LIST * ); +LIST * list_sublist( LIST *, int32_t start, int32_t count ); +LIST * list_pop_front( LIST * ); +LIST * list_sort( LIST * ); +LIST * list_unique( LIST * sorted_list ); +int32_t list_in( LIST *, OBJECT * value ); +LIST * list_reverse( LIST * ); +int32_t list_cmp( LIST * lhs, LIST * rhs ); +int32_t list_is_sublist( LIST * sub, LIST * l ); +void list_done(); + +LISTITER list_begin( LIST * ); +LISTITER list_end( LIST * ); +#define list_next( it ) ((it) + 1) +#define list_item( it ) (*(it)) +#define list_empty( l ) ((l) == L0) +#define list_front( l ) list_item( list_begin( l ) ) + +#define L0 ((LIST *)0) + +void lol_add( LOL *, LIST * ); +void lol_init( LOL * ); +void lol_free( LOL * ); +LIST * lol_get( LOL *, int i ); +void lol_print( LOL * ); +void lol_build( LOL *, char const * * elements ); + +#ifdef HAVE_PYTHON +PyObject * list_to_python( LIST * ); +LIST * list_from_python( PyObject * ); +#endif + +namespace b2 { namespace jam { + struct list + { + struct iterator + { + inline explicit iterator(LISTITER i) : list_i(i) {} + + inline iterator operator++() + { + list_i = list_next(list_i); + return *this; + } + inline iterator operator++(int) + { + iterator result{*this}; + list_i = list_next(list_i); + return result; + } + inline bool operator==(iterator other) const { return list_i == other.list_i; } + inline bool operator!=(iterator other) const { return list_i != other.list_i; } + inline OBJECT *& operator*() const { return list_item(list_i); } + inline OBJECT ** operator->() const { return &list_item(list_i); } + + private: + + LISTITER list_i; + }; + + friend struct iterator; + + inline list(const list &other) + : list_obj(list_copy(other.list_obj)) {} + inline explicit list(const object &o) + : list_obj(list_new(object_copy(o))) {} + inline explicit list(LIST *l) + : list_obj(list_copy(l)) {} + + inline ~list() { if (list_obj) list_free(list_obj); } + inline LIST* release() + { + LIST* r = list_obj; + list_obj = nullptr; + return r; + } + + inline iterator begin() { return iterator(list_begin(list_obj)); } + inline iterator end() { return iterator(list_end(list_obj)); } + inline bool empty() const { return list_empty(list_obj) || length() == 0; } + inline int32_t length() const { return list_length(list_obj); } + inline list &append(const list &other) + { + list_obj = list_append(list_obj, list_copy(other.list_obj)); + return *this; + } + + private: + + LIST *list_obj = nullptr; + }; +}} + +#endif diff --git a/src/boost/tools/build/src/engine/make.cpp b/src/boost/tools/build/src/engine/make.cpp new file mode 100644 index 000000000..5d3764035 --- /dev/null +++ b/src/boost/tools/build/src/engine/make.cpp @@ -0,0 +1,931 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * make.c - bring a target up to date, once rules are in place. + * + * This modules controls the execution of rules to bring a target and its + * dependencies up to date. It is invoked after the targets, rules, et. al. + * described in rules.h are created by the interpreting jam files. + * + * This file contains the main make() entry point and the first pass make0(). + * The second pass, make1(), which actually does the command execution, is in + * make1.c. + * + * External routines: + * make() - make a target, given its name + * + * Internal routines: + * make0() - bind and scan everything to make a TARGET + * make0sort() - reorder TARGETS chain by their time (newest to oldest) + */ + +#include "jam.h" +#include "make.h" + +#include "command.h" +#ifdef OPT_HEADER_CACHE_EXT +# include "hcache.h" +#endif +#include "headers.h" +#include "lists.h" +#include "object.h" +#include "parse.h" +#include "rules.h" +#include "search.h" +#include "timestamp.h" +#include "variable.h" +#include "execcmd.h" +#include "output.h" + +#include + +#ifndef max +# define max(a,b) ((a)>(b)?(a):(b)) +#endif + +static targets_uptr make0sort( targets_uptr c ); + +#ifdef OPT_GRAPH_DEBUG_EXT + static void dependGraphOutput( TARGET * t, int32_t depth ); +#endif + +static char const * target_fate[] = +{ + "init", /* T_FATE_INIT */ + "making", /* T_FATE_MAKING */ + "stable", /* T_FATE_STABLE */ + "newer", /* T_FATE_NEWER */ + "temp", /* T_FATE_ISTMP */ + "touched", /* T_FATE_TOUCHED */ + "rebuild", /* T_FATE_REBUILD */ + "missing", /* T_FATE_MISSING */ + "needtmp", /* T_FATE_NEEDTMP */ + "old", /* T_FATE_OUTDATED */ + "update", /* T_FATE_UPDATE */ + "nofind", /* T_FATE_CANTFIND */ + "nomake" /* T_FATE_CANTMAKE */ +}; + +static char const * target_bind[] = +{ + "unbound", + "missing", + "parents", + "exists", +}; + +#define spaces(x) ( ((const char *)" ") + ( x > 20 ? 0 : 20-x ) ) + + +/* + * make() - make a target, given its name. + */ + +int32_t make( LIST * targets, int32_t anyhow ) +{ + COUNTS counts[ 1 ]; + int32_t status = 0; /* 1 if anything fails */ + +#ifdef OPT_HEADER_CACHE_EXT + hcache_init(); +#endif + + memset( (char *)counts, 0, sizeof( *counts ) ); + + /* Make sure that the tables are set up correctly. + */ + exec_init(); + + /* First bind all targets with LOCATE_TARGET setting. This is needed to + * correctly handle dependencies to generated headers. + */ + bind_explicitly_located_targets(); + + { + LISTITER iter, end; + PROFILE_ENTER( MAKE_MAKE0 ); + for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) ) + { + TARGET * t = bindtarget( list_item( iter ) ); + if ( t->fate == T_FATE_INIT ) + make0( t, 0, 0, counts, anyhow, 0 ); + } + PROFILE_EXIT( MAKE_MAKE0 ); + } + +#ifdef OPT_GRAPH_DEBUG_EXT + if ( DEBUG_GRAPH ) + { + LISTITER iter, end; + for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) ) + dependGraphOutput( bindtarget( list_item( iter ) ), 0 ); + } +#endif + + if ( DEBUG_MAKE ) + { + if ( counts->targets ) + out_printf( "...found %d target%s...\n", counts->targets, + counts->targets > 1 ? "s" : "" ); + if ( counts->temp ) + out_printf( "...using %d temp target%s...\n", counts->temp, + counts->temp > 1 ? "s" : "" ); + if ( counts->updating ) + out_printf( "...updating %d target%s...\n", counts->updating, + counts->updating > 1 ? "s" : "" ); + if ( counts->cantfind ) + out_printf( "...can't find %d target%s...\n", counts->cantfind, + counts->cantfind > 1 ? "s" : "" ); + if ( counts->cantmake ) + out_printf( "...can't make %d target%s...\n", counts->cantmake, + counts->cantmake > 1 ? "s" : "" ); + } + + status = counts->cantfind || counts->cantmake; + + { + PROFILE_ENTER( MAKE_MAKE1 ); + status |= make1( targets ); + PROFILE_EXIT( MAKE_MAKE1 ); + } + + return status; +} + + +/* Force any dependants of t that have already at least begun being visited by + * make0() to be updated. + */ + +static void force_rebuilds( TARGET * t ); + +static void update_dependants( TARGET * t ) +{ + targets_ptr q; + + for ( q = t->dependants.get(); q; q = q->next.get() ) + { + TARGET * p = q->target; + char fate0 = p->fate; + + /* If we have already at least begun visiting it and we are not already + * rebuilding it for other reasons. + */ + if ( ( fate0 != T_FATE_INIT ) && ( fate0 < T_FATE_BUILD ) ) + { + p->fate = T_FATE_UPDATE; + + if ( DEBUG_FATE ) + { + out_printf( "fate change %s from %s to %s (as dependent of %s)\n", + object_str( p->name ), target_fate[ (int32_t) fate0 ], target_fate[ (int32_t) p->fate ], object_str( t->name ) ); + } + + /* If we are done visiting it, go back and make sure its dependants + * get rebuilt. + */ + if ( fate0 > T_FATE_MAKING ) + update_dependants( p ); + } + } + /* Make sure that rebuilds can be chained. */ + force_rebuilds( t ); +} + + +/* + * Make sure that all of t's rebuilds get rebuilt. + */ + +static void force_rebuilds( TARGET * t ) +{ + targets_ptr d; + for ( d = t->rebuilds.get(); d; d = d->next.get() ) + { + TARGET * r = d->target; + + /* If it is not already being rebuilt for other reasons. */ + if ( r->fate < T_FATE_BUILD ) + { + if ( DEBUG_FATE ) + out_printf( "fate change %s from %s to %s (by rebuild)\n", + object_str( r->name ), target_fate[ (int32_t) r->fate ], target_fate[ T_FATE_REBUILD ] ); + + /* Force rebuild it. */ + r->fate = T_FATE_REBUILD; + + /* And make sure its dependants are updated too. */ + update_dependants( r ); + } + } +} + + +int32_t make0rescan( TARGET * t, TARGET * rescanning ) +{ + int32_t result = 0; + targets_ptr c; + + /* Check whether we have already found a cycle. */ + if ( target_scc( t ) == rescanning ) + return 1; + + /* If we have already visited this node, ignore it. */ + if ( t->rescanning == rescanning ) + return 0; + + /* If t is already updated, ignore it. */ + if ( t->scc_root == NULL && t->progress > T_MAKE_ACTIVE ) + return 0; + + t->rescanning = rescanning; + for ( c = t->depends.get(); c; c = c->next.get() ) + { + TARGET * dependency = c->target; + /* Always start at the root of each new strongly connected component. */ + if ( target_scc( dependency ) != target_scc( t ) ) + dependency = target_scc( dependency ); + result |= make0rescan( dependency, rescanning ); + + /* Make sure that we pick up the new include node. */ + if ( c->target->includes == rescanning ) + result = 1; + } + if ( result && t->scc_root == NULL ) + { + t->scc_root = rescanning; + targetentry( rescanning->depends, t ); + } + return result; +} + + +/* + * make0() - bind and scan everything to make a TARGET. + * + * Recursively binds a target, searches for #included headers, calls itself on + * those headers and any dependencies. + */ + +void make0 +( + TARGET * t, + TARGET * p, /* parent */ + int32_t depth, /* for display purposes */ + COUNTS * counts, /* for reporting */ + int32_t anyhow, + TARGET * rescanning +) /* forcibly touch all (real) targets */ +{ + targets_ptr c; + TARGET * ptime = t; + TARGET * located_target = 0; + timestamp last; + timestamp leaf; + timestamp hlast; + int32_t fate; + char const * flag = ""; + SETTINGS * s; + +#ifdef OPT_GRAPH_DEBUG_EXT + int32_t savedFate; + int32_t oldTimeStamp; +#endif + + if ( DEBUG_MAKEPROG ) + out_printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) ); + + /* + * Step 1: Initialize. + */ + + if ( DEBUG_MAKEPROG ) + out_printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) ); + + t->fate = T_FATE_MAKING; + t->depth = depth; + + /* + * Step 2: Under the influence of "on target" variables, bind the target and + * search for headers. + */ + + /* Step 2a: Set "on target" variables. */ + s = copysettings( t->settings ); + pushsettings( root_module(), s ); + + /* Step 2b: Find and timestamp the target file (if it is a file). */ + if ( ( t->binding == T_BIND_UNBOUND ) && !( t->flags & T_FLAG_NOTFILE ) ) + { + OBJECT * another_target; + object_free( t->boundname ); + t->boundname = search( t->name, &t->time, &another_target, + t->flags & T_FLAG_ISFILE ); + /* If it was detected that this target refers to an already existing and + * bound target, we add a dependency so that every target depending on + * us will depend on that other target as well. + */ + if ( another_target ) + located_target = bindtarget( another_target ); + + t->binding = timestamp_empty( &t->time ) + ? T_BIND_MISSING + : T_BIND_EXISTS; + } + + /* INTERNAL, NOTFILE header nodes have the time of their parents. */ + if ( p && ( t->flags & T_FLAG_INTERNAL ) ) + ptime = p; + + /* If temp file does not exist but parent does, use parent. */ + if ( p && ( t->flags & T_FLAG_TEMP ) && + ( t->binding == T_BIND_MISSING ) && + ( p->binding != T_BIND_MISSING ) ) + { + t->binding = T_BIND_PARENTS; + ptime = p; + } + +#ifdef OPT_SEMAPHORE + { + LIST * var = var_get( root_module(), constant_JAM_SEMAPHORE ); + if ( !list_empty( var ) ) + { + TARGET * const semaphore = bindtarget( list_front( var ) ); + semaphore->progress = T_MAKE_SEMAPHORE; + t->semaphore = semaphore; + } + } +#endif + + /* Step 2c: If its a file, search for headers. */ + if ( t->binding == T_BIND_EXISTS ) + headers( t ); + + /* Step 2d: reset "on target" variables. */ + popsettings( root_module(), s ); + freesettings( s ); + + /* + * Pause for a little progress reporting. + */ + + if ( DEBUG_BIND ) + { + if ( !object_equal( t->name, t->boundname ) ) + out_printf( "bind\t--\t%s%s: %s\n", spaces( depth ), + object_str( t->name ), object_str( t->boundname ) ); + + switch ( t->binding ) + { + case T_BIND_UNBOUND: + case T_BIND_MISSING: + case T_BIND_PARENTS: + out_printf( "time\t--\t%s%s: %s\n", spaces( depth ), + object_str( t->name ), target_bind[ (int32_t)t->binding ] ); + break; + + case T_BIND_EXISTS: + out_printf( "time\t--\t%s%s: %s\n", spaces( depth ), + object_str( t->name ), timestamp_str( &t->time ) ); + break; + } + } + + /* + * Step 3: Recursively make0() dependencies & headers. + */ + + /* Step 3a: Recursively make0() dependencies. */ + for ( c = t->depends.get(); c; c = c->next.get() ) + { + int32_t const internal = t->flags & T_FLAG_INTERNAL; + + /* Warn about circular deps, except for includes, which include each + * other alot. + */ + if ( c->target->fate == T_FATE_INIT ) + make0( c->target, ptime, depth + 1, counts, anyhow, rescanning ); + else if ( c->target->fate == T_FATE_MAKING && !internal ) + out_printf( "warning: %s depends on itself\n", object_str( + c->target->name ) ); + else if ( c->target->fate != T_FATE_MAKING && rescanning ) + make0rescan( c->target, rescanning ); + if ( rescanning && c->target->includes && c->target->includes->fate != + T_FATE_MAKING ) + make0rescan( target_scc( c->target->includes ), rescanning ); + } + + if ( located_target ) + { + if ( located_target->fate == T_FATE_INIT ) + make0( located_target, ptime, depth + 1, counts, anyhow, rescanning + ); + else if ( located_target->fate != T_FATE_MAKING && rescanning ) + make0rescan( located_target, rescanning ); + } + + /* Step 3b: Recursively make0() internal includes node. */ + if ( t->includes ) + make0( t->includes, p, depth + 1, counts, anyhow, rescanning ); + + /* Step 3c: Add dependencies' includes to our direct dependencies. */ + { + targets_uptr incs; + for ( c = t->depends.get(); c; c = c->next.get() ) + if ( c->target->includes ) + targetentry( incs, c->target->includes ); + t->depends = targetchain( std::move(t->depends), std::move(incs) ); + } + + if ( located_target ) + targetentry( t->depends, located_target ); + + /* Step 3d: Detect cycles. */ + { + int32_t cycle_depth = depth; + for ( c = t->depends.get(); c; c = c->next.get() ) + { + TARGET * scc_root = target_scc( c->target ); + if ( scc_root->fate == T_FATE_MAKING && + ( !scc_root->includes || + scc_root->includes->fate != T_FATE_MAKING ) ) + { + if ( scc_root->depth < cycle_depth ) + { + cycle_depth = scc_root->depth; + t->scc_root = scc_root; + } + } + } + } + + /* + * Step 4: Compute time & fate. + */ + + /* Step 4a: Pick up dependencies' time and fate. */ + timestamp_clear( &last ); + timestamp_clear( &leaf ); + fate = T_FATE_STABLE; + for ( c = t->depends.get(); c; c = c->next.get() ) + { + /* If we are in a different strongly connected component, pull + * timestamps from the root. + */ + if ( c->target->scc_root ) + { + TARGET * const scc_root = target_scc( c->target ); + if ( scc_root != t->scc_root ) + { + timestamp_max( &c->target->leaf, &c->target->leaf, + &scc_root->leaf ); + timestamp_max( &c->target->time, &c->target->time, + &scc_root->time ); + c->target->fate = max( c->target->fate, scc_root->fate ); + } + } + + /* If LEAVES has been applied, we only heed the timestamps of the leaf + * source nodes. + */ + timestamp_max( &leaf, &leaf, &c->target->leaf ); + if ( t->flags & T_FLAG_LEAVES ) + { + timestamp_copy( &last, &leaf ); + continue; + } + timestamp_max( &last, &last, &c->target->time ); + fate = max( fate, c->target->fate ); + +#ifdef OPT_GRAPH_DEBUG_EXT + if ( DEBUG_FATE ) + if ( fate < c->target->fate ) + out_printf( "fate change %s from %s to %s by dependency %s\n", + object_str( t->name ), target_fate[ (int32_t)fate ], + target_fate[ (int32_t)c->target->fate ], object_str( + c->target->name ) ); +#endif + } + + /* Step 4b: Pick up included headers time. */ + + /* + * If a header is newer than a temp source that includes it, the temp source + * will need building. + */ + if ( t->includes ) + timestamp_copy( &hlast, &t->includes->time ); + else + timestamp_clear( &hlast ); + + /* Step 4c: handle NOUPDATE oddity. + * + * If a NOUPDATE file exists, mark it as having eternally old dependencies. + * Do not inherit our fate from our dependencies. Decide fate based only on + * other flags and our binding (done later). + */ + if ( t->flags & T_FLAG_NOUPDATE ) + { +#ifdef OPT_GRAPH_DEBUG_EXT + if ( DEBUG_FATE ) + if ( fate != T_FATE_STABLE ) + out_printf( "fate change %s back to stable, NOUPDATE.\n", + object_str( t->name ) ); +#endif + + timestamp_clear( &last ); + timestamp_clear( &t->time ); + + /* Do not inherit our fate from our dependencies. Decide fate based only + * upon other flags and our binding (done later). + */ + fate = T_FATE_STABLE; + } + + /* Step 4d: Determine fate: rebuild target or what? */ + + /* + In English: + If can not find or make child, can not make target. + If children changed, make target. + If target missing, make it. + If children newer, make target. + If temp's children newer than parent, make temp. + If temp's headers newer than parent, make temp. + If deliberately touched, make it. + If up-to-date temp file present, use it. + If target newer than non-notfile parent, mark target newer. + Otherwise, stable! + + Note this block runs from least to most stable: as we make it further + down the list, the target's fate gets more stable. + */ + +#ifdef OPT_GRAPH_DEBUG_EXT + savedFate = fate; + oldTimeStamp = 0; +#endif + + if ( fate >= T_FATE_BROKEN ) + { + fate = T_FATE_CANTMAKE; + } + else if ( fate >= T_FATE_SPOIL ) + { + fate = T_FATE_UPDATE; + } + else if ( t->binding == T_BIND_MISSING ) + { + fate = T_FATE_MISSING; + } + else if ( t->binding == T_BIND_EXISTS && timestamp_cmp( &last, &t->time ) > + 0 ) + { +#ifdef OPT_GRAPH_DEBUG_EXT + oldTimeStamp = 1; +#endif + fate = T_FATE_OUTDATED; + } + else if ( t->binding == T_BIND_PARENTS && timestamp_cmp( &last, &p->time ) > + 0 ) + { +#ifdef OPT_GRAPH_DEBUG_EXT + oldTimeStamp = 1; +#endif + fate = T_FATE_NEEDTMP; + } + else if ( t->binding == T_BIND_PARENTS && timestamp_cmp( &hlast, &p->time ) + > 0 ) + { + fate = T_FATE_NEEDTMP; + } + else if ( t->flags & T_FLAG_TOUCHED ) + { + fate = T_FATE_TOUCHED; + } + else if ( anyhow && !( t->flags & T_FLAG_NOUPDATE ) ) + { + fate = T_FATE_TOUCHED; + } + else if ( t->binding == T_BIND_EXISTS && ( t->flags & T_FLAG_TEMP ) ) + { + fate = T_FATE_ISTMP; + } + else if ( t->binding == T_BIND_EXISTS && p && p->binding != T_BIND_UNBOUND + && timestamp_cmp( &t->time, &p->time ) > 0 ) + { +#ifdef OPT_GRAPH_DEBUG_EXT + oldTimeStamp = 1; +#endif + fate = T_FATE_NEWER; + } + else + { + fate = T_FATE_STABLE; + } +#ifdef OPT_GRAPH_DEBUG_EXT + if ( DEBUG_FATE && ( fate != savedFate ) ) + { + if ( savedFate == T_FATE_STABLE ) + out_printf( "fate change %s set to %s%s\n", object_str( t->name ), + target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" ); + else + out_printf( "fate change %s from %s to %s%s\n", object_str( t->name ), + target_fate[ savedFate ], target_fate[ fate ], oldTimeStamp ? + " (by timestamp)" : "" ); + } +#endif + + /* Step 4e: Handle missing files. */ + /* If it is missing and there are no actions to create it, boom. */ + /* If we can not make a target we do not care about it, okay. */ + /* We could insist that there are updating actions for all missing */ + /* files, but if they have dependencies we just pretend it is a NOTFILE. */ + + if ( ( fate == T_FATE_MISSING ) && !t->actions && !t->depends ) + { + if ( t->flags & T_FLAG_NOCARE ) + { +#ifdef OPT_GRAPH_DEBUG_EXT + if ( DEBUG_FATE ) + out_printf( "fate change %s to STABLE from %s, " + "no actions, no dependencies and do not care\n", + object_str( t->name ), target_fate[ fate ] ); +#endif + fate = T_FATE_STABLE; + } + else + { + out_printf( "don't know how to make %s\n", object_str( t->name ) ); + fate = T_FATE_CANTFIND; + } + } + + /* Step 4f: Propagate dependencies' time & fate. */ + /* Set leaf time to be our time only if this is a leaf. */ + + timestamp_max( &t->time, &t->time, &last ); + timestamp_copy( &t->leaf, timestamp_empty( &leaf ) ? &t->time : &leaf ); + /* This target's fate may have been updated by virtue of following some + * target's rebuilds list, so only allow it to be increased to the fate we + * have calculated. Otherwise, grab its new fate. + */ + if ( fate > t->fate ) + t->fate = fate; + else + fate = t->fate; + + /* + * Step 4g: If this target needs to be built, make0 all targets + * that are updated by the same actions used to update this target. + * These have already been marked as REBUILDS, and make1 has + * special handling for them. We just need to make sure that + * they get make0ed. + */ + if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) ) + { + ACTIONS * a; + targets_ptr c; + for ( a = t->actions; a; a = a->next ) + { + for ( c = a->action->targets.get(); c; c = c->next.get() ) + { + if ( c->target->fate == T_FATE_INIT ) + { + make0( c->target, ptime, depth + 1, counts, anyhow, rescanning ); + } + } + } + } + + /* Step 4h: If this target needs to be built, force rebuild everything in + * its rebuilds list. + */ + if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) ) + force_rebuilds( t ); + + /* + * Step 5: Sort dependencies by their update time. + */ + + if ( globs.newestfirst ) + t->depends = make0sort( std::move(t->depends) ); + + /* + * Step 6: A little harmless tabulating for tracing purposes. + */ + + /* Do not count or report internal includes nodes. */ + if ( t->flags & T_FLAG_INTERNAL ) + return; + + if ( counts ) + { +#ifdef OPT_IMPROVED_PATIENCE_EXT + ++counts->targets; +#else + if ( !( ++counts->targets % 1000 ) && DEBUG_MAKE ) + { + out_printf( "...patience...\n" ); + out_flush(); + } +#endif + + if ( fate == T_FATE_ISTMP ) + ++counts->temp; + else if ( fate == T_FATE_CANTFIND ) + ++counts->cantfind; + else if ( ( fate == T_FATE_CANTMAKE ) && t->actions ) + ++counts->cantmake; + else if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) && + t->actions ) + ++counts->updating; + } + + if ( !( t->flags & T_FLAG_NOTFILE ) && ( fate >= T_FATE_SPOIL ) ) + flag = "+"; + else if ( t->binding == T_BIND_EXISTS && p && timestamp_cmp( &t->time, + &p->time ) > 0 ) + flag = "*"; + + if ( DEBUG_MAKEPROG ) + out_printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int32_t)t->fate ], + spaces( depth ), object_str( t->name ) ); +} + + +#ifdef OPT_GRAPH_DEBUG_EXT + +static char const * target_name( TARGET * t ) +{ + static char buf[ 1000 ]; + if ( t->flags & T_FLAG_INTERNAL ) + { + sprintf( buf, "%s (internal node)", object_str( t->name ) ); + return buf; + } + return object_str( t->name ); +} + + +/* + * dependGraphOutput() - output the DG after make0 has run. + */ + +static void dependGraphOutput( TARGET * t, int32_t depth ) +{ + targets_ptr c; + + if ( ( t->flags & T_FLAG_VISITED ) || !t->name || !t->boundname ) + return; + + t->flags |= T_FLAG_VISITED; + + switch ( t->fate ) + { + case T_FATE_TOUCHED: + case T_FATE_MISSING: + case T_FATE_OUTDATED: + case T_FATE_UPDATE: + out_printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t + ) ); + break; + default: + out_printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t + ) ); + break; + } + + if ( !object_equal( t->name, t->boundname ) ) + out_printf( " %s Loc: %s\n", spaces( depth ), object_str( t->boundname ) + ); + + switch ( t->fate ) + { + case T_FATE_STABLE: + out_printf( " %s : Stable\n", spaces( depth ) ); + break; + case T_FATE_NEWER: + out_printf( " %s : Newer\n", spaces( depth ) ); + break; + case T_FATE_ISTMP: + out_printf( " %s : Up to date temp file\n", spaces( depth ) ); + break; + case T_FATE_NEEDTMP: + out_printf( " %s : Temporary file, to be updated\n", spaces( depth ) + ); + break; + case T_FATE_TOUCHED: + out_printf( " %s : Been touched, updating it\n", spaces( depth ) ); + break; + case T_FATE_MISSING: + out_printf( " %s : Missing, creating it\n", spaces( depth ) ); + break; + case T_FATE_OUTDATED: + out_printf( " %s : Outdated, updating it\n", spaces( depth ) ); + break; + case T_FATE_REBUILD: + out_printf( " %s : Rebuild, updating it\n", spaces( depth ) ); + break; + case T_FATE_UPDATE: + out_printf( " %s : Updating it\n", spaces( depth ) ); + break; + case T_FATE_CANTFIND: + out_printf( " %s : Can not find it\n", spaces( depth ) ); + break; + case T_FATE_CANTMAKE: + out_printf( " %s : Can make it\n", spaces( depth ) ); + break; + } + + if ( t->flags & ~T_FLAG_VISITED ) + { + out_printf( " %s : ", spaces( depth ) ); + if ( t->flags & T_FLAG_TEMP ) out_printf( "TEMPORARY " ); + if ( t->flags & T_FLAG_NOCARE ) out_printf( "NOCARE " ); + if ( t->flags & T_FLAG_NOTFILE ) out_printf( "NOTFILE " ); + if ( t->flags & T_FLAG_TOUCHED ) out_printf( "TOUCHED " ); + if ( t->flags & T_FLAG_LEAVES ) out_printf( "LEAVES " ); + if ( t->flags & T_FLAG_NOUPDATE ) out_printf( "NOUPDATE " ); + out_printf( "\n" ); + } + + for ( c = t->depends.get(); c; c = c->next.get() ) + { + out_printf( " %s : Depends on %s (%s)", spaces( depth ), + target_name( c->target ), target_fate[ (int32_t)c->target->fate ] ); + if ( !timestamp_cmp( &c->target->time, &t->time ) ) + out_printf( " (max time)"); + out_printf( "\n" ); + } + + for ( c = t->depends.get(); c; c = c->next.get() ) + dependGraphOutput( c->target, depth + 1 ); +} +#endif + + +/* + * make0sort() - reorder TARGETS chain by their time (newest to oldest). + * + * This sorts in-place by swapping the target pointers in the chain in a + * rather terrible n-square/2 algorithm. + */ + +static targets_uptr make0sort( targets_uptr chain ) +{ + PROFILE_ENTER( MAKE_MAKE0SORT ); + + // The current tail we put the next newest item. + for ( targets_ptr front = chain.get(); front ; front = front->next.get() ) + { + // Find the maximum time, i.e. most recent in the rest of the chain. + targets_ptr newest = front->next.get(); + for ( targets_ptr rest = newest; rest ; rest = rest->next.get()) + { + if ( timestamp_cmp( &newest->target->time, &rest->target->time ) > 0 ) + newest = rest; + } + // Sort it to the front if needed. + if ( front != newest ) + std::swap( front->target, newest->target ); + } + + PROFILE_EXIT( MAKE_MAKE0SORT ); + return chain; +} + + +static LIST * targets_to_update_ = L0; + + +void mark_target_for_updating( OBJECT * target ) +{ + targets_to_update_ = list_push_back( targets_to_update_, object_copy( + target ) ); +} + + +LIST * targets_to_update() +{ + return targets_to_update_; +} + + +void clear_targets_to_update() +{ + list_free( targets_to_update_ ); + targets_to_update_ = L0; +} diff --git a/src/boost/tools/build/src/engine/make.h b/src/boost/tools/build/src/engine/make.h new file mode 100644 index 000000000..5fcdb7a2e --- /dev/null +++ b/src/boost/tools/build/src/engine/make.h @@ -0,0 +1,45 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * make.h - bring a target up to date, once rules are in place + */ + +#ifndef MAKE_SW20111118_H +#define MAKE_SW20111118_H + +#include "config.h" +#include "lists.h" +#include "object.h" +#include "rules.h" + +int32_t make( LIST * targets, int32_t anyhow ); +int32_t make1( LIST * t ); + +typedef struct { + int32_t temp; + int32_t updating; + int32_t cantfind; + int32_t cantmake; + int32_t targets; + int32_t made; +} COUNTS ; + + +void make0( TARGET * t, TARGET * p, int32_t depth, COUNTS * counts, int32_t anyhow, + TARGET * rescanning ); + + +/* Specifies that the target should be updated. */ +void mark_target_for_updating( OBJECT * target ); + +/* Returns targets previously passed to mark_target_for_updating(). */ +LIST * targets_to_update(); + +/* Clears/unmarks all targets currently marked for update. */ +void clear_targets_to_update(); + +#endif diff --git a/src/boost/tools/build/src/engine/make1.cpp b/src/boost/tools/build/src/engine/make1.cpp new file mode 100644 index 000000000..332b91cc5 --- /dev/null +++ b/src/boost/tools/build/src/engine/make1.cpp @@ -0,0 +1,1515 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * make1.c - execute commands to bring targets up to date + * + * This module contains make1(), the entry point called by make() to recursively + * descend the dependency graph executing update actions as marked by make0(). + * + * External routines: + * make1() - execute commands to update a TARGET and all of its dependencies + * + * Internal routines, the recursive/asynchronous command executors: + * make1a() - recursively schedules dependency builds and then goes to + * MAKE1B + * make1b() - if nothing is blocking this target's build, proceed to + * MAKE1C + * make1c() - launch target's next command, or go to parents' MAKE1B + * if none + * make1c_closure() - handle command execution completion and go to MAKE1C + * + * Internal support routines: + * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc. + * make1list() - turn a list of targets into a LIST, for $(<) and $(>) + * make1settings() - for vars with bound values, build up replacement lists + * make1bind() - bind targets that weren't bound in dependency analysis + */ + +#include "jam.h" +#include "make.h" + +#include "command.h" +#include "compile.h" +#include "execcmd.h" +#include "headers.h" +#include "lists.h" +#include "object.h" +#include "output.h" +#include "parse.h" +#include "rules.h" +#include "search.h" +#include "variable.h" +#include "output.h" +#include "startup.h" + +#include +#include + +#if !defined( NT ) || defined( __GNUC__ ) + #include /* for unlink */ +#endif + +static CMD * make1cmds ( TARGET * ); +static LIST * make1list ( LIST *, const targets_uptr &, int32_t flags ); +static SETTINGS * make1settings ( struct module_t *, LIST * vars ); +static void make1bind ( TARGET * ); +static void push_cmds( CMDLIST * cmds, int32_t status ); +static int32_t cmd_sem_lock( TARGET * t ); +static void cmd_sem_unlock( TARGET * t ); + +static bool targets_contains( const targets_uptr & l, TARGET * t ); +static bool targets_equal( const targets_uptr & l1, const targets_uptr & l2 ); + +/* Ugly static - it is too hard to carry it through the callbacks. */ + +static struct +{ + int32_t failed; + int32_t skipped; + int32_t total; + int32_t made; +} counts[ 1 ]; + +/* Target state. */ +#define T_STATE_MAKE1A 0 /* make1a() should be called */ +#define T_STATE_MAKE1B 1 /* make1b() should be called */ +#define T_STATE_MAKE1C 2 /* make1c() should be called */ + +typedef struct _state state; +struct _state +{ + state * prev; /* previous state on stack */ + TARGET * t; /* current target */ + TARGET * parent; /* parent argument necessary for MAKE1A */ + int32_t curstate; /* current state */ +}; + +static void make1a( state * const ); +static void make1b( state * const ); +static void make1c( state const * const ); + +static void make1c_closure( void * const closure, int32_t status, + timing_info const * const, char const * const cmd_stdout, + char const * const cmd_stderr, int32_t const cmd_exit_reason ); + +typedef struct _stack +{ + state * stack; +} stack; + +static stack state_stack = { NULL }; + +static state * state_freelist = NULL; + +/* Currently running command counter. */ +static int32_t cmdsrunning; + + +static state * alloc_state() +{ + if ( state_freelist ) + { + state * const pState = state_freelist; + state_freelist = pState->prev; + memset( pState, 0, sizeof( state ) ); + return pState; + } + return (state *)BJAM_MALLOC( sizeof( state ) ); +} + + +static void free_state( state * const pState ) +{ + pState->prev = state_freelist; + state_freelist = pState; +} + + +static void clear_state_freelist() +{ + while ( state_freelist ) + { + state * const pState = state_freelist; + state_freelist = state_freelist->prev; + BJAM_FREE( pState ); + } +} + + +static state * current_state( stack * const pStack ) +{ + return pStack->stack; +} + + +static void pop_state( stack * const pStack ) +{ + if ( pStack->stack ) + { + state * const pState = pStack->stack->prev; + free_state( pStack->stack ); + pStack->stack = pState; + } +} + + +static state * push_state( stack * const pStack, TARGET * const t, + TARGET * const parent, int32_t const curstate ) +{ + state * const pState = alloc_state(); + pState->t = t; + pState->parent = parent; + pState->prev = pStack->stack; + pState->curstate = curstate; + return pStack->stack = pState; +} + + +/* + * Pushes a stack onto another stack, effectively reversing the order. + */ + +static void push_stack_on_stack( stack * const pDest, stack * const pSrc ) +{ + while ( pSrc->stack ) + { + state * const pState = pSrc->stack; + pSrc->stack = pState->prev; + pState->prev = pDest->stack; + pDest->stack = pState; + } +} + + +/* + * make1() - execute commands to update a list of targets and all of their dependencies + */ + +static int32_t intr = 0; +static int32_t quit = 0; + +int32_t make1( LIST * targets ) +{ + state * pState; + int32_t status = 0; + + memset( (char *)counts, 0, sizeof( *counts ) ); + + { + LISTITER iter, end; + stack temp_stack = { NULL }; + for ( iter = list_begin( targets ), end = list_end( targets ); + iter != end; iter = list_next( iter ) ) + push_state( &temp_stack, bindtarget( list_item( iter ) ), NULL, T_STATE_MAKE1A ); + push_stack_on_stack( &state_stack, &temp_stack ); + } + + /* Clear any state left over from the past */ + quit = 0; + + /* Recursively make the target and its dependencies. */ + + while ( 1 ) + { + while ( ( pState = current_state( &state_stack ) ) ) + { + if ( quit ) + pop_state( &state_stack ); + + switch ( pState->curstate ) + { + case T_STATE_MAKE1A: make1a( pState ); break; + case T_STATE_MAKE1B: make1b( pState ); break; + case T_STATE_MAKE1C: make1c( pState ); break; + default: + assert( !"make1(): Invalid state detected." ); + } + } + if ( !cmdsrunning ) + break; + /* Wait for outstanding commands to finish running. */ + exec_wait(); + } + + clear_state_freelist(); + + /* Talk about it. */ + if ( counts->failed ) + out_printf( "...failed updating %d target%s...\n", counts->failed, + counts->failed > 1 ? "s" : "" ); + if ( DEBUG_MAKE && counts->skipped ) + out_printf( "...skipped %d target%s...\n", counts->skipped, + counts->skipped > 1 ? "s" : "" ); + if ( DEBUG_MAKE && counts->made ) + out_printf( "...updated %d target%s...\n", counts->made, + counts->made > 1 ? "s" : "" ); + + /* If we were interrupted, exit now that all child processes + have finished. */ + if ( intr ) + b2::clean_exit( EXITBAD ); + + { + LISTITER iter, end; + for ( iter = list_begin( targets ), end = list_end( targets ); + iter != end; iter = list_next( iter ) ) + { + /* Check that the target was updated and that the + update succeeded. */ + TARGET * t = bindtarget( list_item( iter ) ); + if (t->progress == T_MAKE_DONE) + { + if (t->status != EXEC_CMD_OK) + status = 1; + } + else if ( ! ( t->progress == T_MAKE_NOEXEC_DONE && globs.noexec ) ) + { + status = 1; + } + } + } + return status; +} + + +/* + * make1a() - recursively schedules dependency builds and then goes to MAKE1B + * + * Called to start processing a specified target. Does nothing if the target is + * already being processed or otherwise starts processing all of its + * dependencies. + */ + +static void make1a( state * const pState ) +{ + TARGET * t = pState->t; + TARGET * const scc_root = target_scc( t ); + + if ( !pState->parent || target_scc( pState->parent ) != scc_root ) + pState->t = t = scc_root; + + /* If the parent is the first to try to build this target or this target is + * in the MAKE1C quagmire, arrange for the parent to be notified when this + * target has been built. + */ + if ( pState->parent && t->progress <= T_MAKE_RUNNING ) + { + TARGET * const parent_scc = target_scc( pState->parent ); + if ( t != parent_scc ) + { + targetentry( t->parents, parent_scc ); + ++parent_scc->asynccnt; + } + } + + /* If the target has been previously updated with -n in effect, and we are + * now ignoring -n, update it for real. E.g. if the UPDATE_NOW rule was + * called for it twice - first with the -n option and then without. + */ + if ( !globs.noexec && t->progress == T_MAKE_NOEXEC_DONE ) + t->progress = T_MAKE_INIT; + + /* If this target is already being processed then do nothing. There is no + * need to start processing the same target all over again. + */ + if ( t->progress != T_MAKE_INIT ) + { + pop_state( &state_stack ); + return; + } + + /* Guard against circular dependencies. */ + t->progress = T_MAKE_ONSTACK; + + /* 'asynccnt' counts the dependencies preventing this target from proceeding + * to MAKE1C for actual building. We start off with a count of 1 to prevent + * anything from happening until we can notify all dependencies that they + * are needed. This 1 is then accounted for when we enter MAKE1B ourselves, + * below. Without this if a dependency gets built before we finish + * processing all of our other dependencies our build might be triggered + * prematurely. + */ + t->asynccnt = 1; + + /* Push dependency build requests (to be executed in the natural order). */ + { + stack temp_stack = { NULL }; + targets_ptr c; + for ( c = t->depends.get(); c && !quit; c = c->next.get() ) + push_state( &temp_stack, c->target, t, T_STATE_MAKE1A ); + push_stack_on_stack( &state_stack, &temp_stack ); + } + + t->progress = T_MAKE_ACTIVE; + + /* Once all of our dependencies have started getting processed we can move + * onto MAKE1B. + */ + /* Implementation note: + * In theory this would be done by popping this state before pushing + * dependency target build requests but as a slight optimization we simply + * modify our current state and leave it on the stack instead. + */ + pState->curstate = T_STATE_MAKE1B; +} + + +/* + * make1b() - if nothing is blocking this target's build, proceed to MAKE1C + * + * Called after something stops blocking this target's build, e.g. that all of + * its dependencies have started being processed, one of its dependencies has + * been built or a semaphore this target has been waiting for is free again. + */ + +static void make1b( state * const pState ) +{ + TARGET * const t = pState->t; + TARGET * failed = 0; + char const * failed_name = "dependencies"; + + pop_state( &state_stack ); + + /* If any dependencies are still outstanding, wait until they signal their + * completion by pushing this same state for their parent targets. + */ + if ( --t->asynccnt ) + { + return; + } + + /* Now ready to build target 't', if dependencies built OK. */ + + /* Collect status from dependencies. If -n was passed then act as though all + * dependencies built correctly (the only way they can fail is if UPDATE_NOW + * was called). If the dependencies can not be found or we got an interrupt, + * we can not get here. + */ + if ( !globs.noexec ) + { + targets_ptr c; + for ( c = t->depends.get(); c; c = c->next.get() ) + if ( c->target->status > t->status && !( c->target->flags & + T_FLAG_NOCARE ) ) + { + failed = c->target; + t->status = c->target->status; + } + } + + /* If an internal header node failed to build, we want to output the target + * that it failed on. + */ + if ( failed ) + failed_name = failed->flags & T_FLAG_INTERNAL + ? failed->failed + : object_str( failed->name ); + t->failed = failed_name; + + /* If actions for building any of the dependencies have failed, bail. + * Otherwise, execute all actions to make the current target. + */ + if ( ( t->status == EXEC_CMD_FAIL ) && t->actions ) + { + ++counts->skipped; + if ( ( t->flags & ( T_FLAG_RMOLD | T_FLAG_NOTFILE ) ) == T_FLAG_RMOLD ) + { + if ( !unlink( object_str( t->boundname ) ) ) + out_printf( "...removing outdated %s\n", object_str( t->boundname ) + ); + } + else + out_printf( "...skipped %s for lack of %s...\n", object_str( t->name ), + failed_name ); + } + + if ( t->status == EXEC_CMD_OK ) + switch ( t->fate ) + { + case T_FATE_STABLE: + case T_FATE_NEWER: + break; + + case T_FATE_CANTFIND: + case T_FATE_CANTMAKE: + t->status = EXEC_CMD_FAIL; + break; + + case T_FATE_ISTMP: + if ( DEBUG_MAKE ) + out_printf( "...using %s...\n", object_str( t->name ) ); + break; + + case T_FATE_TOUCHED: + case T_FATE_MISSING: + case T_FATE_NEEDTMP: + case T_FATE_OUTDATED: + case T_FATE_UPDATE: + case T_FATE_REBUILD: + /* Prepare commands for executing actions scheduled for this target. + * Commands have their embedded variables automatically expanded, + * including making use of any "on target" variables. + */ + if ( t->actions ) + { + ++counts->total; + if ( DEBUG_MAKE && !( counts->total % 100 ) ) + out_printf( "...on %dth target...\n", counts->total ); + + t->cmds = (char *)make1cmds( t ); + /* Update the target's "progress" so MAKE1C processing counts it + * among its successes/failures. + */ + t->progress = T_MAKE_RUNNING; + } + break; + + /* All valid fates should have been accounted for by now. */ + default: + err_printf( "ERROR: %s has bad fate %d", object_str( t->name ), + t->fate ); + b2::clean_exit( b2::exit_result::failure ); + } + + /* Proceed to MAKE1C to begin executing the chain of commands prepared for + * building the target. If we are not going to build the target (e.g. due to + * dependency failures or no commands needing to be run) the chain will be + * empty and MAKE1C processing will directly signal the target's completion. + */ + + if ( t->cmds == NULL || --( ( CMD * )t->cmds )->asynccnt == 0 ) + push_state( &state_stack, t, NULL, T_STATE_MAKE1C ); + else if ( DEBUG_EXECCMD ) + { + CMD * cmd = ( CMD * )t->cmds; + out_printf( "Delaying %s %s: %d targets not ready\n", object_str( cmd->rule->name ), object_str( t->boundname ), cmd->asynccnt ); + } +} + + +/* + * make1c() - launch target's next command, or go to parents' MAKE1B if none + * + * If there are (more) commands to run to build this target (and we have not hit + * an error running earlier comands) we launch the command using exec_cmd(). + * Command execution signals its completion in exec_wait() by calling our + * make1c_closure() callback. + * + * If there are no more commands to run, we collect the status from all the + * actions and report our completion to all the parents. + */ + +static void make1c( state const * const pState ) +{ + TARGET * const t = pState->t; + CMD * const cmd = (CMD *)t->cmds; + int32_t exec_flags = 0; + + if ( cmd ) + { + /* Pop state first in case something below (e.g. exec_cmd(), exec_wait() + * or make1c_closure()) pushes a new state. Note that we must not access + * the popped state data after this as the same stack node might have + * been reused internally for some newly pushed state. + */ + pop_state( &state_stack ); + + if ( cmd->status != EXEC_CMD_OK ) + { + t->cmds = NULL; + push_cmds( cmd->next, cmd->status ); + cmd_free( cmd ); + return; + } + +#ifdef OPT_SEMAPHORE + if ( ! cmd_sem_lock( t ) ) + { + return; + } +#endif + + /* Increment the jobs running counter. */ + ++cmdsrunning; + + if ( ( globs.jobs == 1 ) && ( DEBUG_MAKEQ || + ( DEBUG_MAKE && !( cmd->rule->actions->flags & RULE_QUIETLY ) ) ) ) + { + OBJECT * action = cmd->rule->name; + OBJECT * target = list_front( lol_get( (LOL *)&cmd->args, 0 ) ); + + out_printf( "%s %s\n", object_str( action ), object_str( target ) ); + + /* Print out the command executed if given -d+2. */ + if ( DEBUG_EXEC ) + { + out_puts( cmd->buf->value ); + out_putc( '\n' ); + } + + /* We only need to flush the streams if there's likely to + * be a wait before it finishes. + */ + if ( ! globs.noexec && ! cmd->noop ) + { + out_flush(); + err_flush(); + } + } + else + { + exec_flags |= EXEC_CMD_QUIET; + } + + /* Execute the actual build command or fake it if no-op. */ + if ( globs.noexec || cmd->noop ) + { + timing_info time_info = { 0 }; + timestamp_current( &time_info.start ); + timestamp_copy( &time_info.end, &time_info.start ); + make1c_closure( t, EXEC_CMD_OK, &time_info, "", "", EXIT_OK ); + } + else + { + exec_cmd( cmd->buf, exec_flags, make1c_closure, t, cmd->shell ); + + /* Wait until under the concurrent command count limit. */ + /* FIXME: This wait could be skipped here and moved to just before + * trying to execute a command that would cross the command count + * limit. Note though that this might affect the order in which + * unrelated targets get built and would thus require that all + * affected Boost Build tests be updated. + */ + assert( 0 < globs.jobs ); + while ( cmdsrunning >= globs.jobs ) + exec_wait(); + } + } + else + { + /* Tally success/failure for those we tried to update. */ + if ( t->progress == T_MAKE_RUNNING ) + { + /* Invert OK/FAIL target status when FAIL_EXPECTED has been applied. */ + if ( t->flags & T_FLAG_FAIL_EXPECTED && !globs.noexec ) + { + switch ( t->status ) + { + case EXEC_CMD_FAIL: t->status = EXEC_CMD_OK; break; + case EXEC_CMD_OK: t->status = EXEC_CMD_FAIL; break; + } + + /* Printing failure has to be delayed until the last + * action is completed for FAIL_EXPECTED targets. + * Do it here. + */ + if ( t->status == EXEC_CMD_FAIL ) + { + out_printf( "...failed %s ", object_str( t->actions->action->rule->name ) ); + out_printf( "%s", object_str( t->boundname ) ); + out_printf( "...\n" ); + } + + /* Handle -q */ + if ( t->status == EXEC_CMD_FAIL && globs.quitquick ) + ++quit; + + /* Delete the target on failure. */ + if ( !( t->flags & ( T_FLAG_PRECIOUS | T_FLAG_NOTFILE ) ) && + !unlink( object_str( t->boundname ) ) ) + out_printf( "...removing %s\n", object_str( t->boundname ) ); + } + switch ( t->status ) + { + case EXEC_CMD_OK: ++counts->made; break; + case EXEC_CMD_FAIL: ++counts->failed; break; + } + } + + /* Tell parents their dependency has been built. */ + { + targets_ptr c; + stack temp_stack = { NULL }; + TARGET * additional_includes = NULL; + + t->progress = globs.noexec ? T_MAKE_NOEXEC_DONE : T_MAKE_DONE; + + /* Target has been updated so rescan it for dependencies. */ + if ( t->fate >= T_FATE_MISSING && t->status == EXEC_CMD_OK && + !( t->flags & T_FLAG_INTERNAL ) ) + { + TARGET * saved_includes; + SETTINGS * s; + + /* Clean current includes. */ + saved_includes = t->includes; + t->includes = 0; + + s = copysettings( t->settings ); + pushsettings( root_module(), s ); + headers( t ); + popsettings( root_module(), s ); + freesettings( s ); + + if ( t->includes ) + { + /* Tricky. The parents have already been processed, but they + * have not seen the internal node, because it was just + * created. We need to: + * - push MAKE1A states that would have been pushed by the + * parents here + * - make sure all unprocessed parents will pick up the + * new includes + * - make sure processing the additional MAKE1A states is + * done before processing the MAKE1B state for our + * current target (which would mean this target has + * already been built), otherwise the parent would be + * considered built before the additional MAKE1A state + * processing even got a chance to start. + */ + make0( t->includes, t->parents->target, 0, 0, 0, t->includes + ); + /* Link the old includes on to make sure that it gets + * cleaned up correctly. + */ + t->includes->includes = saved_includes; + for ( c = t->dependants.get(); c; c = c->next.get() ) + targetentry( c->target->depends, t->includes ); + /* Will be processed below. */ + additional_includes = t->includes; + } + else + { + t->includes = saved_includes; + } + } + + if ( additional_includes ) + for ( c = t->parents.get(); c; c = c->next.get() ) + push_state( &temp_stack, additional_includes, c->target, + T_STATE_MAKE1A ); + + if ( t->scc_root ) + { + TARGET * const scc_root = target_scc( t ); + assert( scc_root->progress < T_MAKE_DONE ); + for ( c = t->parents.get(); c; c = c->next.get() ) + { + if ( target_scc( c->target ) == scc_root ) + push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B + ); + else + targetentry( scc_root->parents, c->target ); + } + } + else + { + for ( c = t->parents.get(); c; c = c->next.get() ) + push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B ); + } + + /* Must pop state before pushing any more. */ + pop_state( &state_stack ); + + /* Using stacks reverses the order of execution. Reverse it back. */ + push_stack_on_stack( &state_stack, &temp_stack ); + } + } +} + + +/* + * call_timing_rule() - Look up the __TIMING_RULE__ variable on the given + * target, and if non-empty, invoke the rule it names, passing the given + * timing_info. + */ + +static void call_timing_rule( TARGET * target, timing_info const * const time ) +{ + LIST * timing_rule; + + pushsettings( root_module(), target->settings ); + timing_rule = var_get( root_module(), constant_TIMING_RULE ); + popsettings( root_module(), target->settings ); + + if ( !list_empty( timing_rule ) ) + { + /* rule timing-rule ( args * : target : start end user system clock ) */ + + /* Prepare the argument list. */ + FRAME frame[ 1 ]; + OBJECT * rulename = list_front( timing_rule ); + frame_init( frame ); + + /* args * :: $(__TIMING_RULE__[2-]) */ + lol_add( frame->args, list_copy_range( timing_rule, list_next( + list_begin( timing_rule ) ), list_end( timing_rule ) ) ); + + /* target :: the name of the target */ + lol_add( frame->args, list_new( object_copy( target->name ) ) ); + + /* start end user system clock :: info about the action command */ + lol_add( frame->args, list_push_back( list_push_back( list_push_back( list_push_back( list_new( + outf_time( &time->start ) ), + outf_time( &time->end ) ), + outf_double( time->user ) ), + outf_double( time->system ) ), + outf_double( timestamp_delta_seconds(&time->start, &time->end) ) ) + ); + + /* Call the rule. */ + evaluate_rule( bindrule( rulename , root_module() ), rulename, frame ); + + /* Clean up. */ + frame_free( frame ); + } +} + + +/* + * call_action_rule() - Look up the __ACTION_RULE__ variable on the given + * target, and if non-empty, invoke the rule it names, passing the given info, + * timing_info, executed command and command output. + */ + +static void call_action_rule +( + TARGET * target, + int32_t status, + timing_info const * time, + char const * executed_command, + char const * command_output +) +{ + LIST * action_rule; + + pushsettings( root_module(), target->settings ); + action_rule = var_get( root_module(), constant_ACTION_RULE ); + popsettings( root_module(), target->settings ); + + if ( !list_empty( action_rule ) ) + { + /* rule action-rule ( + args * : + target : + command status start end user system : + output ? ) */ + + /* Prepare the argument list. */ + FRAME frame[ 1 ]; + OBJECT * rulename = list_front( action_rule ); + frame_init( frame ); + + /* args * :: $(__ACTION_RULE__[2-]) */ + lol_add( frame->args, list_copy_range( action_rule, list_next( + list_begin( action_rule ) ), list_end( action_rule ) ) ); + + /* target :: the name of the target */ + lol_add( frame->args, list_new( object_copy( target->name ) ) ); + + /* command status start end user system :: info about the action command + */ + lol_add( frame->args, + list_push_back( list_push_back( list_push_back( list_push_back( list_push_back( list_new( + object_new( executed_command ) ), + outf_int( status ) ), + outf_time( &time->start ) ), + outf_time( &time->end ) ), + outf_double( time->user ) ), + outf_double( time->system ) ) ); + + /* output ? :: the output of the action command */ + if ( command_output ) + { + OBJECT * command_output_obj = object_new( command_output ); + char * output_i = (char*)object_str(command_output_obj); + /* Clean the output of control characters. */ + for (; *output_i; ++output_i) + { + if (iscntrl(*output_i) && !isspace(*output_i)) *output_i = '?'; + } + lol_add( frame->args, list_new( command_output_obj ) ); + } + else + lol_add( frame->args, L0 ); + + /* Call the rule. */ + evaluate_rule( bindrule( rulename, root_module() ), rulename, frame ); + + /* Clean up. */ + frame_free( frame ); + } +} + + +/* + * make1c_closure() - handle command execution completion and go to MAKE1C. + * + * Internal function passed as a notification callback for when a command + * finishes getting executed by the OS or called directly when faking that a + * command had been executed by the OS. + * + * Now all we need to do is fiddle with the command exit status and push a new + * MAKE1C state to execute the next command scheduled for building this target + * or close up the target's build process in case there are no more commands + * scheduled for it. On interrupts, we bail heavily. + */ + +static void make1c_closure +( + void * const closure, + int32_t status_orig, + timing_info const * const time, + char const * const cmd_stdout, + char const * const cmd_stderr, + int32_t const cmd_exit_reason +) +{ + TARGET * const t = (TARGET *)closure; + CMD * const cmd = (CMD *)t->cmds; + char const * rule_name = 0; + char const * target_name = 0; + + assert( cmd ); + + --cmdsrunning; + + /* Calculate the target's status from the cmd execution result. */ + { + /* Store the target's status. */ + t->status = status_orig; + + /* Ignore failures for actions marked as 'ignore'. */ + if ( t->status == EXEC_CMD_FAIL && cmd->rule->actions->flags & + RULE_IGNORE ) + t->status = EXEC_CMD_OK; + } + + if ( DEBUG_MAKEQ || + ( DEBUG_MAKE && !( cmd->rule->actions->flags & RULE_QUIETLY ) ) ) + { + rule_name = object_str( cmd->rule->name ); + target_name = object_str( list_front( lol_get( (LOL *)&cmd->args, 0 ) ) + ); + } + + if ( rule_name == NULL || globs.jobs > 1 ) + out_action( rule_name, target_name, cmd->buf->value, cmd_stdout, + cmd_stderr, cmd_exit_reason ); + + /* If the process expired, make user aware with an explicit message, but do + * this only for non-quiet actions. + */ + if ( cmd_exit_reason == EXIT_TIMEOUT && target_name ) + out_printf( "%ld second time limit exceeded\n", globs.timeout ); + + out_flush(); + err_flush(); + + if ( !globs.noexec ) + { + call_timing_rule( t, time ); + if ( DEBUG_EXECCMD ) + out_printf( "%f sec system; %f sec user; %f sec clock\n", + time->system, time->user, + timestamp_delta_seconds(&time->start, &time->end) ); + + /* Assume -p0 is in effect, i.e. cmd_stdout contains merged output. */ + call_action_rule( t, status_orig, time, cmd->buf->value, cmd_stdout ); + } + + /* Print command text on failure. */ + if ( t->status == EXEC_CMD_FAIL && DEBUG_MAKE && + ! ( t->flags & T_FLAG_FAIL_EXPECTED ) ) + { + if ( !DEBUG_EXEC ) + out_printf( "%s\n", cmd->buf->value ); + + out_printf( "...failed %s ", object_str( cmd->rule->name ) ); + list_print( lol_get( (LOL *)&cmd->args, 0 ) ); + out_printf( "...\n" ); + } + + /* On interrupt, set quit so _everything_ fails. Do the same for failed + * commands if we were asked to stop the build in case of any errors. + */ + if ( t->status == EXEC_CMD_INTR ) + { + ++intr; + ++quit; + } + if ( t->status == EXEC_CMD_FAIL && globs.quitquick && + ! ( t->flags & T_FLAG_FAIL_EXPECTED ) ) + ++quit; + + /* If the command was not successful remove all of its targets not marked as + * "precious". + */ + if ( t->status != EXEC_CMD_OK ) + { + LIST * const targets = lol_get( (LOL *)&cmd->args, 0 ); + LISTITER iter = list_begin( targets ); + LISTITER const end = list_end( targets ); + for ( ; iter != end; iter = list_next( iter ) ) + { + char const * const filename = object_str( list_item( iter ) ); + TARGET const * const t = bindtarget( list_item( iter ) ); + if ( !( t->flags & T_FLAG_PRECIOUS ) && !unlink( filename ) ) + out_printf( "...removing %s\n", filename ); + } + } + +#ifdef OPT_SEMAPHORE + /* Release any semaphores used by this action. */ + cmd_sem_unlock( t ); +#endif + + /* Free this command and push the MAKE1C state to execute the next one + * scheduled for building this same target. + */ + t->cmds = NULL; + push_cmds( cmd->next, t->status ); + cmd_free( cmd ); +} + +/* push the next MAKE1C state after a command is run. */ +static void push_cmds( CMDLIST * cmds, int32_t status ) +{ + CMDLIST * cmd_iter; + for( cmd_iter = cmds; cmd_iter; cmd_iter = cmd_iter->next ) + { + if ( cmd_iter->iscmd ) + { + CMD * next_cmd = cmd_iter->impl.cmd; + /* Propagate the command status. */ + if ( next_cmd->status < status ) + next_cmd->status = status; + if ( --next_cmd->asynccnt == 0 ) + { + /* Select the first target associated with the action. + * This is safe because sibling CMDs cannot have targets + * in common. + */ + TARGET * first_target = bindtarget( list_front( lol_get( &next_cmd->args, 0 ) ) ); + first_target->cmds = (char *)next_cmd; + push_state( &state_stack, first_target, NULL, T_STATE_MAKE1C ); + } + else if ( DEBUG_EXECCMD ) + { + TARGET * first_target = bindtarget( list_front( lol_get( &next_cmd->args, 0 ) ) ); + out_printf( "Delaying %s %s: %d targets not ready\n", object_str( next_cmd->rule->name ), object_str( first_target->boundname ), next_cmd->asynccnt ); + } + } + else + { + /* This is a target that we're finished updating */ + TARGET * updated_target = cmd_iter->impl.t; + if ( updated_target->status < status ) + updated_target->status = status; + updated_target->cmds = NULL; + push_state( &state_stack, updated_target, NULL, T_STATE_MAKE1C ); + } + } +} + + +/* + * swap_settings() - replace the settings from the current module and target + * with those from the new module and target + */ + +static void swap_settings +( + module_t * * current_module, + TARGET * * current_target, + module_t * new_module, + TARGET * new_target +) +{ + if ( ( new_target == *current_target ) && + ( new_module == *current_module ) ) + return; + + if ( *current_target ) + popsettings( *current_module, (*current_target)->settings ); + + if ( new_target ) + pushsettings( new_module, new_target->settings ); + + *current_module = new_module; + *current_target = new_target; +} + + +/* + * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc. + * + * Essentially copies a chain of ACTIONs to a chain of CMDs, grouping + * RULE_TOGETHER actions, splitting RULE_PIECEMEAL actions, and handling + * RULE_NEWSRCS actions. The result is a chain of CMDs which has already had all + * of its embedded variable references expanded and can now be executed using + * exec_cmd(). + */ + +static CMD * make1cmds( TARGET * t ) +{ + CMD * cmds = 0; + CMD * last_cmd = 0; + LIST * shell = L0; + module_t * settings_module = 0; + TARGET * settings_target = 0; + ACTIONS * a0 = 0; + int32_t const running_flag = globs.noexec ? A_RUNNING_NOEXEC : A_RUNNING; + + /* Step through actions. + */ + for ( a0 = t->actions; a0; a0 = a0->next ) + { + RULE * rule = a0->action->rule; + rule_actions_ptr actions = rule->actions; + SETTINGS * boundvars; + LIST * nt; + LIST * ns; + ACTIONS * a1; + + /* Only do rules with commands to execute. + */ + if ( !actions ) + continue; + + if ( a0->action->running >= running_flag ) + { + CMD * first; + /* If this action was skipped either because it was + * combined with another action by RULE_TOGETHER, or + * because all of its sources were filtered out, + * then we don't have anything to do here. + */ + if ( a0->action->first_cmd == NULL ) + continue; + /* This action has already been processed for another target. + * Just set up the dependency graph correctly and move on. + */ + first = (CMD *)a0->action->first_cmd; + if( cmds ) + { + last_cmd->next = cmdlist_append_cmd( last_cmd->next, first ); + } + else + { + cmds = first; + } + last_cmd = (CMD *)a0->action->last_cmd; + continue; + } + + a0->action->running = running_flag; + + /* Make LISTS of targets and sources. If `execute together` has been + * specified for this rule, tack on sources from each instance of this + * rule for this target. + */ + nt = make1list( L0, a0->action->targets, 0 ); + ns = make1list( L0, a0->action->sources, actions->flags ); + if ( actions->flags & RULE_TOGETHER ) + for ( a1 = a0->next; a1; a1 = a1->next ) + if ( a1->action->rule == rule && + a1->action->running < running_flag && + targets_equal( a0->action->targets, a1->action->targets ) ) + { + ns = make1list( ns, a1->action->sources, actions->flags ); + a1->action->running = running_flag; + } + + /* If doing only updated (or existing) sources, but none have been + * updated (or exist), skip this action. + */ + if ( list_empty( ns ) && + ( actions->flags & ( RULE_NEWSRCS | RULE_EXISTING ) ) ) + { + list_free( nt ); + continue; + } + + swap_settings( &settings_module, &settings_target, rule->module, t ); + if ( list_empty( shell ) ) + { + /* shell is per-target */ + shell = var_get( rule->module, constant_JAMSHELL ); + } + + /* If we had 'actions xxx bind vars' we bind the vars now. */ + boundvars = make1settings( rule->module, actions->bindlist ); + pushsettings( rule->module, boundvars ); + + /* + * Build command, starting with all source args. + * + * For actions that allow PIECEMEAL commands, if the constructed command + * string is too long, we retry constructing it with a reduced number of + * source arguments presented. + * + * While reducing slowly takes a bit of compute time to get things just + * right, it is worth it to get as close to maximum allowed command + * string length as possible, because launching the commands we are + * executing is likely to be much more compute intensive. + * + * Note that we loop through at least once, for sourceless actions. + */ + { + int32_t const length = list_length( ns ); + int32_t start = 0; + int32_t chunk = length; + int32_t cmd_count = 0; + targets_uptr semaphores; + targets_ptr targets_iter; + int32_t unique_targets; + do + { + CMD * cmd; + int32_t cmd_check_result; + int32_t cmd_error_length; + int32_t cmd_error_max_length; + int32_t retry = 0; + int32_t accept_command = 0; + + /* Build cmd: cmd_new() takes ownership of its lists. */ + cmd = cmd_new( rule, list_copy( nt ), list_sublist( ns, start, + chunk ), list_copy( shell ) ); + + cmd_check_result = exec_check( cmd->buf, &cmd->shell, + &cmd_error_length, &cmd_error_max_length ); + + if ( cmd_check_result == EXEC_CHECK_OK ) + { + accept_command = 1; + } + else if ( cmd_check_result == EXEC_CHECK_NOOP ) + { + accept_command = 1; + cmd->noop = 1; + } + else if ( ( actions->flags & RULE_PIECEMEAL ) && ( chunk > 1 ) ) + { + /* Too long but splittable. Reduce chunk size slowly and + * retry. + */ + assert( cmd_check_result == EXEC_CHECK_TOO_LONG || + cmd_check_result == EXEC_CHECK_LINE_TOO_LONG ); + chunk = chunk * 9 / 10; + retry = 1; + } + else + { + /* Too long and not splittable. */ + char const * const error_message = cmd_check_result == + EXEC_CHECK_TOO_LONG + ? "is too long" + : "contains a line that is too long"; + assert( cmd_check_result == EXEC_CHECK_TOO_LONG || + cmd_check_result == EXEC_CHECK_LINE_TOO_LONG ); + out_printf( + "%s action %s (%d, max %d):\n", + object_str( rule->name ), error_message, + cmd_error_length, cmd_error_max_length ); + + /* Tell the user what did not fit. */ + out_puts( cmd->buf->value ); + b2::clean_exit( EXITBAD ); + } + + assert( !retry || !accept_command ); + + if ( accept_command ) + { + /* Chain it up. */ + if ( cmds ) + { + last_cmd->next = cmdlist_append_cmd( last_cmd->next, cmd ); + last_cmd = cmd; + } + else + { + cmds = last_cmd = cmd; + } + + if ( cmd_count++ == 0 ) + { + a0->action->first_cmd = cmd; + } + } + else + { + cmd_free( cmd ); + } + + if ( !retry ) + start += chunk; + } + while ( start < length ); + + /* Record the end of the actions cmds */ + a0->action->last_cmd = last_cmd; + + unique_targets = 0; + for ( targets_iter = a0->action->targets.get(); targets_iter; targets_iter = targets_iter->next.get() ) + { + if ( targets_contains( targets_iter->next, targets_iter->target ) ) + continue; + /* Add all targets produced by the action to the update list. */ + push_state( &state_stack, targets_iter->target, NULL, T_STATE_MAKE1A ); + ++unique_targets; + } + /* We need to wait until all the targets agree that + * it's okay to run this action. + */ + ( ( CMD * )a0->action->first_cmd )->asynccnt = unique_targets; + +#if OPT_SEMAPHORE + /* Collect semaphores */ + for ( targets_iter = a0->action->targets.get(); targets_iter; targets_iter = targets_iter->next.get() ) + { + TARGET * sem = targets_iter->target->semaphore; + if ( sem ) + { + if ( ! targets_contains( semaphores, sem ) ) + targetentry( semaphores, sem ); + } + } + ( ( CMD * )a0->action->first_cmd )->lock = semaphores.get(); + ( ( CMD * )a0->action->last_cmd )->unlock = std::move(semaphores); +#endif + } + + /* These were always copied when used. */ + list_free( nt ); + list_free( ns ); + + /* Free variables with values bound by 'actions xxx bind vars'. */ + popsettings( rule->module, boundvars ); + freesettings( boundvars ); + } + + if ( cmds ) + { + last_cmd->next = cmdlist_append_target( last_cmd->next, t ); + } + + swap_settings( &settings_module, &settings_target, 0, 0 ); + return cmds; +} + + +/* + * make1list() - turn a list of targets into a LIST, for $(<) and $(>) + */ + +static LIST * make1list( LIST * l, const targets_uptr & ts, int32_t flags ) +{ + targets_ptr targets = ts.get(); + for ( ; targets; targets = targets->next.get() ) + { + TARGET * t = targets->target; + + if ( t->binding == T_BIND_UNBOUND ) + make1bind( t ); + + if ( ( flags & RULE_EXISTING ) && ( flags & RULE_NEWSRCS ) ) + { + if ( ( t->binding != T_BIND_EXISTS ) && + ( t->fate <= T_FATE_STABLE ) ) + continue; + } + else if ( flags & RULE_EXISTING ) + { + if ( t->binding != T_BIND_EXISTS ) + continue; + } + else if ( flags & RULE_NEWSRCS ) + { + if ( t->fate <= T_FATE_STABLE ) + continue; + } + + /* Prohibit duplicates for RULE_TOGETHER. */ + if ( flags & RULE_TOGETHER ) + { + LISTITER iter = list_begin( l ); + LISTITER const end = list_end( l ); + for ( ; iter != end; iter = list_next( iter ) ) + if ( object_equal( list_item( iter ), t->boundname ) ) + break; + if ( iter != end ) + continue; + } + + /* Build new list. */ + l = list_push_back( l, object_copy( t->boundname ) ); + } + + return l; +} + + +/* + * make1settings() - for vars with bound values, build up replacement lists + */ + +static SETTINGS * make1settings( struct module_t * module, LIST * vars ) +{ + SETTINGS * settings = 0; + + LISTITER vars_iter = list_begin( vars ); + LISTITER const vars_end = list_end( vars ); + for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter ) ) + { + LIST * const l = var_get( module, list_item( vars_iter ) ); + LIST * nl = L0; + LISTITER iter = list_begin( l ); + LISTITER const end = list_end( l ); + + for ( ; iter != end; iter = list_next( iter ) ) + { + TARGET * const t = bindtarget( list_item( iter ) ); + + /* Make sure the target is bound. */ + if ( t->binding == T_BIND_UNBOUND ) + make1bind( t ); + + /* Build a new list. */ + nl = list_push_back( nl, object_copy( t->boundname ) ); + } + + /* Add to settings chain. */ + settings = addsettings( settings, VAR_SET, list_item( vars_iter ), nl ); + } + + return settings; +} + + +/* + * make1bind() - bind targets that were not bound during dependency analysis + * + * Spot the kludge! If a target is not in the dependency tree, it did not get + * bound by make0(), so we have to do it here. Ugly. + */ + +static void make1bind( TARGET * t ) +{ + if ( t->flags & T_FLAG_NOTFILE ) + return; + + pushsettings( root_module(), t->settings ); + object_free( t->boundname ); + t->boundname = search( t->name, &t->time, 0, t->flags & T_FLAG_ISFILE ); + t->binding = timestamp_empty( &t->time ) ? T_BIND_MISSING : T_BIND_EXISTS; + popsettings( root_module(), t->settings ); +} + + +static bool targets_contains( const targets_uptr & ts, TARGET * t ) +{ + targets_ptr l = ts.get(); + for ( ; l; l = l->next.get() ) + { + if ( t == l->target ) + { + return true; + } + } + return false; +} + +static bool targets_equal( const targets_uptr & ts1, const targets_uptr & ts2 ) +{ + targets_ptr l1 = ts1.get(); + targets_ptr l2 = ts2.get(); + for ( ; l1 && l2; l1 = l1->next.get(), l2 = l2->next.get() ) + { + if ( l1->target != l2->target ) + return false; + } + return !l1 && !l2; +} + + +#ifdef OPT_SEMAPHORE + +static int32_t cmd_sem_lock( TARGET * t ) +{ + CMD * cmd = (CMD *)t->cmds; + targets_ptr iter; + /* Check whether all the semaphores required for updating + * this target are free. + */ + for ( iter = cmd->lock; iter; iter = iter->next.get() ) + { + if ( iter->target->asynccnt > 0 ) + { + if ( DEBUG_EXECCMD ) + out_printf( "SEM: %s is busy, delaying launch of %s\n", + object_str( iter->target->name ), object_str( t->name ) ); + targetentry( iter->target->parents, t ); + return 0; + } + } + /* Lock the semaphores. */ + for ( iter = cmd->lock; iter; iter = iter->next.get() ) + { + ++iter->target->asynccnt; + if ( DEBUG_EXECCMD ) + out_printf( "SEM: %s now used by %s\n", object_str( iter->target->name + ), object_str( t->name ) ); + } + /* A cmd only needs to be locked around its execution. + * clearing cmd->lock here makes it safe to call cmd_sem_lock + * twice. + */ + cmd->lock = NULL; + return 1; +} + +static void cmd_sem_unlock( TARGET * t ) +{ + CMD * cmd = ( CMD * )t->cmds; + targets_ptr iter; + /* Release the semaphores. */ + for ( iter = cmd->unlock.get(); iter; iter = iter->next.get() ) + { + if ( DEBUG_EXECCMD ) + out_printf( "SEM: %s is now free\n", object_str( + iter->target->name ) ); + --iter->target->asynccnt; + assert( iter->target->asynccnt <= 0 ); + } + for ( iter = cmd->unlock.get(); iter; iter = iter->next.get() ) + { + /* Find a waiting target that's ready */ + while ( iter->target->parents ) + { + TARGET * t1 = iter->target->parents->target; + + iter->target->parents = targets_pop(std::move(iter->target->parents)); + + if ( cmd_sem_lock( t1 ) ) + { + push_state( &state_stack, t1, NULL, T_STATE_MAKE1C ); + break; + } + } + } +} + +#endif diff --git a/src/boost/tools/build/src/engine/md5.cpp b/src/boost/tools/build/src/engine/md5.cpp new file mode 100644 index 000000000..2c53be85e --- /dev/null +++ b/src/boost/tools/build/src/engine/md5.cpp @@ -0,0 +1,381 @@ +/* + Copyright (C) 1999, 2000, 2002 Aladdin Enterprises. All rights reserved. + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + L. Peter Deutsch + ghost@aladdin.com + + */ +/* $Id: md5.c,v 1.6 2002/04/13 19:20:28 lpd Exp $ */ +/* + Independent implementation of MD5 (RFC 1321). + + This code implements the MD5 Algorithm defined in RFC 1321, whose + text is available at + http://www.ietf.org/rfc/rfc1321.txt + The code is derived from the text of the RFC, including the test suite + (section A.5) but excluding the rest of Appendix A. It does not include + any code or documentation that is identified in the RFC as being + copyrighted. + + The original and principal author of md5.c is L. Peter Deutsch + . Other authors are noted in the change history + that follows (in reverse chronological order): + + 2002-04-13 lpd Clarified derivation from RFC 1321; now handles byte order + either statically or dynamically; added missing #include + in library. + 2002-03-11 lpd Corrected argument list for main(), and added int return + type, in test program and T value program. + 2002-02-21 lpd Added missing #include in test program. + 2000-07-03 lpd Patched to eliminate warnings about "constant is + unsigned in ANSI C, signed in traditional"; made test program + self-checking. + 1999-11-04 lpd Edited comments slightly for automatic TOC extraction. + 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5). + 1999-05-03 lpd Original version. + */ + +#include "md5.h" +#include + +#undef BYTE_ORDER /* 1 = big-endian, -1 = little-endian, 0 = unknown */ +#ifdef ARCH_IS_BIG_ENDIAN +# define BYTE_ORDER (ARCH_IS_BIG_ENDIAN ? 1 : -1) +#else +# define BYTE_ORDER 0 +#endif + +#define T_MASK ((md5_word_t)~0) +#define T1 /* 0xd76aa478 */ (T_MASK ^ 0x28955b87) +#define T2 /* 0xe8c7b756 */ (T_MASK ^ 0x173848a9) +#define T3 0x242070db +#define T4 /* 0xc1bdceee */ (T_MASK ^ 0x3e423111) +#define T5 /* 0xf57c0faf */ (T_MASK ^ 0x0a83f050) +#define T6 0x4787c62a +#define T7 /* 0xa8304613 */ (T_MASK ^ 0x57cfb9ec) +#define T8 /* 0xfd469501 */ (T_MASK ^ 0x02b96afe) +#define T9 0x698098d8 +#define T10 /* 0x8b44f7af */ (T_MASK ^ 0x74bb0850) +#define T11 /* 0xffff5bb1 */ (T_MASK ^ 0x0000a44e) +#define T12 /* 0x895cd7be */ (T_MASK ^ 0x76a32841) +#define T13 0x6b901122 +#define T14 /* 0xfd987193 */ (T_MASK ^ 0x02678e6c) +#define T15 /* 0xa679438e */ (T_MASK ^ 0x5986bc71) +#define T16 0x49b40821 +#define T17 /* 0xf61e2562 */ (T_MASK ^ 0x09e1da9d) +#define T18 /* 0xc040b340 */ (T_MASK ^ 0x3fbf4cbf) +#define T19 0x265e5a51 +#define T20 /* 0xe9b6c7aa */ (T_MASK ^ 0x16493855) +#define T21 /* 0xd62f105d */ (T_MASK ^ 0x29d0efa2) +#define T22 0x02441453 +#define T23 /* 0xd8a1e681 */ (T_MASK ^ 0x275e197e) +#define T24 /* 0xe7d3fbc8 */ (T_MASK ^ 0x182c0437) +#define T25 0x21e1cde6 +#define T26 /* 0xc33707d6 */ (T_MASK ^ 0x3cc8f829) +#define T27 /* 0xf4d50d87 */ (T_MASK ^ 0x0b2af278) +#define T28 0x455a14ed +#define T29 /* 0xa9e3e905 */ (T_MASK ^ 0x561c16fa) +#define T30 /* 0xfcefa3f8 */ (T_MASK ^ 0x03105c07) +#define T31 0x676f02d9 +#define T32 /* 0x8d2a4c8a */ (T_MASK ^ 0x72d5b375) +#define T33 /* 0xfffa3942 */ (T_MASK ^ 0x0005c6bd) +#define T34 /* 0x8771f681 */ (T_MASK ^ 0x788e097e) +#define T35 0x6d9d6122 +#define T36 /* 0xfde5380c */ (T_MASK ^ 0x021ac7f3) +#define T37 /* 0xa4beea44 */ (T_MASK ^ 0x5b4115bb) +#define T38 0x4bdecfa9 +#define T39 /* 0xf6bb4b60 */ (T_MASK ^ 0x0944b49f) +#define T40 /* 0xbebfbc70 */ (T_MASK ^ 0x4140438f) +#define T41 0x289b7ec6 +#define T42 /* 0xeaa127fa */ (T_MASK ^ 0x155ed805) +#define T43 /* 0xd4ef3085 */ (T_MASK ^ 0x2b10cf7a) +#define T44 0x04881d05 +#define T45 /* 0xd9d4d039 */ (T_MASK ^ 0x262b2fc6) +#define T46 /* 0xe6db99e5 */ (T_MASK ^ 0x1924661a) +#define T47 0x1fa27cf8 +#define T48 /* 0xc4ac5665 */ (T_MASK ^ 0x3b53a99a) +#define T49 /* 0xf4292244 */ (T_MASK ^ 0x0bd6ddbb) +#define T50 0x432aff97 +#define T51 /* 0xab9423a7 */ (T_MASK ^ 0x546bdc58) +#define T52 /* 0xfc93a039 */ (T_MASK ^ 0x036c5fc6) +#define T53 0x655b59c3 +#define T54 /* 0x8f0ccc92 */ (T_MASK ^ 0x70f3336d) +#define T55 /* 0xffeff47d */ (T_MASK ^ 0x00100b82) +#define T56 /* 0x85845dd1 */ (T_MASK ^ 0x7a7ba22e) +#define T57 0x6fa87e4f +#define T58 /* 0xfe2ce6e0 */ (T_MASK ^ 0x01d3191f) +#define T59 /* 0xa3014314 */ (T_MASK ^ 0x5cfebceb) +#define T60 0x4e0811a1 +#define T61 /* 0xf7537e82 */ (T_MASK ^ 0x08ac817d) +#define T62 /* 0xbd3af235 */ (T_MASK ^ 0x42c50dca) +#define T63 0x2ad7d2bb +#define T64 /* 0xeb86d391 */ (T_MASK ^ 0x14792c6e) + + +static void +md5_process(md5_state_t *pms, const md5_byte_t *data /*[64]*/) +{ + md5_word_t + a = pms->abcd[0], b = pms->abcd[1], + c = pms->abcd[2], d = pms->abcd[3]; + md5_word_t t; +#if BYTE_ORDER > 0 + /* Define storage only for big-endian CPUs. */ + md5_word_t X[16]; +#else + /* Define storage for little-endian or both types of CPUs. */ + md5_word_t xbuf[16]; + const md5_word_t *X; +#endif + + { +#if BYTE_ORDER == 0 + /* + * Determine dynamically whether this is a big-endian or + * little-endian machine, since we can use a more efficient + * algorithm on the latter. + */ + static const int w = 1; + + if (*((const md5_byte_t *)&w)) /* dynamic little-endian */ +#endif +#if BYTE_ORDER <= 0 /* little-endian */ + { + /* + * On little-endian machines, we can process properly aligned + * data without copying it. + */ + if (!((data - (const md5_byte_t *)0) & 3)) { + /* data are properly aligned */ + X = (const md5_word_t *)data; + } else { + /* not aligned */ + memcpy(xbuf, data, 64); + X = xbuf; + } + } +#endif +#if BYTE_ORDER == 0 + else /* dynamic big-endian */ +#endif +#if BYTE_ORDER >= 0 /* big-endian */ + { + /* + * On big-endian machines, we must arrange the bytes in the + * right order. + */ + const md5_byte_t *xp = data; + int i; + +# if BYTE_ORDER == 0 + X = xbuf; /* (dynamic only) */ +# else +# define xbuf X /* (static only) */ +# endif + for (i = 0; i < 16; ++i, xp += 4) + xbuf[i] = xp[0] + (xp[1] << 8) + (xp[2] << 16) + (xp[3] << 24); + } +#endif + } + +#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32 - (n)))) + + /* Round 1. */ + /* Let [abcd k s i] denote the operation + a = b + ((a + F(b,c,d) + X[k] + T[i]) <<< s). */ +#define F(x, y, z) (((x) & (y)) | (~(x) & (z))) +#define SET(a, b, c, d, k, s, Ti)\ + t = a + F(b,c,d) + X[k] + Ti;\ + a = ROTATE_LEFT(t, s) + b + /* Do the following 16 operations. */ + SET(a, b, c, d, 0, 7, T1); + SET(d, a, b, c, 1, 12, T2); + SET(c, d, a, b, 2, 17, T3); + SET(b, c, d, a, 3, 22, T4); + SET(a, b, c, d, 4, 7, T5); + SET(d, a, b, c, 5, 12, T6); + SET(c, d, a, b, 6, 17, T7); + SET(b, c, d, a, 7, 22, T8); + SET(a, b, c, d, 8, 7, T9); + SET(d, a, b, c, 9, 12, T10); + SET(c, d, a, b, 10, 17, T11); + SET(b, c, d, a, 11, 22, T12); + SET(a, b, c, d, 12, 7, T13); + SET(d, a, b, c, 13, 12, T14); + SET(c, d, a, b, 14, 17, T15); + SET(b, c, d, a, 15, 22, T16); +#undef SET + + /* Round 2. */ + /* Let [abcd k s i] denote the operation + a = b + ((a + G(b,c,d) + X[k] + T[i]) <<< s). */ +#define G(x, y, z) (((x) & (z)) | ((y) & ~(z))) +#define SET(a, b, c, d, k, s, Ti)\ + t = a + G(b,c,d) + X[k] + Ti;\ + a = ROTATE_LEFT(t, s) + b + /* Do the following 16 operations. */ + SET(a, b, c, d, 1, 5, T17); + SET(d, a, b, c, 6, 9, T18); + SET(c, d, a, b, 11, 14, T19); + SET(b, c, d, a, 0, 20, T20); + SET(a, b, c, d, 5, 5, T21); + SET(d, a, b, c, 10, 9, T22); + SET(c, d, a, b, 15, 14, T23); + SET(b, c, d, a, 4, 20, T24); + SET(a, b, c, d, 9, 5, T25); + SET(d, a, b, c, 14, 9, T26); + SET(c, d, a, b, 3, 14, T27); + SET(b, c, d, a, 8, 20, T28); + SET(a, b, c, d, 13, 5, T29); + SET(d, a, b, c, 2, 9, T30); + SET(c, d, a, b, 7, 14, T31); + SET(b, c, d, a, 12, 20, T32); +#undef SET + + /* Round 3. */ + /* Let [abcd k s t] denote the operation + a = b + ((a + H(b,c,d) + X[k] + T[i]) <<< s). */ +#define H(x, y, z) ((x) ^ (y) ^ (z)) +#define SET(a, b, c, d, k, s, Ti)\ + t = a + H(b,c,d) + X[k] + Ti;\ + a = ROTATE_LEFT(t, s) + b + /* Do the following 16 operations. */ + SET(a, b, c, d, 5, 4, T33); + SET(d, a, b, c, 8, 11, T34); + SET(c, d, a, b, 11, 16, T35); + SET(b, c, d, a, 14, 23, T36); + SET(a, b, c, d, 1, 4, T37); + SET(d, a, b, c, 4, 11, T38); + SET(c, d, a, b, 7, 16, T39); + SET(b, c, d, a, 10, 23, T40); + SET(a, b, c, d, 13, 4, T41); + SET(d, a, b, c, 0, 11, T42); + SET(c, d, a, b, 3, 16, T43); + SET(b, c, d, a, 6, 23, T44); + SET(a, b, c, d, 9, 4, T45); + SET(d, a, b, c, 12, 11, T46); + SET(c, d, a, b, 15, 16, T47); + SET(b, c, d, a, 2, 23, T48); +#undef SET + + /* Round 4. */ + /* Let [abcd k s t] denote the operation + a = b + ((a + I(b,c,d) + X[k] + T[i]) <<< s). */ +#define I(x, y, z) ((y) ^ ((x) | ~(z))) +#define SET(a, b, c, d, k, s, Ti)\ + t = a + I(b,c,d) + X[k] + Ti;\ + a = ROTATE_LEFT(t, s) + b + /* Do the following 16 operations. */ + SET(a, b, c, d, 0, 6, T49); + SET(d, a, b, c, 7, 10, T50); + SET(c, d, a, b, 14, 15, T51); + SET(b, c, d, a, 5, 21, T52); + SET(a, b, c, d, 12, 6, T53); + SET(d, a, b, c, 3, 10, T54); + SET(c, d, a, b, 10, 15, T55); + SET(b, c, d, a, 1, 21, T56); + SET(a, b, c, d, 8, 6, T57); + SET(d, a, b, c, 15, 10, T58); + SET(c, d, a, b, 6, 15, T59); + SET(b, c, d, a, 13, 21, T60); + SET(a, b, c, d, 4, 6, T61); + SET(d, a, b, c, 11, 10, T62); + SET(c, d, a, b, 2, 15, T63); + SET(b, c, d, a, 9, 21, T64); +#undef SET + + /* Then perform the following additions. (That is increment each + of the four registers by the value it had before this block + was started.) */ + pms->abcd[0] += a; + pms->abcd[1] += b; + pms->abcd[2] += c; + pms->abcd[3] += d; +} + +void +md5_init(md5_state_t *pms) +{ + pms->count[0] = pms->count[1] = 0; + pms->abcd[0] = 0x67452301; + pms->abcd[1] = /*0xefcdab89*/ T_MASK ^ 0x10325476; + pms->abcd[2] = /*0x98badcfe*/ T_MASK ^ 0x67452301; + pms->abcd[3] = 0x10325476; +} + +void +md5_append(md5_state_t *pms, const md5_byte_t *data, size_t nbytes) +{ + const md5_byte_t *p = data; + size_t left = nbytes; + size_t offset = (pms->count[0] >> 3) & 63; + md5_word_t nbits = (md5_word_t)(nbytes << 3); + + if (nbytes <= 0) + return; + + /* Update the message length. */ + pms->count[1] += md5_word_t(nbytes >> 29); + pms->count[0] += nbits; + if (pms->count[0] < nbits) + pms->count[1]++; + + /* Process an initial partial block. */ + if (offset) { + size_t copy = (offset + nbytes > 64 ? 64 - offset : nbytes); + + memcpy(pms->buf + offset, p, copy); + if (offset + copy < 64) + return; + p += copy; + left -= copy; + md5_process(pms, pms->buf); + } + + /* Process full blocks. */ + for (; left >= 64; p += 64, left -= 64) + md5_process(pms, p); + + /* Process a final partial block. */ + if (left) + memcpy(pms->buf, p, left); +} + +void +md5_finish(md5_state_t *pms, md5_byte_t digest[16]) +{ + static const md5_byte_t pad[64] = { + 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + }; + md5_byte_t data[8]; + int i; + + /* Save the length before padding. */ + for (i = 0; i < 8; ++i) + data[i] = (md5_byte_t)(pms->count[i >> 2] >> ((i & 3) << 3)); + /* Pad to 56 bytes mod 64. */ + md5_append(pms, pad, ((55 - (pms->count[0] >> 3)) & 63) + 1); + /* Append the length. */ + md5_append(pms, data, 8); + for (i = 0; i < 16; ++i) + digest[i] = (md5_byte_t)(pms->abcd[i >> 2] >> ((i & 3) << 3)); +} diff --git a/src/boost/tools/build/src/engine/md5.h b/src/boost/tools/build/src/engine/md5.h new file mode 100644 index 000000000..cbfde5976 --- /dev/null +++ b/src/boost/tools/build/src/engine/md5.h @@ -0,0 +1,93 @@ +/* + Copyright (C) 1999, 2002 Aladdin Enterprises. All rights reserved. + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + L. Peter Deutsch + ghost@aladdin.com + + */ +/* $Id: md5.h,v 1.4 2002/04/13 19:20:28 lpd Exp $ */ +/* + Independent implementation of MD5 (RFC 1321). + + This code implements the MD5 Algorithm defined in RFC 1321, whose + text is available at + http://www.ietf.org/rfc/rfc1321.txt + The code is derived from the text of the RFC, including the test suite + (section A.5) but excluding the rest of Appendix A. It does not include + any code or documentation that is identified in the RFC as being + copyrighted. + + The original and principal author of md5.h is L. Peter Deutsch + . Other authors are noted in the change history + that follows (in reverse chronological order): + + 2002-04-13 lpd Removed support for non-ANSI compilers; removed + references to Ghostscript; clarified derivation from RFC 1321; + now handles byte order either statically or dynamically. + 1999-11-04 lpd Edited comments slightly for automatic TOC extraction. + 1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5); + added conditionalization for C++ compilation from Martin + Purschke . + 1999-05-03 lpd Original version. + */ + +#ifndef md5_INCLUDED +# define md5_INCLUDED + +#include + +/* + * This package supports both compile-time and run-time determination of CPU + * byte order. If ARCH_IS_BIG_ENDIAN is defined as 0, the code will be + * compiled to run only on little-endian CPUs; if ARCH_IS_BIG_ENDIAN is + * defined as non-zero, the code will be compiled to run only on big-endian + * CPUs; if ARCH_IS_BIG_ENDIAN is not defined, the code will be compiled to + * run on either big- or little-endian CPUs, but will run slightly less + * efficiently on either one than if ARCH_IS_BIG_ENDIAN is defined. + */ + +typedef unsigned char md5_byte_t; /* 8-bit byte */ +typedef unsigned int md5_word_t; /* 32-bit word */ + +/* Define the state of the MD5 Algorithm. */ +typedef struct md5_state_s { + md5_word_t count[2]; /* message length in bits, lsw first */ + md5_word_t abcd[4]; /* digest buffer */ + md5_byte_t buf[64]; /* accumulate block */ +} md5_state_t; + +#ifdef __cplusplus +extern "C" +{ +#endif + +/* Initialize the algorithm. */ +void md5_init(md5_state_t *pms); + +/* Append a string to the message. */ +void md5_append(md5_state_t *pms, const md5_byte_t *data, size_t nbytes); + +/* Finish the message and return the digest. */ +void md5_finish(md5_state_t *pms, md5_byte_t digest[16]); + +#ifdef __cplusplus +} /* end extern "C" */ +#endif + +#endif /* md5_INCLUDED */ diff --git a/src/boost/tools/build/src/engine/mem.cpp b/src/boost/tools/build/src/engine/mem.cpp new file mode 100644 index 000000000..ee353253a --- /dev/null +++ b/src/boost/tools/build/src/engine/mem.cpp @@ -0,0 +1,8 @@ +/* +Copyright Rene Rivera 2006. +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#include "jam.h" diff --git a/src/boost/tools/build/src/engine/mem.h b/src/boost/tools/build/src/engine/mem.h new file mode 100644 index 000000000..41868cc58 --- /dev/null +++ b/src/boost/tools/build/src/engine/mem.h @@ -0,0 +1,170 @@ +/* + * Copyright 2006-2022 RenĂ© Ferdinand Rivera Morell + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef BJAM_MEM_H +#define BJAM_MEM_H + +#include "config.h" + +#include +#include + +#define bjam_malloc_x(s) std::malloc(s) +#define bjam_calloc_x(n, s) std::calloc(n, s) +#define bjam_realloc_x(p, s) std::realloc(p, s) +#define bjam_free_x(p) std::free(p) + +#ifndef bjam_malloc_atomic_x +#define bjam_malloc_atomic_x(s) bjam_malloc_x(s) +#endif +#ifndef bjam_calloc_atomic_x +#define bjam_calloc_atomic_x(n, s) bjam_calloc_x(n, s) +#endif +#ifndef bjam_mem_init_x +#define bjam_mem_init_x() +#endif +#ifndef bjam_mem_close_x +#define bjam_mem_close_x() +#endif +#ifndef bjam_malloc_raw_x +#define bjam_malloc_raw_x(s) bjam_malloc_x(s) +#endif +#ifndef bjam_calloc_raw_x +#define bjam_calloc_raw_x(n, s) bjam_calloc_x(n, s) +#endif +#ifndef bjam_realloc_raw_x +#define bjam_realloc_raw_x(p, s) bjam_realloc_x(p, s) +#endif +#ifndef bjam_free_raw_x +#define bjam_free_raw_x(p) bjam_free_x(p) +#endif + +#ifdef OPT_DEBUG_PROFILE +/* Profile tracing of memory allocations. */ +#include "debug.h" + +#define BJAM_MALLOC(s) (profile_memory(s), bjam_malloc_x(s)) +#define BJAM_MALLOC_ATOMIC(s) (profile_memory(s), bjam_malloc_atomic_x(s)) +#define BJAM_CALLOC(n, s) (profile_memory(n * s), bjam_calloc_x(n, s)) +#define BJAM_CALLOC_ATOMIC(n, s) (profile_memory(n * s), bjam_calloc_atomic_x(n, s)) +#define BJAM_REALLOC(p, s) (profile_memory(s), bjam_realloc_x(p, s)) + +#define BJAM_MALLOC_RAW(s) (profile_memory(s), bjam_malloc_raw_x(s)) +#define BJAM_CALLOC_RAW(n, s) (profile_memory(n * s), bjam_calloc_raw_x(n, s)) +#define BJAM_REALLOC_RAW(p, s) (profile_memory(s), bjam_realloc_raw_x(p, s)) +#else +/* No mem tracing. */ +#define BJAM_MALLOC(s) bjam_malloc_x(s) +#define BJAM_MALLOC_ATOMIC(s) bjam_malloc_atomic_x(s) +#define BJAM_CALLOC(n, s) bjam_calloc_x(n, s) +#define BJAM_CALLOC_ATOMIC(n, s) bjam_calloc_atomic_x(n, s) +#define BJAM_REALLOC(p, s) bjam_realloc_x(p, s) + +#define BJAM_MALLOC_RAW(s) bjam_malloc_raw_x(s) +#define BJAM_CALLOC_RAW(n, s) bjam_calloc_raw_x(n, s) +#define BJAM_REALLOC_RAW(p, s) bjam_realloc_raw_x(p, s) +#endif + +#define BJAM_MEM_INIT() bjam_mem_init_x() +#define BJAM_MEM_CLOSE() bjam_mem_close_x() + +#define BJAM_FREE(p) bjam_free_x(p) +#define BJAM_FREE_RAW(p) bjam_free_raw_x(p) + +namespace b2 { +namespace jam { + + template + T* ctor_ptr(void* p, Args&&... args) + { + std::memset(p, 0, sizeof(T)); + return new (p) T(std::forward(args)...); + } + + template + void dtor_ptr(T* p) + { + p->~T(); + } + + template + T* make_ptr(Args&&... args) + { + return ctor_ptr(BJAM_MALLOC(sizeof(T))); + } + + template + void free_ptr(T* p) + { + dtor_ptr(p); + BJAM_FREE(p); + } + + template + struct unique_jptr_deleter { + void operator()(T* p) const + { + p->~T(); + BJAM_FREE(p); + } + }; + + template + using unique_jptr = std::unique_ptr>; + + template + unique_jptr make_unique_jptr(Args&&... args) + { + return unique_jptr(make_ptr(std::forward(args)...)); + } + + template + struct unique_jptr_free { + using free_f = void (*)(T*); + + explicit unique_jptr_free(free_f f) + : free_function(f) + { + } + + unique_jptr_free(unique_jptr_free&& p) + : free_function(std::move(p.free_function)) + { + p.free_function = nullptr; + } + + void operator()(T* p) const + { + // if (free_function) + if (p) + (*free_function)(p); + } + + private: + free_f free_function = nullptr; + }; + + template + using unique_bare_jptr = std::unique_ptr>; + + template + unique_bare_jptr make_unique_bare_jptr(T* p, F f) + { + return unique_bare_jptr(p, unique_jptr_free(f)); + } + + template + unique_bare_jptr make_unique_bare_jptr(T* p, F enter_f, G exit_f) + { + enter_f(p); + return unique_bare_jptr(p, unique_jptr_free(exit_f)); + } + +} +} + +#endif diff --git a/src/boost/tools/build/src/engine/modules.cpp b/src/boost/tools/build/src/engine/modules.cpp new file mode 100644 index 000000000..250c03724 --- /dev/null +++ b/src/boost/tools/build/src/engine/modules.cpp @@ -0,0 +1,434 @@ +/* + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "jam.h" +#include "modules.h" + +#include "hash.h" +#include "lists.h" +#include "native.h" +#include "object.h" +#include "parse.h" +#include "rules.h" +#include "jam_strings.h" +#include "variable.h" + +#include +#include + +static struct hash * module_hash = 0; +static module_t root; + + +module_t * bindmodule( OBJECT * name ) +{ + if ( !name ) + return &root; + + { + PROFILE_ENTER( BINDMODULE ); + + module_t * m; + int found; + + if ( !module_hash ) + module_hash = hashinit( sizeof( module_t ), "modules" ); + + m = (module_t *)hash_insert( module_hash, name, &found ); + if ( !found ) + { + m->name = object_copy( name ); + m->variables = 0; + m->variable_indices = 0; + m->num_fixed_variables = 0; + m->fixed_variables = 0; + m->rules = 0; + m->imported_modules = 0; + m->class_module = 0; + m->native_rules = 0; + m->user_module = 0; + } + + PROFILE_EXIT( BINDMODULE ); + + return m; + } +} + + +/* + * demand_rules() - Get the module's "rules" hash on demand. + */ +struct hash * demand_rules( module_t * m ) +{ + if ( !m->rules ) + m->rules = hashinit( sizeof( RULE ), "rules" ); + return m->rules; +} + + +/* + * delete_module() - wipe out the module's rules and variables. + */ + +static void delete_rule_( void * xrule, void * data ) +{ + rule_free( (RULE *)xrule ); +} + + +static void delete_native_rule( void * xrule, void * data ) +{ + native_rule_t * rule = (native_rule_t *)xrule; + object_free( rule->name ); + if ( rule->procedure ) + function_free( rule->procedure ); +} + + +static void delete_imported_modules( void * xmodule_name, void * data ) +{ + object_free( *(OBJECT * *)xmodule_name ); +} + + +static void free_fixed_variable( void * xvar, void * data ); + +void delete_module( module_t * m ) +{ + /* Clear out all the rules. */ + if ( m->rules ) + { + hashenumerate( m->rules, delete_rule_, (void *)0 ); + hash_free( m->rules ); + m->rules = 0; + } + + if ( m->native_rules ) + { + hashenumerate( m->native_rules, delete_native_rule, (void *)0 ); + hash_free( m->native_rules ); + m->native_rules = 0; + } + + if ( m->variables ) + { + var_done( m ); + m->variables = 0; + } + + if ( m->fixed_variables ) + { + int i; + for ( i = 0; i < m->num_fixed_variables; ++i ) + { + list_free( m->fixed_variables[ i ] ); + } + BJAM_FREE( m->fixed_variables ); + m->fixed_variables = 0; + } + + if ( m->variable_indices ) + { + hashenumerate( m->variable_indices, &free_fixed_variable, (void *)0 ); + hash_free( m->variable_indices ); + m->variable_indices = 0; + } + + if ( m->imported_modules ) + { + hashenumerate( m->imported_modules, delete_imported_modules, (void *)0 ); + hash_free( m->imported_modules ); + m->imported_modules = 0; + } +} + + +struct module_stats +{ + OBJECT * module_name; + struct hashstats rules_stats[ 1 ]; + struct hashstats variables_stats[ 1 ]; + struct hashstats variable_indices_stats[ 1 ]; + struct hashstats imported_modules_stats[ 1 ]; +}; + + +static void module_stat( struct hash * hp, OBJECT * module, const char * name ) +{ + if ( hp ) + { + struct hashstats stats[ 1 ]; + string id[ 1 ]; + hashstats_init( stats ); + string_new( id ); + string_append( id, object_str( module ) ); + string_push_back( id, ' ' ); + string_append( id, name ); + + hashstats_add( stats, hp ); + hashstats_print( stats, id->value ); + + string_free( id ); + } +} + + +static void class_module_stat( struct hashstats * stats, OBJECT * module, const char * name ) +{ + if ( stats->item_size ) + { + string id[ 1 ]; + string_new( id ); + string_append( id, object_str( module ) ); + string_append( id, " object " ); + string_append( id, name ); + + hashstats_print( stats, id->value ); + + string_free( id ); + } +} + + +static void stat_module( void * xmodule, void * data ) +{ + module_t *m = (module_t *)xmodule; + + if ( DEBUG_MEM || DEBUG_PROFILE ) + { + struct hash * class_info = (struct hash *)data; + if ( m->class_module ) + { + int found; + struct module_stats * ms = (struct module_stats *)hash_insert( class_info, m->class_module->name, &found ); + if ( !found ) + { + ms->module_name = m->class_module->name; + hashstats_init( ms->rules_stats ); + hashstats_init( ms->variables_stats ); + hashstats_init( ms->variable_indices_stats ); + hashstats_init( ms->imported_modules_stats ); + } + + hashstats_add( ms->rules_stats, m->rules ); + hashstats_add( ms->variables_stats, m->variables ); + hashstats_add( ms->variable_indices_stats, m->variable_indices ); + hashstats_add( ms->imported_modules_stats, m->imported_modules ); + } + else + { + module_stat( m->rules, m->name, "rules" ); + module_stat( m->variables, m->name, "variables" ); + module_stat( m->variable_indices, m->name, "fixed variables" ); + module_stat( m->imported_modules, m->name, "imported modules" ); + } + } + + delete_module( m ); + object_free( m->name ); +} + +static void print_class_stats( void * xstats, void * data ) +{ + struct module_stats * stats = (struct module_stats *)xstats; + class_module_stat( stats->rules_stats, stats->module_name, "rules" ); + class_module_stat( stats->variables_stats, stats->module_name, "variables" ); + class_module_stat( stats->variable_indices_stats, stats->module_name, "fixed variables" ); + class_module_stat( stats->imported_modules_stats, stats->module_name, "imported modules" ); +} + + +static void delete_module_( void * xmodule, void * data ) +{ + module_t *m = (module_t *)xmodule; + + delete_module( m ); + object_free( m->name ); +} + + +void modules_done() +{ + if ( module_hash ) + { + if ( DEBUG_MEM || DEBUG_PROFILE ) + { + struct hash * class_hash = hashinit( sizeof( struct module_stats ), "object info" ); + hashenumerate( module_hash, stat_module, (void *)class_hash ); + hashenumerate( class_hash, print_class_stats, (void *)0 ); + hash_free( class_hash ); + } + hashenumerate( module_hash, delete_module_, (void *)0 ); + hashdone( module_hash ); + } + module_hash = 0; + delete_module( &root ); +} + +module_t * root_module() +{ + return &root; +} + + +void import_module( LIST * module_names, module_t * target_module ) +{ + PROFILE_ENTER( IMPORT_MODULE ); + + struct hash * h; + LISTITER iter; + LISTITER end; + + if ( !target_module->imported_modules ) + target_module->imported_modules = hashinit( sizeof( char * ), "imported" + ); + h = target_module->imported_modules; + + iter = list_begin( module_names ); + end = list_end( module_names ); + for ( ; iter != end; iter = list_next( iter ) ) + { + int found; + OBJECT * const s = list_item( iter ); + OBJECT * * const ss = (OBJECT * *)hash_insert( h, s, &found ); + if ( !found ) + *ss = object_copy( s ); + } + + PROFILE_EXIT( IMPORT_MODULE ); +} + + +static void add_module_name( void * r_, void * result_ ) +{ + OBJECT * * const r = (OBJECT * *)r_; + LIST * * const result = (LIST * *)result_; + *result = list_push_back( *result, object_copy( *r ) ); +} + + +LIST * imported_modules( module_t * module ) +{ + LIST * result = L0; + if ( module->imported_modules ) + hashenumerate( module->imported_modules, add_module_name, &result ); + return result; +} + + +FUNCTION * function_bind_variables( FUNCTION *, module_t *, int * counter ); +FUNCTION * function_unbind_variables( FUNCTION * ); + +struct fixed_variable +{ + OBJECT * key; + int n; +}; + +struct bind_vars_t +{ + module_t * module; + int counter; +}; + + +static void free_fixed_variable( void * xvar, void * data ) +{ + object_free( ( (struct fixed_variable *)xvar )->key ); +} + + +static void bind_variables_for_rule( void * xrule, void * xdata ) +{ + RULE * rule = (RULE *)xrule; + struct bind_vars_t * data = (struct bind_vars_t *)xdata; + if ( rule->procedure && rule->module == data->module ) + rule->procedure = function_bind_variables( rule->procedure, + data->module, &data->counter ); +} + + +void module_bind_variables( struct module_t * m ) +{ + if ( m != root_module() && m->rules ) + { + struct bind_vars_t data; + data.module = m; + data.counter = m->num_fixed_variables; + hashenumerate( m->rules, &bind_variables_for_rule, &data ); + module_set_fixed_variables( m, data.counter ); + } +} + + +int module_add_fixed_var( struct module_t * m, OBJECT * name, int * counter ) +{ + struct fixed_variable * v; + int found; + + assert( !m->class_module ); + + if ( !m->variable_indices ) + m->variable_indices = hashinit( sizeof( struct fixed_variable ), "variable index table" ); + + v = (struct fixed_variable *)hash_insert( m->variable_indices, name, &found ); + if ( !found ) + { + v->key = object_copy( name ); + v->n = (*counter)++; + } + + return v->n; +} + + +LIST * var_get_and_clear_raw( module_t * m, OBJECT * name ); + +static void load_fixed_variable( void * xvar, void * data ) +{ + struct fixed_variable * var = (struct fixed_variable *)xvar; + struct module_t * m = (struct module_t *)data; + if ( var->n >= m->num_fixed_variables ) + m->fixed_variables[ var->n ] = var_get_and_clear_raw( m, var->key ); +} + + +void module_set_fixed_variables( struct module_t * m, int n_variables ) +{ + /* Reallocate */ + struct hash * variable_indices; + LIST * * fixed_variables = (LIST * *)BJAM_MALLOC( n_variables * sizeof( LIST * ) ); + if ( m->fixed_variables ) + { + memcpy( fixed_variables, m->fixed_variables, m->num_fixed_variables * sizeof( LIST * ) ); + BJAM_FREE( m->fixed_variables ); + } + m->fixed_variables = fixed_variables; + variable_indices = m->class_module + ? m->class_module->variable_indices + : m->variable_indices; + if ( variable_indices ) + hashenumerate( variable_indices, &load_fixed_variable, m ); + m->num_fixed_variables = n_variables; +} + + +int module_get_fixed_var( struct module_t * m_, OBJECT * name ) +{ + struct fixed_variable * v; + struct module_t * m = m_; + + if ( m->class_module ) + m = m->class_module; + + if ( !m->variable_indices ) + return -1; + + v = (struct fixed_variable *)hash_find( m->variable_indices, name ); + return v && v->n < m_->num_fixed_variables ? v->n : -1; +} diff --git a/src/boost/tools/build/src/engine/modules.h b/src/boost/tools/build/src/engine/modules.h new file mode 100644 index 000000000..3a07b11c3 --- /dev/null +++ b/src/boost/tools/build/src/engine/modules.h @@ -0,0 +1,57 @@ +/* + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ +#ifndef MODULES_DWA10182001_H +#define MODULES_DWA10182001_H + +#include "config.h" +#include "lists.h" +#include "object.h" + +typedef struct module_t module_t ; + +typedef module_t * module_ptr; + +struct module_t +{ + OBJECT * name; + struct hash * rules; + struct hash * variables; + struct hash * variable_indices; + int num_fixed_variables; + LIST * * fixed_variables; + struct hash * imported_modules; + module_t * class_module; + struct hash * native_rules; + int user_module; +}; + +module_t * bindmodule( OBJECT * name ); +module_t * root_module(); +void delete_module( module_t * ); + +void import_module( LIST * module_names, module_t * target_module ); +LIST * imported_modules( module_t * ); + +struct hash * demand_rules( module_t * ); + +void module_bind_variables( module_t * ); + +/* + * After calling module_add_fixed_var, module_set_fixed_variables must be called + * before accessing any variables in the module. + */ +int module_add_fixed_var( module_t *, OBJECT * name, int * n ); +void module_set_fixed_variables( module_t *, int n ); + +/* + * Returns the index of the variable or -1 if none exists. + */ +int module_get_fixed_var( module_t *, OBJECT * name ); + +void modules_done(); + +#endif diff --git a/src/boost/tools/build/src/engine/modules/order.cpp b/src/boost/tools/build/src/engine/modules/order.cpp new file mode 100644 index 000000000..527359e17 --- /dev/null +++ b/src/boost/tools/build/src/engine/modules/order.cpp @@ -0,0 +1,159 @@ +/* Copyright 2004. Vladimir Prus + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "../lists.h" +#include "../mem.h" +#include "../native.h" +#include "../object.h" +#include "../jam_strings.h" +#include "../variable.h" + + +/* Use quite klugy approach: when we add order dependency from 'a' to 'b', just + * append 'b' to of value of variable 'a'. + */ +LIST * add_pair( FRAME * frame, int32_t flags ) +{ + LIST * arg = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( arg ); + LISTITER const end = list_end( arg ); + var_set( frame->module, list_item( iter ), list_copy_range( arg, list_next( + iter ), end ), VAR_APPEND ); + return L0; +} + + +/* Given a list and a value, returns position of that value in the list, or -1 + * if not found. + */ +int32_t list_index( LIST * list, OBJECT * value ) +{ + int32_t result = 0; + LISTITER iter = list_begin( list ); + LISTITER const end = list_end( list ); + for ( ; iter != end; iter = list_next( iter ), ++result ) + if ( object_equal( list_item( iter ), value ) ) + return result; + return -1; +} + +enum colors { white, gray, black }; + + +/* Main routine for topological sort. Calls itself recursively on all adjacent + * vertices which were not yet visited. After that, 'current_vertex' is added to + * '*result_ptr'. + */ +void do_ts( int32_t * * graph, int32_t current_vertex, int32_t * colors, int32_t * * result_ptr + ) +{ + int32_t i; + + colors[ current_vertex ] = gray; + for ( i = 0; graph[ current_vertex ][ i ] != -1; ++i ) + { + int32_t adjacent_vertex = graph[ current_vertex ][ i ]; + if ( colors[ adjacent_vertex ] == white ) + do_ts( graph, adjacent_vertex, colors, result_ptr ); + /* The vertex is either black, in which case we do not have to do + * anything, or gray, in which case we have a loop. If we have a loop, + * it is not clear what useful diagnostic we can emit, so we emit + * nothing. + */ + } + colors[ current_vertex ] = black; + **result_ptr = current_vertex; + ( *result_ptr )++; +} + + +static void topological_sort( int32_t * * graph, int32_t num_vertices, int32_t * result ) +{ + int32_t i; + int32_t * colors = ( int32_t * )BJAM_CALLOC( num_vertices, sizeof( int32_t ) ); + for ( i = 0; i < num_vertices; ++i ) + colors[ i ] = white; + + for ( i = num_vertices - 1; i >= 0; --i ) + if ( colors[ i ] == white ) + do_ts( graph, i, colors, &result ); + + BJAM_FREE( colors ); +} + + +LIST * order( FRAME * frame, int32_t flags ) +{ + LIST * arg = lol_get( frame->args, 0 ); + LIST * result = L0; + int32_t src; + LISTITER iter = list_begin( arg ); + LISTITER const end = list_end( arg ); + + /* We need to create a graph of order dependencies between the passed + * objects. We assume there are no duplicates passed to 'add_pair'. + */ + int32_t length = list_length( arg ); + int32_t * * graph = ( int32_t * * )BJAM_CALLOC( length, sizeof( int32_t * ) ); + int32_t * order = ( int32_t * )BJAM_MALLOC( ( length + 1 ) * sizeof( int32_t ) ); + + for ( src = 0; iter != end; iter = list_next( iter ), ++src ) + { + /* For all objects this one depends upon, add elements to 'graph'. */ + LIST * dependencies = var_get( frame->module, list_item( iter ) ); + int32_t index = 0; + LISTITER dep_iter = list_begin( dependencies ); + LISTITER const dep_end = list_end( dependencies ); + + graph[ src ] = ( int32_t * )BJAM_CALLOC( list_length( dependencies ) + 1, + sizeof( int32_t ) ); + for ( ; dep_iter != dep_end; dep_iter = list_next( dep_iter ) ) + { + int32_t const dst = list_index( arg, list_item( dep_iter ) ); + if ( dst != -1 ) + graph[ src ][ index++ ] = dst; + } + graph[ src ][ index ] = -1; + } + + topological_sort( graph, length, order ); + + { + int32_t index = length - 1; + for ( ; index >= 0; --index ) + { + int32_t i; + LISTITER iter = list_begin( arg ); + for ( i = 0; i < order[ index ]; ++i, iter = list_next( iter ) ); + result = list_push_back( result, object_copy( list_item( iter ) ) ); + } + } + + /* Clean up */ + { + int32_t i; + for ( i = 0; i < length; ++i ) + BJAM_FREE( graph[ i ] ); + BJAM_FREE( graph ); + BJAM_FREE( order ); + } + + return result; +} + + +void init_order() +{ + { + char const * args[] = { "first", "second", 0 }; + declare_native_rule( "class@order", "add-pair", args, add_pair, 1 ); + } + + { + char const * args[] = { "objects", "*", 0 }; + declare_native_rule( "class@order", "order", args, order, 1 ); + } +} diff --git a/src/boost/tools/build/src/engine/modules/path.cpp b/src/boost/tools/build/src/engine/modules/path.cpp new file mode 100644 index 000000000..35d753d5e --- /dev/null +++ b/src/boost/tools/build/src/engine/modules/path.cpp @@ -0,0 +1,25 @@ +/* Copyright Vladimir Prus 2003. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "../constants.h" +#include "../frames.h" +#include "../lists.h" +#include "../native.h" +#include "../filesys.h" + + +LIST * path_exists( FRAME * frame, int flags ) +{ + return file_query( list_front( lol_get( frame->args, 0 ) ) ) ? + list_new( object_copy( constant_true ) ) : L0; +} + + +void init_path() +{ + char const * args[] = { "location", 0 }; + declare_native_rule( "path", "exists", args, path_exists, 1 ); +} diff --git a/src/boost/tools/build/src/engine/modules/property-set.cpp b/src/boost/tools/build/src/engine/modules/property-set.cpp new file mode 100644 index 000000000..b5a11abcf --- /dev/null +++ b/src/boost/tools/build/src/engine/modules/property-set.cpp @@ -0,0 +1,334 @@ +/* + * Copyright 2013 Steven Watanabe + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "../object.h" +#include "../lists.h" +#include "../modules.h" +#include "../rules.h" +#include "../variable.h" +#include "../native.h" +#include "../compile.h" +#include "../mem.h" +#include "../constants.h" +#include "string.h" + +struct ps_map_entry +{ + struct ps_map_entry * next; + LIST * key; + OBJECT * value; +}; + +struct ps_map +{ + struct ps_map_entry * * table; + int32_t table_size; + int32_t num_elems; +}; + +static unsigned list_hash(LIST * key) +{ + unsigned int hash = 0; + LISTITER iter = list_begin( key ), end = list_end( key ); + for ( ; iter != end; ++iter ) + { + hash = hash * 2147059363 + object_hash( list_item( iter ) ); + } + return hash; +} + +static int list_equal( LIST * lhs, LIST * rhs ) +{ + LISTITER lhs_iter, lhs_end, rhs_iter; + if ( list_length( lhs ) != list_length( rhs ) ) + { + return 0; + } + lhs_iter = list_begin( lhs ); + lhs_end = list_end( lhs ); + rhs_iter = list_begin( rhs ); + for ( ; lhs_iter != lhs_end; ++lhs_iter, ++rhs_iter ) + { + if ( ! object_equal( list_item( lhs_iter ), list_item( rhs_iter ) ) ) + { + return 0; + } + } + return 1; +} + +static void ps_map_init( struct ps_map * map ) +{ + int32_t i; + map->table_size = 2; + map->num_elems = 0; + map->table = (struct ps_map_entry * *)BJAM_MALLOC( size_t(map->table_size) * sizeof( struct ps_map_entry * ) ); + for ( i = 0; i < map->table_size; ++i ) + { + map->table[ i ] = NULL; + } +} + +static void ps_map_destroy( struct ps_map * map ) +{ + int32_t i; + for ( i = 0; i < map->table_size; ++i ) + { + struct ps_map_entry * pos; + for ( pos = map->table[ i ]; pos; ) + { + struct ps_map_entry * tmp = pos->next; + object_free( pos->value ); + BJAM_FREE( pos ); + pos = tmp; + } + } + BJAM_FREE( map->table ); +} + +static void ps_map_rehash( struct ps_map * map ) +{ + struct ps_map old = *map; + int32_t i; + map->table = (struct ps_map_entry * *)BJAM_MALLOC( size_t(map->table_size) * 2 * sizeof( struct ps_map_entry * ) ); + map->table_size *= 2; + for ( i = 0; i < map->table_size; ++i ) + { + map->table[ i ] = NULL; + } + for ( i = 0; i < old.table_size; ++i ) + { + struct ps_map_entry * pos; + for ( pos = old.table[ i ]; pos; ) + { + struct ps_map_entry * tmp = pos->next; + + unsigned hash_val = list_hash( pos->key ); + unsigned bucket = hash_val % map->table_size; + pos->next = map->table[ bucket ]; + map->table[ bucket ] = pos; + + pos = tmp; + } + } + BJAM_FREE( old.table ); +} + +static struct ps_map_entry * ps_map_insert(struct ps_map * map, LIST * key) +{ + unsigned hash_val = list_hash( key ); + unsigned bucket = hash_val % map->table_size; + struct ps_map_entry * pos; + for ( pos = map->table[bucket]; pos ; pos = pos->next ) + { + if ( list_equal( pos->key, key ) ) + return pos; + } + + if ( map->num_elems >= map->table_size ) + { + ps_map_rehash( map ); + bucket = hash_val % map->table_size; + } + pos = (struct ps_map_entry *)BJAM_MALLOC( sizeof( struct ps_map_entry ) ); + pos->next = map->table[bucket]; + pos->key = key; + pos->value = 0; + map->table[bucket] = pos; + ++map->num_elems; + return pos; +} + +static struct ps_map all_property_sets; + +LIST * property_set_create( FRAME * frame, int flags ) +{ + LIST * properties = lol_get( frame->args, 0 ); + LIST * sorted = list_sort( properties ); + LIST * unique = list_unique( sorted ); + struct ps_map_entry * pos = ps_map_insert( &all_property_sets, unique ); + list_free( sorted ); + if ( pos->value ) + { + list_free( unique ); + return list_new( object_copy( pos->value ) ); + } + else + { + OBJECT * rulename = object_new( "new" ); + OBJECT * varname = object_new( "self.raw" ); + LIST * val = call_rule( rulename, frame, + list_new( object_new( "property-set" ) ), 0 ); + LISTITER iter, end; + object_free( rulename ); + pos->value = object_copy( list_front( val ) ); + var_set( bindmodule( pos->value ), varname, unique, VAR_SET ); + object_free( varname ); + + for ( iter = list_begin( unique ), end = list_end( unique ); iter != end; ++iter ) + { + const char * str = object_str( list_item( iter ) ); + if ( str[ 0 ] != '<' || ! strchr( str, '>' ) ) + { + string message[ 1 ]; + string_new( message ); + string_append( message, "Invalid property: '" ); + string_append( message, str ); + string_append( message, "'" ); + LIST * imports = list_new( object_new( "errors" ) ); + import_module( imports, frame->module ); + rulename = object_new( "errors.error" ); + call_rule( rulename, frame, + list_new( object_new( message->value ) ), 0 ); + /* unreachable */ + string_free( message ); + object_free( list_front( imports ) ); + list_free( imports ); + object_free( rulename ); + } + } + + return val; + } +} + +/* binary search for the property value */ +LIST * property_set_get( FRAME * frame, int flags ) +{ + OBJECT * varname = object_new( "self.raw" ); + LIST * props = var_get( frame->module, varname ); + const char * name = object_str( list_front( lol_get( frame->args, 0 ) ) ); + size_t name_len = strlen( name ); + LISTITER begin, end; + LIST * result = L0; + object_free( varname ); + + /* Assumes random access */ + begin = list_begin( props ), end = list_end( props ); + + while ( 1 ) + { + ptrdiff_t diff = (end - begin); + LISTITER mid = begin + diff / 2; + int res; + if ( diff == 0 ) + { + return L0; + } + res = strncmp( object_str( list_item( mid ) ), name, name_len ); + if ( res < 0 ) + { + begin = mid + 1; + } + else if ( res > 0 ) + { + end = mid; + } + else /* We've found the property */ + { + /* Find the beginning of the group */ + LISTITER tmp = mid; + while ( tmp > begin ) + { + --tmp; + res = strncmp( object_str( list_item( tmp ) ), name, name_len ); + if ( res != 0 ) + { + ++tmp; + break; + } + } + begin = tmp; + /* Find the end of the group */ + tmp = mid + 1; + while ( tmp < end ) + { + res = strncmp( object_str( list_item( tmp ) ), name, name_len ); + if ( res != 0 ) break; + ++tmp; + } + end = tmp; + break; + } + } + + for ( ; begin != end; ++begin ) + { + result = list_push_back( result, + object_new( object_str( list_item( begin ) ) + name_len ) ); + } + + return result; +} + +/* binary search for the property value */ +LIST * property_set_contains_features( FRAME * frame, int flags ) +{ + OBJECT * varname = object_new( "self.raw" ); + LIST * props = var_get( frame->module, varname ); + LIST * features = lol_get( frame->args, 0 ); + LISTITER features_iter = list_begin( features ); + LISTITER features_end = list_end( features ) ; + object_free( varname ); + + for ( ; features_iter != features_end; ++features_iter ) + { + const char * name = object_str( list_item( features_iter ) ); + size_t name_len = strlen( name ); + LISTITER begin, end; + /* Assumes random access */ + begin = list_begin( props ), end = list_end( props ); + + while ( 1 ) + { + ptrdiff_t diff = (end - begin); + LISTITER mid = begin + diff / 2; + int res; + if ( diff == 0 ) + { + /* The feature is missing */ + return L0; + } + res = strncmp( object_str( list_item( mid ) ), name, name_len ); + if ( res < 0 ) + { + begin = mid + 1; + } + else if ( res > 0 ) + { + end = mid; + } + else /* We've found the property */ + { + break; + } + } + } + return list_new( object_copy( constant_true ) ); +} + +void init_property_set() +{ + { + char const * args[] = { "raw-properties", "*", 0 }; + declare_native_rule( "property-set", "create", args, property_set_create, 1 ); + } + { + char const * args[] = { "feature", 0 }; + declare_native_rule( "class@property-set", "get", args, property_set_get, 1 ); + } + { + char const * args[] = { "features", "*", 0 }; + declare_native_rule( "class@property-set", "contains-features", args, property_set_contains_features, 1 ); + } + ps_map_init( &all_property_sets ); +} + +void property_set_done() +{ + ps_map_destroy( &all_property_sets ); +} diff --git a/src/boost/tools/build/src/engine/modules/readme.txt b/src/boost/tools/build/src/engine/modules/readme.txt new file mode 100644 index 000000000..2a08ba10e --- /dev/null +++ b/src/boost/tools/build/src/engine/modules/readme.txt @@ -0,0 +1,3 @@ + +This directory constains sources which declare native +rules for B2 modules. \ No newline at end of file diff --git a/src/boost/tools/build/src/engine/modules/regex.cpp b/src/boost/tools/build/src/engine/modules/regex.cpp new file mode 100644 index 000000000..95fe19d14 --- /dev/null +++ b/src/boost/tools/build/src/engine/modules/regex.cpp @@ -0,0 +1,233 @@ +/* + * Copyright 2003. Vladimir Prus + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "../mem.h" +#include "../native.h" +#include "../jam_strings.h" +#include "../subst.h" + +/* +rule split ( string separator ) +{ + local result ; + local s = $(string) ; + + local match = 1 ; + while $(match) + { + match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ; + if $(match) + { + match += "" ; # in case 3rd item was empty - works around MATCH bug + result = $(match[3]) $(result) ; + s = $(match[1]) ; + } + } + return $(s) $(result) ; +} +*/ + +LIST * regex_split( FRAME * frame, int flags ) +{ + LIST * args = lol_get( frame->args, 0 ); + OBJECT * s; + OBJECT * separator; + regexp * re; + const char * pos, * prev; + LIST * result = L0; + LISTITER iter = list_begin( args ); + s = list_item( iter ); + separator = list_item( list_next( iter ) ); + + re = regex_compile( separator ); + + prev = pos = object_str( s ); + while ( regexec( re, pos ) ) + { + result = list_push_back( result, object_new_range( prev, int32_t(re->startp[ 0 ] - prev) ) ); + prev = re->endp[ 0 ]; + /* Handle empty matches */ + if ( *pos == '\0' ) + break; + else if ( pos == re->endp[ 0 ] ) + pos++; + else + pos = re->endp[ 0 ]; + } + + result = list_push_back( result, object_new( pos ) ); + + return result; +} + +/* +rule replace ( + string # The string to modify. + match # The characters to replace. + replacement # The string to replace with. + ) +{ + local result = "" ; + local parts = 1 ; + while $(parts) + { + parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ; + if $(parts) + { + parts += "" ; + result = "$(replacement)$(parts[3])$(result)" ; + string = $(parts[1]) ; + } + } + string ?= "" ; + result = "$(string)$(result)" ; + return $(result) ; +} +*/ + +LIST * regex_replace( FRAME * frame, int flags ) +{ + LIST * args = lol_get( frame->args, 0 ); + OBJECT * s; + OBJECT * match; + OBJECT * replacement; + regexp * re; + const char * pos; + string buf[ 1 ]; + LIST * result; + LISTITER iter = list_begin( args ); + s = list_item( iter ); + iter = list_next( iter ); + match = list_item( iter ); + iter = list_next( iter ); + replacement = list_item(iter ); + + re = regex_compile( match ); + + string_new( buf ); + + pos = object_str( s ); + while ( regexec( re, pos ) ) + { + string_append_range( buf, pos, re->startp[ 0 ] ); + string_append( buf, object_str( replacement ) ); + /* Handle empty matches */ + if ( *pos == '\0' ) + break; + else if ( pos == re->endp[ 0 ] ) + string_push_back( buf, *pos++ ); + else + pos = re->endp[ 0 ]; + } + string_append( buf, pos ); + + result = list_new( object_new( buf->value ) ); + + string_free( buf ); + + return result; +} + +/* +rule transform ( list * : pattern : indices * ) +{ + indices ?= 1 ; + local result ; + for local e in $(list) + { + local m = [ MATCH $(pattern) : $(e) ] ; + if $(m) + { + result += $(m[$(indices)]) ; + } + } + return $(result) ; +} +*/ + +LIST * regex_transform( FRAME * frame, int flags ) +{ + LIST * const l = lol_get( frame->args, 0 ); + LIST * const pattern = lol_get( frame->args, 1 ); + LIST * const indices_list = lol_get( frame->args, 2 ); + int * indices = 0; + int size; + LIST * result = L0; + + if ( !list_empty( indices_list ) ) + { + int * p; + LISTITER iter = list_begin( indices_list ); + LISTITER const end = list_end( indices_list ); + size = list_length( indices_list ); + indices = (int *)BJAM_MALLOC( size * sizeof( int ) ); + for ( p = indices; iter != end; iter = list_next( iter ) ) + *p++ = atoi( object_str( list_item( iter ) ) ); + } + else + { + size = 1; + indices = (int *)BJAM_MALLOC( sizeof( int ) ); + *indices = 1; + } + + { + /* Result is cached and intentionally never freed */ + regexp * const re = regex_compile( list_front( pattern ) ); + + LISTITER iter = list_begin( l ); + LISTITER const end = list_end( l ); + + string buf[ 1 ]; + string_new( buf ); + + for ( ; iter != end; iter = list_next( iter ) ) + { + if ( regexec( re, object_str( list_item( iter ) ) ) ) + { + int i = 0; + for ( ; i < size; ++i ) + { + int const index = indices[ i ]; + /* Skip empty submatches. Not sure it is right in all cases, + * but surely is right for the case for which this routine + * is optimized -- header scanning. + */ + if ( re->startp[ index ] != re->endp[ index ] ) + { + string_append_range( buf, re->startp[ index ], + re->endp[ index ] ); + result = list_push_back( result, object_new( buf->value + ) ); + string_truncate( buf, 0 ); + } + } + } + } + string_free( buf ); + } + + BJAM_FREE( indices ); + return result; +} + + +void init_regex() +{ + { + char const * args[] = { "string", "separator", 0 }; + declare_native_rule( "regex", "split", args, regex_split, 1 ); + } + { + char const * args[] = { "string", "match", "replacement", 0 }; + declare_native_rule( "regex", "replace", args, regex_replace, 1 ); + } + { + char const * args[] = { "list", "*", ":", "pattern", ":", "indices", "*", 0 }; + declare_native_rule( "regex", "transform", args, regex_transform, 2 ); + } +} diff --git a/src/boost/tools/build/src/engine/modules/sequence.cpp b/src/boost/tools/build/src/engine/modules/sequence.cpp new file mode 100644 index 000000000..acf20b827 --- /dev/null +++ b/src/boost/tools/build/src/engine/modules/sequence.cpp @@ -0,0 +1,96 @@ +/* + * Copyright Vladimir Prus 2003. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "../native.h" +#include "../object.h" +#include "../lists.h" +#include "../compile.h" + +#include + + +#ifndef max +# define max(a,b) ((a)>(b)?(a):(b)) +#endif + + +LIST * sequence_select_highest_ranked( FRAME * frame, int flags ) +{ + /* Returns all of 'elements' for which corresponding element in parallel */ + /* list 'rank' is equal to the maximum value in 'rank'. */ + + LIST * const elements = lol_get( frame->args, 0 ); + LIST * const rank = lol_get( frame->args, 1 ); + + LIST * result = L0; + int highest_rank = -1; + + { + LISTITER iter = list_begin( rank ); + LISTITER const end = list_end( rank ); + for ( ; iter != end; iter = list_next( iter ) ) + { + int const current = atoi( object_str( list_item( iter ) ) ); + highest_rank = max( highest_rank, current ); + } + } + + { + LISTITER iter = list_begin( rank ); + LISTITER const end = list_end( rank ); + LISTITER elements_iter = list_begin( elements ); + for ( ; iter != end; iter = list_next( iter ), elements_iter = + list_next( elements_iter ) ) + if ( atoi( object_str( list_item( iter ) ) ) == highest_rank ) + result = list_push_back( result, object_copy( list_item( + elements_iter ) ) ); + } + + return result; +} + +LIST * sequence_transform( FRAME * frame, int flags ) +{ + LIST * function = lol_get( frame->args, 0 ); + LIST * sequence = lol_get( frame->args, 1 ); + LIST * result = L0; + OBJECT * function_name = list_front( function ); + LISTITER args_begin = list_next( list_begin( function ) ), args_end = list_end( function ); + LISTITER iter = list_begin( sequence ), end = list_end( sequence ); + RULE * rule = bindrule( function_name, frame->prev->module ); + + for ( ; iter != end; iter = list_next( iter ) ) + { + FRAME inner[ 1 ]; + + frame_init( inner ); + inner->prev = frame; + inner->prev_user = frame->prev_user; + inner->module = frame->prev->module; + + lol_add( inner->args, list_push_back( list_copy_range( function, args_begin, args_end ), object_copy( list_item( iter ) ) ) ); + result = list_append( result, evaluate_rule( rule, function_name, inner ) ); + + frame_free( inner ); + } + + return result; +} + +void init_sequence() +{ + { + char const * args[] = { "elements", "*", ":", "rank", "*", 0 }; + declare_native_rule( "sequence", "select-highest-ranked", args, + sequence_select_highest_ranked, 1 ); + } + { + char const * args[] = { "function", "+", ":", "sequence", "*", 0 }; + declare_native_rule( "sequence", "transform", args, + sequence_transform, 1 ); + } +} diff --git a/src/boost/tools/build/src/engine/modules/set.cpp b/src/boost/tools/build/src/engine/modules/set.cpp new file mode 100644 index 000000000..325f81220 --- /dev/null +++ b/src/boost/tools/build/src/engine/modules/set.cpp @@ -0,0 +1,43 @@ +/* Copyright Vladimir Prus 2003. Distributed under the Boost */ +/* Software License, Version 1.0. (See accompanying */ +/* file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) */ + +#include "../native.h" +#include "../object.h" + +/* + local result = ; + local element ; + for element in $(B) + { + if ! ( $(element) in $(A) ) + { + result += $(element) ; + } + } + return $(result) ; +*/ +LIST *set_difference( FRAME *frame, int flags ) +{ + + LIST* b = lol_get( frame->args, 0 ); + LIST* a = lol_get( frame->args, 1 ); + + LIST* result = L0; + LISTITER iter = list_begin( b ), end = list_end( b ); + for( ; iter != end; iter = list_next( iter ) ) + { + if (!list_in(a, list_item(iter))) + result = list_push_back(result, object_copy(list_item(iter))); + } + return result; +} + +void init_set() +{ + { + const char* args[] = { "B", "*", ":", "A", "*", 0 }; + declare_native_rule("set", "difference", args, set_difference, 1); + } + +} diff --git a/src/boost/tools/build/src/engine/native.cpp b/src/boost/tools/build/src/engine/native.cpp new file mode 100644 index 000000000..157d19185 --- /dev/null +++ b/src/boost/tools/build/src/engine/native.cpp @@ -0,0 +1,34 @@ +/* Copyright 2003. Vladimir Prus + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "native.h" + +#include "hash.h" + +#include + + +void declare_native_rule( char const * module, char const * rule, + char const * * args, LIST * (*f)( FRAME *, int32_t ), int32_t version ) +{ + OBJECT * const module_obj = module ? object_new( module ) : 0 ; + module_t * m = bindmodule( module_obj ); + if ( module_obj ) + object_free( module_obj ); + if ( !m->native_rules ) + m->native_rules = hashinit( sizeof( native_rule_t ), "native rules" ); + + { + OBJECT * const name = object_new( rule ); + int32_t found; + native_rule_t * const np = (native_rule_t *)hash_insert( + m->native_rules, name, &found ); + np->name = name; + assert( !found ); + np->procedure = function_builtin( f, 0, args ); + np->version = version; + } +} diff --git a/src/boost/tools/build/src/engine/native.h b/src/boost/tools/build/src/engine/native.h new file mode 100644 index 000000000..d31c48a0a --- /dev/null +++ b/src/boost/tools/build/src/engine/native.h @@ -0,0 +1,35 @@ +/* Copyright 2003. David Abrahams + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef NATIVE_H_VP_2003_12_09 +#define NATIVE_H_VP_2003_12_09 + +#include "config.h" +#include "function.h" +#include "frames.h" +#include "lists.h" +#include "object.h" + +typedef struct native_rule_t +{ + OBJECT * name; + FUNCTION * procedure; + + /* Version of the interface that the native rule provides. It is possible + * that we want to change the set parameter for existing native rule. In + * that case, version number should be incremented so B2 can check + * for the version it relies on. + * + * Versions are numbered from 1. + */ + int32_t version; +} native_rule_t; +/* MSVC debugger gets confused unless the native_rule_t typedef is provided. */ + +void declare_native_rule( char const * module, char const * rule, + char const * * args, LIST * (*f)( FRAME *, int32_t ), int32_t version ); + +#endif diff --git a/src/boost/tools/build/src/engine/object.cpp b/src/boost/tools/build/src/engine/object.cpp new file mode 100644 index 000000000..fc625e6f5 --- /dev/null +++ b/src/boost/tools/build/src/engine/object.cpp @@ -0,0 +1,404 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * Copyright 2011 Steven Watanabe + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * object.c - object manipulation routines + * + * External functions: + * object_new() - create an object from a string + * object_new_range() - create an object from a string of given length + * object_copy() - return a copy of an object + * object_free() - free an object + * object_str() - get the string value of an object + * object_done() - free string tables + * + * This implementation builds a hash table of all strings, so that multiple + * calls of object_new() on the same string allocate memory for the string once. + * Strings are never actually freed. + */ + +#include "jam.h" +#include "object.h" +#include "output.h" + +#include +#include +#include + + +#define OBJECT_MAGIC 0xa762e0e3u + +#ifndef object_copy + +struct hash_header +{ +#ifndef NDEBUG + unsigned int magic; +#endif + unsigned int hash; + struct hash_item * next; +}; + +#endif + +struct hash_item +{ + struct hash_header header; + char data[ 1 ]; +}; + +#define ALLOC_ALIGNMENT (sizeof(struct hash_item) - sizeof(struct hash_header)) + +typedef struct string_set +{ + int32_t num; + int32_t size; + struct hash_item * * data; +} string_set; + +#if !defined(BJAM_NO_MEM_CACHE) || (BJAM_NO_MEM_CACHE == 0) +static string_set strhash; +#endif +static int32_t strtotal = 0; +static int32_t strcount_in = 0; +static int32_t strcount_out = 0; + + +/* + * Immortal string allocator implementation speeds string allocation and cuts + * down on internal fragmentation. + */ + +#define STRING_BLOCK 4096 +typedef struct strblock +{ + struct strblock * next; + char data[ STRING_BLOCK ]; +} strblock; + +static strblock * strblock_chain = 0; + +#if !defined(BJAM_NO_MEM_CACHE) || (BJAM_NO_MEM_CACHE == 0) +/* Storage remaining in the current strblock */ +static char * storage_start = 0; +static char * storage_finish = 0; + + +/* + * allocate() - Allocate n bytes of immortal string storage. + */ + +static char * allocate( int32_t n ) +{ +#ifdef BJAM_NEWSTR_NO_ALLOCATE + return (char *)BJAM_MALLOC( n ); +#else + /* See if we can grab storage from an existing block. */ + int32_t remaining = int32_t(storage_finish - storage_start); + n = ( ( n + ALLOC_ALIGNMENT - 1 ) / ALLOC_ALIGNMENT ) * ALLOC_ALIGNMENT; + if ( remaining >= n ) + { + char * result = storage_start; + storage_start += n; + return result; + } + else /* Must allocate a new block. */ + { + strblock * new_block; + int32_t nalloc = n; + if ( nalloc < STRING_BLOCK ) + nalloc = STRING_BLOCK; + + /* Allocate a new block and link into the chain. */ + new_block = (strblock *)BJAM_MALLOC( offsetof( strblock, data[ 0 ] ) + + size_t(nalloc) * sizeof( new_block->data[ 0 ] ) ); + if ( new_block == 0 ) + return 0; + new_block->next = strblock_chain; + strblock_chain = new_block; + + /* Take future allocations out of the larger remaining space. */ + if ( remaining < nalloc - n ) + { + storage_start = new_block->data + n; + storage_finish = new_block->data + nalloc; + } + return new_block->data; + } +#endif +} +#endif + + +static unsigned int hash_keyval( char const * key, int32_t size ) +{ + unsigned int const magic = 2147059363; + unsigned int hash = 0; + + unsigned int i; + for ( i = 0; i < size / sizeof( unsigned int ); ++i ) + { + unsigned int val; + memcpy( &val, key, sizeof( unsigned int ) ); + hash = hash * magic + val; + key += sizeof( unsigned int ); + } + + { + unsigned int val = 0; + memcpy( &val, key, size % sizeof( unsigned int ) ); + hash = hash * magic + val; + } + + return hash + ( hash >> 17 ); +} + + +#if !defined(BJAM_NO_MEM_CACHE) || (BJAM_NO_MEM_CACHE == 0) +static void string_set_init( string_set * set ) +{ + set->size = 0; + set->num = 4; + set->data = (struct hash_item * *)BJAM_MALLOC( set->num * sizeof( struct hash_item * ) ); + memset( set->data, 0, set->num * sizeof( struct hash_item * ) ); +} + + +static void string_set_done( string_set * set ) +{ + BJAM_FREE( set->data ); +} + + +static void string_set_resize( string_set * set ) +{ + string_set new_set; + new_set.num = set->num * 2; + new_set.size = set->size; + new_set.data = (struct hash_item * *)BJAM_MALLOC( sizeof( struct hash_item * + ) * new_set.num ); + memset( new_set.data, 0, sizeof( struct hash_item * ) * new_set.num ); + for ( int32_t i = 0; i < set->num; ++i ) + { + while ( set->data[ i ] ) + { + struct hash_item * temp = set->data[ i ]; + unsigned pos = temp->header.hash % new_set.num; + set->data[ i ] = temp->header.next; + temp->header.next = new_set.data[ pos ]; + new_set.data[ pos ] = temp; + } + } + BJAM_FREE( set->data ); + *set = new_set; +} + + +static char const * string_set_insert( string_set * set, char const * string, + int32_t const size ) +{ + unsigned hash = hash_keyval( string, size ); + unsigned pos = hash % set->num; + + struct hash_item * result; + + for ( result = set->data[ pos ]; result; result = result->header.next ) + if ( !strncmp( result->data, string, size ) && !result->data[ size ] ) + return result->data; + + if ( set->size >= set->num ) + { + string_set_resize( set ); + pos = hash % set->num; + } + + result = (struct hash_item *)allocate( sizeof( struct hash_header ) + size + + 1 ); + result->header.hash = hash; + result->header.next = set->data[ pos ]; +#ifndef NDEBUG + result->header.magic = OBJECT_MAGIC; +#endif + memcpy( result->data, string, size ); + result->data[ size ] = '\0'; + assert( hash_keyval( result->data, size ) == result->header.hash ); + set->data[ pos ] = result; + strtotal += size + 1; + ++set->size; + + return result->data; +} +#endif + + +/* + * object_new_range() - create an object from a string of given length + */ + +OBJECT * object_new_range( char const * const string, int32_t size ) +{ + ++strcount_in; + +#ifdef BJAM_NO_MEM_CACHE + { + struct hash_item * const m = (struct hash_item *)BJAM_MALLOC( sizeof( + struct hash_header ) + size + 1 ); + strtotal += size + 1; + memcpy( m->data, string, size ); + m->data[ size ] = '\0'; +#ifndef NDEBUG + m->header.magic = OBJECT_MAGIC; +#endif + return (OBJECT *)m->data; + } +#else + if ( !strhash.data ) + string_set_init( &strhash ); + return (OBJECT *)string_set_insert( &strhash, string, size ); +#endif +} + + +/* + * object_new() - create an object from a string + */ + +OBJECT * object_new( char const * const string ) +{ + return object_new_range( string, int32_t(strlen( string )) ); +} + + +#ifndef object_copy + +static struct hash_item * object_get_item( OBJECT * obj ) +{ + return (struct hash_item *)( (char *)obj - offsetof( struct hash_item, data + ) ); +} + + +static void object_validate( OBJECT * obj ) +{ + assert( obj ); + assert( object_get_item( obj )->header.magic == OBJECT_MAGIC ); +} + + +/* + * object_copy() - return a copy of an object + */ + +OBJECT * object_copy( OBJECT * obj ) +{ + object_validate( obj ); +#ifdef BJAM_NO_MEM_CACHE + return object_new( object_str( obj ) ); +#else + ++strcount_in; + return obj; +#endif +} + + +/* + * object_free() - free an object + */ + +void object_free( OBJECT * obj ) +{ + object_validate( obj ); +#ifdef BJAM_NO_MEM_CACHE + BJAM_FREE( object_get_item( obj ) ); +#endif + ++strcount_out; +} + + +/* + * object_str() - return the OBJECT's internal C string + */ + +char const * object_str( OBJECT * obj ) +{ + object_validate( obj ); + return (char const *)obj; +} + + +/* + * object_equal() - compare two objects + */ + +int object_equal( OBJECT * lhs, OBJECT * rhs ) +{ + object_validate( lhs ); + object_validate( rhs ); +#ifdef BJAM_NO_MEM_CACHE + return !strcmp( object_str( lhs ), object_str( rhs ) ); +#else + assert( ( lhs == rhs ) == !strcmp( object_str( lhs ), object_str( rhs ) ) ); + return lhs == rhs; +#endif +} + + +/* + * object_hash() - returns the hash value of an object + */ + +unsigned int object_hash( OBJECT * obj ) +{ + object_validate( obj ); +#ifdef BJAM_NO_MEM_CACHE + return hash_keyval( object_str( obj ), strlen( object_str( obj ) ) ); +#else + return object_get_item( obj )->header.hash; +#endif +} + +#endif + +/* + * object_done() - free string tables. + */ + +void object_done() +{ +#ifdef BJAM_NEWSTR_NO_ALLOCATE + unsigned i; + for ( i = 0; i < strhash.num; ++i ) + { + while ( strhash.data[ i ] ) + { + struct hash_item * item = strhash.data[ i ]; + strhash.data[ i ] = item->header.next; + BJAM_FREE( item ); + } + } +#else + /* Reclaim string blocks. */ + while ( strblock_chain ) + { + strblock * const n = strblock_chain->next; + BJAM_FREE( strblock_chain ); + strblock_chain = n; + } +#endif + +#if !defined(BJAM_NO_MEM_CACHE) || (BJAM_NO_MEM_CACHE == 0) + string_set_done( &strhash ); +#endif + + if ( DEBUG_MEM ) + { + out_printf( "%dK in strings\n", strtotal / 1024 ); + if ( strcount_in != strcount_out ) + out_printf( "--- %d strings of %d dangling\n", strcount_in - + strcount_out, strcount_in ); + } +} diff --git a/src/boost/tools/build/src/engine/object.h b/src/boost/tools/build/src/engine/object.h new file mode 100644 index 000000000..53df10892 --- /dev/null +++ b/src/boost/tools/build/src/engine/object.h @@ -0,0 +1,80 @@ +/* + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2011 Steven Watanabe + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * object.h - object manipulation routines + */ + +#ifndef BOOST_JAM_OBJECT_H +#define BOOST_JAM_OBJECT_H + +#include "config.h" +#include +#include + +typedef struct _object OBJECT; + +typedef OBJECT * object_ptr; + +OBJECT * object_new( char const * const ); +OBJECT * object_new_range( char const * const, int32_t size ); +void object_done( void ); + +#if defined(NDEBUG) && !defined(BJAM_NO_MEM_CACHE) + +struct hash_header +{ + unsigned int hash; + struct hash_item * next; +}; + +#define object_str( obj ) ((char const *)(obj)) +#define object_copy( obj ) (obj) +#define object_free( obj ) ((void)0) +#define object_equal( lhs, rhs ) ((lhs) == (rhs)) +#define object_hash( obj ) (((struct hash_header *)((char *)(obj) - sizeof(struct hash_header)))->hash) + +#else + +char const * object_str ( OBJECT * ); +OBJECT * object_copy ( OBJECT * ); +void object_free ( OBJECT * ); +int object_equal( OBJECT *, OBJECT * ); +unsigned int object_hash ( OBJECT * ); + +#endif + +namespace b2 { namespace jam { + + struct object + { + inline object(const object &o) + : obj(object_copy(o.obj)) {} + + inline explicit object(OBJECT *o) + : obj(object_copy(o)) {} + inline explicit object(const char * val) + : obj(object_new(val)) {} + inline explicit object(const std::string &val) + : obj(object_new(val.c_str())) {} + + inline ~object() { if (obj) object_free(obj); } + inline OBJECT * release() { OBJECT *r = obj; obj = nullptr; return r; } + + inline operator OBJECT*() const { return obj; } + inline operator std::string() const { return object_str(obj); } + + inline bool operator==(OBJECT *o) const { return std::strcmp(object_str(obj), object_str(o)) == 0; } + + private: + + OBJECT * obj = nullptr; + }; + +}} + +#endif diff --git a/src/boost/tools/build/src/engine/option.cpp b/src/boost/tools/build/src/engine/option.cpp new file mode 100644 index 000000000..fa9246b02 --- /dev/null +++ b/src/boost/tools/build/src/engine/option.cpp @@ -0,0 +1,94 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +# include "jam.h" +# include "option.h" + +/* + * option.c - command line option processing + * + * {o >o + * \<>) "Process command line options as defined in . + * Return the number of argv[] elements used up by options, + * or -1 if an invalid option flag was given or an argument + * was supplied for an option that does not require one." + */ + +int getoptions( int argc, char * * argv, const char * opts, bjam_option * optv ) +{ + int i; + int optc = N_OPTS; + + memset( (char *)optv, '\0', sizeof( *optv ) * N_OPTS ); + + for ( i = 0; i < argc; ++i ) + { + char *arg; + + if ( ( argv[ i ][ 0 ] != '-' ) || + ( ( argv[ i ][ 1 ] != '-' ) && !isalpha( argv[ i ][ 1 ] ) ) ) + continue; + + if ( !optc-- ) + { + printf( "too many options (%d max)\n", N_OPTS ); + return -1; + } + + for ( arg = &argv[ i ][ 1 ]; *arg; ++arg ) + { + const char * f; + + for ( f = opts; *f; ++f ) + if ( *f == *arg ) + break; + + if ( !*f ) + { + printf( "Invalid option: -%c\n", *arg ); + return -1; + } + + optv->flag = *f; + + if ( f[ 1 ] != ':' ) + { + optv++->val = (char *)"true"; + } + else if ( arg[ 1 ] ) + { + optv++->val = &arg[1]; + break; + } + else if ( ++i < argc ) + { + optv++->val = argv[ i ]; + break; + } + else + { + printf( "option: -%c needs argument\n", *f ); + return -1; + } + } + } + + return i; +} + + +/* + * Name: getoptval() - find an option given its character. + */ + +char * getoptval( bjam_option * optv, char opt, int subopt ) +{ + int i; + for ( i = 0; i < N_OPTS; ++i, ++optv ) + if ( ( optv->flag == opt ) && !subopt-- ) + return optv->val; + return 0; +} diff --git a/src/boost/tools/build/src/engine/option.h b/src/boost/tools/build/src/engine/option.h new file mode 100644 index 000000000..df3063105 --- /dev/null +++ b/src/boost/tools/build/src/engine/option.h @@ -0,0 +1,25 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * option.h - command line option processing + * + * {o >o + * \ -) "Command line option." + */ + +#include "config.h" + +typedef struct bjam_option +{ + char flag; /* filled in by getoption() */ + char * val; /* set to random address if true */ +} bjam_option; + +#define N_OPTS 256 + +int getoptions( int argc, char * * argv, const char * opts, bjam_option * optv ); +char * getoptval( bjam_option * optv, char opt, int subopt ); diff --git a/src/boost/tools/build/src/engine/output.cpp b/src/boost/tools/build/src/engine/output.cpp new file mode 100644 index 000000000..196082c78 --- /dev/null +++ b/src/boost/tools/build/src/engine/output.cpp @@ -0,0 +1,192 @@ +/* + Copyright 2007 Rene Rivera + Distributed under the Boost Software License, Version 1.0. + (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#include "jam.h" +#include "output.h" + +#include +#include +#include + +#include +#include + + +#define bjam_out (stdout) +#define bjam_err (stderr) + +static void out_( char const * data, FILE * const io ) +{ + while ( *data ) + { + size_t const len = strcspn( data, "\r" ); + data += fwrite( data, 1, len, io ); + if ( *data == '\r' ) ++data; + } +} + + +void out_flush() +{ + fflush( bjam_out ); + if ( globs.out ) fflush( globs.out ); +} +void err_flush() +{ + fflush( bjam_err ); + if ( globs.out ) fflush( globs.out ); +} +void out_puts(char const * const s) +{ + fputs( s, bjam_out ); + if ( globs.out ) fputs( s, globs.out ); +} +void err_puts(char const * const s) +{ + fputs( s, bjam_err ); + if ( globs.out ) fputs( s, globs.out ); +} +void out_putc(const char c) +{ + fputc( c, bjam_out ); + if ( globs.out ) fputc( c, globs.out ); +} +void err_putc(const char c) +{ + fputc( c, bjam_err ); + if ( globs.out ) fputc( c, globs.out ); +} +void out_data(char const * const s) +{ + out_( s, bjam_out ); + if ( globs.out ) out_( s, globs.out ); +} +void err_data(char const * const s) +{ + out_( s, bjam_err ); + if ( globs.out ) out_( s, globs.out ); +} +void out_printf(char const * const f, ...) +{ + { + va_list args; + va_start( args, f ); + vfprintf( bjam_out, f, args ); + va_end( args ); + } + if ( globs.out ) + { + va_list args; + va_start( args, f ); + vfprintf( globs.out, f, args ); + va_end( args ); + } +} +void err_printf(char const * const f, ...) +{ + { + va_list args; + va_start( args, f ); + vfprintf( bjam_err, f, args ); + va_end( args ); + } + if ( globs.out ) + { + va_list args; + va_start( args, f ); + vfprintf( globs.out, f, args ); + va_end( args ); + } +} + + +void out_action +( + char const * const action, + char const * const target, + char const * const command, + char const * const out_d, + char const * const err_d, + int const exit_reason +) +{ + /* Print out the action + target line, if the action is quiet the action + * should be null. + */ + if ( action ) + out_printf( "%s %s\n", action, target ); + + /* Print out the command executed if given -d+2. */ + if ( DEBUG_EXEC ) + { + out_puts( command ); + out_putc( '\n' ); + } + + /* Print out the command output, if requested, or if the program failed, but + * only output for non-quiet actions. + */ + if ( action || exit_reason != EXIT_OK ) + { + if ( out_d && + ( ( globs.pipe_action & 1 /* STDOUT_FILENO */ ) || + ( globs.pipe_action == 0 ) ) ) + out_data( out_d ); + if ( err_d && ( globs.pipe_action & 2 /* STDERR_FILENO */ ) ) + err_data( err_d ); + } +} + + +void errno_puts(char const * const s) +{ + const auto e = errno; + err_printf("[errno %d] %s (%s)\n", e, s, strerror(e)); +} + + +void errno_printf(char const * const f, ...) +{ + const auto e = errno; + std::string s = "[errno "+std::to_string(e)+"] "; + { + va_list args; + va_start( args, f ); + int l = vsnprintf( nullptr, 0, f, args ); + va_end( args ); + va_start( args, f ); + std::unique_ptr r(new char[l+1]); + vsnprintf( r.get(), l+1, f, args ); + va_end( args ); + s += r.get(); + } + s += " ("; + s += strerror(e); + s += ")"; + err_puts(s.c_str()); +} + + +OBJECT * outf_int( int const value ) +{ + char buffer[ 50 ]; + sprintf( buffer, "%i", value ); + return object_new( buffer ); +} + + +OBJECT * outf_double( double const value ) +{ + char buffer[ 50 ]; + sprintf( buffer, "%f", value ); + return object_new( buffer ); +} + + +OBJECT * outf_time( timestamp const * const time ) +{ + return object_new( timestamp_str( time ) ); +} diff --git a/src/boost/tools/build/src/engine/output.h b/src/boost/tools/build/src/engine/output.h new file mode 100644 index 000000000..b81a8d5c4 --- /dev/null +++ b/src/boost/tools/build/src/engine/output.h @@ -0,0 +1,46 @@ +/* + Copyright 2007 Rene Rivera + Distributed under the Boost Software License, Version 1.0. + (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#ifndef BJAM_OUTPUT_H +#define BJAM_OUTPUT_H + +#include "config.h" +#include "object.h" +#include "timestamp.h" + +#define EXIT_OK 0 +#define EXIT_FAIL 1 +#define EXIT_TIMEOUT 2 + +void out_action( + char const * const action, + char const * const target, + char const * const command, + char const * const out_data, + char const * const err_data, + int const exit_reason +); + +void out_flush(); +void err_flush(); +void out_puts(char const * const s); +void err_puts(char const * const s); +void out_putc(const char c); +void err_putc(const char c); +void out_data(char const * const s); +void err_data(char const * const s); +void out_printf(char const * const f, ...); +void err_printf(char const * const f, ...); + +// Output current errno value & description along with given string. +void errno_puts(char const * const s); +void errno_printf(char const * const f, ...); + +OBJECT * outf_int( int const value ); +OBJECT * outf_double( double const value ); +OBJECT * outf_time( timestamp const * const value ); + +#endif diff --git a/src/boost/tools/build/src/engine/parse.cpp b/src/boost/tools/build/src/engine/parse.cpp new file mode 100644 index 000000000..d1588ab75 --- /dev/null +++ b/src/boost/tools/build/src/engine/parse.cpp @@ -0,0 +1,147 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "jam.h" +#include "lists.h" +#include "parse.h" +#include "scan.h" +#include "object.h" +#include "modules.h" +#include "frames.h" +#include "function.h" +#include "mem.h" + +/* + * parse.c - make and destroy parse trees as driven by the parser + * + * 09/07/00 (seiwald) - ref count on PARSE to avoid freeing when used, + * as per Matt Armstrong. + * 09/11/00 (seiwald) - structure reworked to reflect that (*func)() + * returns a LIST *. + */ + +static PARSE * yypsave; + +static void parse_impl( FRAME * frame ) +{ + + /* Now parse each block of rules and execute it. Execute it outside of the + * parser so that recursive calls to yyrun() work (no recursive yyparse's). + */ + + for ( ; ; ) + { + PARSE * p; + + /* Filled by yyparse() calling parse_save(). */ + yypsave = 0; + + /* If parse error or empty parse, outta here. */ + if ( yyparse() || !( p = yypsave ) ) + break; + + /* Run the parse tree. */ + auto func = b2::jam::make_unique_bare_jptr( function_compile( p ), function_free ); + parse_free( p ); + list_free( function_run( func.get(), frame, stack_global() ) ); + } + + yyfdone(); +} + + +void parse_file( OBJECT * f, FRAME * frame ) +{ + /* Suspend scan of current file and push this new file in the stream. */ + yyfparse( f ); + + parse_impl( frame ); +} + + +void parse_string( OBJECT * name, const char * * lines, FRAME * frame ) +{ + yysparse( name, lines ); + parse_impl( frame ); +} + + +void parse_save( PARSE * p ) +{ + yypsave = p; +} + + +PARSE * parse_make( + int type, + PARSE * left, + PARSE * right, + PARSE * third, + OBJECT * string, + OBJECT * string1, + int num ) +{ + PARSE * p = (PARSE *)BJAM_MALLOC( sizeof( PARSE ) ); + + p->type = type; + p->left = left; + p->right = right; + p->third = third; + p->string = string; + p->string1 = string1; + p->num = num; + p->refs = 1; + p->rulename = 0; + + if ( left ) + { + p->file = object_copy( left->file ); + p->line = left->line; + } + else + { + yyinput_last_read_token( &p->file, &p->line ); + p->file = object_copy( p->file ); + } + + return p; +} + + +void parse_refer( PARSE * p ) +{ + ++p->refs; +} + + +void parse_free( PARSE * p ) +{ + if ( --p->refs ) + return; + + if ( p->string ) + object_free( p->string ); + if ( p->string1 ) + object_free( p->string1 ); + if ( p->left ) + parse_free( p->left ); + if ( p->right ) + parse_free( p->right ); + if ( p->third ) + parse_free( p->third ); + if ( p->rulename ) + object_free( p->rulename ); + if ( p->file ) + object_free( p->file ); + + BJAM_FREE( (char *)p ); +} diff --git a/src/boost/tools/build/src/engine/parse.h b/src/boost/tools/build/src/engine/parse.h new file mode 100644 index 000000000..c99782e79 --- /dev/null +++ b/src/boost/tools/build/src/engine/parse.h @@ -0,0 +1,81 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * parse.h - make and destroy parse trees as driven by the parser. + */ + +#ifndef PARSE_DWA20011020_H +#define PARSE_DWA20011020_H + +#include "config.h" +#include "frames.h" +#include "lists.h" +#include "modules.h" + + +#define PARSE_APPEND 0 +#define PARSE_FOREACH 1 +#define PARSE_IF 2 +#define PARSE_EVAL 3 +#define PARSE_INCLUDE 4 +#define PARSE_LIST 5 +#define PARSE_LOCAL 6 +#define PARSE_MODULE 7 +#define PARSE_CLASS 8 +#define PARSE_NULL 9 +#define PARSE_ON 10 +#define PARSE_RULE 11 +#define PARSE_RULES 12 +#define PARSE_SET 13 +#define PARSE_SETCOMP 14 +#define PARSE_SETEXEC 15 +#define PARSE_SETTINGS 16 +#define PARSE_SWITCH 17 +#define PARSE_WHILE 18 +#define PARSE_RETURN 19 +#define PARSE_BREAK 20 +#define PARSE_CONTINUE 21 + + +/* + * Parse tree node. + */ + +typedef struct _PARSE PARSE; + +struct _PARSE { + int type; + PARSE * left; + PARSE * right; + PARSE * third; + OBJECT * string; + OBJECT * string1; + int num; + int refs; + OBJECT * rulename; + OBJECT * file; + int line; +}; + +void parse_file( OBJECT *, FRAME * ); +void parse_string( OBJECT * name, const char * * lines, FRAME * frame ); +void parse_save( PARSE * ); + +PARSE * parse_make( int type, PARSE * left, PARSE * right, PARSE * third, + OBJECT * string, OBJECT * string1, int num ); + +void parse_refer( PARSE * ); +void parse_free( PARSE * ); +LIST * parse_evaluate( PARSE *, FRAME * ); + +#endif diff --git a/src/boost/tools/build/src/engine/patchlevel.h b/src/boost/tools/build/src/engine/patchlevel.h new file mode 100644 index 000000000..455f7a12d --- /dev/null +++ b/src/boost/tools/build/src/engine/patchlevel.h @@ -0,0 +1,16 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ +/* +This file is ALSO: +Copyright 2018-2022 Rene Rivera +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + + +#define VERSION_MAJOR 4 +#define VERSION_MINOR 8 +#define VERSION_PATCH 2 diff --git a/src/boost/tools/build/src/engine/pathnt.cpp b/src/boost/tools/build/src/engine/pathnt.cpp new file mode 100644 index 000000000..2e23a1617 --- /dev/null +++ b/src/boost/tools/build/src/engine/pathnt.cpp @@ -0,0 +1,413 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * pathnt.c - NT specific path manipulation support + */ + +#include "jam.h" +#ifdef USE_PATHNT + +#include "pathsys.h" +#include "hash.h" + +#define WIN32_LEAN_AND_MEAN +#include + +#ifdef OS_CYGWIN +# include +# include +# ifdef CYGWIN_VERSION_CYGWIN_CONV +# include +# endif +# include +#endif + +#include +#include + + +/* The definition of this in winnt.h is not ANSI-C compatible. */ +#undef INVALID_FILE_ATTRIBUTES +#define INVALID_FILE_ATTRIBUTES ((DWORD)-1) + + +typedef struct path_key_entry +{ + OBJECT * path; + OBJECT * key; + int exists; +} path_key_entry; + +static struct hash * path_key_cache; + + +/* + * path_get_process_id_() + */ + +unsigned long path_get_process_id_( void ) +{ + return GetCurrentProcessId(); +} + + +/* + * path_get_temp_path_() + */ + +void path_get_temp_path_( string * buffer ) +{ + DWORD pathLength = GetTempPathA( 0, NULL ); + string_reserve( buffer, pathLength ); + pathLength = GetTempPathA( pathLength, buffer->value ); + buffer->value[ pathLength - 1 ] = '\0'; + buffer->size = pathLength - 1; +} + + +/* + * canonicWindowsPath() - convert a given path into its canonic/long format + * + * Appends the canonic path to the end of the given 'string' object. + * + * FIXME: This function is still work-in-progress as it originally did not + * necessarily return the canonic path format (could return slightly different + * results for certain equivalent path strings) and could accept paths pointing + * to non-existing file system entities as well. + * + * Caches results internally, automatically caching any parent paths it has to + * convert to their canonic format in the process. + * + * Prerequisites: + * - path given in normalized form, i.e. all of its folder separators have + * already been converted into '\\' + * - path_key_cache path/key mapping cache object already initialized + */ + +static int canonicWindowsPath( char const * const path, int32_t path_length, + string * const out ) +{ + char const * last_element; + int32_t saved_size; + char const * p; + int missing_parent; + + /* This is only called via path_key(), which initializes the cache. */ + assert( path_key_cache ); + + if ( !path_length ) + return 1; + + if ( path_length == 1 && path[ 0 ] == '\\' ) + { + string_push_back( out, '\\' ); + return 1; + } + + if ( path[ 1 ] == ':' && + ( path_length == 2 || + ( path_length == 3 && path[ 2 ] == '\\' ) ) ) + { + string_push_back( out, toupper( path[ 0 ] ) ); + string_push_back( out, ':' ); + string_push_back( out, '\\' ); + return 1; + } + + /* Find last '\\'. */ + for ( p = path + path_length - 1; p >= path && *p != '\\'; --p ); + last_element = p + 1; + + /* Special case '\' && 'D:\' - include trailing '\'. */ + if ( p == path || + (p == path + 2 && path[ 1 ] == ':') ) + ++p; + + missing_parent = 0; + + if ( p >= path ) + { + char const * const dir = path; + const int32_t dir_length = int32_t(p - path); + OBJECT * const dir_obj = object_new_range( dir, dir_length ); + int found; + path_key_entry * const result = (path_key_entry *)hash_insert( + path_key_cache, dir_obj, &found ); + if ( !found ) + { + result->path = dir_obj; + if ( canonicWindowsPath( dir, dir_length, out ) ) + result->exists = 1; + else + result->exists = 0; + result->key = object_new( out->value ); + } + else + { + object_free( dir_obj ); + string_append( out, object_str( result->key ) ); + } + if ( !result->exists ) + missing_parent = 1; + } + + if ( out->size && out->value[ out->size - 1 ] != '\\' ) + string_push_back( out, '\\' ); + + saved_size = out->size; + string_append_range( out, last_element, path + path_length ); + + if ( !missing_parent ) + { + char const * const n = last_element; + int32_t n_length = int32_t(path + path_length - n); + if ( !( n_length == 1 && n[ 0 ] == '.' ) + && !( n_length == 2 && n[ 0 ] == '.' && n[ 1 ] == '.' ) ) + { + WIN32_FIND_DATAA fd; + HANDLE const hf = FindFirstFileA( out->value, &fd ); + if ( hf != INVALID_HANDLE_VALUE ) + { + string_truncate( out, saved_size ); + string_append( out, fd.cFileName ); + FindClose( hf ); + return 1; + } + } + else + { + return 1; + } + } + return 0; +} + + +/* + * normalize_path() - 'normalizes' the given path for the path-key mapping + * + * The resulting string has nothing to do with 'normalized paths' as used in + * Boost Jam build scripts and the built-in NORMALIZE_PATH rule. It is intended + * to be used solely as an intermediate step when mapping an arbitrary path to + * its canonical representation. + * + * When choosing the intermediate string the important things are for it to be + * inexpensive to calculate and any two paths having different canonical + * representations also need to have different calculated intermediate string + * representations. Any implemented additional rules serve only to simplify + * constructing the canonical path representation from the calculated + * intermediate string. + * + * Implemented returned path rules: + * - use backslashes as path separators + * - lowercase only (since all Windows file systems are case insensitive) + * - trim trailing path separator except in case of a root path, i.e. 'X:\' + */ + +static void normalize_path( string * path ) +{ + char * s; + for ( s = path->value; s < path->value + path->size; ++s ) + *s = *s == '/' ? '\\' : tolower( *s ); + /* Strip trailing "/". */ + if ( path->size && path->size != 3 && path->value[ path->size - 1 ] == '\\' + ) + string_pop_back( path ); +} + + +static path_key_entry * path_key( OBJECT * const path, + int const known_to_be_canonic ) +{ + path_key_entry * result; + int found; + + if ( !path_key_cache ) + path_key_cache = hashinit( sizeof( path_key_entry ), "path to key" ); + + result = (path_key_entry *)hash_insert( path_key_cache, path, &found ); + if ( !found ) + { + OBJECT * normalized; + int32_t normalized_size; + path_key_entry * nresult; + result->path = path; + { + string buf[ 1 ]; + string_copy( buf, object_str( path ) ); + normalize_path( buf ); + normalized = object_new( buf->value ); + normalized_size = buf->size; + string_free( buf ); + } + nresult = (path_key_entry *)hash_insert( path_key_cache, normalized, + &found ); + if ( !found || nresult == result ) + { + nresult->path = normalized; + if ( known_to_be_canonic ) + { + nresult->key = object_copy( path ); + nresult->exists = 1; + } + else + { + string canonic_path[ 1 ]; + string_new( canonic_path ); + if ( canonicWindowsPath( object_str( normalized ), normalized_size, + canonic_path ) ) + nresult->exists = 1; + else + nresult->exists = 0; + nresult->key = object_new( canonic_path->value ); + string_free( canonic_path ); + } + } + else + object_free( normalized ); + if ( nresult != result ) + { + result->path = object_copy( path ); + result->key = object_copy( nresult->key ); + result->exists = nresult->exists; + } + } + + return result; +} + + +/* + * translate_path_cyg2win() - conversion of a cygwin to a Windows path. + * + * FIXME: skip grist + */ + +#ifdef OS_CYGWIN +static int translate_path_cyg2win( string * path ) +{ + int translated = 0; + +#ifdef CYGWIN_VERSION_CYGWIN_CONV + /* Use new Cygwin API added with Cygwin 1.7. Old one had no error + * handling and has been deprecated. + */ + char * dynamicBuffer = 0; + char buffer[ MAX_PATH + 1001 ]; + char const * result = buffer; + cygwin_conv_path_t const conv_type = CCP_POSIX_TO_WIN_A | CCP_RELATIVE; + ssize_t const apiResult = cygwin_conv_path( conv_type, path->value, + buffer, sizeof( buffer ) / sizeof( *buffer ) ); + assert( apiResult == 0 || apiResult == -1 ); + assert( apiResult || strlen( result ) < sizeof( buffer ) / sizeof( + *buffer ) ); + if ( apiResult ) + { + result = 0; + if ( errno == ENOSPC ) + { + ssize_t const size = cygwin_conv_path( conv_type, path->value, + NULL, 0 ); + assert( size >= -1 ); + if ( size > 0 ) + { + dynamicBuffer = (char *)BJAM_MALLOC_ATOMIC( size ); + if ( dynamicBuffer ) + { + ssize_t const apiResult = cygwin_conv_path( conv_type, + path->value, dynamicBuffer, size ); + assert( apiResult == 0 || apiResult == -1 ); + if ( !apiResult ) + { + result = dynamicBuffer; + assert( strlen( result ) < size ); + } + } + } + } + } +#else /* CYGWIN_VERSION_CYGWIN_CONV */ + /* Use old Cygwin API deprecated with Cygwin 1.7. */ + char result[ MAX_PATH + 1 ]; + cygwin_conv_to_win32_path( path->value, result ); + assert( strlen( result ) <= MAX_PATH ); +#endif /* CYGWIN_VERSION_CYGWIN_CONV */ + + if ( result ) + { + string_truncate( path, 0 ); + string_append( path, result ); + translated = 1; + } + +#ifdef CYGWIN_VERSION_CYGWIN_CONV + if ( dynamicBuffer ) + BJAM_FREE( dynamicBuffer ); +#endif + + return translated; +} +#endif /* OS_CYGWIN */ + + +/* + * path_translate_to_os_() + */ + +int path_translate_to_os_( char const * f, string * file ) +{ + int translated = 0; + + /* by default, pass on the original path */ + string_copy( file, f ); + +#ifdef OS_CYGWIN + translated = translate_path_cyg2win( file ); +#endif + + return translated; +} + + +void path_register_key( OBJECT * canonic_path ) +{ + path_key( canonic_path, 1 ); +} + + +OBJECT * path_as_key( OBJECT * path ) +{ + return object_copy( path_key( path, 0 )->key ); +} + + +static void free_path_key_entry( void * xentry, void * const data ) +{ + path_key_entry * const entry = (path_key_entry *)xentry; + if (entry->path) object_free( entry->path ); + if (entry->key) object_free( entry->key ); +} + + +void path_done( void ) +{ + if ( path_key_cache ) + { + hashenumerate( path_key_cache, &free_path_key_entry, 0 ); + hashdone( path_key_cache ); + } +} + +#endif // USE_PATHNT diff --git a/src/boost/tools/build/src/engine/pathsys.cpp b/src/boost/tools/build/src/engine/pathsys.cpp new file mode 100644 index 000000000..d9a8fcd81 --- /dev/null +++ b/src/boost/tools/build/src/engine/pathsys.cpp @@ -0,0 +1,469 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * pathsys.c - platform independent path manipulation support + * + * External routines: + * path_build() - build a filename given dir/base/suffix/member + * path_parent() - make a PATHNAME point to its parent dir + * path_parse() - split a file name into dir/base/suffix/member + * path_tmpdir() - returns the system dependent temporary folder path + * path_tmpfile() - returns a new temporary path + * path_tmpnam() - returns a new temporary name + * + * File_parse() and path_build() just manipulate a string and a structure; + * they do not make system calls. + */ + +#include "jam.h" + +#include "cwd.h" +#include "filesys.h" +#include "pathsys.h" + +#include +#include + +#include + + +/* Internal OS specific implementation details - have names ending with an + * underscore and are expected to be implemented in an OS specific pathXXX.c + * module. + */ +unsigned long path_get_process_id_( void ); +void path_get_temp_path_( string * buffer ); +int path_translate_to_os_( char const * f, string * file ); + + +/* + * path_parse() - split a file name into dir/base/suffix/member + */ + +void path_parse( char const * file, PATHNAME * f ) +{ + char const * p; + char const * q; + char const * end; + + memset( (char *)f, 0, sizeof( *f ) ); + + /* Look for ''. */ + + if ( ( file[ 0 ] == '<' ) && ( p = strchr( file, '>' ) ) ) + { + f->f_grist.ptr = file; + f->f_grist.len = int32_t(p - file); + file = p + 1; + } + + /* Look for 'dir/'. */ + + p = strrchr( file, '/' ); + +#if PATH_DELIM == '\\' + /* On NT, look for dir\ as well */ + { + char const * p1 = strrchr( p ? p + 1 : file, '\\' ); + if ( p1 ) p = p1; + } +#endif + + if ( p ) + { + f->f_dir.ptr = file; + f->f_dir.len = int32_t(p - file); + + /* Special case for / - dirname is /, not "" */ + if ( !f->f_dir.len ) + ++f->f_dir.len; + +#if PATH_DELIM == '\\' + /* Special case for D:/ - dirname is D:/, not "D:" */ + if ( f->f_dir.len == 2 && file[ 1 ] == ':' ) + ++f->f_dir.len; +#endif + + file = p + 1; + } + + end = file + strlen( file ); + + /* Look for '(member)'. */ + if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) ) + { + f->f_member.ptr = p + 1; + f->f_member.len = int32_t(end - p - 2); + end = p; + } + + /* Look for '.suffix'. This would be memrchr(). */ + p = 0; + for ( q = file; ( q = (char *)memchr( q, '.', end - q ) ); ++q ) + p = q; + if ( p ) + { + f->f_suffix.ptr = p; + f->f_suffix.len = int32_t(end - p); + end = p; + } + + /* Leaves base. */ + f->f_base.ptr = file; + f->f_base.len = int32_t(end - file); +} + + +/* + * is_path_delim() - true iff c is a path delimiter + */ + +static int is_path_delim( char const c ) +{ + return c == PATH_DELIM +#if PATH_DELIM == '\\' + || c == '/' +#endif + ; +} + + +/* + * as_path_delim() - convert c to a path delimiter if it is not one already + */ + +static char as_path_delim( char const c ) +{ + return is_path_delim( c ) ? c : PATH_DELIM; +} + + +/* + * path_build() - build a filename given dir/base/suffix/member + * + * To avoid changing slash direction on NT when reconstituting paths, instead of + * unconditionally appending PATH_DELIM we check the past-the-end character of + * the previous path element. If it is a path delimiter, we append that, and + * only append PATH_DELIM as a last resort. This heuristic is based on the fact + * that PATHNAME objects are usually the result of calling path_parse, which + * leaves the original slashes in the past-the-end position. Correctness depends + * on the assumption that all strings are zero terminated, so a past-the-end + * character will always be available. + * + * As an attendant patch, we had to ensure that backslashes are used explicitly + * in 'timestamp.c'. + */ + +void path_build( PATHNAME * f, string * file ) +{ + int check_f; + int check_f_pos; + + file_build1( f, file ); + + /* Do not prepend root if it is '.' or the directory is rooted. */ + check_f = (f->f_root.len + && !( f->f_root.len == 1 && f->f_root.ptr[ 0 ] == '.') + && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '/' )); +#if PATH_DELIM == '\\' + check_f = (check_f + && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '\\' ) + && !( f->f_dir.len && f->f_dir.ptr[ 1 ] == ':' )); +#endif + if (check_f) + { + string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len + ); + /* If 'root' already ends with a path delimiter, do not add another one. + */ + if ( !is_path_delim( f->f_root.ptr[ f->f_root.len - 1 ] ) ) + string_push_back( file, as_path_delim( f->f_root.ptr[ f->f_root.len + ] ) ); + } + + if ( f->f_dir.len ) + string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len ); + + /* Put path separator between dir and file. */ + /* Special case for root dir: do not add another path separator. */ + check_f_pos = (f->f_dir.len && ( f->f_base.len || f->f_suffix.len )); +#if PATH_DELIM == '\\' + check_f_pos = (check_f_pos && !( f->f_dir.len == 3 && f->f_dir.ptr[ 1 ] == ':' )); +#endif + check_f_pos = (check_f_pos && !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[ 0 ]))); + if (check_f_pos) + string_push_back( file, as_path_delim( f->f_dir.ptr[ f->f_dir.len ] ) ); + + if ( f->f_base.len ) + string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len + ); + + if ( f->f_suffix.len ) + string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + + f->f_suffix.len ); + + if ( f->f_member.len ) + { + string_push_back( file, '(' ); + string_append_range( file, f->f_member.ptr, f->f_member.ptr + + f->f_member.len ); + string_push_back( file, ')' ); + } +} + + +/* + * path_parent() - make a PATHNAME point to its parent dir + */ + +void path_parent( PATHNAME * f ) +{ + f->f_base.ptr = f->f_suffix.ptr = f->f_member.ptr = ""; + f->f_base.len = f->f_suffix.len = f->f_member.len = 0; +} + + +/* + * path_tmpdir() - returns the system dependent temporary folder path + * + * Returned value is stored inside a static buffer and should not be modified. + * Returned value does *not* include a trailing path separator. + */ + +string const * path_tmpdir() +{ + static string buffer[ 1 ]; + static int have_result; + if ( !have_result ) + { + string_new( buffer ); + path_get_temp_path_( buffer ); + have_result = 1; + } + return buffer; +} + + +/* + * path_tmpnam() - returns a new temporary name + */ + +OBJECT * path_tmpnam( void ) +{ + char name_buffer[ 64 ]; + unsigned long const pid = path_get_process_id_(); + static unsigned long t; + if ( !t ) t = time( 0 ) & 0xffff; + t += 1; + sprintf( name_buffer, "jam%lx%lx.000", pid, t ); + return object_new( name_buffer ); +} + + +/* + * path_tmpfile() - returns a new temporary path + */ + +OBJECT * path_tmpfile( void ) +{ + OBJECT * result; + OBJECT * tmpnam; + + string file_path[ 1 ]; + string_copy( file_path, path_tmpdir()->value ); + string_push_back( file_path, PATH_DELIM ); + tmpnam = path_tmpnam(); + string_append( file_path, object_str( tmpnam ) ); + object_free( tmpnam ); + result = object_new( file_path->value ); + string_free( file_path ); + + return result; +} + + +/* + * path_translate_to_os() - translate filename to OS-native path + * + */ + +int path_translate_to_os( char const * f, string * file ) +{ + return path_translate_to_os_( f, file ); +} + + +std::string b2::paths::normalize(const std::string &p) +{ + // We root the path as a sentinel. But we need to remember that we did so + // to un-root afterwards. + std::string result{"/"}; + bool is_rooted = p[0] == '/' || p[0] == '\\'; + result += p; + + // Convert \ into /. On Windows, paths using / and \ are equivalent, and we + // want this function to obtain a canonic representation. + std::replace(result.begin(), result.end(), '\\', '/'); + + int32_t ellipsis = 0; + for (auto end_pos = result.length(); end_pos > 0; ) + { + auto path_pos = result.rfind('/', end_pos-1); + if (path_pos == std::string::npos) break; + if (path_pos == end_pos-1) + { + /* Found a trailing or duplicate '/'. Remove it. */ + result.erase(path_pos, 1); + } + else if ((end_pos-path_pos == 2) && result[path_pos+1] == '.') + { + /* Found '/.'. Remove them all. */ + result.erase(path_pos, 2); + } + else if ((end_pos-path_pos == 3) && result[path_pos+1] == '.' && result[path_pos+2] == '.') + { + /* Found '/..'. Remove them all. */ + result.erase(path_pos, 3); + ellipsis += 1; + } + else if (ellipsis > 0) + { + /* An elided parent path. Remove it. */ + result.erase(path_pos, end_pos-path_pos); + ellipsis -= 1; + } + end_pos = path_pos; + } + + // Now we know that we need to add exactly ellipsis '..' path elements to the + // front and that our string is either empty or has a '/' as its first + // significant character. If we have any ellipsis remaining then the passed + // path must not have been rooted or else it is invalid we return empty. + if (ellipsis > 0) + { + if (is_rooted) return ""; + do result.insert(0, "/.."); while (--ellipsis > 0); + } + + // If we reduced to nothing we return a valid path depending on wether + // the input was rooted or not. + if (result.empty()) return is_rooted ? "/" : "."; + // Return the result without the sentinel if it's not rooted. + if (!is_rooted) return result.substr(1); + + return result; +} + + +/* + * executable_path() + */ + +#if defined(_WIN32) +# define WIN32_LEAN_AND_MEAN +# include +char * executable_path( char const * argv0 ) +{ + char buf[ 1024 ]; + DWORD const ret = GetModuleFileNameA( NULL, buf, sizeof( buf ) ); + return ( !ret || ret == sizeof( buf ) ) ? NULL : strdup( buf ); +} +#elif defined(__APPLE__) /* Not tested */ +# include +char *executable_path( char const * argv0 ) +{ + char buf[ 1024 ]; + uint32_t size = sizeof( buf ); + return _NSGetExecutablePath( buf, &size ) ? NULL : strdup( buf ); +} +#elif defined(sun) || defined(__sun) /* Not tested */ +# include +char * executable_path( char const * argv0 ) +{ + const char * execname = getexecname(); + return execname ? strdup( execname ) : NULL; +} +#elif defined(__FreeBSD__) || defined(__FreeBSD_kernel__) +# include +char * executable_path( char const * argv0 ) +{ + int mib[ 4 ] = { CTL_KERN, KERN_PROC, KERN_PROC_PATHNAME, -1 }; + char buf[ 1024 ]; + size_t size = sizeof( buf ); + sysctl( mib, 4, buf, &size, NULL, 0 ); + return ( !size || size == sizeof( buf ) ) ? NULL : strndup( buf, size ); +} +#elif defined(__linux__) || defined(__CYGWIN__) || defined(__GNU__) +# include +char * executable_path( char const * argv0 ) +{ + char buf[ 1024 ]; + ssize_t const ret = readlink( "/proc/self/exe", buf, sizeof( buf ) ); + return ( !ret || ret == sizeof( buf ) ) ? NULL : strndup( buf, ret ); +} +#elif defined(OS_VMS) +# include +char * executable_path( char const * argv0 ) +{ + char * vms_path = NULL; + char * posix_path = NULL; + char * p; + + /* On VMS argv[0] shows absolute path to the image file. + * So, just remove VMS file version and translate path to POSIX-style. + */ + vms_path = strdup( argv0 ); + if ( vms_path && ( p = strchr( vms_path, ';') ) ) *p = '\0'; + posix_path = decc$translate_vms( vms_path ); + if ( vms_path ) free( vms_path ); + + return posix_path > 0 ? strdup( posix_path ) : NULL; +} +#else +char * executable_path( char const * argv0 ) +{ + char * result = nullptr; + /* If argv0 is an absolute path, assume it is the right absolute path. */ + if (!result && b2::paths::is_rooted(argv0)) + result = strdup( argv0 ); + // If argv0 is a relative path, we can compute the absolute one from the + // current working dir. + if (!result && b2::paths::is_relative(argv0)) + { + auto p = b2::paths::normalize(b2::cwd_str()+"/"+argv0); + result = strdup( p.c_str() ); + } + // If it's a bare basename, search the PATH for a match. + if (!result) + { + std::string path_env = getenv( "PATH" ); + std::string::size_type i = 0; + while (i != std::string::npos) + { + std::string::size_type e = path_env.find_first_of(':', i); + std::string p = e == std::string::npos + ? path_env.substr(i) + : path_env.substr(i, e-i); + if (b2::filesys::is_file(p+"/"+argv0)) + { + result = strdup( (p+"/"+argv0).c_str() ); + break; + } + i = e == std::string::npos ? e : e+1; + } + } + return result; +} +#endif diff --git a/src/boost/tools/build/src/engine/pathsys.h b/src/boost/tools/build/src/engine/pathsys.h new file mode 100644 index 000000000..03ac403fa --- /dev/null +++ b/src/boost/tools/build/src/engine/pathsys.h @@ -0,0 +1,123 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* +Copyright 2020 RenĂ© Ferdinand Rivera Morell +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + + +/* + * pathsys.h - PATHNAME struct + */ + +/* + * PATHNAME - a name of a file, broken into dir/base/suffix(member) + * + * - salt to distinguish between targets that would otherwise have the + * same name - it never appears in the bound name of a target. + * + * (member) - archive member name: the syntax is arbitrary, but must agree in + * path_parse(), path_build(). + */ + +#ifndef PATHSYS_VP_20020211_H +#define PATHSYS_VP_20020211_H + +#include "config.h" +#include "object.h" +#include "jam_strings.h" + +#include + + +typedef struct _pathpart +{ + char const * ptr; + int32_t len; +} PATHPART; + +typedef struct _pathname +{ + PATHPART part[ 6 ]; + +#define f_grist part[ 0 ] +#define f_root part[ 1 ] +#define f_dir part[ 2 ] +#define f_base part[ 3 ] +#define f_suffix part[ 4 ] +#define f_member part[ 5 ] +} PATHNAME; + + +void path_build( PATHNAME *, string * file ); +void path_parse( char const * file, PATHNAME * ); +void path_parent( PATHNAME * ); +int path_translate_to_os( char const *, string * file ); + +/* Given a path, returns an object containing an equivalent path in canonical + * format that can be used as a unique key for that path. Equivalent paths such + * as a/b, A\B, and a\B on NT all yield the same key. + */ +OBJECT * path_as_key( OBJECT * path ); + +/* Called as an optimization when we know we have a path that is already in its + * canonical/long/key form. Avoids the need for some subsequent path_as_key() + * call to do a potentially expensive path conversion requiring access to the + * actual underlying file system. + */ +void path_register_key( OBJECT * canonic_path ); + +/* Returns a static pointer to the system dependent path to the temporary + * directory. NOTE: Does *not* include a trailing path separator. + */ +string const * path_tmpdir( void ); + +/* Returns a new temporary name. */ +OBJECT * path_tmpnam( void ); + +/* Returns a new temporary path. */ +OBJECT * path_tmpfile( void ); + +/* Give the first argument to 'main', return a full path to our executable. + * Returns null in the unlikely case it cannot be determined. Caller is + * responsible for freeing the string. + * + * Implemented in jam.c + */ +char * executable_path( char const * argv0 ); + +void path_done( void ); + +namespace b2 +{ + namespace paths + { + inline bool is_rooted(const std::string &p) + { + #if NT + return + (p.size() >= 1 && (p[0] == '/' || p[0] == '\\')) || + (p.size() >= 3 && p[1] == ':' && (p[2] == '/' || p[2] == '\\')); + #else + return + (p.size() >= 1 && (p[0] == '/' || p[0] == '\\')); + #endif + } + inline bool is_relative(const std::string &p) + { + return + (p.size() >= 3 && ( + (p[0] == '.' && p[1] == '.' && (p[2] == '/' || p[2] == '\\')) || + (p[0] == '.' && (p[1] == '/' || p[1] == '\\')) + )); + } + std::string normalize(const std::string &p); + } +} + +#endif diff --git a/src/boost/tools/build/src/engine/pathunix.cpp b/src/boost/tools/build/src/engine/pathunix.cpp new file mode 100644 index 000000000..1e7d1e5af --- /dev/null +++ b/src/boost/tools/build/src/engine/pathunix.cpp @@ -0,0 +1,91 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * pathunix.c - UNIX specific path manipulation support + */ + +#include "jam.h" +#ifdef USE_PATHUNIX + +#include "pathsys.h" + +#include +#include /* needed for getpid() */ + + +/* + * path_get_process_id_() + */ + +unsigned long path_get_process_id_( void ) +{ + return getpid(); +} + + +/* + * path_get_temp_path_() + */ + +void path_get_temp_path_( string * buffer ) +{ + char const * t = getenv( "TMPDIR" ); + string_append( buffer, t ? t : "/tmp" ); +} + + +/* + * path_translate_to_os_() + */ + +int path_translate_to_os_( char const * f, string * file ) +{ + int translated = 0; + + /* by default, pass on the original path */ + string_copy( file, f ); + + return translated; +} + + +/* + * path_register_key() + */ + +void path_register_key( OBJECT * path ) +{ +} + + +/* + * path_as_key() + */ + +OBJECT * path_as_key( OBJECT * path ) +{ + return object_copy( path ); +} + + +/* + * path_done() + */ + +void path_done( void ) +{ +} + +#endif // USE_PATHUNIX diff --git a/src/boost/tools/build/src/engine/pathvms.cpp b/src/boost/tools/build/src/engine/pathvms.cpp new file mode 100644 index 000000000..76a1241ab --- /dev/null +++ b/src/boost/tools/build/src/engine/pathvms.cpp @@ -0,0 +1,254 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Copyright 2015 Artur Shepilko. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + + +/* + * pathvms.c - VMS-specific path manipulation support + * + * This implementation is based on POSIX-style path manipulation. + * + * VMS CTRL directly supports both POSIX- and native VMS-style path expressions, + * with the POSIX-to-VMS path translation performed internally by the same + * set of functions. For the most part such processing is transparent, with + * few differences mainly related to file-versions (in POSIX mode only the recent + * version is visible). + * + * This should allow us to some extent re-use pathunix.c implementation. + * + * Thus in jam-files the path references can also remain POSIX/UNIX-like on all + * levels EXCEPT in actions scope, where the path references must be translated + * to the native VMS-style. This approach is somewhat similar to jam CYGWIN + * handling. + * + * + * External routines: + * path_register_key() + * path_as_key() + * path_done() + * + * External routines called only via routines in pathsys.c: + * path_get_process_id_() + * path_get_temp_path_() + * path_translate_to_os_() + */ + + +#include "jam.h" + +#ifdef OS_VMS + +#include "pathsys.h" + +#include +#include +#include /* needed for getpid() */ +#include /* needed for decc$to_vms() */ + + +/* + * path_get_process_id_() + */ + +unsigned long path_get_process_id_( void ) +{ + return getpid(); +} + + +/* + * path_get_temp_path_() + */ + +void path_get_temp_path_( string * buffer ) +{ + char const * t = getenv( "TMPDIR" ); + string_append( buffer, t ? t : "/tmp" ); +} + + +/* + * translate_path_posix2vms() + * + * POSIX-to-VMS file specification translation: + * + * Translation is performed with decc$to_vms() CTRL routine (default decc$features) + * Some limitations apply: + * -- ODS-2 compliant file specs only (no spaces, punctuation chars etc.) + * + * -- wild-cards are not allowed + * In general decc$to_vms() can expand the wildcard for existing files, + * yet it cannot retain wild-cards in translated spec. Use GLOB for this. + * + * -- rooted path must refer to an existing/defined device or root-dir + * (e.g. /defconcealed/dir/file.ext or /existingrootdir/dir/file.ext ) + * + * -- POSIX dir/no-type-file path ambiguity (e.g. dir/newsubdir vs. dir/newfile + * is handled as follows: + * + * 1) first try as directory: + * -- if translated (may be a dir): means the file-path has no .type/suffix + * -- if not translated, then it may be a file (has .type) OR invalid spec + * 2) then try as file: + * -- if translated and also is a dir -- check if such file exists (stat) + * -- if not translated, but is a dir -- return as dir + * + * NOTE: on VMS it's possible to have both a file and a dir of the same name + * appear in the same directory. In such case _directory_ intent is assumed. + * + * It's preferable to avoid such naming ambiguity in this context, so + * append an empty .type to specify a no-type file (eg. "filename.") + * + */ + + +static string * m_vmsfilespec = NULL; + +/* + * copy_vmsfilespec() - decc$to_vms action routine for matched filenames + */ + +static int copy_vmsfilespec( char * f, int type ) +{ + assert ( NULL != m_vmsfilespec && "Must be bound to a valid object" ); + + string_copy( m_vmsfilespec, f ); + + /* 0:Exit on first match (1:Process all) */ + return 0; +} + + +static int translate_path_posix2vms( string * path ) +{ + int translated = 0; + + string as_dir[ 1 ]; + string as_file[ 1 ]; + int dir_count; + int file_count; + + unsigned char is_dir; + unsigned char is_file; + unsigned char is_ambiguous; + + string_new( as_dir ); + string_new( as_file ); + + + m_vmsfilespec = as_dir; + + /* MATCH 0:do not allow wildcards, 0:allow directories (2:dir only) */ + dir_count = decc$to_vms( path->value, copy_vmsfilespec, 0, 2 ); + + + m_vmsfilespec = as_file; + + /* MATCH 0:do not allow wildcards, 0:allow directories (2:dir only) */ + file_count = decc$to_vms( path->value, copy_vmsfilespec, 0, 0 ); + + m_vmsfilespec = NULL; + + + translated = ( file_count || dir_count ); + + if ( file_count && dir_count ) + { + struct stat statbuf; + + /* use as_file only when exists AND as_dir does not exist + * otherwise use as_dir + */ + if ( stat(as_dir->value, &statbuf ) < 0 + && stat(as_file->value, &statbuf ) > 0 + && ( statbuf.st_mode & S_IFREG ) ) + { + string_truncate( path, 0 ); + string_append( path, as_file->value ); + } + else + { + string_truncate( path, 0 ); + string_append( path, as_dir->value ); + } + } + else if ( file_count ) + { + string_truncate( path, 0 ); + string_append( path, as_file->value ); + } + else if ( dir_count ) + { + string_truncate( path, 0 ); + string_append( path, as_dir->value ); + } + else + { + /* error: unable to translate path to native format */ + translated = 0; + } + + string_free( as_dir ); + string_free( as_file ); + + return translated; +} + + +/* + * path_translate_to_os_() + */ + +int path_translate_to_os_( char const * f, string * file ) +{ + int translated = 0; + + /* by default, pass on the original path */ + string_copy( file, f ); + + translated = translate_path_posix2vms( file ); + + return translated; +} + + +/* + * path_register_key() + */ + +void path_register_key( OBJECT * path ) +{ +} + + +/* + * path_as_key() + */ + +OBJECT * path_as_key( OBJECT * path ) +{ + return object_copy( path ); +} + + +/* + * path_done() + */ + +void path_done( void ) +{ +} + +#endif + diff --git a/src/boost/tools/build/src/engine/regexp.cpp b/src/boost/tools/build/src/engine/regexp.cpp new file mode 100644 index 000000000..dacae6252 --- /dev/null +++ b/src/boost/tools/build/src/engine/regexp.cpp @@ -0,0 +1,1330 @@ +/* + * regcomp and regexec -- regsub and regerror are elsewhere + * + * Copyright (c) 1986 by University of Toronto. + * Written by Henry Spencer. Not derived from licensed software. + * + * Permission is granted to anyone to use this software for any + * purpose on any computer system, and to redistribute it freely, + * subject to the following restrictions: + * + * 1. The author is not responsible for the consequences of use of + * this software, no matter how awful, even if they arise + * from defects in it. + * + * 2. The origin of this software must not be misrepresented, either + * by explicit claim or by omission. + * + * 3. Altered versions must be plainly marked as such, and must not + * be misrepresented as being the original software. + *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore, + *** hoptoad!gnu, on 27 Dec 1986, to add \n as an alternative to | + *** to assist in implementing egrep. + *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore, + *** hoptoad!gnu, on 27 Dec 1986, to add \< and \> for word-matching + *** as in BSD grep and ex. + *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore, + *** hoptoad!gnu, on 28 Dec 1986, to optimize characters quoted with \. + *** THIS IS AN ALTERED VERSION. It was altered by James A. Woods, + *** ames!jaw, on 19 June 1987, to quash a regcomp() redundancy. + *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald + *** seiwald@vix.com, on 28 August 1993, for use in jam. Regmagic.h + *** was moved into regexp.h, and the include of regexp.h now uses "'s + *** to avoid conflicting with the system regexp.h. Const, bless its + *** soul, was removed so it can compile everywhere. The declaration + *** of strchr() was in conflict on AIX, so it was removed (as it is + *** happily defined in string.h). + *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald + *** seiwald@perforce.com, on 20 January 2000, to use function prototypes. + * + * Beware that some of this code is subtly aware of the way operator precedence + * is structured in regular expressions. Serious changes in regular-expression + * syntax might require a total rethink. + */ + + +#include "jam.h" +#include "regexp.h" +#include "output.h" + +#include +#include +#ifndef ultrix +# include +#endif +#include + + +/* + * The "internal use only" fields in regexp.h are present to pass info from + * compile to execute that permits the execute phase to run lots faster on + * simple cases. They are: + : + * regstart char that must begin a match; '\0' if none obvious. + * reganch is the match anchored (at beginning-of-line only)? + * regmust string (pointer into program) that match must include, or NULL. + * regmlen length of regmust string. + * + * Regstart and reganch permit very fast decisions on suitable starting points + * for a match, cutting down the work a lot. Regmust permits fast rejection of + * lines that cannot possibly match. The regmust tests are costly enough that + * regcomp() supplies a regmust only if the r.e. contains something potentially + * expensive (at present, the only such thing detected is * or + at the start of + * the r.e., which can involve a lot of backup). Regmlen is supplied because the + * test in regexec() needs it and regcomp() is computing it anyway. + */ + +/* + * Structure for regexp "program". This is essentially a linear encoding of a + * nondeterministic finite-state machine (aka syntax charts or "railroad normal + * form" in parsing technology). Each node is an opcode plus a "next" pointer, + * possibly plus an operand. "Next" pointers of all nodes except BRANCH + * implement concatenation; a "next" pointer with a BRANCH on both ends of it is + * connecting two alternatives. [Here we have one of the subtle syntax + * dependencies: an individual BRANCH, as opposed to a collection of them, is + * never concatenated with anything because of operator precedence.] The operand + * of some types of node is a literal string; for others, it is a node leading + * into a sub-FSM. In particular, the operand of a BRANCH node is the first node + * of the branch. [NB this is *not* a tree structure: the tail of the branch + * connects to the thing following the set of BRANCHes.] The opcodes are: + */ + +/* definition number opnd? meaning */ +#define END 0 /* no End of program. */ +#define BOL 1 /* no Match "" at beginning of line. */ +#define EOL 2 /* no Match "" at end of line. */ +#define ANY 3 /* no Match any one character. */ +#define ANYOF 4 /* str Match any character in this string. */ +#define ANYBUT 5 /* str Match any character not in this string. */ +#define BRANCH 6 /* node Match this alternative, or the next... */ +#define BACK 7 /* no Match "", "next" ptr points backward. */ +#define EXACTLY 8 /* str Match this string. */ +#define NOTHING 9 /* no Match empty string. */ +#define STAR 10 /* node Match this (simple) thing 0 or more times. */ +#define PLUS 11 /* node Match this (simple) thing 1 or more times. */ +#define WORDA 12 /* no Match "" at wordchar, where prev is nonword */ +#define WORDZ 13 /* no Match "" at nonwordchar, where prev is word */ +#define OPEN 20 /* no Mark this point in input as start of #n. */ + /* OPEN+1 is number 1, etc. */ +#define CLOSE 30 /* no Analogous to OPEN. */ + + +/* + * Opcode notes: + * + * BRANCH The set of branches constituting a single choice are hooked + * together with their "next" pointers, since precedence prevents + * anything being concatenated to any individual branch. The + * "next" pointer of the last BRANCH in a choice points to the + * thing following the whole choice. This is also where the + * final "next" pointer of each individual branch points; each + * branch starts with the operand node of a BRANCH node. + * + * BACK Normal "next" pointers all implicitly point forward; BACK + * exists to make loop structures possible. + * + * STAR,PLUS '?', and complex '*' and '+', are implemented as circular + * BRANCH structures using BACK. Simple cases (one character + * per match) are implemented with STAR and PLUS for speed + * and to minimize recursive plunges. + * + * OPEN,CLOSE ...are numbered at compile time. + */ + +/* + * A node is one char of opcode followed by two chars of "next" pointer. + * "Next" pointers are stored as two 8-bit pieces, high order first. The + * value is a positive offset from the opcode of the node containing it. + * An operand, if any, simply follows the node. (Note that much of the + * code generation knows about this implicit relationship.) + * + * Using two bytes for the "next" pointer is vast overkill for most things, + * but allows patterns to get big without disasters. + */ +#define OP(p) (*(p)) +#define NEXT(p) (((*((p)+1)&0377)<<8) + (*((p)+2)&0377)) +#define OPERAND(p) ((p) + 3) + +/* + * See regmagic.h for one further detail of program structure. + */ + + +/* + * Utility definitions. + */ +#ifndef CHARBITS +#define UCHARAT(p) ((int32_t)*(const unsigned char *)(p)) +#else +#define UCHARAT(p) ((int32_t)*(p)&CHARBITS) +#endif + +#define FAIL(m) { regerror(m); return(NULL); } +#define ISMULT(c) ((c) == '*' || (c) == '+' || (c) == '?') + +/* + * Flags to be passed up and down. + */ +#define HASWIDTH 01 /* Known never to match null string. */ +#define SIMPLE 02 /* Simple enough to be STAR/PLUS operand. */ +#define SPSTART 04 /* Starts with * or +. */ +#define WORST 0 /* Worst case. */ + +/* + * Global work variables for regcomp(). + */ +static char *regparse; /* Input-scan pointer. */ +static int32_t regnpar; /* () count. */ +static char regdummy; +static char *regcode; /* Code-emit pointer; ®dummy = don't. */ +static int32_t regsize; /* Code size. */ + +/* + * Forward declarations for regcomp()'s friends. + */ +#ifndef STATIC +#define STATIC static +#endif +STATIC char *reg( int32_t paren, int32_t *flagp ); +STATIC char *regbranch( int32_t *flagp ); +STATIC char *regpiece( int32_t *flagp ); +STATIC char *regatom( int32_t *flagp ); +STATIC char *regnode( int32_t op ); +STATIC char *regnext( char *p ); +STATIC void regc( int32_t b ); +STATIC void reginsert( char op, char *opnd ); +STATIC void regtail( char *p, char *val ); +STATIC void regoptail( char *p, char *val ); +#ifdef STRCSPN +STATIC int32_t strcspn(); +#endif + +/* + - regcomp - compile a regular expression into internal code + * + * We can't allocate space until we know how big the compiled form will be, + * but we can't compile it (and thus know how big it is) until we've got a + * place to put the code. So we cheat: we compile it twice, once with code + * generation turned off and size counting turned on, and once "for real". + * This also means that we don't allocate space until we are sure that the + * thing really will compile successfully, and we never have to move the + * code and thus invalidate pointers into it. (Note that it has to be in + * one piece because free() must be able to free it all.) + * + * Beware that the optimization-preparation code in here knows about some + * of the structure of the compiled regexp. + */ +regexp * +regcomp( const char *exp ) +{ + regexp *r; + char *scan; + char *longest; + int32_t len; + int32_t flags; + + if (exp == NULL) + FAIL("NULL argument"); + + /* First pass: determine size, legality. */ +#ifdef notdef + if (exp[0] == '.' && exp[1] == '*') exp += 2; /* aid grep */ +#endif + regparse = (char *)exp; + regnpar = 1; + regsize = 0; + regcode = ®dummy; + regc(MAGIC); + if (reg(0, &flags) == NULL) + return(NULL); + + /* Small enough for pointer-storage convention? */ + if (regsize >= 32767L) /* Probably could be 65535L. */ + FAIL("regexp too big"); + + /* Allocate space. */ + r = (regexp *)BJAM_MALLOC(sizeof(regexp) + regsize); + if (r == NULL) + FAIL("out of space"); + + /* Second pass: emit code. */ + regparse = (char *)exp; + regnpar = 1; + regcode = r->program; + regc(MAGIC); + if (reg(0, &flags) == NULL) + return(NULL); + + /* Dig out information for optimizations. */ + r->regstart = '\0'; /* Worst-case defaults. */ + r->reganch = 0; + r->regmust = NULL; + r->regmlen = 0; + scan = r->program+1; /* First BRANCH. */ + if (OP(regnext(scan)) == END) { /* Only one top-level choice. */ + scan = OPERAND(scan); + + /* Starting-point info. */ + if (OP(scan) == EXACTLY) + r->regstart = *OPERAND(scan); + else if (OP(scan) == BOL) + r->reganch++; + + /* + * If there's something expensive in the r.e., find the + * longest literal string that must appear and make it the + * regmust. Resolve ties in favor of later strings, since + * the regstart check works with the beginning of the r.e. + * and avoiding duplication strengthens checking. Not a + * strong reason, but sufficient in the absence of others. + */ + if (flags&SPSTART) { + longest = NULL; + len = 0; + for (; scan != NULL; scan = regnext(scan)) + if (OP(scan) == EXACTLY && static_cast(strlen(OPERAND(scan))) >= len) { + longest = OPERAND(scan); + len = static_cast(strlen(OPERAND(scan))); + } + r->regmust = longest; + r->regmlen = len; + } + } + + return(r); +} + +/* + - reg - regular expression, i.e. main body or parenthesized thing + * + * Caller must absorb opening parenthesis. + * + * Combining parenthesis handling with the base level of regular expression + * is a trifle forced, but the need to tie the tails of the branches to what + * follows makes it hard to avoid. + */ +static char * +reg( + int32_t paren, /* Parenthesized? */ + int32_t *flagp ) +{ + char *ret; + char *br; + char *ender; + int32_t parno = 0; + int32_t flags; + + *flagp = HASWIDTH; /* Tentatively. */ + + /* Make an OPEN node, if parenthesized. */ + if (paren) { + if (regnpar >= NSUBEXP) + FAIL("too many ()"); + parno = regnpar; + regnpar++; + ret = regnode(OPEN+parno); + } else + ret = NULL; + + /* Pick up the branches, linking them together. */ + br = regbranch(&flags); + if (br == NULL) + return(NULL); + if (ret != NULL) + regtail(ret, br); /* OPEN -> first. */ + else + ret = br; + if (!(flags&HASWIDTH)) + *flagp &= ~HASWIDTH; + *flagp |= flags&SPSTART; + while (*regparse == '|' || *regparse == '\n') { + regparse++; + br = regbranch(&flags); + if (br == NULL) + return(NULL); + regtail(ret, br); /* BRANCH -> BRANCH. */ + if (!(flags&HASWIDTH)) + *flagp &= ~HASWIDTH; + *flagp |= flags&SPSTART; + } + + /* Make a closing node, and hook it on the end. */ + ender = regnode((paren) ? CLOSE+parno : END); + regtail(ret, ender); + + /* Hook the tails of the branches to the closing node. */ + for (br = ret; br != NULL; br = regnext(br)) + regoptail(br, ender); + + /* Check for proper termination. */ + if (paren && *regparse++ != ')') { + FAIL("unmatched ()"); + } else if (!paren && *regparse != '\0') { + if (*regparse == ')') { + FAIL("unmatched ()"); + } else + FAIL("junk on end"); /* "Can't happen". */ + /* NOTREACHED */ + } + + return(ret); +} + +/* + - regbranch - one alternative of an | operator + * + * Implements the concatenation operator. + */ +static char * +regbranch( int32_t *flagp ) +{ + char *ret; + char *chain; + char *latest; + int32_t flags; + + *flagp = WORST; /* Tentatively. */ + + ret = regnode(BRANCH); + chain = NULL; + while (*regparse != '\0' && *regparse != ')' && + *regparse != '\n' && *regparse != '|') { + latest = regpiece(&flags); + if (latest == NULL) + return(NULL); + *flagp |= flags&HASWIDTH; + if (chain == NULL) /* First piece. */ + *flagp |= flags&SPSTART; + else + regtail(chain, latest); + chain = latest; + } + if (chain == NULL) /* Loop ran zero times. */ + (void) regnode(NOTHING); + + return(ret); +} + +/* + - regpiece - something followed by possible [*+?] + * + * Note that the branching code sequences used for ? and the general cases + * of * and + are somewhat optimized: they use the same NOTHING node as + * both the endmarker for their branch list and the body of the last branch. + * It might seem that this node could be dispensed with entirely, but the + * endmarker role is not redundant. + */ +static char * +regpiece( int32_t *flagp ) +{ + char *ret; + char op; + char *next; + int32_t flags; + + ret = regatom(&flags); + if (ret == NULL) + return(NULL); + + op = *regparse; + if (!ISMULT(op)) { + *flagp = flags; + return(ret); + } + + if (!(flags&HASWIDTH) && op != '?') + FAIL("*+ operand could be empty"); + *flagp = (op != '+') ? (WORST|SPSTART) : (WORST|HASWIDTH); + + if (op == '*' && (flags&SIMPLE)) + reginsert(STAR, ret); + else if (op == '*') { + /* Emit x* as (x&|), where & means "self". */ + reginsert(BRANCH, ret); /* Either x */ + regoptail(ret, regnode(BACK)); /* and loop */ + regoptail(ret, ret); /* back */ + regtail(ret, regnode(BRANCH)); /* or */ + regtail(ret, regnode(NOTHING)); /* null. */ + } else if (op == '+' && (flags&SIMPLE)) + reginsert(PLUS, ret); + else if (op == '+') { + /* Emit x+ as x(&|), where & means "self". */ + next = regnode(BRANCH); /* Either */ + regtail(ret, next); + regtail(regnode(BACK), ret); /* loop back */ + regtail(next, regnode(BRANCH)); /* or */ + regtail(ret, regnode(NOTHING)); /* null. */ + } else if (op == '?') { + /* Emit x? as (x|) */ + reginsert(BRANCH, ret); /* Either x */ + regtail(ret, regnode(BRANCH)); /* or */ + next = regnode(NOTHING); /* null. */ + regtail(ret, next); + regoptail(ret, next); + } + regparse++; + if (ISMULT(*regparse)) + FAIL("nested *?+"); + + return(ret); +} + +/* + - regatom - the lowest level + * + * Optimization: gobbles an entire sequence of ordinary characters so that + * it can turn them into a single node, which is smaller to store and + * faster to run. Backslashed characters are exceptions, each becoming a + * separate node; the code is simpler that way and it's not worth fixing. + */ +static char * +regatom( int32_t *flagp ) +{ + char *ret; + int32_t flags; + + *flagp = WORST; /* Tentatively. */ + + switch (*regparse++) { + /* FIXME: these chars only have meaning at beg/end of pat? */ + case '^': + ret = regnode(BOL); + break; + case '$': + ret = regnode(EOL); + break; + case '.': + ret = regnode(ANY); + *flagp |= HASWIDTH|SIMPLE; + break; + case '[': { + int32_t classr; + int32_t classend; + + if (*regparse == '^') { /* Complement of range. */ + ret = regnode(ANYBUT); + regparse++; + } else + ret = regnode(ANYOF); + if (*regparse == ']' || *regparse == '-') + regc(*regparse++); + while (*regparse != '\0' && *regparse != ']') { + if (*regparse == '-') { + regparse++; + if (*regparse == ']' || *regparse == '\0') + regc('-'); + else { + classr = UCHARAT(regparse-2)+1; + classend = UCHARAT(regparse); + if (classr > classend+1) + FAIL("invalid [] range"); + for (; classr <= classend; classr++) + regc(classr); + regparse++; + } + } else + regc(*regparse++); + } + regc('\0'); + if (*regparse != ']') + FAIL("unmatched []"); + regparse++; + *flagp |= HASWIDTH|SIMPLE; + } + break; + case '(': + ret = reg(1, &flags); + if (ret == NULL) + return(NULL); + *flagp |= flags&(HASWIDTH|SPSTART); + break; + case '\0': + case '|': + case '\n': + case ')': + FAIL("internal urp"); /* Supposed to be caught earlier. */ + break; + case '?': + case '+': + case '*': + FAIL("?+* follows nothing"); + break; + case '\\': + switch (*regparse++) { + case '\0': + FAIL("trailing \\"); + break; + case '<': + ret = regnode(WORDA); + break; + case '>': + ret = regnode(WORDZ); + break; + /* FIXME: Someday handle \1, \2, ... */ + default: + /* Handle general quoted chars in exact-match routine */ + goto de_fault; + } + break; + de_fault: + default: + /* + * Encode a string of characters to be matched exactly. + * + * This is a bit tricky due to quoted chars and due to + * '*', '+', and '?' taking the SINGLE char previous + * as their operand. + * + * On entry, the char at regparse[-1] is going to go + * into the string, no matter what it is. (It could be + * following a \ if we are entered from the '\' case.) + * + * Basic idea is to pick up a good char in ch and + * examine the next char. If it's *+? then we twiddle. + * If it's \ then we frozzle. If it's other magic char + * we push ch and terminate the string. If none of the + * above, we push ch on the string and go around again. + * + * regprev is used to remember where "the current char" + * starts in the string, if due to a *+? we need to back + * up and put the current char in a separate, 1-char, string. + * When regprev is NULL, ch is the only char in the + * string; this is used in *+? handling, and in setting + * flags |= SIMPLE at the end. + */ + { + char *regprev; + char ch; + + regparse--; /* Look at cur char */ + ret = regnode(EXACTLY); + for ( regprev = 0 ; ; ) { + ch = *regparse++; /* Get current char */ + switch (*regparse) { /* look at next one */ + + default: + regc(ch); /* Add cur to string */ + break; + + case '.': case '[': case '(': + case ')': case '|': case '\n': + case '$': case '^': + case '\0': + /* FIXME, $ and ^ should not always be magic */ + magic: + regc(ch); /* dump cur char */ + goto done; /* and we are done */ + + case '?': case '+': case '*': + if (!regprev) /* If just ch in str, */ + goto magic; /* use it */ + /* End mult-char string one early */ + regparse = regprev; /* Back up parse */ + goto done; + + case '\\': + regc(ch); /* Cur char OK */ + switch (regparse[1]){ /* Look after \ */ + case '\0': + case '<': + case '>': + /* FIXME: Someday handle \1, \2, ... */ + goto done; /* Not quoted */ + default: + /* Backup point is \, scan * point is after it. */ + regprev = regparse; + regparse++; + continue; /* NOT break; */ + } + } + regprev = regparse; /* Set backup point */ + } + done: + regc('\0'); + *flagp |= HASWIDTH; + if (!regprev) /* One char? */ + *flagp |= SIMPLE; + } + break; + } + + return(ret); +} + +/* + - regnode - emit a node + */ +static char * /* Location. */ +regnode( int32_t op ) +{ + char *ret; + char *ptr; + + ret = regcode; + if (ret == ®dummy) { + regsize += 3; + return(ret); + } + + ptr = ret; + *ptr++ = op; + *ptr++ = '\0'; /* Null "next" pointer. */ + *ptr++ = '\0'; + regcode = ptr; + + return(ret); +} + +/* + - regc - emit (if appropriate) a byte of code + */ +static void +regc( int32_t b ) +{ + if (regcode != ®dummy) + *regcode++ = b; + else + regsize++; +} + +/* + - reginsert - insert an operator in front of already-emitted operand + * + * Means relocating the operand. + */ +static void +reginsert( + char op, + char *opnd ) +{ + char *src; + char *dst; + char *place; + + if (regcode == ®dummy) { + regsize += 3; + return; + } + + src = regcode; + regcode += 3; + dst = regcode; + while (src > opnd) + *--dst = *--src; + + place = opnd; /* Op node, where operand used to be. */ + *place++ = op; + *place++ = '\0'; + *place++ = '\0'; +} + +/* + - regtail - set the next-pointer at the end of a node chain + */ +static void +regtail( + char *p, + char *val ) +{ + char *scan; + char *temp; + size_t offset; + + if (p == ®dummy) + return; + + /* Find last node. */ + scan = p; + for (;;) { + temp = regnext(scan); + if (temp == NULL) + break; + scan = temp; + } + + if (OP(scan) == BACK) + offset = scan - val; + else + offset = val - scan; + *(scan+1) = (offset>>8)&0377; + *(scan+2) = offset&0377; +} + +/* + - regoptail - regtail on operand of first argument; nop if operandless + */ + +static void +regoptail( + char *p, + char *val ) +{ + /* "Operandless" and "op != BRANCH" are synonymous in practice. */ + if (p == NULL || p == ®dummy || OP(p) != BRANCH) + return; + regtail(OPERAND(p), val); +} + +/* + * regexec and friends + */ + +/* + * Global work variables for regexec(). + */ +static const char *reginput; /* String-input pointer. */ +static const char *regbol; /* Beginning of input, for ^ check. */ +static const char **regstartp; /* Pointer to startp array. */ +static const char **regendp; /* Ditto for endp. */ + +/* + * Forwards. + */ +STATIC int32_t regtry( regexp *prog, const char *string ); +STATIC int32_t regmatch( char *prog ); +STATIC int32_t regrepeat( char *p ); + +#ifdef DEBUG +int32_t regnarrate = 0; +void regdump(); +STATIC char *regprop(); +#endif + +/* + - regexec - match a regexp against a string + */ +int32_t +regexec( + regexp *prog, + const char *string ) +{ + char *s; + + /* Be paranoid... */ + if (prog == NULL || string == NULL) { + regerror("NULL parameter"); + return(0); + } + + /* Check validity of program. */ + if (UCHARAT(prog->program) != MAGIC) { + regerror("corrupted program"); + return(0); + } + + /* If there is a "must appear" string, look for it. */ + if ( prog->regmust != NULL ) + { + s = (char *)string; + while ( ( s = strchr( s, prog->regmust[ 0 ] ) ) != NULL ) + { + if ( !strncmp( s, prog->regmust, prog->regmlen ) ) + break; /* Found it. */ + ++s; + } + if ( s == NULL ) /* Not present. */ + return 0; + } + + /* Mark beginning of line for ^ . */ + regbol = (char *)string; + + /* Simplest case: anchored match need be tried only once. */ + if ( prog->reganch ) + return regtry( prog, string ); + + /* Messy cases: unanchored match. */ + s = (char *)string; + if (prog->regstart != '\0') + /* We know what char it must start with. */ + while ((s = strchr(s, prog->regstart)) != NULL) { + if (regtry(prog, s)) + return(1); + s++; + } + else + /* We do not -- general case. */ + do { + if ( regtry( prog, s ) ) + return( 1 ); + } while ( *s++ != '\0' ); + + /* Failure. */ + return 0; +} + + +/* + * regtry() - try match at specific point. + */ + +static int32_t /* 0 failure, 1 success */ +regtry( + regexp *prog, + const char *string ) +{ + int32_t i; + const char * * sp; + const char * * ep; + + reginput = string; + regstartp = prog->startp; + regendp = prog->endp; + + sp = prog->startp; + ep = prog->endp; + for ( i = NSUBEXP; i > 0; --i ) + { + *sp++ = NULL; + *ep++ = NULL; + } + if ( regmatch( prog->program + 1 ) ) + { + prog->startp[ 0 ] = string; + prog->endp[ 0 ] = reginput; + return 1; + } + else + return 0; +} + + +/* + * regmatch() - main matching routine. + * + * Conceptually the strategy is simple: check to see whether the current node + * matches, call self recursively to see whether the rest matches, and then act + * accordingly. In practice we make some effort to avoid recursion, in + * particular by going through "ordinary" nodes (that do not need to know + * whether the rest of the match failed) by a loop instead of by recursion. + */ + +static int32_t /* 0 failure, 1 success */ +regmatch( char * prog ) +{ + char * scan; /* Current node. */ + char * next; /* Next node. */ + + scan = prog; +#ifdef DEBUG + if (scan != NULL && regnarrate) + err_printf("%s(\n", regprop(scan)); +#endif + while (scan != NULL) { +#ifdef DEBUG + if (regnarrate) + err_printf("%s...\n", regprop(scan)); +#endif + next = regnext(scan); + + switch (OP(scan)) { + case BOL: + if (reginput != regbol) + return(0); + break; + case EOL: + if (*reginput != '\0') + return(0); + break; + case WORDA: + /* Must be looking at a letter, digit, or _ */ + if ((!isalnum(*reginput)) && *reginput != '_') + return(0); + /* Prev must be BOL or nonword */ + if (reginput > regbol && + (isalnum(reginput[-1]) || reginput[-1] == '_')) + return(0); + break; + case WORDZ: + /* Must be looking at non letter, digit, or _ */ + if (isalnum(*reginput) || *reginput == '_') + return(0); + /* We don't care what the previous char was */ + break; + case ANY: + if (*reginput == '\0') + return(0); + reginput++; + break; + case EXACTLY: { + size_t len; + char *opnd; + + opnd = OPERAND(scan); + /* Inline the first character, for speed. */ + if (*opnd != *reginput) + return(0); + len = strlen(opnd); + if (len > 1 && strncmp(opnd, reginput, len) != 0) + return(0); + reginput += len; + } + break; + case ANYOF: + if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) == NULL) + return(0); + reginput++; + break; + case ANYBUT: + if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) != NULL) + return(0); + reginput++; + break; + case NOTHING: + break; + case BACK: + break; + case OPEN+1: + case OPEN+2: + case OPEN+3: + case OPEN+4: + case OPEN+5: + case OPEN+6: + case OPEN+7: + case OPEN+8: + case OPEN+9: { + int32_t no; + const char *save; + + no = OP(scan) - OPEN; + save = reginput; + + if (regmatch(next)) { + /* + * Don't set startp if some later + * invocation of the same parentheses + * already has. + */ + if (regstartp[no] == NULL) + regstartp[no] = save; + return(1); + } else + return(0); + } + break; + case CLOSE+1: + case CLOSE+2: + case CLOSE+3: + case CLOSE+4: + case CLOSE+5: + case CLOSE+6: + case CLOSE+7: + case CLOSE+8: + case CLOSE+9: { + int32_t no; + const char *save; + + no = OP(scan) - CLOSE; + save = reginput; + + if (regmatch(next)) { + /* + * Don't set endp if some later + * invocation of the same parentheses + * already has. + */ + if (regendp[no] == NULL) + regendp[no] = save; + return(1); + } else + return(0); + } + break; + case BRANCH: { + const char *save; + + if (OP(next) != BRANCH) /* No choice. */ + next = OPERAND(scan); /* Avoid recursion. */ + else { + do { + save = reginput; + if (regmatch(OPERAND(scan))) + return(1); + reginput = save; + scan = regnext(scan); + } while (scan != NULL && OP(scan) == BRANCH); + return(0); + /* NOTREACHED */ + } + } + break; + case STAR: + case PLUS: { + char nextch; + int32_t no; + const char *save; + int32_t min; + + /* + * Lookahead to avoid useless match attempts + * when we know what character comes next. + */ + nextch = '\0'; + if (OP(next) == EXACTLY) + nextch = *OPERAND(next); + min = (OP(scan) == STAR) ? 0 : 1; + save = reginput; + no = regrepeat(OPERAND(scan)); + while (no >= min) { + /* If it could work, try it. */ + if (nextch == '\0' || *reginput == nextch) + if (regmatch(next)) + return(1); + /* Couldn't or didn't -- back up. */ + no--; + reginput = save + no; + } + return(0); + } + break; + case END: + return(1); /* Success! */ + break; + default: + regerror("memory corruption"); + return(0); + break; + } + + scan = next; + } + + /* + * We get here only if there's trouble -- normally "case END" is + * the terminating point. + */ + regerror("corrupted pointers"); + return(0); +} + +/* + - regrepeat - repeatedly match something simple, report how many + */ +static int32_t +regrepeat( char *p ) +{ + int32_t count = 0; + const char *scan; + char *opnd; + + scan = reginput; + opnd = OPERAND(p); + switch (OP(p)) { + case ANY: + count = int32_t(strlen(scan)); + scan += count; + break; + case EXACTLY: + while (*opnd == *scan) { + count++; + scan++; + } + break; + case ANYOF: + while (*scan != '\0' && strchr(opnd, *scan) != NULL) { + count++; + scan++; + } + break; + case ANYBUT: + while (*scan != '\0' && strchr(opnd, *scan) == NULL) { + count++; + scan++; + } + break; + default: /* Oh dear. Called inappropriately. */ + regerror("internal foulup"); + count = 0; /* Best compromise. */ + break; + } + reginput = scan; + + return(count); +} + +/* + - regnext - dig the "next" pointer out of a node + */ +static char * +regnext( char *p ) +{ + int32_t offset; + + if (p == ®dummy) + return(NULL); + + offset = NEXT(p); + if (offset == 0) + return(NULL); + + if (OP(p) == BACK) + return(p-offset); + else + return(p+offset); +} + +#ifdef DEBUG + +STATIC char *regprop(); + +/* + - regdump - dump a regexp onto stdout in vaguely comprehensible form + */ +void +regdump( regexp *r ) +{ + char *s; + char op = EXACTLY; /* Arbitrary non-END op. */ + char *next; + + + s = r->program + 1; + while (op != END) { /* While that wasn't END last time... */ + op = OP(s); + out_printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */ + next = regnext(s); + if (next == NULL) /* Next ptr. */ + out_printf("(0)"); + else + out_printf("(%d)", (s-r->program)+(next-s)); + s += 3; + if (op == ANYOF || op == ANYBUT || op == EXACTLY) { + /* Literal string, where present. */ + while (*s != '\0') { + out_putc(*s); + s++; + } + s++; + } + out_putc('\n'); + } + + /* Header fields of interest. */ + if (r->regstart != '\0') + out_printf("start `%c' ", r->regstart); + if (r->reganch) + out_printf("anchored "); + if (r->regmust != NULL) + out_printf("must have \"%s\"", r->regmust); + out_printf("\n"); +} + +/* + - regprop - printable representation of opcode + */ +static char * +regprop( char *op ) +{ + char *p; + static char buf[50]; + + (void) strcpy(buf, ":"); + + switch (OP(op)) { + case BOL: + p = "BOL"; + break; + case EOL: + p = "EOL"; + break; + case ANY: + p = "ANY"; + break; + case ANYOF: + p = "ANYOF"; + break; + case ANYBUT: + p = "ANYBUT"; + break; + case BRANCH: + p = "BRANCH"; + break; + case EXACTLY: + p = "EXACTLY"; + break; + case NOTHING: + p = "NOTHING"; + break; + case BACK: + p = "BACK"; + break; + case END: + p = "END"; + break; + case OPEN+1: + case OPEN+2: + case OPEN+3: + case OPEN+4: + case OPEN+5: + case OPEN+6: + case OPEN+7: + case OPEN+8: + case OPEN+9: + sprintf(buf+strlen(buf), "OPEN%d", OP(op)-OPEN); + p = NULL; + break; + case CLOSE+1: + case CLOSE+2: + case CLOSE+3: + case CLOSE+4: + case CLOSE+5: + case CLOSE+6: + case CLOSE+7: + case CLOSE+8: + case CLOSE+9: + sprintf(buf+strlen(buf), "CLOSE%d", OP(op)-CLOSE); + p = NULL; + break; + case STAR: + p = "STAR"; + break; + case PLUS: + p = "PLUS"; + break; + case WORDA: + p = "WORDA"; + break; + case WORDZ: + p = "WORDZ"; + break; + default: + regerror("corrupted opcode"); + break; + } + if (p != NULL) + (void) strcat(buf, p); + return(buf); +} +#endif + +/* + * The following is provided for those people who do not have strcspn() in + * their C libraries. They should get off their butts and do something + * about it; at least one public-domain implementation of those (highly + * useful) string routines has been published on Usenet. + */ +#ifdef STRCSPN +/* + * strcspn - find length of initial segment of s1 consisting entirely + * of characters not from s2 + */ + +static int32_t +strcspn( + char *s1, + char *s2 ) +{ + char *scan1; + char *scan2; + int32_t count; + + count = 0; + for (scan1 = s1; *scan1 != '\0'; scan1++) { + for (scan2 = s2; *scan2 != '\0';) /* ++ moved down. */ + if (*scan1 == *scan2++) + return(count); + count++; + } + return(count); +} +#endif diff --git a/src/boost/tools/build/src/engine/regexp.h b/src/boost/tools/build/src/engine/regexp.h new file mode 100644 index 000000000..bbac95eeb --- /dev/null +++ b/src/boost/tools/build/src/engine/regexp.h @@ -0,0 +1,36 @@ +/* + * Definitions etc. for regexp(3) routines. + * + * Caveat: this is V8 regexp(3) [actually, a reimplementation thereof], + * not the System V one. + */ +#ifndef REGEXP_DWA20011023_H +#define REGEXP_DWA20011023_H + +#include "config.h" + +#define NSUBEXP 10 +typedef struct regexp { + char const * startp[ NSUBEXP ]; + char const * endp[ NSUBEXP ]; + char regstart; /* Internal use only. */ + char reganch; /* Internal use only. */ + char * regmust; /* Internal use only. */ + int32_t regmlen; /* Internal use only. */ + char program[ 1 ]; /* Unwarranted chumminess with compiler. */ +} regexp; + + +regexp * regcomp( char const * exp ); +int32_t regexec( regexp * prog, char const * string ); +void regerror( char const * s ); + + +/* + * The first byte of the regexp internal "program" is actually this magic + * number; the start node begins in the second byte. + */ +#define MAGIC 0234 + +#endif + diff --git a/src/boost/tools/build/src/engine/rules.cpp b/src/boost/tools/build/src/engine/rules.cpp new file mode 100644 index 000000000..a4e2ad107 --- /dev/null +++ b/src/boost/tools/build/src/engine/rules.cpp @@ -0,0 +1,735 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * rules.c - access to RULEs, TARGETs, and ACTIONs + * + * External routines: + * bindrule() - return pointer to RULE, creating it if necessary. + * bindtarget() - return pointer to TARGET, creating it if necessary. + * touch_target() - mark a target to simulate being new. + * targetlist() - turn list of target names into a TARGET chain. + * targetentry() - add a TARGET to a chain of TARGETS. + * actionlist() - append to an ACTION chain. + * addsettings() - add a deferred "set" command to a target. + * pushsettings() - set all target specific variables. + * popsettings() - reset target specific variables to their pre-push values. + * freesettings() - delete a settings list. + * rules_done() - free RULE and TARGET tables. + */ + +#include "jam.h" +#include "rules.h" + +#include "hash.h" +#include "lists.h" +#include "object.h" +#include "output.h" +#include "parse.h" +#include "pathsys.h" +#include "search.h" +#include "variable.h" + + +static void set_rule_actions( rule_ptr, rule_actions_ptr ); +static void set_rule_body ( rule_ptr, function_ptr ); + +static struct hash * targethash = 0; + + +/* + * get_target_includes() - lazy creates a target's internal includes node + * + * The newly created node is not entered into the hash table as there should + * never be a need to bind them directly from a target names. If you want to + * access an internal includes node by name, first access the actual target and + * then read the internal includes node from there. + */ + +static target_ptr get_target_includes( target_ptr const t ) +{ + if ( !t->includes ) + { + target_ptr const i = b2::jam::make_ptr<_target>(); + i->name = object_copy( t->name ); + i->boundname = object_copy( i->name ); + i->flags |= T_FLAG_NOTFILE | T_FLAG_INTERNAL; + t->includes = i; + } + return t->includes; +} + + +/* + * target_include() - adds a target to the given targe's 'included' list + * target_include_many() - adds targets to the given target's 'included' list + * + * Included targets are modeled as dependencies of the including target's + * internal include node. + */ + +void target_include( target_ptr const including, target_ptr const included ) +{ + target_ptr const internal = get_target_includes( including ); + targetentry( internal->depends, included ); +} + +void target_include_many( target_ptr const including, list_ptr const included_names + ) +{ + target_ptr const internal = get_target_includes( including ); + targetlist( internal->depends, included_names ); +} + + +/* + * enter_rule() - return pointer to RULE, creating it if necessary in + * target_module. + */ + +static rule_ptr enter_rule( object_ptr rulename, module_ptr target_module ) +{ + int found; + rule_ptr const r = (rule_ptr)hash_insert( demand_rules( target_module ), + rulename, &found ); + if ( !found ) + { + r->name = object_copy( rulename ); + r->procedure = 0; + r->module = 0; + r->actions = 0; + r->exported = 0; + r->module = target_module; + } + return r; +} + + +/* + * define_rule() - return pointer to RULE, creating it if necessary in + * target_module. Prepare it to accept a body or action originating in + * src_module. + */ + +static rule_ptr define_rule( module_ptr src_module, object_ptr rulename, + module_ptr target_module ) +{ + rule_ptr const r = enter_rule( rulename, target_module ); + if ( r->module != src_module ) + { + /* If the rule was imported from elsewhere, clear it now. */ + set_rule_body( r, 0 ); + set_rule_actions( r, 0 ); + /* r will be executed in the source module. */ + r->module = src_module; + } + return r; +} + + +void rule_free( rule_ptr r ) +{ + object_free( r->name ); + r->name = 0; + if ( r->procedure ) + function_free( r->procedure ); + r->procedure = 0; + if ( r->actions ) + actions_free( r->actions ); + r->actions = 0; +} + + +/* + * bindtarget() - return pointer to TARGET, creating it if necessary. + */ + +target_ptr bindtarget( object_ptr const target_name ) +{ + int found; + target_ptr t; + + if ( !targethash ) + targethash = hashinit( sizeof( TARGET ), "targets" ); + + t = (target_ptr)hash_insert( targethash, target_name, &found ); + if ( !found ) + { + b2::jam::ctor_ptr<_target>(t); + t->name = object_copy( target_name ); + t->boundname = object_copy( t->name ); /* default for T_FLAG_NOTFILE */ + } + + return t; +} + + +static void bind_explicitly_located_target( target_ptr t, void * ) +{ + if ( !( t->flags & T_FLAG_NOTFILE ) ) + { + /* Check if there is a setting for LOCATE. */ + settings_ptr s = t->settings; + for ( ; s ; s = s->next ) + { + if ( object_equal( s->symbol, constant_LOCATE ) && ! list_empty( s->value ) ) + { + set_explicit_binding( t->name, list_front( s->value ) ); + break; + } + } + } +} + + +void bind_explicitly_located_targets() +{ + if ( targethash ) + hash_enumerate( targethash, bind_explicitly_located_target ); +} + + +/* + * touch_target() - mark a target to simulate being new. + */ + +void touch_target( object_ptr const t ) +{ + bindtarget( t )->flags |= T_FLAG_TOUCHED; +} + + +/* + * target_scc() - returns the root of a strongly connected component that this + * target is a part of. + */ + +target_ptr target_scc( target_ptr t ) +{ + target_ptr result = t; + while ( result->scc_root ) + result = result->scc_root; + while ( t->scc_root ) + { + target_ptr const tmp = t->scc_root; + t->scc_root = result; + t = tmp; + } + return result; +} + + +/* + * targetlist() - turn list of target names into a TARGET chain. + * + * Inputs: + * chain existing TARGETS to append to + * targets list of target names + */ + +void targetlist( targets_uptr& chain, list_ptr target_names ) +{ + LISTITER iter = list_begin( target_names ); + LISTITER const end = list_end( target_names ); + for ( ; iter != end; iter = list_next( iter ) ) + targetentry( chain, bindtarget( list_item( iter ) ) ); +} + + +/* + * targetentry() - add a TARGET to a chain of TARGETS. + * + * Inputs: + * chain existing TARGETS to append to + * target new target to append + */ + +void targetentry( targets_uptr& chain, target_ptr target ) +{ + auto c = b2::jam::make_unique_jptr(); + c->target = target; + + targets_ptr tail = c.get(); + if ( !chain ) chain.reset(c.release()); + else chain->tail->next.reset(c.release()); + chain->tail = tail; +} + + +/* + * targetchain() - append two TARGET chains. + * + * Inputs: + * chain existing TARGETS to append to + * target new target to append + */ + +targets_uptr targetchain( targets_uptr chain, targets_uptr targets ) +{ + if ( !targets ) return chain; + if ( !chain ) return targets; + + targets_ptr tail = targets->tail; + chain->tail->next = std::move(targets); + chain->tail = tail; + return chain; +} + +/* + * targets_pop() - removes the first TARGET from the chain. + */ + +targets_uptr targets_pop(targets_uptr chain) +{ + targets_uptr result; + if ( chain && chain->next ) + { + chain->next->tail = chain->tail; + result = std::move( chain->next ); + } + return result; +} + +/* + * action_free - decrement the ACTIONs reference count and (maybe) free it. + */ + +void action_free( action_ptr action ) +{ + if ( --action->refs == 0 ) + { + b2::jam::free_ptr(action); + } +} + + +/* + * actionlist() - append to an ACTION chain. + */ + +actions_ptr actionlist( actions_ptr chain, action_ptr action ) +{ + actions_ptr const actions = (actions_ptr)BJAM_MALLOC( sizeof( ACTIONS ) ); + actions->action = action; + ++action->refs; + if ( !chain ) chain = actions; + else chain->tail->next = actions; + chain->tail = actions; + actions->next = 0; + return chain; +} + +static settings_ptr settings_freelist; + + +/* + * addsettings() - add a deferred "set" command to a target. + * + * Adds a variable setting (varname=list) onto a chain of settings for a + * particular target. 'flag' controls the relationship between new and old + * values in the same way as in var_set() function (see variable.c). Returns the + * head of the settings chain. + */ + +settings_ptr addsettings( settings_ptr head, int flag, object_ptr symbol, + list_ptr value ) +{ + settings_ptr v; + + /* Look for previous settings. */ + for ( v = head; v; v = v->next ) + if ( object_equal( v->symbol, symbol ) ) + break; + + /* If not previously set, alloc a new. */ + /* If appending, do so. */ + /* Else free old and set new. */ + if ( !v ) + { + v = settings_freelist; + if ( v ) + settings_freelist = v->next; + else + v = (settings_ptr)BJAM_MALLOC( sizeof( *v ) ); + + v->symbol = object_copy( symbol ); + v->value = value; + v->next = head; + head = v; + } + else if ( flag == VAR_APPEND ) + { + v->value = list_append( v->value, value ); + } + else if ( flag != VAR_DEFAULT ) + { + list_free( v->value ); + v->value = value; + } + else + list_free( value ); + + /* Return (new) head of list. */ + return head; +} + + +/* + * pushsettings() - set all target specific variables. + */ + +void pushsettings( module_ptr module, settings_ptr v ) +{ + for ( ; v; v = v->next ) + v->value = var_swap( module, v->symbol, v->value ); +} + + +/* + * popsettings() - reset target specific variables to their pre-push values. + */ + +void popsettings( module_ptr module, settings_ptr v ) +{ + pushsettings( module, v ); /* just swap again */ +} + + +/* + * copysettings() - duplicate a settings list, returning the new copy. + */ + +settings_ptr copysettings( settings_ptr head ) +{ + settings_ptr copy = 0; + settings_ptr v; + for ( v = head; v; v = v->next ) + copy = addsettings( copy, VAR_SET, v->symbol, list_copy( v->value ) ); + return copy; +} + + +/* + * freeactions() - delete an action list. + */ + +void freeactions( actions_ptr chain ) +{ + while ( chain ) + { + actions_ptr const n = chain->next; + action_free( chain->action ); + BJAM_FREE( chain ); + chain = n; + } +} + + +/* + * freesettings() - delete a settings list. + */ + +void freesettings( settings_ptr v ) +{ + while ( v ) + { + settings_ptr const n = v->next; + object_free( v->symbol ); + list_free( v->value ); + v->next = settings_freelist; + settings_freelist = v; + v = n; + } +} + + +static void freetarget( target_ptr const t, void * ) +{ + if ( t->name ) object_free ( t->name ); + if ( t->boundname ) object_free ( t->boundname ); + if ( t->settings ) freesettings( t->settings ); + if ( t->depends ) t->depends.reset(); + if ( t->dependants ) t->dependants.reset(); + if ( t->parents ) t->parents.reset(); + if ( t->actions ) freeactions ( t->actions ); + if ( t->includes ) + { + freetarget( t->includes, (void *)0 ); + BJAM_FREE( t->includes ); + } + t->~_target(); +} + + +/* + * rules_done() - free RULE and TARGET tables. + */ + +void rules_done() +{ + if ( targethash ) + { + hash_enumerate( targethash, freetarget ); + hashdone( targethash ); + } + while ( settings_freelist ) + { + settings_ptr const n = settings_freelist->next; + BJAM_FREE( settings_freelist ); + settings_freelist = n; + } +} + + +/* + * actions_refer() - add a new reference to the given actions. + */ + +void actions_refer( rule_actions_ptr a ) +{ + ++a->reference_count; +} + + +/* + * actions_free() - release a reference to given actions. + */ + +void actions_free( rule_actions_ptr a ) +{ + if ( --a->reference_count <= 0 ) + { + function_free( a->command ); + list_free( a->bindlist ); + BJAM_FREE( a ); + } +} + + +/* + * set_rule_body() - set the argument list and procedure of the given rule. + */ + +static void set_rule_body( rule_ptr rule, function_ptr procedure ) +{ + if ( procedure ) + function_refer( procedure ); + if ( rule->procedure ) + function_free( rule->procedure ); + rule->procedure = procedure; +} + + +/* + * global_name() - given a rule, return the name for a corresponding rule in the + * global module. + */ + +static object_ptr global_rule_name( rule_ptr r ) +{ + if ( r->module == root_module() ) + return object_copy( r->name ); + + { + char name[ 4096 ] = ""; + if ( r->module->name ) + { + strncat( name, object_str( r->module->name ), sizeof( name ) - 1 ); + strncat( name, ".", sizeof( name ) - 1 ); + } + strncat( name, object_str( r->name ), sizeof( name ) - 1 ); + return object_new( name ); + } +} + + +/* + * global_rule() - given a rule, produce a corresponding entry in the global + * module. + */ + +static rule_ptr global_rule( rule_ptr r ) +{ + if ( r->module == root_module() ) + return r; + + { + object_ptr const name = global_rule_name( r ); + rule_ptr const result = define_rule( r->module, name, root_module() ); + object_free( name ); + return result; + } +} + + +/* + * new_rule_body() - make a new rule named rulename in the given module, with + * the given argument list and procedure. If exported is true, the rule is + * exported to the global module as modulename.rulename. + */ + +rule_ptr new_rule_body( module_ptr m, object_ptr rulename, function_ptr procedure, + int exported ) +{ + rule_ptr const local = define_rule( m, rulename, m ); + local->exported = exported; + set_rule_body( local, procedure ); + + /* Mark the procedure with the global rule name, regardless of whether the + * rule is exported. That gives us something reasonably identifiable that we + * can use, e.g. in profiling output. Only do this once, since this could be + * called multiple times with the same procedure. + */ + if ( !function_rulename( procedure ) ) + function_set_rulename( procedure, global_rule_name( local ) ); + + return local; +} + + +static void set_rule_actions( rule_ptr rule, rule_actions_ptr actions ) +{ + if ( actions ) + actions_refer( actions ); + if ( rule->actions ) + actions_free( rule->actions ); + rule->actions = actions; +} + + +static rule_actions_ptr actions_new( function_ptr command, list_ptr bindlist, + int flags ) +{ + rule_actions_ptr const result = (rule_actions_ptr)BJAM_MALLOC( sizeof( + rule_actions ) ); + function_refer( command ); + result->command = command; + result->bindlist = bindlist; + result->flags = flags; + result->reference_count = 0; + return result; +} + + +rule_ptr new_rule_actions( module_ptr m, object_ptr rulename, function_ptr command, + list_ptr bindlist, int flags ) +{ + rule_ptr const local = define_rule( m, rulename, m ); + rule_ptr const global = global_rule( local ); + set_rule_actions( local, actions_new( command, bindlist, flags ) ); + set_rule_actions( global, local->actions ); + return local; +} + + +/* + * Looks for a rule in the specified module, and returns it, if found. First + * checks if the rule is present in the module's rule table. Second, if the + * rule's name is in the form name1.name2 and name1 is in the list of imported + * modules, look in module 'name1' for rule 'name2'. + */ + +rule_ptr lookup_rule( object_ptr rulename, module_ptr m, int local_only ) +{ + rule_ptr r; + rule_ptr result = 0; + module_ptr original_module = m; + + if ( m->class_module ) + m = m->class_module; + + if ( m->rules && ( r = (rule_ptr)hash_find( m->rules, rulename ) ) ) + result = r; + else if ( !local_only && m->imported_modules ) + { + /* Try splitting the name into module and rule. */ + const char * p = strchr( object_str( rulename ), '.' ) ; + if ( p ) + { + /* Now, r->name keeps the module name, and p + 1 keeps the rule + * name. + */ + object_ptr rule_part = object_new( p + 1 ); + object_ptr module_part; + { + string buf[ 1 ]; + string_new( buf ); + string_append_range( buf, object_str( rulename ), p ); + module_part = object_new( buf->value ); + string_free( buf ); + } + if ( hash_find( m->imported_modules, module_part ) ) + result = lookup_rule( rule_part, bindmodule( module_part ), 1 ); + object_free( module_part ); + object_free( rule_part ); + } + } + + if ( result ) + { + if ( local_only && !result->exported ) + result = 0; + else if ( original_module != m ) + { + /* Lookup started in class module. We have found a rule in class + * module, which is marked for execution in that module, or in some + * instance. Mark it for execution in the instance where we started + * the lookup. + */ + int const execute_in_class = result->module == m; + int const execute_in_some_instance = + result->module->class_module == m; + if ( execute_in_class || execute_in_some_instance ) + result->module = original_module; + } + } + + return result; +} + + +rule_ptr bindrule( object_ptr rulename, module_ptr m ) +{ + rule_ptr result = lookup_rule( rulename, m, 0 ); + if ( !result ) + result = lookup_rule( rulename, root_module(), 0 ); + /* We have only one caller, 'evaluate_rule', which will complain about + * calling an undefined rule. We could issue the error here, but we do not + * have the necessary information, such as frame. + */ + if ( !result ) + result = enter_rule( rulename, m ); + return result; +} + + +rule_ptr import_rule( rule_ptr source, module_ptr m, object_ptr name ) +{ + rule_ptr const dest = define_rule( source->module, name, m ); + set_rule_body( dest, source->procedure ); + set_rule_actions( dest, source->actions ); + return dest; +} + + +void rule_localize( rule_ptr rule, module_ptr m ) +{ + rule->module = m; + if ( rule->procedure ) + { + function_ptr procedure = function_unbind_variables( rule->procedure ); + function_refer( procedure ); + function_free( rule->procedure ); + rule->procedure = procedure; + } +} diff --git a/src/boost/tools/build/src/engine/rules.h b/src/boost/tools/build/src/engine/rules.h new file mode 100644 index 000000000..41e84c011 --- /dev/null +++ b/src/boost/tools/build/src/engine/rules.h @@ -0,0 +1,294 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2022 RenĂ© Ferdinand Rivera Morell + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * rules.h - targets, rules, and related information + * + * This file describes the structures holding the targets, rules, and related + * information accumulated by interpreting the statements of the jam files. + * + * The following are defined: + * + * RULE - a generic jam rule, the product of RULE and ACTIONS. + * ACTIONS - a chain of ACTIONs. + * ACTION - a RULE instance with targets and sources. + * SETTINGS - variables to set when executing a TARGET's ACTIONS. + * TARGETS - a chain of TARGETs. + * TARGET - an entity (e.g. a file) that can be built. + */ + +#ifndef RULES_DWA_20011020_H +#define RULES_DWA_20011020_H + +#include "config.h" +#include "function.h" +#include "mem.h" +#include "modules.h" +#include "timestamp.h" +#include + +typedef struct _rule RULE; +typedef struct _target TARGET; +typedef struct _targets TARGETS; +typedef struct _action ACTION; +typedef struct _actions ACTIONS; +typedef struct _settings SETTINGS; + +typedef RULE* rule_ptr; +typedef TARGET* target_ptr; +typedef TARGETS* targets_ptr; +typedef ACTION* action_ptr; +typedef ACTIONS* actions_ptr; +typedef SETTINGS* settings_ptr; + +typedef RULE& rule_ref; +typedef TARGET& target_ref; +typedef TARGETS& targets_ref; +typedef ACTION& action_ref; +typedef ACTIONS& actions_ref; +typedef SETTINGS& settings_ref; + +using rule_uptr = b2::jam::unique_jptr<_rule>; +using target_uptr = b2::jam::unique_jptr<_target>; +using targets_uptr = b2::jam::unique_jptr<_targets>; +using action_uptr = b2::jam::unique_jptr<_action>; +using actions_uptr = b2::jam::unique_jptr<_actions>; +using settings_uptr = b2::jam::unique_jptr<_settings>; + +/* RULE - a generic jam rule, the product of RULE and ACTIONS. */ + +/* Build actions corresponding to a rule. */ +struct rule_actions { + int reference_count; + function_ptr command; /* command string from ACTIONS */ + list_ptr bindlist; + int flags; /* modifiers on ACTIONS */ +}; + +#define RULE_NEWSRCS 0x01 /* $(>) is updated sources only */ +#define RULE_TOGETHER 0x02 /* combine actions on single target */ +#define RULE_IGNORE 0x04 /* ignore return status of executes */ +#define RULE_QUIETLY 0x08 /* do not mention it unless verbose */ +#define RULE_PIECEMEAL 0x10 /* split exec so each $(>) is small */ +#define RULE_EXISTING 0x20 /* $(>) is pre-existing sources only */ + +typedef struct rule_actions* rule_actions_ptr; + +struct _rule { + object_ptr name; + function_ptr procedure; + rule_actions_ptr actions; /* build actions, or NULL for no actions */ + module_ptr module; /* module in which this rule is executed */ + int exported; /* nonzero if this rule is supposed to appear in + * the global module and be automatically + * imported into other modules + */ +}; + +/* ACTIONS - a chain of ACTIONs. */ +struct _actions { + actions_ptr next; + actions_ptr tail; /* valid only for head */ + action_ptr action; +}; + +/* ACTION - a RULE instance with targets and sources. */ +struct _action { + rule_ptr rule; + targets_uptr targets; + targets_uptr sources; /* aka $(>) */ + char running; /* has been started */ +#define A_INIT 0 +#define A_RUNNING_NOEXEC 1 +#define A_RUNNING 2 + int refs; + + /* WARNING: These variables are used to pass state required by make1cmds and + * are not valid anywhere else. + */ + void* first_cmd; /* Pointer to the first CMD created by this action */ + void* last_cmd; /* Pointer to the last CMD created by this action */ +}; + +/* SETTINGS - variables to set when executing a TARGET's ACTIONS. */ +struct _settings { + settings_ptr next; + object_ptr symbol; /* symbol name for var_set() */ + list_ptr value; /* symbol value for var_set() */ +}; + +/* TARGETS - a chain of TARGETs. */ +struct _targets { + targets_uptr next = nullptr; + targets_ptr tail = nullptr; /* valid only for head */ + target_ptr target = nullptr; + + ~_targets() + { + targets_uptr sink = std::move(next); + while ( sink ) sink = std::move(sink->next); + } +}; + +/* TARGET - an entity (e.g. a file) that can be built. */ +struct _target { + object_ptr name; + object_ptr boundname; /* if search() relocates target */ + actions_ptr actions; /* rules to execute, if any */ + settings_ptr settings; /* variables to define */ + + targets_uptr depends; /* dependencies */ + targets_uptr dependants; /* the inverse of dependencies */ + targets_uptr rebuilds; /* targets that should be force-rebuilt + * whenever this one is + */ + target_ptr includes; /* internal includes node */ + + timestamp time; /* update time */ + timestamp leaf; /* update time of leaf sources */ + + short flags; /* status info */ + +#define T_FLAG_TEMP 0x0001 /* TEMPORARY applied */ +#define T_FLAG_NOCARE 0x0002 /* NOCARE applied */ +#define T_FLAG_NOTFILE 0x0004 /* NOTFILE applied */ +#define T_FLAG_TOUCHED 0x0008 /* ALWAYS applied or -t target */ +#define T_FLAG_LEAVES 0x0010 /* LEAVES applied */ +#define T_FLAG_NOUPDATE 0x0020 /* NOUPDATE applied */ +#define T_FLAG_VISITED 0x0040 /* CWM: Used in debugging */ + +/* This flag has been added to support a new built-in rule named "RMBAD". It is + * used to force removal of outdated targets whose dependencies fail to build. + */ +#define T_FLAG_RMOLD 0x0080 /* RMBAD applied */ + +/* This flag was added to support a new built-in rule named "FAIL_EXPECTED" used + * to indicate that the result of running a given action should be inverted, + * i.e. ok <=> fail. Useful for launching certain test runs from a Jamfile. + */ +#define T_FLAG_FAIL_EXPECTED 0x0100 /* FAIL_EXPECTED applied */ + +#define T_FLAG_INTERNAL 0x0200 /* internal INCLUDES node */ + +/* Indicates that the target must be a file. Prevents matching non-files, like + * directories, when a target is searched. + */ +#define T_FLAG_ISFILE 0x0400 + +#define T_FLAG_PRECIOUS 0x0800 + + char binding; /* how target relates to a real file or + * folder + */ + +#define T_BIND_UNBOUND 0 /* a disembodied name */ +#define T_BIND_MISSING 1 /* could not find real file */ +#define T_BIND_PARENTS 2 /* using parent's timestamp */ +#define T_BIND_EXISTS 3 /* real file, timestamp valid */ + + char fate; /* make0()'s diagnosis */ + +#define T_FATE_INIT 0 /* nothing done to target */ +#define T_FATE_MAKING 1 /* make0(target) on stack */ + +#define T_FATE_STABLE 2 /* target did not need updating */ +#define T_FATE_NEWER 3 /* target newer than parent */ + +#define T_FATE_SPOIL 4 /* >= SPOIL rebuilds parents */ +#define T_FATE_ISTMP 4 /* unneeded temp target oddly present */ + +#define T_FATE_BUILD 5 /* >= BUILD rebuilds target */ +#define T_FATE_TOUCHED 5 /* manually touched with -t */ +#define T_FATE_REBUILD 6 +#define T_FATE_MISSING 7 /* is missing, needs updating */ +#define T_FATE_NEEDTMP 8 /* missing temp that must be rebuild */ +#define T_FATE_OUTDATED 9 /* is out of date, needs updating */ +#define T_FATE_UPDATE 10 /* deps updated, needs updating */ + +#define T_FATE_BROKEN 11 /* >= BROKEN ruins parents */ +#define T_FATE_CANTFIND 11 /* no rules to make missing target */ +#define T_FATE_CANTMAKE 12 /* can not find dependencies */ + + char progress; /* tracks make1() progress */ + +#define T_MAKE_INIT 0 /* make1(target) not yet called */ +#define T_MAKE_ONSTACK 1 /* make1(target) on stack */ +#define T_MAKE_ACTIVE 2 /* make1(target) in make1b() */ +#define T_MAKE_RUNNING 3 /* make1(target) running commands */ +#define T_MAKE_DONE 4 /* make1(target) done */ +#define T_MAKE_NOEXEC_DONE 5 /* make1(target) done with -n in effect */ + +#ifdef OPT_SEMAPHORE +#define T_MAKE_SEMAPHORE 5 /* Special target type for semaphores */ +#endif + + char status; /* exec_cmd() result */ + +#ifdef OPT_SEMAPHORE + target_ptr semaphore; /* used in serialization */ +#endif + + int asynccnt; /* child deps outstanding */ + targets_uptr parents; /* used by make1() for completion */ + target_ptr scc_root; /* used by make to resolve cyclic includes + */ + target_ptr rescanning; /* used by make0 to mark visited targets + * when rescanning + */ + int depth; /* The depth of the target in the make0 + * stack. + */ + char* cmds; /* type-punned command list */ + + char const* failed; +}; + +/* Action related functions. */ +void action_free(action_ptr); +actions_ptr actionlist(actions_ptr, action_ptr); +void freeactions(actions_ptr); +settings_ptr addsettings(settings_ptr, int flag, object_ptr symbol, list_ptr value); +void pushsettings(module_ptr, settings_ptr); +void popsettings(module_ptr, settings_ptr); +settings_ptr copysettings(settings_ptr); +void freesettings(settings_ptr); +void actions_refer(rule_actions_ptr); +void actions_free(rule_actions_ptr); + +/* Rule related functions. */ +rule_ptr bindrule(object_ptr rulename, module_ptr); +rule_ptr import_rule(rule_ptr source, module_ptr, object_ptr name); +void rule_localize(rule_ptr rule, module_ptr module); +rule_ptr new_rule_body(module_ptr, object_ptr rulename, function_ptr func, int exprt); +rule_ptr new_rule_actions(module_ptr, object_ptr rulename, function_ptr command, list_ptr bindlist, int flags); +void rule_free(rule_ptr); + +/* Target related functions. */ +void bind_explicitly_located_targets(); +target_ptr bindtarget(object_ptr const); +targets_uptr targetchain(targets_uptr, targets_uptr); +void targetentry(targets_uptr&, target_ptr); +void target_include(target_ptr const including, + target_ptr const included); +void target_include_many(target_ptr const including, + list_ptr const included_names); +void targetlist(targets_uptr&, list_ptr target_names); +void touch_target(object_ptr const); +void clear_includes(target_ptr); +target_ptr target_scc(target_ptr); +targets_uptr targets_pop(targets_uptr); + +/* Final module cleanup. */ +void rules_done(); + +#endif diff --git a/src/boost/tools/build/src/engine/scan.cpp b/src/boost/tools/build/src/engine/scan.cpp new file mode 100644 index 000000000..67948a095 --- /dev/null +++ b/src/boost/tools/build/src/engine/scan.cpp @@ -0,0 +1,738 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * scan.c - the jam yacc scanner + * + */ + +#include "jam.h" +#include "scan.h" +#include "output.h" + +#include "constants.h" +#include "jamgram.hpp" + + +struct keyword +{ + const char * word; + int type; +} keywords[] = +{ +#include "jamgramtab.h" + { 0, 0 } +}; + +typedef struct include include; +struct include +{ + include * next; /* next serial include file */ + char * string; /* pointer into current line */ + char * * strings; /* for yyfparse() -- text to parse */ + LISTITER pos; /* for yysparse() -- text to parse */ + LIST * list; /* for yysparse() -- text to parse */ + FILE * file; /* for yyfparse() -- file being read */ + OBJECT * fname; /* for yyfparse() -- file name */ + int line; /* line counter for error messages */ + char buf[ 512 ]; /* for yyfparse() -- line buffer */ +}; + +static include * incp = 0; /* current file; head of chain */ + +static int scanmode = SCAN_NORMAL; +static int anyerrors = 0; + + +static char * symdump( YYSTYPE * ); + +#define BIGGEST_TOKEN 10240 /* no single token can be larger */ + + +/* + * Set parser mode: normal, string, or keyword. + */ + +int yymode( int n ) +{ + int result = scanmode; + scanmode = n; + return result; +} + + +void yyerror( char const * s ) +{ + /* We use yylval instead of incp to access the error location information as + * the incp pointer will already be reset to 0 in case the error occurred at + * EOF. + * + * The two may differ only if ran into an unexpected EOF or we get an error + * while reading a lexical token spanning multiple lines, e.g. a multi-line + * string literal or action body, in which case yylval location information + * will hold the information about where the token started while incp will + * hold the information about where reading it broke. + */ + out_printf( "%s:%d: %s at %s\n", object_str( yylval.file ), yylval.line, s, + symdump( &yylval ) ); + ++anyerrors; +} + + +int yyanyerrors() +{ + return anyerrors != 0; +} + + +void yyfparse( OBJECT * s ) +{ + include * i = (include *)BJAM_MALLOC( sizeof( *i ) ); + + /* Push this onto the incp chain. */ + i->string = (char*)""; + i->strings = 0; + i->file = 0; + i->fname = object_copy( s ); + i->line = 0; + i->next = incp; + incp = i; +} + + +void yysparse( OBJECT * name, const char * * lines ) +{ + yyfparse( name ); + incp->strings = (char * *)lines; +} + + +/* + * yyfdone() - cleanup after we're done parsing a file. + */ +void yyfdone( void ) +{ + include * const i = incp; + incp = i->next; + + /* Close file, free name. */ + if(i->file && (i->file != stdin)) + fclose(i->file); + object_free(i->fname); + BJAM_FREE((char *)i); +} + + +/* + * yyline() - read new line and return first character. + * + * Fabricates a continuous stream of characters across include files, returning + * EOF at the bitter end. + */ + +int yyline() +{ + include * const i = incp; + + if ( !incp ) + return EOF; + + /* Once we start reading from the input stream, we reset the include + * insertion point so that the next include file becomes the head of the + * list. + */ + + /* If there is more data in this line, return it. */ + if ( *i->string ) + return *i->string++; + + /* If we are reading from an internal string list, go to the next string. */ + if ( i->strings ) + { + if ( *i->strings ) + { + ++i->line; + i->string = *(i->strings++); + return *i->string++; + } + } + else + { + /* If necessary, open the file. */ + if ( !i->file ) + { + FILE * f = stdin; + if ( strcmp( object_str( i->fname ), "-" ) && !( f = fopen( object_str( i->fname ), "r" ) ) ) + errno_puts( object_str( i->fname ) ); + i->file = f; + } + + /* If there is another line in this file, start it. */ + if ( i->file && fgets( i->buf, sizeof( i->buf ), i->file ) ) + { + ++i->line; + i->string = i->buf; + return *i->string++; + } + } + + /* This include is done. Return EOF so yyparse() returns to + * parse_file(). + */ + + return EOF; +} + +/* This allows us to get an extra character of lookahead. + * There are a few places where we need to look ahead two + * characters and yyprev only guarantees a single character + * of putback. + */ +int yypeek() +{ + if ( *incp->string ) + { + return *incp->string; + } + else if ( incp->strings ) + { + if ( *incp->strings ) + return **incp->strings; + } + else if ( incp->file ) + { + /* Don't bother opening the file. yypeek is + * only used in special cases and never at the + * beginning of a file. + */ + int ch = fgetc( incp->file ); + if ( ch != EOF ) + ungetc( ch, incp->file ); + return ch; + } + return EOF; +} + +/* + * yylex() - set yylval to current token; return its type. + * + * Macros to move things along: + * + * yychar() - return and advance character; invalid after EOF. + * yyprev() - back up one character; invalid before yychar(). + * + * yychar() returns a continuous stream of characters, until it hits the EOF of + * the current include file. + */ + +#define yychar() ( *incp->string ? *incp->string++ : yyline() ) +#define yyprev() ( incp->string-- ) + +static int use_new_scanner = 0; + +#define yystartkeyword() if(use_new_scanner) break; else token_warning() +#define yyendkeyword() if(use_new_scanner) break; else if ( 1 ) { expect_whitespace = 1; continue; } else (void)0 + +void do_token_warning() +{ + out_printf( "%s:%d: %s %s\n", object_str( yylval.file ), yylval.line, "Unescaped special character in", + symdump( &yylval ) ); +} + +#define token_warning() has_token_warning = 1 + +int yylex() +{ + int c; + char buf[ BIGGEST_TOKEN ]; + char * b = buf; + + if ( !incp ) + goto eof; + + /* Get first character (whitespace or of token). */ + c = yychar(); + + if ( scanmode == SCAN_STRING ) + { + /* If scanning for a string (action's {}'s), look for the closing brace. + * We handle matching braces, if they match. + */ + + int nest = 1; + + while ( ( c != EOF ) && ( b < buf + sizeof( buf ) ) ) + { + if ( c == '{' ) + ++nest; + + if ( ( c == '}' ) && !--nest ) + break; + + *b++ = c; + + c = yychar(); + + /* Turn trailing "\r\n" sequences into plain "\n" for Cygwin. */ + if ( ( c == '\n' ) && ( b[ -1 ] == '\r' ) ) + --b; + } + + /* We ate the ending brace -- regurgitate it. */ + if ( c != EOF ) + yyprev(); + + /* Check for obvious errors. */ + if ( b == buf + sizeof( buf ) ) + { + yyerror( "action block too big" ); + goto eof; + } + + if ( nest ) + { + yyerror( "unmatched {} in action block" ); + goto eof; + } + + *b = 0; + yylval.type = STRING; + yylval.string = object_new( buf ); + yylval.file = incp->fname; + yylval.line = incp->line; + } + else + { + char * b = buf; + struct keyword * k; + int inquote = 0; + int notkeyword; + int hastoken = 0; + int hasquote = 0; + int ingrist = 0; + int invarexpand = 0; + int expect_whitespace = 0; + int has_token_warning = 0; + + /* Eat white space. */ + for ( ; ; ) + { + /* Skip past white space. */ + while ( ( c != EOF ) && isspace( c ) ) + c = yychar(); + + /* Not a comment? */ + if ( c != '#' ) + break; + + c = yychar(); + if ( ( c != EOF ) && c == '|' ) + { + /* Swallow up block comment. */ + int c0 = yychar(); + int c1 = yychar(); + while ( ! ( c0 == '|' && c1 == '#' ) && ( c0 != EOF && c1 != EOF ) ) + { + c0 = c1; + c1 = yychar(); + } + c = yychar(); + } + else + { + /* Swallow up comment line. */ + while ( ( c != EOF ) && ( c != '\n' ) ) c = yychar(); + } + } + + /* c now points to the first character of a token. */ + if ( c == EOF ) + goto eof; + + yylval.file = incp->fname; + yylval.line = incp->line; + + /* While scanning the word, disqualify it for (expensive) keyword lookup + * when we can: $anything, "anything", \anything + */ + notkeyword = c == '$'; + + /* Look for white space to delimit word. "'s get stripped but preserve + * white space. \ protects next character. + */ + while + ( + ( c != EOF ) && + ( b < buf + sizeof( buf ) ) && + ( inquote || invarexpand || !isspace( c ) ) + ) + { + if ( expect_whitespace || ( isspace( c ) && ! inquote ) ) + { + token_warning(); + expect_whitespace = 0; + } + if ( !inquote && !invarexpand ) + { + if ( scanmode == SCAN_COND || scanmode == SCAN_CONDB ) + { + if ( hastoken && ( c == '=' || c == '<' || c == '>' || c == '!' || c == '(' || c == ')' || c == '&' || c == '|' ) ) + { + /* Don't treat > as special if we started with a grist. */ + if ( ! ( scanmode == SCAN_CONDB && ingrist == 1 && c == '>' ) ) + { + yystartkeyword(); + } + } + else if ( c == '=' || c == '(' || c == ')' ) + { + *b++ = c; + c = yychar(); + yyendkeyword(); + } + else if ( c == '!' || ( scanmode == SCAN_COND && ( c == '<' || c == '>' ) ) ) + { + *b++ = c; + if ( ( c = yychar() ) == '=' ) + { + *b++ = c; + c = yychar(); + } + yyendkeyword(); + } + else if ( c == '&' || c == '|' ) + { + *b++ = c; + if ( yychar() == c ) + { + *b++ = c; + c = yychar(); + } + yyendkeyword(); + } + } + else if ( scanmode == SCAN_PARAMS ) + { + if ( c == '*' || c == '+' || c == '?' || c == '(' || c == ')' ) + { + if ( !hastoken ) + { + *b++ = c; + c = yychar(); + yyendkeyword(); + } + else + { + yystartkeyword(); + } + } + } + else if ( scanmode == SCAN_XASSIGN && ! hastoken ) + { + if ( c == '=' ) + { + *b++ = c; + c = yychar(); + yyendkeyword(); + } + else if ( c == '+' || c == '?' ) + { + if ( yypeek() == '=' ) + { + *b++ = c; + *b++ = yychar(); + c = yychar(); + yyendkeyword(); + } + } + } + else if ( scanmode == SCAN_NORMAL || scanmode == SCAN_ASSIGN ) + { + if ( c == '=' ) + { + if ( !hastoken ) + { + *b++ = c; + c = yychar(); + yyendkeyword(); + } + else + { + yystartkeyword(); + } + } + else if ( c == '+' || c == '?' ) + { + if ( yypeek() == '=' ) + { + if ( hastoken ) + { + yystartkeyword(); + } + else + { + *b++ = c; + *b++ = yychar(); + c = yychar(); + yyendkeyword(); + } + } + } + } + if ( scanmode != SCAN_CASE && ( c == ';' || c == '{' || c == '}' || + ( scanmode != SCAN_PARAMS && ( c == '[' || c == ']' ) ) ) ) + { + if ( ! hastoken ) + { + *b++ = c; + c = yychar(); + yyendkeyword(); + } + else + { + yystartkeyword(); + } + } + else if ( c == ':' ) + { + if ( ! hastoken ) + { + *b++ = c; + c = yychar(); + yyendkeyword(); + break; + } + else if ( hasquote ) + { + /* Special rules for ':' do not apply after we quote anything. */ + yystartkeyword(); + } + else if ( ingrist == 0 ) + { + int next = yychar(); + int is_win_path = 0; + int is_conditional = 0; + if ( next == '\\' ) + { + if( yypeek() == '\\' ) + { + is_win_path = 1; + } + } + else if ( next == '/' ) + { + is_win_path = 1; + } + yyprev(); + if ( is_win_path ) + { + /* Accept windows paths iff they are at the start or immediately follow a grist. */ + if ( b > buf && isalpha( b[ -1 ] ) && ( b == buf + 1 || b[ -2 ] == '>' ) ) + { + is_win_path = 1; + } + else + { + is_win_path = 0; + } + } + if ( next == '<' ) + { + /* Accept conditionals only for tokens that start with "<" or "!<" */ + if ( ( (b > buf) && (buf[ 0 ] == '<') ) || + ( (b > (buf + 1)) && (buf[ 0 ] == '!') && (buf[ 1 ] == '<') )) + { + is_conditional = 1; + } + } + if ( !is_conditional && !is_win_path ) + { + yystartkeyword(); + } + } + } + } + hastoken = 1; + if ( c == '"' ) + { + /* begin or end " */ + inquote = !inquote; + hasquote = 1; + notkeyword = 1; + } + else if ( c != '\\' ) + { + if ( !invarexpand && c == '<' ) + { + if ( ingrist == 0 ) ingrist = 1; + else ingrist = -1; + } + else if ( !invarexpand && c == '>' ) + { + if ( ingrist == 1 ) ingrist = 0; + else ingrist = -1; + } + else if ( c == '$' ) + { + if ( ( c = yychar() ) == EOF ) + { + *b++ = '$'; + break; + } + else if ( c == '(' ) + { + /* inside $(), we only care about quotes */ + *b++ = '$'; + c = '('; + ++invarexpand; + } + else + { + c = '$'; + yyprev(); + } + } + else if ( c == '@' ) + { + if ( ( c = yychar() ) == EOF ) + { + *b++ = '@'; + break; + } + else if ( c == '(' ) + { + /* inside @(), we only care about quotes */ + *b++ = '@'; + c = '('; + ++invarexpand; + } + else + { + c = '@'; + yyprev(); + } + } + else if ( invarexpand && c == '(' ) + { + ++invarexpand; + } + else if ( invarexpand && c == ')' ) + { + --invarexpand; + } + /* normal char */ + *b++ = c; + } + else if ( ( c = yychar() ) != EOF ) + { + /* \c */ + if (c == 'n') + c = '\n'; + else if (c == 'r') + c = '\r'; + else if (c == 't') + c = '\t'; + *b++ = c; + notkeyword = 1; + } + else + { + /* \EOF */ + break; + } + + c = yychar(); + } + + /* Automatically switch modes after reading the token. */ + if ( scanmode == SCAN_CONDB ) + scanmode = SCAN_COND; + + /* Check obvious errors. */ + if ( b == buf + sizeof( buf ) ) + { + yyerror( "string too big" ); + goto eof; + } + + if ( inquote ) + { + yyerror( "unmatched \" in string" ); + goto eof; + } + + /* We looked ahead a character - back up. */ + if ( c != EOF ) + yyprev(); + + /* Scan token table. Do not scan if it is obviously not a keyword or if + * it is an alphabetic when were looking for punctuation. + */ + + *b = 0; + yylval.type = ARG; + + if ( !notkeyword && !( isalpha( *buf ) && ( scanmode == SCAN_PUNCT || scanmode == SCAN_PARAMS || scanmode == SCAN_ASSIGN ) ) ) + for ( k = keywords; k->word; ++k ) + if ( ( *buf == *k->word ) && !strcmp( k->word, buf ) ) + { + yylval.type = k->type; + yylval.keyword = k->word; /* used by symdump */ + break; + } + + if ( yylval.type == ARG ) + yylval.string = object_new( buf ); + + if ( scanmode == SCAN_NORMAL && yylval.type == ARG ) + scanmode = SCAN_XASSIGN; + + if ( has_token_warning ) + do_token_warning(); + } + + if ( DEBUG_SCAN ) + out_printf( "scan %s\n", symdump( &yylval ) ); + + return yylval.type; + +eof: + /* We do not reset yylval.file & yylval.line here so unexpected EOF error + * messages would include correct error location information. + */ + yylval.type = EOF; + return yylval.type; +} + + +static char * symdump( YYSTYPE * s ) +{ + static char buf[ BIGGEST_TOKEN + 20 ]; + switch ( s->type ) + { + case EOF : sprintf( buf, "EOF" ); break; + case 0 : sprintf( buf, "unknown symbol %s", object_str( s->string ) ); break; + case ARG : sprintf( buf, "argument %s" , object_str( s->string ) ); break; + case STRING: sprintf( buf, "string \"%s\"" , object_str( s->string ) ); break; + default : sprintf( buf, "keyword %s" , s->keyword ); break; + } + return buf; +} + + +/* + * Get information about the current file and line, for those epsilon + * transitions that produce a parse. + */ + +void yyinput_last_read_token( OBJECT * * name, int * line ) +{ + /* TODO: Consider whether and when we might want to report where the last + * read token ended, e.g. EOF errors inside string literals. + */ + *name = yylval.file; + *line = yylval.line; +} diff --git a/src/boost/tools/build/src/engine/scan.h b/src/boost/tools/build/src/engine/scan.h new file mode 100644 index 000000000..2ad736ad6 --- /dev/null +++ b/src/boost/tools/build/src/engine/scan.h @@ -0,0 +1,71 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * scan.h - the jam yacc scanner + * + * External functions: + * yyerror( char *s ) - print a parsing error message. + * yyfparse( char *s ) - scan include file s. + * yylex() - parse the next token, returning its type. + * yymode() - adjust lexicon of scanner. + * yyparse() - declaration for yacc parser. + * yyanyerrors() - indicate if any parsing errors occurred. + * + * The yymode() function is for the parser to adjust the lexicon of the scanner. + * Aside from normal keyword scanning, there is a mode to handle action strings + * (look only for the closing }) and a mode to ignore most keywords when looking + * for a punctuation keyword. This allows non-punctuation keywords to be used in + * lists without quoting. + */ + +#include "config.h" +#include "lists.h" +#include "object.h" +#include "parse.h" + + +/* + * YYSTYPE - value of a lexical token + */ + +#define YYSTYPE YYSYMBOL + +typedef struct _YYSTYPE +{ + int type; + OBJECT * string; + PARSE * parse; + LIST * list; + int number; + OBJECT * file; + int line; + char const * keyword; +} YYSTYPE; + +extern YYSTYPE yylval; + +int yymode( int n ); +void yyerror( char const * s ); +int yyanyerrors(); +void yyfparse( OBJECT * s ); +void yyfdone( void ); +void yysparse( OBJECT * name, const char * * lines ); +int yyline(); +int yylex(); +int yyparse(); +void yyinput_last_read_token( OBJECT * * name, int * line ); + +#define SCAN_NORMAL 0 /* normal parsing */ +#define SCAN_STRING 1 /* look only for matching } */ +#define SCAN_PUNCT 2 /* only punctuation keywords */ +#define SCAN_COND 3 /* look for operators that can appear in conditions. */ +#define SCAN_PARAMS 4 /* The parameters of a rule "()*?+" */ +#define SCAN_CALL 5 /* Inside a rule call. [].*/ +#define SCAN_CASE 6 /* A case statement. We only recognize ':' as special. */ +#define SCAN_CONDB 7 /* The beginning of a condition (ignores leading comparison operators, so that if in $(y) works.)*/ +#define SCAN_ASSIGN 8 /* The list may be terminated by an assignment operator. */ +#define SCAN_XASSIGN 9 /* The next token might be an assignment, but to token afterwards cannot. */ diff --git a/src/boost/tools/build/src/engine/search.cpp b/src/boost/tools/build/src/engine/search.cpp new file mode 100644 index 000000000..8b916e102 --- /dev/null +++ b/src/boost/tools/build/src/engine/search.cpp @@ -0,0 +1,275 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "jam.h" +#include "search.h" + +#include "compile.h" +#include "filesys.h" +#include "hash.h" +#include "lists.h" +#include "object.h" +#include "pathsys.h" +#include "jam_strings.h" +#include "timestamp.h" +#include "variable.h" +#include "output.h" + +#include + + +typedef struct _binding +{ + OBJECT * binding; + OBJECT * target; +} BINDING; + +static struct hash * explicit_bindings = 0; + + +void call_bind_rule( OBJECT * target_, OBJECT * boundname_ ) +{ + LIST * const bind_rule = var_get( root_module(), constant_BINDRULE ); + if ( !list_empty( bind_rule ) ) + { + OBJECT * target = object_copy( target_ ); + OBJECT * boundname = object_copy( boundname_ ); + if ( boundname && target ) + { + /* Prepare the argument list. */ + FRAME frame[ 1 ]; + frame_init( frame ); + + /* First argument is the target name. */ + lol_add( frame->args, list_new( target ) ); + + lol_add( frame->args, list_new( boundname ) ); + if ( lol_get( frame->args, 1 ) ) + { + OBJECT * rulename = list_front( bind_rule ); + list_free( evaluate_rule( bindrule( rulename, root_module() ), rulename, frame ) ); + } + + /* Clean up */ + frame_free( frame ); + } + else + { + if ( boundname ) + object_free( boundname ); + if ( target ) + object_free( target ); + } + } +} + +/* Records the binding of a target with an explicit LOCATE. */ +void set_explicit_binding( OBJECT * target, OBJECT * locate ) +{ + OBJECT * boundname; + OBJECT * key; + PATHNAME f[ 1 ]; + string buf[ 1 ]; + int found; + BINDING * ba; + + if ( !explicit_bindings ) + explicit_bindings = hashinit( sizeof( BINDING ), "explicitly specified " + "locations" ); + + string_new( buf ); + + /* Parse the filename. */ + path_parse( object_str( target ), f ); + + /* Ignore the grist. */ + f->f_grist.ptr = 0; + f->f_grist.len = 0; + + /* Root the target path at the given location. */ + f->f_root.ptr = object_str( locate ); + f->f_root.len = int32_t(strlen( object_str( locate ) )); + + path_build( f, buf ); + boundname = object_new( buf->value ); + if ( DEBUG_SEARCH ) + out_printf( "explicit locate %s: %s\n", object_str( target ), buf->value ); + string_free( buf ); + key = path_as_key( boundname ); + object_free( boundname ); + + ba = (BINDING *)hash_insert( explicit_bindings, key, &found ); + if ( !found ) + { + ba->binding = key; + ba->target = target; + } + else + object_free( key ); +} + +/* + * search.c - find a target along $(SEARCH) or $(LOCATE). + * + * First, check if LOCATE is set. If so, use it to determine the location of + * target and return, regardless of whether anything exists at that location. + * + * Second, examine all directories in SEARCH. If the file exists there or there + * is another target with the same name already placed at this location via the + * LOCATE setting, stop and return the location. In case of a previous target, + * return its name via the 'another_target' argument. + * + * This behaviour allows handling dependencies on generated files. + * + * If caller does not expect that the target is generated, 0 can be passed as + * 'another_target'. + */ + +OBJECT * search( OBJECT * target, timestamp * const time, + OBJECT * * another_target, int const file ) +{ + PATHNAME f[ 1 ]; + LIST * varlist; + string buf[ 1 ]; + int found = 0; + OBJECT * boundname = 0; + + if ( another_target ) + *another_target = 0; + + if ( !explicit_bindings ) + explicit_bindings = hashinit( sizeof( BINDING ), "explicitly specified " + "locations" ); + + string_new( buf ); + + /* Parse the filename. */ + path_parse( object_str( target ), f ); + + f->f_grist.ptr = 0; + f->f_grist.len = 0; + + varlist = var_get( root_module(), constant_LOCATE ); + if ( !list_empty( varlist ) ) + { + OBJECT * key; + f->f_root.ptr = object_str( list_front( varlist ) ); + f->f_root.len = int32_t(strlen( object_str( list_front( varlist ) ) )); + + path_build( f, buf ); + + if ( DEBUG_SEARCH ) + out_printf( "locate %s: %s\n", object_str( target ), buf->value ); + + key = object_new( buf->value ); + timestamp_from_path( time, key ); + object_free( key ); + found = 1; + } + else if ( varlist = var_get( root_module(), constant_SEARCH ), + !list_empty( varlist ) ) + { + LISTITER iter = list_begin( varlist ); + LISTITER const end = list_end( varlist ); + for ( ; iter != end; iter = list_next( iter ) ) + { + BINDING * ba; + file_info_t * ff; + OBJECT * key; + OBJECT * test_path; + + f->f_root.ptr = object_str( list_item( iter ) ); + f->f_root.len = int32_t(strlen( object_str( list_item( iter ) ) )); + + string_truncate( buf, 0 ); + path_build( f, buf ); + + if ( DEBUG_SEARCH ) + out_printf( "search %s: %s\n", object_str( target ), buf->value ); + + test_path = object_new( buf->value ); + key = path_as_key( test_path ); + object_free( test_path ); + ff = file_query( key ); + timestamp_from_path( time, key ); + + if ( ( ba = (BINDING *)hash_find( explicit_bindings, key ) ) ) + { + if ( DEBUG_SEARCH ) + out_printf(" search %s: found explicitly located target %s\n", + object_str( target ), object_str( ba->target ) ); + if ( another_target ) + *another_target = ba->target; + found = 1; + object_free( key ); + break; + } + else if ( ff ) + { + if ( !file || ff->is_file ) + { + found = 1; + object_free( key ); + break; + } + } + object_free( key ); + } + } + + if ( !found ) + { + /* Look for the obvious. */ + /* This is a questionable move. Should we look in the obvious place if + * SEARCH is set? + */ + OBJECT * key; + + f->f_root.ptr = 0; + f->f_root.len = 0; + + string_truncate( buf, 0 ); + path_build( f, buf ); + + if ( DEBUG_SEARCH ) + out_printf( "search %s: %s\n", object_str( target ), buf->value ); + + key = object_new( buf->value ); + timestamp_from_path( time, key ); + object_free( key ); + } + + boundname = object_new( buf->value ); + string_free( buf ); + + /* Prepare a call to BINDRULE if the variable is set. */ + call_bind_rule( target, boundname ); + + return boundname; +} + + +static void free_binding( void * xbinding, void * data ) +{ + object_free( ( (BINDING *)xbinding )->binding ); +} + + +void search_done( void ) +{ + if ( explicit_bindings ) + { + hashenumerate( explicit_bindings, free_binding, 0 ); + hashdone( explicit_bindings ); + } +} diff --git a/src/boost/tools/build/src/engine/search.h b/src/boost/tools/build/src/engine/search.h new file mode 100644 index 000000000..80d69fa79 --- /dev/null +++ b/src/boost/tools/build/src/engine/search.h @@ -0,0 +1,23 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * search.h - find a target along $(SEARCH) or $(LOCATE) + */ + +#ifndef SEARCH_SW20111118_H +#define SEARCH_SW20111118_H + +#include "config.h" +#include "object.h" +#include "timestamp.h" + +void set_explicit_binding( OBJECT * target, OBJECT * locate ); +OBJECT * search( OBJECT * target, timestamp * const time, + OBJECT * * another_target, int const file ); +void search_done( void ); + +#endif diff --git a/src/boost/tools/build/src/engine/startup.cpp b/src/boost/tools/build/src/engine/startup.cpp new file mode 100644 index 000000000..6e9414ba2 --- /dev/null +++ b/src/boost/tools/build/src/engine/startup.cpp @@ -0,0 +1,270 @@ +/* +Copyright 2020 RenĂ© Ferdinand Rivera Morell +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#include "startup.h" +#include "rules.h" +#include "frames.h" +#include "object.h" +#include "pathsys.h" +#include "cwd.h" +#include "filesys.h" +#include "output.h" +#include "variable.h" + +#include +#include +#include + +namespace +{ + void bind_builtin( + char const *name_, LIST *(*f)(FRAME *, int flags), + int flags, char const **args) + { + FUNCTION *func; + OBJECT *name = object_new(name_); + func = function_builtin(f, flags, args); + new_rule_body(root_module(), name, func, 1); + function_free(func); + object_free(name); + } +} // namespace + +void b2::startup::load_builtins() +{ + { + char const *args[] = {"dir", "?", 0}; + bind_builtin("boost-build", builtin_boost_build, 0, args); + } +} + +LIST *b2::startup::builtin_boost_build(FRAME *frame, int flags) +{ + b2::jam::list dir_arg{lol_get(frame->args, 0)}; + std::string dir; + if (!dir_arg.empty()) dir = b2::jam::object(*dir_arg.begin()); + + b2::jam::variable dot_bootstrap_file{".bootstrap-file"}; + if (dot_bootstrap_file) + { + err_printf( + "Error: Illegal attempt to re-bootstrap the build system by invoking\n" + "\n" + " 'boost-build '%s' ;\n" + "\n" + "Please consult the documentation at " + "'https://www.bfgroup.xyz/b2/'.\n\n", + dir.c_str()); + return L0; + } + + // # Add the given directory to the path so we can find the build system. If + // # dir is empty, has no effect. + b2::jam::variable dot_boost_build_file{".boost-build-file"}; + b2::jam::list dot_boost_build_file_val{static_cast(dot_boost_build_file)}; + std::string boost_build_jam = b2::jam::object{*dot_boost_build_file_val.begin()}; + std::string boost_build_dir; + if (b2::paths::is_rooted(dir)) + boost_build_dir = dir; + else + boost_build_dir = b2::paths::normalize( + std::string{boost_build_jam}+"/../"+dir); + b2::jam::list search_path{b2::jam::object{boost_build_dir}}; + b2::jam::variable BOOST_BUILD_PATH{"BOOST_BUILD_PATH"}; + search_path.append(BOOST_BUILD_PATH); + + // We set the global, and env, BOOST_BUILD_PATH so that the loading of the + // build system finds the initial set of modules needed for starting it up. + BOOST_BUILD_PATH = search_path; + + // The code that loads the rest of B2, in particular the site-config.jam + // and user-config.jam configuration files uses os.environ, so we need to + // update the value there. + b2::jam::variable dot_ENVIRON__BOOST_BUILD_PATH{".ENVIRON", "BOOST_BUILD_PATH"}; + dot_ENVIRON__BOOST_BUILD_PATH = search_path; + + // # Try to find the build system bootstrap file 'bootstrap.jam'. + std::string bootstrap_file; + for (auto path: search_path) + { + std::string file = b2::jam::object{path}; + file = b2::paths::normalize(file+"/bootstrap.jam"); + if (b2::filesys::is_file(file)) + { + bootstrap_file = file; + break; + } + } + + // # There is no bootstrap.jam we can find, exit with an error. + if (bootstrap_file.empty()) + { + err_printf( + "Unable to load B2: could not find build system.\n" + "-----------------------------------------------\n" + "%s attempted to load the build system by invoking\n" + "\n" + " 'boost-build %s ;'\n" + "\n" + "but we were unable to find 'bootstrap.jam' in the specified directory " + "or in BOOST_BUILD_PATH:\n", + boost_build_jam.c_str(), dir.c_str()); + for (auto path: search_path) + { + std::string file = b2::jam::object{path}; + err_printf(" %s\n", file.c_str()); + } + err_puts( + "Please consult the documentation at " + "'https://www.bfgroup.xyz/b2/'.\n\n"); + return L0; + } + + // Set the bootstrap=file var as it's used by the build system to refer to + // the rest of the build system files. + dot_bootstrap_file = b2::jam::list{b2::jam::object{bootstrap_file}}; + + // Show where we found it, if asked. + b2::jam::variable dot_OPTION__debug_configuration{".OPTION", "debug-configration"}; + if (dot_OPTION__debug_configuration) + { + out_printf("notice: loading B2 from %s\n", bootstrap_file.c_str()); + } + + // # Load the build system, now that we know where to start from. + parse_file(b2::jam::object{bootstrap_file}, frame); + + return L0; +} + +extern char const *saved_argv0; + +bool b2::startup::bootstrap(FRAME *frame) +{ + b2::jam::list ARGV = b2::jam::variable{"ARGV"}; + b2::jam::object opt_debug_configuration{"--debug-configuration"}; + b2::jam::variable dot_OPTION__debug_configuration{".OPTION", "debug-configration"}; + for (auto arg: ARGV) + { + if (opt_debug_configuration == arg) + { + dot_OPTION__debug_configuration = b2::jam::list{b2::jam::object{"true"}}; + break; + } + } + + char *b2_exe_path_pchar = executable_path(saved_argv0); + const std::string b2_exe_path{b2_exe_path_pchar}; + if (b2_exe_path_pchar) + { + std::free(b2_exe_path_pchar); + } + const std::string boost_build_jam{"boost-build.jam"}; + std::string b2_file_path; + + // Attempt to find the `boost-build.jam` boot file in work directory tree. + if (b2_file_path.empty()) + { + std::string work_dir{b2::paths::normalize(b2::cwd_str()) + "/"}; + while (b2_file_path.empty() && !work_dir.empty()) + { + if (b2::filesys::is_file(work_dir + boost_build_jam)) + b2_file_path = work_dir + boost_build_jam; + else if (work_dir.length() == 1 && work_dir[0] == '/') + work_dir.clear(); + else + { + auto parent_pos = work_dir.rfind('/', work_dir.length() - 2); + if (parent_pos != std::string::npos) + work_dir.erase(parent_pos + 1); + else + work_dir.clear(); + } + } + } + + // Check relative to the executable for portable install location. + if (b2_file_path.empty()) + { + const std::string path{ + b2::paths::normalize( + b2_exe_path + "/../.b2/kernel/" + boost_build_jam)}; + if (b2::filesys::is_file(path)) + b2_file_path = path; + } + + // Check relative to the executable for portable install location. + if (b2_file_path.empty()) + { + const std::string path{ + b2::paths::normalize( + b2_exe_path + "/../../share/boost-build/src/kernel/" + boost_build_jam)}; + if (b2::filesys::is_file(path)) + b2_file_path = path; + } + + // Check the BOOST_BUILD_PATH (and BOOST_ROOT) paths. + if (b2_file_path.empty()) + { + b2::jam::list BOOST_BUILD_PATH = b2::jam::variable{"BOOST_BUILD_PATH"}; + // For back-compat with Boost we also search in the BOOST_ROOT location. + BOOST_BUILD_PATH.append(b2::jam::list(b2::jam::variable{"BOOST_ROOT"})); + for (auto search_path: BOOST_BUILD_PATH) + { + std::string path = b2::jam::object{search_path}; + path = b2::paths::normalize(path+"/"+boost_build_jam); + if (b2::filesys::is_file(path)) + { + b2_file_path = path; + break; + } + } + } + + // Indicate a load failure when we can't find the build file. + if (b2_file_path.empty()) + { + const char * not_found_error = + "Unable to load B2: could not find 'boost-build.jam'\n" + "---------------------------------------------------\n" + "Attempted search from '%s' up to the root " + "at '%s'\n" + "Please consult the documentation at " + "'https://www.bfgroup.xyz/b2/'.\n\n"; + err_printf(not_found_error, b2::cwd_str().c_str(), b2_exe_path.c_str()); + return false; + } + + // Show where we found it, if asked. + if (dot_OPTION__debug_configuration) + { + out_printf("notice: found boost-build.jam at %s\n", b2_file_path.c_str()); + } + + // Load the build system bootstrap file we found. But check we did that. + b2::jam::variable dot_boost_build_file{".boost-build-file"}; + dot_boost_build_file = b2_file_path; + b2::jam::object b2_file_path_sym{b2_file_path}; + parse_file(b2_file_path_sym, frame); + b2::jam::list dot_dot_bootstrap_file_val = b2::jam::variable{".bootstrap-file"}; + if (dot_dot_bootstrap_file_val.empty()) + { + err_printf( + "Unable to load B2\n" + "-----------------\n" + "'%s' was found by searching from %s up to the root.\n" + "\n" + "However, it failed to call the 'boost-build' rule to indicate " + "the location of the build system.\n" + "\n" + "Please consult the documentation at " + "'https://www.bfgroup.xyz/b2/'.\n\n", + b2_file_path.c_str(), b2::cwd_str().c_str()); + return false; + } + + return true; +} diff --git a/src/boost/tools/build/src/engine/startup.h b/src/boost/tools/build/src/engine/startup.h new file mode 100644 index 000000000..01c7d43ee --- /dev/null +++ b/src/boost/tools/build/src/engine/startup.h @@ -0,0 +1,46 @@ +/* +Copyright 2020-2022 RenĂ© Ferdinand Rivera Morell +Distributed under the Boost Software License, Version 1.0. +(See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#ifndef B2_STARTUP_H +#define B2_STARTUP_H + +#include "config.h" +#include "frames.h" +#include + +namespace b2 { +namespace startup { + void load_builtins(); + LIST* builtin_boost_build(FRAME* frame, int flags); + bool bootstrap(FRAME* frame); +} + +enum class exit_result : int { +#ifdef EXIT_SUCCESS + success = EXIT_SUCCESS, +#else + success = 0, +#endif +#ifdef EXIT_FAILURE + failure = EXIT_FAILURE +#else + failure = 1 +#endif +}; + +inline void clean_exit(int exit_code) +{ + throw(exit_result) exit_code; +} + +inline void clean_exit(exit_result exit_code) +{ + throw exit_code; +} + +} // namespace b2 + +#endif diff --git a/src/boost/tools/build/src/engine/subst.cpp b/src/boost/tools/build/src/engine/subst.cpp new file mode 100644 index 000000000..a5fcee08c --- /dev/null +++ b/src/boost/tools/build/src/engine/subst.cpp @@ -0,0 +1,116 @@ +#include "jam.h" +#include "subst.h" + +#include "builtins.h" +#include "frames.h" +#include "hash.h" +#include "lists.h" + +#include + + +typedef struct regex_entry +{ + OBJECT * pattern; + regexp * regex; +} regex_entry; + +static struct hash * regex_hash; + + +regexp * regex_compile( OBJECT * pattern ) +{ + int found; + regex_entry * e ; + + if ( !regex_hash ) + regex_hash = hashinit( sizeof( regex_entry ), "regex" ); + + e = (regex_entry *)hash_insert( regex_hash, pattern, &found ); + if ( !found ) + { + e->pattern = object_copy( pattern ); + e->regex = regcomp( (char *)pattern ); + } + + return e->regex; +} + + +LIST * builtin_subst( FRAME * frame, int flags ) +{ + LIST * result = L0; + LIST * const arg1 = lol_get( frame->args, 0 ); + LISTITER iter = list_begin( arg1 ); + LISTITER const end = list_end( arg1 ); + + if ( iter != end && list_next( iter ) != end && list_next( list_next( iter ) + ) != end ) + { + char const * const source = object_str( list_item( iter ) ); + OBJECT * const pattern = list_item( list_next( iter ) ); + regexp * const repat = regex_compile( pattern ); + + if ( regexec( repat, (char *)source) ) + { + LISTITER subst = list_next( iter ); + + while ( ( subst = list_next( subst ) ) != end ) + { +#define BUFLEN 4096 + char buf[ BUFLEN + 1 ]; + char const * in = object_str( list_item( subst ) ); + char * out = buf; + + for ( ; *in && out < buf + BUFLEN; ++in ) + { + if ( *in == '\\' || *in == '$' ) + { + ++in; + if ( *in == 0 ) + break; + if ( *in >= '0' && *in <= '9' ) + { + unsigned int const n = *in - '0'; + size_t const srclen = repat->endp[ n ] - + repat->startp[ n ]; + size_t const remaining = buf + BUFLEN - out; + size_t const len = srclen < remaining + ? srclen + : remaining; + memcpy( out, repat->startp[ n ], len ); + out += len; + continue; + } + /* fall through and copy the next character */ + } + *out++ = *in; + } + *out = 0; + + result = list_push_back( result, object_new( buf ) ); +#undef BUFLEN + } + } + } + + return result; +} + + +static void free_regex( void * xregex, void * data ) +{ + regex_entry * const regex = (regex_entry *)xregex; + object_free( regex->pattern ); + BJAM_FREE( regex->regex ); +} + + +void regex_done() +{ + if ( regex_hash ) + { + hashenumerate( regex_hash, free_regex, (void *)0 ); + hashdone( regex_hash ); + } +} diff --git a/src/boost/tools/build/src/engine/subst.h b/src/boost/tools/build/src/engine/subst.h new file mode 100644 index 000000000..e5281b445 --- /dev/null +++ b/src/boost/tools/build/src/engine/subst.h @@ -0,0 +1,15 @@ +/* Copyright 2001-2004 David Abrahams. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef SUBST_JG20120722_H +#define SUBST_JG20120722_H + +#include "config.h" +#include "object.h" +#include "regexp.h" + +regexp * regex_compile( OBJECT * pattern ); + +#endif diff --git a/src/boost/tools/build/src/engine/sysinfo.cpp b/src/boost/tools/build/src/engine/sysinfo.cpp new file mode 100644 index 000000000..4aa4183da --- /dev/null +++ b/src/boost/tools/build/src/engine/sysinfo.cpp @@ -0,0 +1,168 @@ +/* Copyright 2019 Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#include "sysinfo.h" +#include "jam.h" +#include "output.h" + +#include + +#if defined(OS_MACOSX) +#include +#include +#endif + +#if !defined(OS_NT) +#include +#else +#include +#endif + +#if defined(OS_LINUX) || defined(__GLIBC__) +// Need to define this in case it's not as that's the only way to get the +// sched_* APIs. +#ifndef _GNU_SOURCE +#define _GNU_SOURCE +#endif +#include +#endif + + +b2::system_info::system_info() +{ +} + +namespace +{ + unsigned int macosx_physicalcpu() + { + #if defined(OS_MACOSX) + int out_hw_ncpu = 0; + size_t len_hw_ncpu = sizeof(out_hw_ncpu); + int result = ::sysctlbyname( + "hw.physicalcpu", &out_hw_ncpu, &len_hw_ncpu, nullptr, 0); + if (result == 0) return out_hw_ncpu; + #endif + return 0; + } + + unsigned int macosx_logicalcpu() + { + #if defined(OS_MACOSX) + int out_hw_ncpu = 0; + size_t len_hw_ncpu = sizeof(out_hw_ncpu); + int result = ::sysctlbyname( + "hw.logicalcpu", &out_hw_ncpu, &len_hw_ncpu, nullptr, 0); + if (result == 0) return out_hw_ncpu; + #endif + return 0; + } + + unsigned int sched_affinity_cpu_count() + { + #if defined(CPU_COUNT_S) + ::cpu_set_t cpu_set; + if (::sched_getaffinity(0, sizeof(cpu_set_t), &cpu_set) == 0) + { + return CPU_COUNT_S(sizeof(cpu_set_t), &cpu_set); + } + #endif + return 0; + } + + unsigned int sysconf_nprocs_configured() + { + #if defined(_SC_NPROCESSORS_ONLN) + return ::sysconf(_SC_NPROCESSORS_CONF); + #else + return 0; + #endif + } + + unsigned int sysconf_nprocs_online() + { + #if defined(_SC_NPROCESSORS_ONLN) + return ::sysconf(_SC_NPROCESSORS_ONLN); + #else + return 0; + #endif + } + + unsigned int std_thread_hardware_concurrency() + { + // [2020-08-19] Mingw-w64 (e.g. i686-w64-mingw-32-g++ from Cygwin, + // g++-mingw-w64-i686-win32) does not have std::thread etc. But we + // should still allow building the engine with this (important) toolset: + // - It is free, lightweight, standards-conforming. + // - It might be the only C++11 toolset for Windows XP. + // (Please see http://www.crouchingtigerhiddenfruitbat.org/Cygwin/timemachine.html !) + // - It is powerful enough even without std::thread etc. For example, it + // can build-and-use Boost.Thread. + // - The only thing currently used from std::thread is this call to + // hardware_concurrency ! + #if ! defined (_WIN32) + return std::thread::hardware_concurrency(); + #else + return 0; + #endif + } + + unsigned int win32_logicalcpu() + { + #if defined (_WIN32) + SYSTEM_INFO si; + GetSystemInfo (&si); + return si.dwNumberOfProcessors; + #else + return 0; + #endif + } +} + +unsigned int b2::system_info::cpu_core_count() +{ + if (cpu_core_count_ == 0) + { + cpu_thread_count_ = macosx_physicalcpu(); + } + if (cpu_thread_count_ == 0) + { + cpu_thread_count_ = sysconf_nprocs_configured(); + } + if (cpu_core_count_ <= 0) + { + cpu_core_count_ = 1; + } + return cpu_core_count_; +} + +unsigned int b2::system_info::cpu_thread_count() +{ + if (cpu_thread_count_ == 0) + { + cpu_thread_count_ = macosx_logicalcpu(); + } + if (cpu_thread_count_ == 0) + { + cpu_thread_count_ = sched_affinity_cpu_count(); + } + if (cpu_thread_count_ == 0) + { + cpu_thread_count_ = sysconf_nprocs_online(); + } + if (cpu_thread_count_ == 0) + { + cpu_thread_count_ = std_thread_hardware_concurrency(); + } + if (cpu_thread_count_ == 0) + { + cpu_thread_count_ = win32_logicalcpu(); + } + if (cpu_thread_count_ == 0) + { + cpu_thread_count_ = cpu_core_count(); + } + return cpu_thread_count_; +} diff --git a/src/boost/tools/build/src/engine/sysinfo.h b/src/boost/tools/build/src/engine/sysinfo.h new file mode 100644 index 000000000..897a53a65 --- /dev/null +++ b/src/boost/tools/build/src/engine/sysinfo.h @@ -0,0 +1,46 @@ +/* Copyright 2019 Rene Rivera + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +#ifndef B2_SYSINFO_H +#define B2_SYSINFO_H + +# include "config.h" + +namespace b2 +{ + /* + Provides information about the system, hardware and software, we are + running in. + */ + class system_info + { + public: + + system_info(); + + /* + Returns the number of physical CPU cores if available. Otherwise + returns 1. + + Currently implemented for: OS_MACOSX. + */ + unsigned int cpu_core_count(); + + /* + Returns the number of logical CPU threads is available. Otherwise + returns `spu_core_count()`. + + Currently implemented for: OS_MACOSX. + */ + unsigned int cpu_thread_count(); + + private: + + unsigned int cpu_core_count_ = 0; + unsigned int cpu_thread_count_ = 0; + }; +} + +#endif diff --git a/src/boost/tools/build/src/engine/timestamp.cpp b/src/boost/tools/build/src/engine/timestamp.cpp new file mode 100644 index 000000000..4603ba99e --- /dev/null +++ b/src/boost/tools/build/src/engine/timestamp.cpp @@ -0,0 +1,231 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2020 Nikita Kniazev. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * timestamp.c - get the timestamp of a file or archive member + * + * External routines: + * timestamp_from_path() - return timestamp for a path, if present + * timestamp_done() - free timestamp tables + * + * Internal routines: + * free_timestamps() - worker function for freeing timestamp table contents + */ + +#include "jam.h" +#include "timestamp.h" + +#include "filesys.h" +#include "hash.h" +#include "object.h" +#include "pathsys.h" +#include "jam_strings.h" +#include "output.h" + + +/* + * BINDING - all known files + */ + +typedef struct _binding +{ + OBJECT * name; + short flags; + +#define BIND_SCANNED 0x01 /* if directory or arch, has been scanned */ + + short progress; + +#define BIND_INIT 0 /* never seen */ +#define BIND_NOENTRY 1 /* timestamp requested but file never found */ +#define BIND_SPOTTED 2 /* file found but not timed yet */ +#define BIND_MISSING 3 /* file found but can not get timestamp */ +#define BIND_FOUND 4 /* file found and time stamped */ + + /* update time - cleared if the there is nothing to bind */ + timestamp time; +} BINDING; + +static struct hash * bindhash = 0; + + +#ifdef OS_NT +/* + * timestamp_from_filetime() - Windows FILETIME --> timestamp conversion + * + * Lifted shamelessly from the CPython implementation. + */ + +void timestamp_from_filetime( timestamp * const t, FILETIME const * const ft ) +{ + /* Seconds between 1.1.1601 and 1.1.1970 */ + static __int64 const secs_between_epochs = 11644473600; + + /* We can not simply cast and dereference a FILETIME, since it might not be + * aligned properly. __int64 type variables are expected to be aligned to an + * 8 byte boundary while FILETIME structures may be aligned to any 4 byte + * boundary. Using an incorrectly aligned __int64 variable may cause a + * performance penalty on some platforms or even exceptions on others + * (documented on MSDN). + */ + __int64 in; + memcpy( &in, ft, sizeof( in ) ); + + /* FILETIME resolution: 100ns. */ + timestamp_init( t, (time_t)( ( in / 10000000 ) - secs_between_epochs ), + (int)( in % 10000000 ) * 100 ); +} +#endif /* OS_NT */ + + +void timestamp_clear( timestamp * const time ) +{ + time->secs = time->nsecs = 0; +} + + +int timestamp_cmp( timestamp const * const lhs, timestamp const * const rhs ) +{ + return int( + lhs->secs == rhs->secs + ? lhs->nsecs - rhs->nsecs + : lhs->secs - rhs->secs ); +} + + +void timestamp_copy( timestamp * const target, timestamp const * const source ) +{ + target->secs = source->secs; + target->nsecs = source->nsecs; +} + + +void timestamp_current( timestamp * const t ) +{ +#ifdef OS_NT + /* GetSystemTimeAsFileTime()'s resolution seems to be about 15 ms on Windows + * XP and under a millisecond on Windows 7. + */ + FILETIME ft; + GetSystemTimeAsFileTime( &ft ); + timestamp_from_filetime( t, &ft ); +#elif defined(_POSIX_TIMERS) && defined(CLOCK_REALTIME) && \ + (!defined(__GLIBC__) || (__GLIBC__ > 2) || (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 17)) + /* Some older versions of XCode define _POSIX_TIMERS, but don't actually + * have clock_gettime. Check CLOCK_REALTIME as well. Prior to glibc 2.17, + * clock_gettime requires -lrt. This is a non-critical feature, so + * we just disable it to keep bootstrapping simple. + */ + struct timespec ts; + clock_gettime( CLOCK_REALTIME, &ts ); + timestamp_init( t, ts.tv_sec, ts.tv_nsec ); +#else /* OS_NT */ + timestamp_init( t, time( 0 ), 0 ); +#endif /* OS_NT */ +} + + +int timestamp_empty( timestamp const * const time ) +{ + return !time->secs && !time->nsecs; +} + + +/* + * timestamp_from_path() - return timestamp for a path, if present + */ + +void timestamp_from_path( timestamp * const time, OBJECT * const path ) +{ + PROFILE_ENTER( timestamp ); + + if ( file_time( path, time ) < 0 ) + timestamp_clear( time ); + + PROFILE_EXIT( timestamp ); +} + + +void timestamp_init( timestamp * const time, time_t const secs, int const nsecs + ) +{ + time->secs = secs; + time->nsecs = nsecs; +} + + +void timestamp_max( timestamp * const max, timestamp const * const lhs, + timestamp const * const rhs ) +{ + if ( timestamp_cmp( lhs, rhs ) > 0 ) + timestamp_copy( max, lhs ); + else + timestamp_copy( max, rhs ); +} + + +static char const * timestamp_formatstr( timestamp const * const time, + char const * const format ) +{ + static char result1[ 500 ]; + static char result2[ 500 ]; + strftime( result1, sizeof( result1 ) / sizeof( *result1 ), format, gmtime( + &time->secs ) ); + sprintf( result2, result1, time->nsecs ); + return result2; +} + + +char const * timestamp_str( timestamp const * const time ) +{ + return timestamp_formatstr( time, "%Y-%m-%d %H:%M:%S.%%09d +0000" ); +} + + +char const * timestamp_timestr( timestamp const * const time ) +{ + return timestamp_formatstr( time, "%H:%M:%S.%%09d" ); +} + + +/* + * free_timestamps() - worker function for freeing timestamp table contents + */ + +static void free_timestamps( void * xbinding, void * data ) +{ + object_free( ( (BINDING *)xbinding )->name ); +} + + +/* + * timestamp_done() - free timestamp tables + */ + +void timestamp_done() +{ + if ( bindhash ) + { + hashenumerate( bindhash, free_timestamps, 0 ); + hashdone( bindhash ); + } +} + +/* + * timestamp_delta_seconds() - seconds from time a to b. + */ +double timestamp_delta_seconds( timestamp const * const a , timestamp const * const b ) +{ + return difftime(b->secs, a->secs) + (b->nsecs - a->nsecs) * 1.0E-9; +} diff --git a/src/boost/tools/build/src/engine/timestamp.h b/src/boost/tools/build/src/engine/timestamp.h new file mode 100644 index 000000000..2a05d2b1c --- /dev/null +++ b/src/boost/tools/build/src/engine/timestamp.h @@ -0,0 +1,48 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * timestamp.h - get the timestamp of a file or archive member + */ + +#ifndef TIMESTAMP_H_SW_2011_11_18 +#define TIMESTAMP_H_SW_2011_11_18 + +#include "config.h" +#include "object.h" + +#ifdef OS_NT +# include +#endif + +#include + +typedef struct timestamp +{ + time_t secs; + int nsecs; +} timestamp; + +void timestamp_clear( timestamp * const ); +int timestamp_cmp( timestamp const * const lhs, timestamp const * const rhs ); +void timestamp_copy( timestamp * const target, timestamp const * const source ); +void timestamp_current( timestamp * const ); +int timestamp_empty( timestamp const * const ); +void timestamp_from_path( timestamp * const, OBJECT * const path ); +void timestamp_init( timestamp * const, time_t const secs, int const nsecs ); +void timestamp_max( timestamp * const max, timestamp const * const lhs, + timestamp const * const rhs ); +char const * timestamp_str( timestamp const * const ); +char const * timestamp_timestr( timestamp const * const ); + +#ifdef OS_NT +void timestamp_from_filetime( timestamp * const, FILETIME const * const ); +#endif + +void timestamp_done(); +double timestamp_delta_seconds( timestamp const * const, timestamp const * const ); + +#endif diff --git a/src/boost/tools/build/src/engine/variable.cpp b/src/boost/tools/build/src/engine/variable.cpp new file mode 100644 index 000000000..3408b102d --- /dev/null +++ b/src/boost/tools/build/src/engine/variable.cpp @@ -0,0 +1,393 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Reece H. Dunn. + * Copyright 2005 Rene Rivera. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE.txt or copy at + * https://www.bfgroup.xyz/b2/LICENSE.txt) + */ + +/* + * variable.c - handle Jam multi-element variables. + * + * External routines: + * + * var_defines() - load a bunch of variable=value settings + * var_get() - get value of a user defined symbol + * var_set() - set a variable in jam's user defined symbol table. + * var_swap() - swap a variable's value with the given one + * var_done() - free variable tables + * + * Internal routines: + * + * var_enter() - make new var symbol table entry, returning var ptr + * var_dump() - dump a variable to stdout + */ + +#include "jam.h" +#include "variable.h" + +#include "filesys.h" +#include "hash.h" +#include "modules.h" +#include "parse.h" +#include "pathsys.h" +#include "jam_strings.h" +#include "output.h" + +#include +#include + + +/* + * VARIABLE - a user defined multi-value variable + */ + +typedef struct _variable VARIABLE ; + +struct _variable +{ + OBJECT * symbol; + LIST * value; +}; + +static LIST * * var_enter( struct module_t *, OBJECT * symbol ); +static void var_dump( OBJECT * symbol, LIST * value, const char * what ); + + +/* + * var_defines() - load a bunch of variable=value settings + * + * If preprocess is false, take the value verbatim. + * + * Otherwise, if the variable value is enclosed in quotes, strip the quotes. + * Otherwise, if variable name ends in PATH, split value at :'s. + * Otherwise, split the value at blanks. + */ + +void var_defines( struct module_t * module, const char * const * e, int preprocess ) +{ + string buf[ 1 ]; + + string_new( buf ); + + for ( ; *e; ++e ) + { + const char * val; + + if ( ( val = strchr( *e, '=' ) ) +#if defined( OS_MAC ) + /* On the mac (MPW), the var=val is actually var\0val */ + /* Think different. */ + || ( val = *e + strlen( *e ) ) +#endif + ) + { + LIST * l = L0; + int32_t const len = int32_t(strlen( val + 1 )); + int const quoted = ( val[ 1 ] == '"' ) && ( val[ len ] == '"' ) && + ( len > 1 ); + + if ( quoted && preprocess ) + { + string_append_range( buf, val + 2, val + len ); + l = list_push_back( l, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + else + { + const char * p; + const char * pp; + char split = +#if defined( OPT_NO_EXTERNAL_VARIABLE_SPLIT ) + '\0' +#elif defined( OS_MAC ) + ',' +#else + ' ' +#endif + ; + + /* Split *PATH at :'s, not spaces. */ + if ( val - 4 >= *e ) + { + if ( !strncmp( val - 4, "PATH", 4 ) || + !strncmp( val - 4, "Path", 4 ) || + !strncmp( val - 4, "path", 4 ) ) + split = SPLITPATH; + } + + /* Do the split. */ + for + ( + pp = val + 1; + preprocess && ( ( p = strchr( pp, split ) ) != 0 ); + pp = p + 1 + ) + { + string_append_range( buf, pp, p ); + l = list_push_back( l, object_new( buf->value ) ); + string_truncate( buf, 0 ); + } + + l = list_push_back( l, object_new( pp ) ); + } + + /* Get name. */ + string_append_range( buf, *e, val ); + { + OBJECT * const varname = object_new( buf->value ); + var_set( module, varname, l, VAR_SET ); + object_free( varname ); + } + string_truncate( buf, 0 ); + } + } + string_free( buf ); +} + + +/* Last returned variable value saved so we may clear it in var_done(). */ +static LIST * saved_var = L0; + + +/* + * var_get() - get value of a user defined symbol + * + * Returns NULL if symbol unset. + */ + +LIST * var_get( struct module_t * module, OBJECT * symbol ) +{ + LIST * result = L0; +#ifdef OPT_AT_FILES + /* Some "fixed" variables... */ + if ( object_equal( symbol, constant_TMPDIR ) ) + { + list_free( saved_var ); + result = saved_var = list_new( object_new( path_tmpdir()->value ) ); + } + else if ( object_equal( symbol, constant_TMPNAME ) ) + { + list_free( saved_var ); + result = saved_var = list_new( path_tmpnam() ); + } + else if ( object_equal( symbol, constant_TMPFILE ) ) + { + list_free( saved_var ); + result = saved_var = list_new( path_tmpfile() ); + } + else if ( object_equal( symbol, constant_STDOUT ) ) + { + list_free( saved_var ); + result = saved_var = list_new( object_copy( constant_STDOUT ) ); + } + else if ( object_equal( symbol, constant_STDERR ) ) + { + list_free( saved_var ); + result = saved_var = list_new( object_copy( constant_STDERR ) ); + } + else +#endif + { + VARIABLE * v; + int n; + + if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 ) + { + if ( DEBUG_VARGET ) + var_dump( symbol, module->fixed_variables[ n ], "get" ); + result = module->fixed_variables[ n ]; + } + else if ( module->variables && ( v = (VARIABLE *)hash_find( + module->variables, symbol ) ) ) + { + if ( DEBUG_VARGET ) + var_dump( v->symbol, v->value, "get" ); + result = v->value; + } + +#ifdef OS_VMS + else if ( ( module->name && object_equal( module->name, constant_ENVIRON ) ) + || root_module() == module ) + { + /* On VMS, when a variable from root or ENVIRON module is not found, + * explicitly request it from the process. + * By design, process variables (and logicals) are not made available + * to C main(), and thus will not get loaded in bulk to root/ENVRON. + * So we get around it by getting any such variable on first request. + */ + const char * val = getenv( object_str( symbol ) ); + + if ( val ) + { + struct module_t * environ_module = module; + char * environ[ 2 ] = { 0 }; /* NULL-terminated */ + string buf[ 1 ]; + + if ( root_module() == module ) + { + environ_module = bindmodule( constant_ENVIRON ); + } + + string_copy( buf, object_str( symbol ) ); + string_append( buf, "=" ); + string_append( buf, val ); + + environ[ 0 ] = buf->value; + + /* Load variable to global module, with splitting, for backward + * compatibility. Then to .ENVIRON, without splitting. + */ + var_defines( root_module(), environ, 1 ); + var_defines( environ_module, environ, 0 ); + string_free( buf ); + + if ( module->variables && ( v = (VARIABLE *)hash_find( + module->variables, symbol ) ) ) + { + if ( DEBUG_VARGET ) + var_dump( v->symbol, v->value, "get" ); + result = v->value; + } + } + } +#endif + } + return result; +} + + +LIST * var_get_and_clear_raw( module_t * module, OBJECT * symbol ) +{ + LIST * result = L0; + VARIABLE * v; + + if ( module->variables && ( v = (VARIABLE *)hash_find( module->variables, + symbol ) ) ) + { + result = v->value; + v->value = L0; + } + + return result; +} + + +/* + * var_set() - set a variable in Jam's user defined symbol table + * + * 'flag' controls the relationship between new and old values of the variable: + * SET replaces the old with the new; APPEND appends the new to the old; DEFAULT + * only uses the new if the variable was previously unset. + * + * Copies symbol. Takes ownership of value. + */ + +void var_set( struct module_t * module, OBJECT * symbol, LIST * value, int flag + ) +{ + LIST * * v = var_enter( module, symbol ); + + if ( DEBUG_VARSET ) + var_dump( symbol, value, "set" ); + + switch ( flag ) + { + case VAR_SET: /* Replace value */ + list_free( *v ); + *v = value; + break; + + case VAR_APPEND: /* Append value */ + *v = list_append( *v, value ); + break; + + case VAR_DEFAULT: /* Set only if unset */ + if ( list_empty( *v ) ) + *v = value; + else + list_free( value ); + break; + } +} + + +/* + * var_swap() - swap a variable's value with the given one + */ + +LIST * var_swap( struct module_t * module, OBJECT * symbol, LIST * value ) +{ + LIST * * v = var_enter( module, symbol ); + LIST * oldvalue = *v; + if ( DEBUG_VARSET ) + var_dump( symbol, value, "set" ); + *v = value; + return oldvalue; +} + + +/* + * var_enter() - make new var symbol table entry, returning var ptr + */ + +static LIST * * var_enter( struct module_t * module, OBJECT * symbol ) +{ + int found; + VARIABLE * v; + int n; + + if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 ) + return &module->fixed_variables[ n ]; + + if ( !module->variables ) + module->variables = hashinit( sizeof( VARIABLE ), "variables" ); + + v = (VARIABLE *)hash_insert( module->variables, symbol, &found ); + if ( !found ) + { + v->symbol = object_copy( symbol ); + v->value = L0; + } + + return &v->value; +} + + +/* + * var_dump() - dump a variable to stdout + */ + +static void var_dump( OBJECT * symbol, LIST * value, const char * what ) +{ + out_printf( "%s %s = ", what, object_str( symbol ) ); + list_print( value ); + out_printf( "\n" ); +} + + +/* + * var_done() - free variable tables + */ + +static void delete_var_( void * xvar, void * data ) +{ + VARIABLE * const v = (VARIABLE *)xvar; + object_free( v->symbol ); + list_free( v->value ); +} + +void var_done( struct module_t * module ) +{ + list_free( saved_var ); + saved_var = L0; + hashenumerate( module->variables, delete_var_, 0 ); + hash_free( module->variables ); +} diff --git a/src/boost/tools/build/src/engine/variable.h b/src/boost/tools/build/src/engine/variable.h new file mode 100644 index 000000000..97e906300 --- /dev/null +++ b/src/boost/tools/build/src/engine/variable.h @@ -0,0 +1,111 @@ +/* + * Copyright 1993, 2000 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* + * variable.h - handle jam multi-element variables + */ + +#ifndef VARIABLE_SW20111119_H +#define VARIABLE_SW20111119_H + +#include "config.h" +#include "lists.h" +#include "object.h" +#include "modules.h" + +#include + + +struct module_t; + +void var_defines( struct module_t *, const char * const * e, int preprocess ); +LIST * var_get( struct module_t *, OBJECT * symbol ); +void var_set( struct module_t *, OBJECT * symbol, LIST * value, int flag ); +LIST * var_swap( struct module_t *, OBJECT * symbol, LIST * value ); +void var_done( struct module_t * ); + +/* + * Defines for var_set(). + */ + +#define VAR_SET 0 /* override previous value */ +#define VAR_APPEND 1 /* append to previous value */ +#define VAR_DEFAULT 2 /* set only if no previous value */ + +namespace b2 { namespace jam { + struct variable + { + inline variable(const variable &v) + : var_module(v.var_module), var_symbol(object_copy(v.var_symbol)) {} + inline variable(struct module_t *m, const char *v) + : var_module(m), var_symbol(object_new(v)) {} + inline variable(const char *m, const char *v) + { + OBJECT * module_sym = object_new(m); + var_module = bindmodule(module_sym); + var_symbol = object_new(v); + object_free(module_sym); + } + inline variable(const std::string &m, const std::string &v) + : variable(m.c_str(), v.c_str()) {} + inline explicit variable(const char *v) : variable(root_module(), v) {} + inline explicit variable(const std::string &v) : variable(v.c_str()) {} + inline ~variable() + { + if (var_symbol) object_free(var_symbol); + } + + inline operator list() const { return list{var_get(var_module, var_symbol)}; } + + inline variable & operator=(list && v) + { + var_set(var_module, var_symbol, v.release(), VAR_SET); + return *this; + } + inline variable & operator=(const list & v) { return *this = list{v}; } + inline variable & operator=(const char *v) { return *this = list{object{v}}; } + inline variable & operator=(const std::string &v) { return *this = list{object{v}}; } + + inline variable & operator+=(list & v) + { + var_set(var_module, var_symbol, v.release(), VAR_APPEND); + return *this; + } + inline variable & operator+=(list && v) + { + var_set(var_module, var_symbol, v.release(), VAR_APPEND); + return *this; + } + inline variable & operator+=(const char *v) { return *this += list{object{v}}; } + inline variable & operator+=(const std::string &v) { return *this += list{object{v}}; } + + inline variable & operator|=(list & v) + { + var_set(var_module, var_symbol, v.release(), VAR_DEFAULT); + return *this; + } + inline variable & operator|=(list && v) + { + var_set(var_module, var_symbol, v.release(), VAR_DEFAULT); + return *this; + } + inline variable & operator|=(const char *v) { return *this |= list{object{v}}; } + inline variable & operator|=(const std::string &v) { return *this |= list{object{v}}; } + + inline operator bool() const + { + LIST * l = var_get(var_module, var_symbol); + return (!list_empty(l)) && (list_length(l) > 0); + } + + private: + + struct module_t * var_module = nullptr; + OBJECT * var_symbol = nullptr; + }; +}} + +#endif diff --git a/src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd b/src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd new file mode 100644 index 000000000..cc758e82e --- /dev/null +++ b/src/boost/tools/build/src/engine/vswhere_usability_wrapper.cmd @@ -0,0 +1,73 @@ +:: Copyright 2020 - Rene Ferdinand Rivera Morell +:: Copyright 2017 - Refael Ackermann +:: Copyright 2019 - Thomas Kent +:: Distributed under MIT style license +:: See accompanying file LICENSE at https://github.com/node4good/windows-autoconf +:: Forked from version: 1.15.4 + +@if not defined DEBUG_HELPER @ECHO OFF +setlocal +set "InstallerPath=%ProgramFiles(x86)%\Microsoft Visual Studio\Installer" +if not exist "%InstallerPath%" set "InstallerPath=%ProgramFiles%\Microsoft Visual Studio\Installer" +if not exist "%InstallerPath%" goto :no-vswhere +:: Manipulate %Path% for easier " handeling +set Path=%Path%;%InstallerPath% +where vswhere 2> nul > nul +if errorlevel 1 goto :no-vswhere +set VSWHERE_REQ=-requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 +set VSWHERE_PRP=-property installationPath + +REM Visual Studio Unknown Version, Beyond 2022 +set VSWHERE_LMT=-version "[18.0,19.0)" +set VSWHERE_PRERELEASE=-prerelease +SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT% %VSWHERE_PRERELEASE% +for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do ( + endlocal + echo Found with vswhere %%i + @rem comment out setting VCINSTALLDIR for Boost.build + @rem set "VCINSTALLDIR=%%i\VC\" + set "VSUNKCOMNTOOLS=%%i\Common7\Tools\" + exit /B 0 +) + +REM Visual Studio 2022 +set VSWHERE_LMT=-version "[17.0,18.0)" +set VSWHERE_PRERELEASE=-prerelease +SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT% %VSWHERE_PRERELEASE% +for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do ( + endlocal + echo Found with vswhere %%i + @rem comment out setting VCINSTALLDIR for Boost.build + @rem set "VCINSTALLDIR=%%i\VC\" + set "VS170COMNTOOLS=%%i\Common7\Tools\" + exit /B 0 +) + +REM Visual Studio 2019 (16.X, toolset 14.2) +set VSWHERE_LMT=-version "[16.0,17.0)" +SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT% %VSWHERE_PRERELEASE% +for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do ( + endlocal + echo Found with vswhere %%i + @rem comment out setting VCINSTALLDIR for Boost.build + @rem set "VCINSTALLDIR=%%i\VC\" + set "VS160COMNTOOLS=%%i\Common7\Tools\" + exit /B 0 +) + +REM Visual Studio 2017 (15.X, toolset 14.1) +set VSWHERE_LMT=-version "[15.0,16.0)" +SET VSWHERE_ARGS=-latest -products * %VSWHERE_REQ% %VSWHERE_PRP% %VSWHERE_LMT% +for /f "usebackq tokens=*" %%i in (`vswhere %VSWHERE_ARGS%`) do ( + endlocal + echo Found with vswhere %%i + @rem comment out setting VCINSTALLDIR for Boost.build + @rem set "VCINSTALLDIR=%%i\VC\" + set "VS150COMNTOOLS=%%i\Common7\Tools\" + exit /B 0 +) + +:no-vswhere +endlocal +echo could not find "vswhere" +exit /B 1 diff --git a/src/boost/tools/build/src/engine/w32_getreg.cpp b/src/boost/tools/build/src/engine/w32_getreg.cpp new file mode 100644 index 000000000..4cbc9dd01 --- /dev/null +++ b/src/boost/tools/build/src/engine/w32_getreg.cpp @@ -0,0 +1,201 @@ +/* +Copyright Paul Lin 2003. Copyright 2006 Bojan Resnik. +Distributed under the Boost Software License, Version 1.0. (See accompanying +file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +# include "jam.h" + +# if defined( OS_NT ) || defined( OS_CYGWIN ) + +# include "lists.h" +# include "object.h" +# include "parse.h" +# include "frames.h" +# include "jam_strings.h" + +# define WIN32_LEAN_AND_MEAN +# include + +# define MAX_REGISTRY_DATA_LENGTH 4096 +# define MAX_REGISTRY_KEYNAME_LENGTH 256 +# define MAX_REGISTRY_VALUENAME_LENGTH 16384 + +typedef struct +{ + LPCSTR name; + HKEY value; +} KeyMap; + +static const KeyMap dlRootKeys[] = { + { "HKLM", HKEY_LOCAL_MACHINE }, + { "HKCU", HKEY_CURRENT_USER }, + { "HKCR", HKEY_CLASSES_ROOT }, + { "HKEY_LOCAL_MACHINE", HKEY_LOCAL_MACHINE }, + { "HKEY_CURRENT_USER", HKEY_CURRENT_USER }, + { "HKEY_CLASSES_ROOT", HKEY_CLASSES_ROOT }, + { 0, 0 } +}; + +static HKEY get_key(char const** path) +{ + const KeyMap *p; + + for (p = dlRootKeys; p->name; ++p) + { + size_t n = strlen(p->name); + if (!strncmp(*path,p->name,n)) + { + if ((*path)[n] == '\\' || (*path)[n] == 0) + { + *path += n + 1; + break; + } + } + } + + return p->value; +} + +LIST * builtin_system_registry( FRAME * frame, int flags ) +{ + char const* path = object_str( list_front( lol_get(frame->args, 0) ) ); + LIST* result = L0; + HKEY key = get_key(&path); + + if ( + key != 0 + && ERROR_SUCCESS == RegOpenKeyExA(key, path, 0, KEY_QUERY_VALUE, &key) + ) + { + DWORD type; + BYTE data[MAX_REGISTRY_DATA_LENGTH]; + DWORD len = sizeof(data); + LIST * const field = lol_get(frame->args, 1); + + if ( ERROR_SUCCESS == + RegQueryValueExA(key, field ? object_str( list_front( field ) ) : 0, 0, &type, data, &len) ) + { + switch (type) + { + + case REG_EXPAND_SZ: + { + DWORD len; + string expanded[1]; + string_new(expanded); + + while ( + (len = ExpandEnvironmentStringsA( + (LPCSTR)data, expanded->value, (DWORD)expanded->capacity)) + > DWORD(expanded->capacity) + ) + string_reserve(expanded, len); + + expanded->size = len - 1; + + result = list_push_back( result, object_new(expanded->value) ); + string_free( expanded ); + } + break; + + case REG_MULTI_SZ: + { + char* s; + + for (s = (char*)data; *s; s += strlen(s) + 1) + result = list_push_back( result, object_new(s) ); + + } + break; + + case REG_DWORD: + { + char buf[100]; + sprintf( buf, "%lu", *(PDWORD)data ); + result = list_push_back( result, object_new(buf) ); + } + break; + + case REG_SZ: + result = list_push_back( result, object_new( (const char *)data ) ); + break; + } + } + RegCloseKey(key); + } + return result; +} + +static LIST* get_subkey_names(HKEY key, char const* path) +{ + LIST* result = 0; + + if ( ERROR_SUCCESS == + RegOpenKeyExA(key, path, 0, KEY_ENUMERATE_SUB_KEYS, &key) + ) + { + char name[MAX_REGISTRY_KEYNAME_LENGTH]; + DWORD name_size = sizeof(name); + DWORD index; + FILETIME last_write_time; + + for ( index = 0; + ERROR_SUCCESS == RegEnumKeyExA( + key, index, name, &name_size, 0, 0, 0, &last_write_time); + ++index, + name_size = sizeof(name) + ) + { + name[name_size] = 0; + result = list_append(result, list_new(object_new(name))); + } + + RegCloseKey(key); + } + + return result; +} + +static LIST* get_value_names(HKEY key, char const* path) +{ + LIST* result = 0; + + if ( ERROR_SUCCESS == RegOpenKeyExA(key, path, 0, KEY_QUERY_VALUE, &key) ) + { + char name[MAX_REGISTRY_VALUENAME_LENGTH]; + DWORD name_size = sizeof(name); + DWORD index; + + for ( index = 0; + ERROR_SUCCESS == RegEnumValueA( + key, index, name, &name_size, 0, 0, 0, 0); + ++index, + name_size = sizeof(name) + ) + { + name[name_size] = 0; + result = list_append(result, list_new(object_new(name))); + } + + RegCloseKey(key); + } + + return result; +} + +LIST * builtin_system_registry_names( FRAME * frame, int flags ) +{ + char const* path = object_str( list_front( lol_get(frame->args, 0) ) ); + char const* result_type = object_str( list_front( lol_get(frame->args, 1) ) ); + + HKEY key = get_key(&path); + + if ( !strcmp(result_type, "subkeys") ) + return get_subkey_names(key, path); + if ( !strcmp(result_type, "values") ) + return get_value_names(key, path); + return 0; +} + +# endif diff --git a/src/boost/tools/build/src/engine/yyacc.cpp b/src/boost/tools/build/src/engine/yyacc.cpp new file mode 100644 index 000000000..14213d6c7 --- /dev/null +++ b/src/boost/tools/build/src/engine/yyacc.cpp @@ -0,0 +1,219 @@ +/* Copyright 2002, 2020 Rene Rivera. +** Distributed under the Boost Software License, Version 1.0. +** (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) +*/ + +#include +#include +#include +#include +#include +#include + +/* +# yyacc - yacc wrapper +# +# Allows tokens to be written as `literal` and then automatically +# substituted with #defined tokens. +# +# Usage: +# yyacc file.y filetab.h file.yy +# +# inputs: +# file.yy yacc grammar with ` literals +# +# outputs: +# file.y yacc grammar +# filetab.h array of string <-> token mappings +# +# 3-13-93 +# Documented and p moved in sed command (for some reason, +# s/x/y/p doesn't work). +# 10-12-93 +# Take basename as second argument. +# 12-31-96 +# reversed order of args to be compatible with GenFile rule +# 11-20-2002 +# Reimplemented as a C program for portability. (Rene Rivera) +# 05-xx-2020 +# Reimplement yet again, in C++. (Rene Rivera) +*/ + +static const std::string usage[] = { + "yyacc " + }; + +void print_usage() +{ + for (auto u: usage) + { + std::printf("%s\n", u.c_str()); + } +} + +std::string tokenize_string(std::string s) +{ + std::string result = s; + if (s == ":") result = "_colon"; + else if (s == "!") result = "_bang"; + else if (s == "!=") result = "_bang_equals"; + else if (s == "&&") result = "_amperamper"; + else if (s == "&") result = "_amper"; + else if (s == "+") result = "_plus"; + else if (s == "+=") result = "_plus_equals"; + else if (s == "||") result = "_barbar"; + else if (s == "|") result = "_bar"; + else if (s == ";") result = "_semic"; + else if (s == "-") result = "_minus"; + else if (s == "<") result = "_langle"; + else if (s == "<=") result = "_langle_equals"; + else if (s == ">") result = "_rangle"; + else if (s == ">=") result = "_rangle_equals"; + else if (s == ".") result = "_period"; + else if (s == "?") result = "_question"; + else if (s == "?=") result = "_question_equals"; + else if (s == "=") result = "_equals"; + else if (s == ",") result = "_comma"; + else if (s == "[") result = "_lbracket"; + else if (s == "]") result = "_rbracket"; + else if (s == "{") result = "_lbrace"; + else if (s == "}") result = "_rbrace"; + else if (s == "(") result = "_lparen"; + else if (s == ")") result = "_rparen"; + + std::transform( + result.begin(), result.end(), result.begin(), + [](unsigned char c){ return std::toupper(c); }); + return result+"_t"; +} + +struct literal +{ + std::string string; + std::string token; + + bool operator<(const literal & x) const + { + return this->string < x.string; + } +}; + +int main(int argc, char ** argv) +{ + int result = 0; + if (argc != 4) + { + print_usage(); + result = 1; + } + else + { + FILE * token_output_f = 0; + FILE * grammar_output_f = 0; + FILE * grammar_source_f = 0; + + grammar_source_f = fopen(argv[3],"r"); + if (grammar_source_f == 0) { result = 1; } + if (result == 0) + { + std::set literals; + char l[2048]; + while (1) + { + if (fgets(l,2048,grammar_source_f) != 0) + { + char * c = l; + while (1) + { + char * c1 = std::strchr(c,'`'); + if (c1 != 0) + { + char * c2 = std::strchr(c1+1,'`'); + if (c2 != 0) + { + auto l = std::string(c1+1,c2-c1-1); + literals.insert({ l, tokenize_string(l) }); + c = c2+1; + } + else + break; + } + else + break; + } + } + else + { + break; + } + } + token_output_f = std::fopen(argv[2],"w"); + if (token_output_f != 0) + { + for (const literal & l: literals) + { + std::fprintf(token_output_f," { \"%s\", %s },\n",l.string.c_str(), l.token.c_str()); + } + std::fclose(token_output_f); + } + else + result = 1; + if (result == 0) + { + grammar_output_f = std::fopen(argv[1],"w"); + if (grammar_output_f != 0) + { + for (const literal & l: literals) + { + fprintf(grammar_output_f,"%%token %s\n",l.token.c_str()); + } + rewind(grammar_source_f); + while (1) + { + if (fgets(l,2048,grammar_source_f) != 0) + { + char * c = l; + while (1) + { + char * c1 = strchr(c,'`'); + if (c1 != 0) + { + char * c2 = strchr(c1+1,'`'); + if (c2 != 0) + { + auto replacement = literals.find({std::string(c1+1,c2-c1-1), ""}); + *c1 = 0; + std::fprintf(grammar_output_f,"%s%s",c,replacement->token.c_str()); + c = c2+1; + } + else + { + std::fprintf(grammar_output_f,"%s",c); + break; + } + } + else + { + std::fprintf(grammar_output_f,"%s",c); + break; + } + } + } + else + { + break; + } + } + std::fclose(grammar_output_f); + } + else + result = 1; + } + } + if (result != 0) + { + perror("yyacc"); + } + } + return result; +} diff --git a/src/boost/tools/build/src/exceptions.py b/src/boost/tools/build/src/exceptions.py new file mode 100644 index 000000000..de5ab7aff --- /dev/null +++ b/src/boost/tools/build/src/exceptions.py @@ -0,0 +1,55 @@ +# Copyright Pedro Ferreira 2005. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + + +class BaseBoostBuildException(Exception): + """A base Exception class for all other Boost.Build exceptions to inherit from.""" + + +class UserError(BaseBoostBuildException): + pass + + +class FeatureConflict(BaseBoostBuildException): + pass + + +class InvalidSource(BaseBoostBuildException): + pass + + +class InvalidFeature(BaseBoostBuildException): + pass + + +class InvalidProperty(BaseBoostBuildException): + pass + + +class InvalidValue(BaseBoostBuildException): + pass + + +class InvalidAttribute(BaseBoostBuildException): + pass + + +class AlreadyDefined(BaseBoostBuildException): + pass + + +class IllegalOperation(BaseBoostBuildException): + pass + + +class Recursion(BaseBoostBuildException): + pass + + +class NoBestMatchingAlternative(BaseBoostBuildException): + pass + + +class NoAction(BaseBoostBuildException): + pass diff --git a/src/boost/tools/build/src/kernel/boost-build.jam b/src/boost/tools/build/src/kernel/boost-build.jam new file mode 100644 index 000000000..cb6fece7d --- /dev/null +++ b/src/boost/tools/build/src/kernel/boost-build.jam @@ -0,0 +1,5 @@ +# Copyright 2003 Dave Abrahams +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +boost-build . ; diff --git a/src/boost/tools/build/src/kernel/bootstrap.jam b/src/boost/tools/build/src/kernel/bootstrap.jam new file mode 100644 index 000000000..ee8f20390 --- /dev/null +++ b/src/boost/tools/build/src/kernel/bootstrap.jam @@ -0,0 +1,265 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2003, 2005, 2006 Rene Rivera +# Copyright 2003, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# First of all, check the jam version. +if $(JAM_VERSION:J="") < 030112 +{ + ECHO "error: Boost.Jam version 3.1.12 or later required" ; + EXIT ; +} + +local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ; +for local r in $(required-rules) +{ + if ! $(r) in [ RULENAMES ] + { + ECHO "error: builtin rule '$(r)' is not present" ; + ECHO "error: your version of bjam is likely out of date" ; + ECHO "error: please get a fresh version from SVN." ; + EXIT ; + } +} + +local native = regex transform 2 ; +while $(native) +{ + if ! [ HAS_NATIVE_RULE $(native[1]) : $(native[2]) : $(native[3]) ] + { + ECHO "error: missing native rule '$(native[1]).$(native[2])'" ; + ECHO "error: or interface version of that rule is too low" ; + ECHO "error: your version of bjam is likely out of date" ; + ECHO "error: please get a fresh version from SVN." ; + EXIT ; + } + native = $(native[4-]) ; +} + + +# Check that the builtin .ENVIRON module is present. We do not have a builtin to +# check that a module is present, so we assume that the PATH environment +# variable is always set and verify that the .ENVIRON module has a non-empty +# value of that variable. +module .ENVIRON +{ + local p = $(PATH) $(Path) $(path) ; + if ! $(p) + { + ECHO "error: no builtin module .ENVIRON is found" ; + ECHO "error: your version of bjam is likely out of date" ; + ECHO "error: please get a fresh version from SVN." ; + EXIT ; + } +} + +# Check that @() functionality is present. Similarly to modules, we do not have +# a way to test this directly. Instead we check that $(TMPNAME) functionality is +# present which was added at roughly the same time (more precisely, it was added +# just before). +{ + if ! $(TMPNAME) + { + ECHO "error: no @() functionality found" ; + ECHO "error: your version of b2 is likely out of date" ; + ECHO "error: please get a fresh version from SVN." ; + EXIT ; + } +} + +# Make sure that \n escape is available. +if "\n" = "n" +{ + if $(OS) = CYGWIN + { + ECHO "warning: escape sequences are not supported" ; + ECHO "warning: this will cause major misbehaviour on cygwin" ; + ECHO "warning: your version of b2 is likely out of date" ; + ECHO "warning: please get a fresh version from SVN." ; + } +} + + +# Bootstrap the module system. Then bring the import rule into the global module. +# +SEARCH on modules.jam = $(.bootstrap-file:D) ; +module modules { include modules.jam ; } +IMPORT modules : import : : import ; + +{ + # Add module subdirectories to the BOOST_BUILD_PATH, which allows us to make + # incremental refactoring steps by moving modules to appropriate + # subdirectories, thereby achieving some physical separation of different + # layers without changing all of our code to specify subdirectories in + # import statements or use an extra level of qualification on imported + # names. + + local subdirs = + kernel # only the most-intrinsic modules: modules, errors + util # low-level substrate: string/number handling, etc. + build # essential elements of the build system architecture + tools # toolsets for handling specific build jobs and targets. + contrib # user contributed (unreviewed) modules + . # build-system.jam lives here + ; + local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ; + BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ; + + modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ; + + modules.poke : EXTRA_PYTHONPATH : $(whereami) ; +} + +# Reload the modules, to clean up things. The modules module can tolerate being +# imported twice. +# +import modules ; + +# Process option plugins first to allow them to prevent loading the rest of the +# build system. +# +import option ; +local dont-build = [ option.process ] ; + +# Should we skip building, i.e. loading the build system, according to the +# options processed? +# +if ! $(dont-build) +{ + if ! --python in $(ARGV) + { + # Allow users to override the build system file from the command-line + # (mostly for testing). + local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ; + build-system ?= build-system ; + + # Use last element in case of multiple command-line options. + import $(build-system[-1]) ; + } + else + { + + # Define additional interface exposed to Python code. Python code will + # also have access to select bjam builtins in the 'bjam' module, but + # some things are easier to define outside C. + module python_interface + { + rule load ( module-name : location ) + { + USER_MODULE $(module-name) ; + # Make all rules in the loaded module available in the global + # namespace, so that we do not have to bother specifying the + # "correct" module when calling from Python. + module $(module-name) + { + __name__ = $(1) ; + include $(2) ; + local rules = [ RULENAMES $(1) ] ; + IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ; + } + } + + rule peek ( module-name ? : variables + ) + { + module $(<) + { + return $($(>)) ; + } + } + + rule set-variable ( module-name : name : value * ) + { + module $(<) + { + $(>) = $(3) ; + } + } + + rule set-top-level-targets ( targets * ) + { + DEPENDS all : $(targets) ; + } + + rule call-in-module ( m : rulename : * ) + { + module $(m) + { + return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) + : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) + : $(17) : $(18) : $(19) ] ; + } + } + + + rule set-update-action ( action : targets * : sources * : + properties * ) + { + $(action) $(targets) : $(sources) : $(properties) ; + } + + rule set-update-action-in-module ( m : action : targets * : + sources * : properties * ) + { + module $(m) + { + $(2) $(3) : $(4) : $(5) ; + } + } + + rule set-target-variable ( targets + : variable : value * : append ? + ) + { + if $(append) + { + $(variable) on $(targets) += $(value) ; + } + else + { + $(variable) on $(targets) = $(value) ; + } + } + + rule get-target-variable ( targets + : variable ) + { + return [ on $(targets) return $($(variable)) ] ; + } + + rule import-rules-from-parent ( parent-module : this-module : + user-rules * ) + { + IMPORT $(parent-module) : $(user-rules) : $(this-module) : + $(user-rules) ; + EXPORT $(this-module) : $(user-rules) ; + } + + rule mark-included ( targets * : includes * ) + { + NOCARE $(includes) ; + INCLUDES $(targets) : $(includes) ; + ISFILE $(includes) ; + } + } + + PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ; + modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ; + + module PyBB + { + local ok = [ bootstrap $(root) ] ; + if ! $(ok) + { + EXIT ; + } + } + + + #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ; + + #module PyBB + #{ + # main ; + #} + } +} diff --git a/src/boost/tools/build/src/kernel/bootstrap.py b/src/boost/tools/build/src/kernel/bootstrap.py new file mode 100644 index 000000000..1215ae795 --- /dev/null +++ b/src/boost/tools/build/src/kernel/bootstrap.py @@ -0,0 +1,25 @@ +# Copyright 2009 Vladimir Prus +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import imp +import sys + +def bootstrap(root_path): + """Performs python-side bootstrapping of Boost.Build/Python. + + This function arranges for 'b2.whatever' package names to work, while also + allowing to put python files alongside corresponding jam modules. + """ + + m = imp.new_module("b2") + # Note that: + # 1. If __path__ is not list of strings, nothing will work + # 2. root_path is already list of strings. + m.__path__ = root_path + sys.modules["b2"] = m + + import b2.build_system + return b2.build_system.main() + diff --git a/src/boost/tools/build/src/kernel/class.jam b/src/boost/tools/build/src/kernel/class.jam new file mode 100644 index 000000000..ea84fe1d5 --- /dev/null +++ b/src/boost/tools/build/src/kernel/class.jam @@ -0,0 +1,420 @@ +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2002, 2005 Rene Rivera +# Copyright 2002, 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Polymorphic class system built on top of core Jam facilities. +# +# Classes are defined by 'class' keywords: +# +# class myclass +# { +# rule __init__ ( arg1 ) # constructor +# { +# self.attribute = $(arg1) ; +# } +# +# rule method1 ( ) # method +# { +# return [ method2 ] ; +# } +# +# rule method2 ( ) # method +# { +# return $(self.attribute) ; +# } +# } +# +# The __init__ rule is the constructor, and sets member variables. +# +# New instances are created by invoking [ new ]: +# +# local x = [ new myclass foo ] ; # x is a new myclass object +# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo" +# +# Derived class are created by mentioning base classes in the declaration:: +# +# class derived : myclass +# { +# rule __init__ ( arg ) +# { +# myclass.__init__ $(arg) ; # call base __init__ +# +# } +# +# rule method2 ( ) # method override +# { +# return $(self.attribute)XXX ; +# } +# } +# +# All methods operate virtually, replacing behavior in the base classes. For +# example:: +# +# local y = [ new derived foo ] ; # y is a new derived object +# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo" +# +# Each class instance is its own core Jam module. All instance attributes and +# methods are accessible without additional qualification from within the class +# instance. All rules imported in class declaration, or visible in base classes +# are also visible. Base methods are available in qualified form: +# base-name.method-name. By convention, attribute names are prefixed with +# "self.". + +import modules ; +import numbers ; + + +rule xinit ( instance : class ) +{ + module $(instance) + { + __class__ = $(2) ; + __name__ = $(1) ; + } +} + + +rule new ( class args * : * ) +{ + .next-instance ?= 1 ; + local id = object($(class))@$(.next-instance) ; + + INSTANCE $(id) : class@$(class) ; + xinit $(id) : $(class) ; + IMPORT_MODULE $(id) ; + $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : + $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : + $(18) : $(19) ; + + # Bump the next unique object name. + .next-instance = [ numbers.increment $(.next-instance) ] ; + + # Return the name of the new instance. + return $(id) ; +} + + +rule bases ( class ) +{ + module class@$(class) + { + return $(__bases__) ; + } +} + + +rule is-derived ( class : bases + ) +{ + local stack = $(class) ; + local visited found ; + while ! $(found) && $(stack) + { + local top = $(stack[1]) ; + stack = $(stack[2-]) ; + if ! ( $(top) in $(visited) ) + { + visited += $(top) ; + stack += [ bases $(top) ] ; + + if $(bases) in $(visited) + { + found = true ; + } + } + } + return $(found) ; +} + + +# Returns true if the 'value' is a class instance. +# +rule is-instance ( value ) +{ + return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ; +} + + +# Check if the given value is of the given type. +# +rule is-a ( + instance # The value to check. + : type # The type to test for. +) +{ + if [ is-instance $(instance) ] + { + return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ; + } +} + + +local rule typecheck ( x ) +{ + local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ; + if ! [ is-a $(x) : $(class-name) ] + { + return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ; + } +} + + +rule __test__ ( ) +{ + import assert ; + import "class" : new ; + import errors : try catch ; + + # This will be the construction function for a class called 'myclass'. + # + class myclass + { + import assert ; + + rule __init__ ( x_ * : y_ * ) + { + # Set some instance variables. + x = $(x_) ; + y = $(y_) ; + foo += 10 ; + } + + rule set-x ( newx * ) + { + x = $(newx) ; + } + + rule get-x ( ) + { + return $(x) ; + } + + rule set-y ( newy * ) + { + y = $(newy) ; + } + + rule get-y ( ) + { + return $(y) ; + } + + rule f ( ) + { + return [ g $(x) ] ; + } + + rule g ( args * ) + { + if $(x) in $(y) + { + return $(x) ; + } + else if $(y) in $(x) + { + return $(y) ; + } + else + { + return ; + } + } + + rule get-class ( ) + { + return $(__class__) ; + } + + rule get-instance ( ) + { + return $(__name__) ; + } + + rule invariant ( ) + { + assert.equal 1 : 1 ; + } + + rule get-foo ( ) + { + return $(foo) ; + } + } # class myclass ; + + class derived1 : myclass + { + rule __init__ ( z_ ) + { + myclass.__init__ $(z_) : X ; + z = $(z_) ; + } + + # Override g. + # + rule g ( args * ) + { + return derived1.g ; + } + + rule h ( ) + { + return derived1.h ; + } + + rule get-z ( ) + { + return $(z) ; + } + + # Check that 'assert.equal' visible in base class is visible here. + # + rule invariant2 ( ) + { + assert.equal 2 : 2 ; + } + + # Check that 'assert.variable-not-empty' visible in base class is + # visible here. + # + rule invariant3 ( ) + { + local v = 10 ; + assert.variable-not-empty v ; + } + } # class derived1 : myclass ; + + class derived2 : myclass + { + rule __init__ ( ) + { + myclass.__init__ 1 : 2 ; + } + + # Override g. + # + rule g ( args * ) + { + return derived2.g ; + } + + # Test the ability to call base class functions with qualification. + # + rule get-x ( ) + { + return [ myclass.get-x ] ; + } + } # class derived2 : myclass ; + + class derived2a : derived2 + { + rule __init__ + { + derived2.__init__ ; + } + } # class derived2a : derived2 ; + + local rule expect_derived2 ( [derived2] x ) { } + + local a = [ new myclass 3 4 5 : 4 5 ] ; + local b = [ new derived1 4 ] ; + local b2 = [ new derived1 4 ] ; + local c = [ new derived2 ] ; + local d = [ new derived2 ] ; + local e = [ new derived2a ] ; + + expect_derived2 $(d) ; + expect_derived2 $(e) ; + + # Argument checking is set up to call exit(1) directly on failure, and we + # can not hijack that with try, so we should better not do this test by + # default. We could fix this by having errors look up and invoke the EXIT + # rule instead; EXIT can be hijacked (;-) + if --fail-typecheck in [ modules.peek : ARGV ] + { + try ; + { + expect_derived2 $(a) ; + } + catch + "Expected an instance of derived2 but got" instead + ; + } + + #try ; + #{ + # new bad_subclass ; + #} + #catch + # bad_subclass.bad_subclass failed to call base class constructor + # myclass.__init__ + # ; + + #try ; + #{ + # class bad_subclass ; + #} + #catch bad_subclass has already been declared ; + + assert.result 3 4 5 : $(a).get-x ; + assert.result 4 5 : $(a).get-y ; + assert.result 4 : $(b).get-x ; + assert.result X : $(b).get-y ; + assert.result 4 : $(b).get-z ; + assert.result 1 : $(c).get-x ; + assert.result 2 : $(c).get-y ; + assert.result 4 5 : $(a).f ; + assert.result derived1.g : $(b).f ; + assert.result derived2.g : $(c).f ; + assert.result derived2.g : $(d).f ; + + assert.result 10 : $(b).get-foo ; + + $(a).invariant ; + $(b).invariant2 ; + $(b).invariant3 ; + + # Check that the __class__ attribute is getting properly set. + assert.result myclass : $(a).get-class ; + assert.result derived1 : $(b).get-class ; + assert.result $(a) : $(a).get-instance ; + + $(a).set-x a.x ; + $(b).set-x b.x ; + $(c).set-x c.x ; + $(d).set-x d.x ; + assert.result a.x : $(a).get-x ; + assert.result b.x : $(b).get-x ; + assert.result c.x : $(c).get-x ; + assert.result d.x : $(d).get-x ; + + class derived3 : derived1 derived2 + { + rule __init__ ( ) + { + } + } + + assert.result : bases myclass ; + assert.result myclass : bases derived1 ; + assert.result myclass : bases derived2 ; + assert.result derived1 derived2 : bases derived3 ; + + assert.true is-derived derived1 : myclass ; + assert.true is-derived derived2 : myclass ; + assert.true is-derived derived3 : derived1 ; + assert.true is-derived derived3 : derived2 ; + assert.true is-derived derived3 : derived1 derived2 myclass ; + assert.true is-derived derived3 : myclass ; + + assert.false is-derived myclass : derived1 ; + + assert.true is-instance $(a) ; + assert.false is-instance bar ; + + assert.true is-a $(a) : myclass ; + assert.true is-a $(c) : derived2 ; + assert.true is-a $(d) : myclass ; + assert.false is-a literal : myclass ; +} diff --git a/src/boost/tools/build/src/kernel/errors.jam b/src/boost/tools/build/src/kernel/errors.jam new file mode 100644 index 000000000..5b01d6f17 --- /dev/null +++ b/src/boost/tools/build/src/kernel/errors.jam @@ -0,0 +1,287 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2004 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Print a stack backtrace leading to this rule's caller. Each argument +# represents a line of output to be printed after the first line of the +# backtrace. +# +rule backtrace ( skip-frames prefix messages * : * ) +{ + local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ; + local drop-elements = $(frame-skips[$(skip-frames)]) ; + if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 ) + { + ECHO "warning: backtrace doesn't support skipping $(skip-frames) " + "frames; using 1 instead." ; + drop-elements = 5 ; + } + + local args = $(.args) ; + if $(.user-modules-only) + { + local bt = [ nearest-user-location ] ; + if $(bt) + { + ECHO $(prefix) at $(bt) ; + } + for local n in $(args) + { + if $($(n))-is-defined + { + ECHO $(prefix) $($(n)) ; + } + } + } + else + { + # Get the whole backtrace, then drop the initial quadruples + # corresponding to the frames that must be skipped. + local bt = [ BACKTRACE ] ; + bt = $(bt[$(drop-elements)-]) ; + + while $(bt) + { + local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ; + ECHO "$(bt[1]):$(bt[2]):" "in" $(bt[4]) "from module" $(m) ; + + # The first time through, print each argument on a separate line. + for local n in $(args) + { + if $($(n))-is-defined + { + ECHO $(prefix) $($(n)) ; + } + } + args = ; # Kill args so that this never happens again. + + # Move on to the next quadruple. + bt = $(bt[5-]) ; + } + } +} + +.args ?= messages 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ; +.disabled ?= ; +.last-error-$(.args) ?= ; + + +# try-catch -- +# +# This is not really an exception-handling mechanism, but it does allow us to +# perform some error-checking on our error-checking. Errors are suppressed after +# a try, and the first one is recorded. Use catch to check that the error +# message matched expectations. + +# Begin looking for error messages. +# +rule try ( ) +{ + .disabled += true ; + .last-error-$(.args) = ; +} + + +# Stop looking for error messages; generate an error if an argument of messages +# is not found in the corresponding argument in the error call. +# +rule catch ( messages * : * ) +{ + .disabled = $(.disabled[2-]) ; # Pop the stack. + + import sequence ; + + if ! $(.last-error-$(.args))-is-defined + { + error-skip-frames 3 expected an error, but none occurred ; + } + else + { + for local n in $(.args) + { + if ! $($(n)) in $(.last-error-$(n)) + { + local v = [ sequence.join $($(n)) : " " ] ; + v ?= "" ; + local joined = [ sequence.join $(.last-error-$(n)) : " " ] ; + + .last-error-$(.args) = ; + error-skip-frames 3 expected \"$(v)\" in argument $(n) of error + : got \"$(joined)\" instead ; + } + } + } +} + + +rule error-skip-frames ( skip-frames messages * : * ) +{ + if ! $(.disabled) + { + backtrace $(skip-frames) "error:" $(messages) : $(2) : $(3) : $(4) : $(5) + : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) + : $(15) : $(16) : $(17) : $(18) : $(19) ; + EXIT ; + } + else if ! $(.last-error-$(.args)) + { + for local n in $(.args) + { + # Add an extra empty string so that we always have something in the + # event of an error. + .last-error-$(n) = $($(n)) "" ; + } + } +} + +if --no-error-backtrace in [ modules.peek : ARGV ] +{ + .no-error-backtrace = true ; +} + + +# Print an error message with a stack backtrace and exit. +# +rule error ( messages * : * ) +{ + if $(.no-error-backtrace) + { + local first-printed ; + # Print each argument on a separate line. + for local n in $(.args) + { + if $($(n))-is-defined + { + if ! $(first-printed) + { + ECHO "error:" $($(n)) ; + first-printed = true ; + } + else + { + ECHO $($(n)) ; + } + } + } + EXIT ; + } + else + { + error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : + $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) + : $(17) : $(18) : $(19) ; + } +} + + +# Same as 'error', but the generated backtrace will include only user files. +# +rule user-error ( messages * : * ) +{ + .user-modules-only = 1 ; + error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : + $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : + $(18) : $(19) ; +} + + +# Print a warning message with a stack backtrace and exit. +# +rule warning +{ + backtrace 2 "warning:" $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : + $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : + $(18) : $(19) ; +} + + +# Convert an arbitrary argument list into a list with ":" separators and quoted +# elements representing the same information. This is mostly useful for +# formatting descriptions of arguments with which a rule was called when +# reporting an error. +# +rule lol->list ( * ) +{ + local result ; + local remaining = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ; + while $($(remaining)) + { + local n = $(remaining[1]) ; + remaining = $(remaining[2-]) ; + + if $(n) != 1 + { + result += ":" ; + } + result += \"$($(n))\" ; + } + return $(result) ; +} + + +# Return the file:line for the nearest entry in backtrace which correspond to a +# user module. +# +rule nearest-user-location ( ) +{ + local bt = [ BACKTRACE ] ; + + local result ; + while $(bt) && ! $(result) + { + local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ; + local user-modules = "([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-config.jam|project-root.jam)" ; + + if [ MATCH $(user-modules) : $(bt[1]:D=) ] + { + result = "$(bt[1]):$(bt[2])" ; + } + bt = $(bt[5-]) ; + } + return $(result) ; +} + + +# If optimized rule is available in Jam, use it. +if NEAREST_USER_LOCATION in [ RULENAMES ] +{ + rule nearest-user-location ( ) + { + local r = [ NEAREST_USER_LOCATION ] ; + return "$(r[1]):$(r[2])" ; + } +} + + +rule __test__ ( ) +{ + # Show that we can correctly catch an expected error. + try ; + { + error an error occurred : somewhere ; + } + catch an error occurred : somewhere ; + + # Show that unexpected errors generate real errors. + try ; + { + try ; + { + error an error occurred : somewhere ; + } + catch an error occurred : nowhere ; + } + catch expected \"nowhere\" in argument 2 ; + + # Show that not catching an error where one was expected is an error. + try ; + { + try ; + { + } + catch ; + } + catch expected an error, but none occurred ; +} diff --git a/src/boost/tools/build/src/kernel/modules.jam b/src/boost/tools/build/src/kernel/modules.jam new file mode 100644 index 000000000..f7fb2c0ba --- /dev/null +++ b/src/boost/tools/build/src/kernel/modules.jam @@ -0,0 +1,364 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2003, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Essentially an include guard; ensures that no module is loaded multiple times. +.loaded ?= ; + +# A list of modules currently being loaded for error reporting of circular +# dependencies. +.loading ?= ; + +# A list of modules needing to be tested using their __test__ rule. +.untested ?= ; + +# A list of modules which have been tested using their __test__ rule. +.tested ?= ; + + +# Runs internal Boost Build unit tests for the specified module. The module's +# __test__ rule is executed in its own module to eliminate any inadvertent +# effects of testing module dependencies (such as assert) on the module itself. +# +local rule run-module-test ( m ) +{ + local tested-modules = [ modules.peek modules : .tested ] ; + + if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations. + && ( ( --debug in $(argv) ) || ( "--debug-module=$(m)" in $(argv) ) ) + { + modules.poke modules : .tested : $(tested-modules) $(m) ; + + if ! ( __test__ in [ RULENAMES $(m) ] ) + { + local argv = [ peek : ARGV ] ; + if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) ) + { + ECHO "warning:" no __test__ rule defined in module $(m) ; + } + } + else + { + if ! ( --quiet in $(argv) ) + { + ECHO testing module $(m)... ; + } + + local test-module = __test-$(m)__ ; + IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m) + ] ; + IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ; + module $(test-module) + { + __test__ ; + } + } + } +} + + +# Return the binding of the given module. +# +rule binding ( module ) +{ + return $($(module).__binding__) ; +} + + +# Sets the module-local value of a variable. This is the most reliable way to +# set a module-local variable in a different module; it eliminates issues of +# name shadowing due to dynamic scoping. +# +rule poke ( module-name ? : variables + : value * ) +{ + module $(<) + { + $(>) = $(3) ; + } +} + + +# Returns the module-local value of a variable. This is the most reliable way to +# examine a module-local variable in a different module; it eliminates issues of +# name shadowing due to dynamic scoping. +# +rule peek ( module-name ? : variables + ) +{ + module $(<) + { + return $($(>)) ; + } +} + + +# Call the given rule locally in the given module. Use this for rules accepting +# rule names as arguments, so that the passed rule may be invoked in the context +# of the rule's caller (for example, if the rule accesses module globals or is a +# local rule). Note that rules called this way may accept at most 18 parameters. +# +rule call-in ( module-name ? : rule-name args * : * ) +{ + module $(module-name) + { + return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : + $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) ] ; + } +} + + +# Given a possibly qualified rule name and arguments, remove any initial module +# qualification from the rule and invoke it in that module. If there is no +# module qualification, the rule is invoked in the global module. Note that +# rules called this way may accept at most 18 parameters. +# +rule call-locally ( qualified-rule-name args * : * ) +{ + local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ; + local rule-name = $(module-rule[2]) ; + rule-name ?= $(qualified-rule-name) ; + # We pass only 18 parameters here since Boost Jam allows at most 19 rule + # parameter positions and the call-in rule already uses up the initial + # position for the module name. + return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) : + $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) + $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] ; +} + + +# Load the indicated module if it is not already loaded. +# +rule load ( + module-name # Name of module to load. Rules will be defined in this + # module. + : filename ? # (partial) path to file; Defaults to $(module-name).jam. + : search * # Directories in which to search for filename. Defaults to + # $(BOOST_BUILD_PATH). +) +{ + # Avoid loading modules twice. + if ! ( $(module-name) in $(.loaded) ) + { + filename ?= $(module-name).jam ; + + # Mark the module loaded so we do not try to load it recursively. + .loaded += $(module-name:B) ; + + # Suppress tests if any module loads are already in progress. + local suppress-test = $(.loading[1]) ; + + # Push this module on the loading stack. + .loading += $(module-name) ; + + # Remember that it is untested. + .untested += $(module-name) ; + + # Insert the new module's __name__ and __file__ globals. + poke $(module-name) : __name__ : $(module-name) ; + poke $(module-name) : __file__ : $(filename) ; + + module $(module-name) + { + # Add some grist so that the module will have a unique target name. + local module-target = $(__file__:G=module@) ; + + local search = $(3) ; + search ?= [ modules.peek : BOOST_BUILD_PATH ] ; + SEARCH on $(module-target) = $(search) ; + BINDRULE on $(module-target) = modules.record-binding ; + + include $(module-target) ; + + # Allow the module to see its own names with full qualification. + local rules = [ RULENAMES $(__name__) ] ; + IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ; + } + + if $(module-name) != modules && ! [ binding $(module-name) ] + { + import errors ; + errors.error "Could not find module" $(module-name) in $(search) ; + } + + # Pop the loading stack. Must happen before testing or we will run into + # a circular loading dependency. + .loading = $(.loading[1--2]) ; + + # Run any pending tests if this is an outer load. + if ! $(suppress-test) + { + local argv = [ peek : ARGV ] ; + for local m in $(.untested) + { + run-module-test $(m) ; + } + .untested = ; + } + } + else if $(module-name) in $(.loading) + { + import errors ; + errors.error loading \"$(module-name)\" + : circular module loading "dependency:" + : $(.loading)" ->" $(module-name) ; + } +} + + +# This helper is used by load (above) to record the binding (path) of each +# loaded module. +# +rule record-binding ( module-target : binding ) +{ + $(.loading[-1]).__binding__ = $(binding) ; +} + + +# Transform each path in the list, with all backslashes converted to forward +# slashes and all detectable redundancy removed. Something like this is probably +# needed in path.jam, but I am not sure of that, I do not understand it, and I +# am not ready to move all of path.jam into the kernel. +# +local rule normalize-raw-paths ( paths * ) +{ + local result ; + for p in $(paths:T) + { + result += [ NORMALIZE_PATH $(p) ] ; + } + return $(result) ; +} + + +.cwd = [ PWD ] ; + + +# Load the indicated module and import rule names into the current module. Any +# members of rules-opt will be available without qualification in the caller's +# module. Any members of rename-opt will be taken as the names of the rules in +# the caller's module, in place of the names they have in the imported module. +# If rules-opt = '*', all rules from the indicated module are imported into the +# caller's module. If rename-opt is supplied, it must have the same number of +# elements as rules-opt. +# +rule import ( module-names + : rules-opt * : rename-opt * ) +{ + if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt) + { + import errors ; + errors.error "Rule aliasing is only available for explicit imports." ; + } + + if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) ) + { + import errors ; + errors.error "When loading multiple modules, no specific rules or" + "renaming is allowed" ; + } + + local caller = [ CALLER_MODULE ] ; + + # Import each specified module + for local m in $(module-names) + { + local module-name = $(m:B) ; + if ! $(module-name) in $(.loaded) + { + # If the importing module is not already in the BOOST_BUILD_PATH, + # prepend it to the path. We do not want to invert the search order + # of modules that are already there. + + local caller-location ; + if $(caller) + { + caller-location = [ binding $(caller) ] ; + caller-location = $(caller-location:D) ; + caller-location = [ normalize-raw-paths + $(caller-location:R=$(.cwd)) ] ; + } + + local search = [ peek : BOOST_BUILD_PATH ] ; + search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ; + + if $(caller-location) && ! $(caller-location) in $(search) + { + search = $(caller-location) $(search) ; + } + + if $(m:D) + { + search = $(caller-location)/$(m:D) $(search)/$(m:D) $(search) ; + } + + load $(module-name) : : $(search) ; + } + + IMPORT_MODULE $(module-name) : $(caller) ; + + if $(rules-opt) + { + local source-names ; + if $(rules-opt) = * + { + local all-rules = [ RULENAMES $(module-name) ] ; + source-names = $(all-rules) ; + } + else + { + source-names = $(rules-opt) ; + } + local target-names = $(rename-opt) ; + target-names ?= $(source-names) ; + IMPORT $(module-name) : $(source-names) : $(caller) : $(target-names) ; + } + } +} + + +# Define exported copies in $(target-module) of all rules exported from +# $(source-module). Also make them available in the global module with +# qualification, so that it is just as though the rules were defined originally +# in $(target-module). +# +rule clone-rules ( source-module target-module ) +{ + local r = [ RULENAMES $(source-module) ] ; + IMPORT $(source-module) : $(r) : $(target-module) : $(r) : LOCALIZE ; + EXPORT $(target-module) : $(r) ; + IMPORT $(target-module) : $(r) : : $(target-module).$(r) ; +} + + +# These rules need to be available in all modules to implement module loading +# itself and other fundamental operations. +local globalize = peek poke record-binding ; +IMPORT modules : $(globalize) : : modules.$(globalize) ; + + +rule __test__ ( ) +{ + import assert ; + import modules : normalize-raw-paths ; + + module modules.__test__ + { + foo = bar ; + } + + assert.result bar : peek modules.__test__ : foo ; + + poke modules.__test__ : foo : bar baz ; + assert.result bar baz : peek modules.__test__ : foo ; + + assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ; + assert.result . : normalize-raw-paths . ; + assert.result .. : normalize-raw-paths .. ; + assert.result ../.. : normalize-raw-paths ../.. ; + assert.result .. : normalize-raw-paths ./.. ; + assert.result / / : normalize-raw-paths / \\ ; + assert.result a : normalize-raw-paths a ; + assert.result a : normalize-raw-paths a/ ; + assert.result /a : normalize-raw-paths /a/ ; + assert.result / : normalize-raw-paths /a/.. ; +} diff --git a/src/boost/tools/build/src/manager.py b/src/boost/tools/build/src/manager.py new file mode 100644 index 000000000..6f4508567 --- /dev/null +++ b/src/boost/tools/build/src/manager.py @@ -0,0 +1,110 @@ +# Copyright Pedro Ferreira 2005. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import bjam + +# To simplify implementation of tools level, we'll +# have a global variable keeping the current manager. +the_manager = None +def get_manager(): + return the_manager + +class Manager: + """ This class is a facade to the Boost.Build system. + It serves as the root to access all data structures in use. + """ + + def __init__ (self, engine, global_build_dir): + """ Constructor. + engine: the build engine that will actually construct the targets. + """ + from build.virtual_target import VirtualTargetRegistry + from build.targets import TargetRegistry + from build.project import ProjectRegistry + from build.scanner import ScannerRegistry + from build.errors import Errors + from b2.util.logger import NullLogger + from build import build_request, property_set, feature + + self.engine_ = engine + self.virtual_targets_ = VirtualTargetRegistry (self) + self.projects_ = ProjectRegistry (self, global_build_dir) + self.targets_ = TargetRegistry () + self.logger_ = NullLogger () + self.scanners_ = ScannerRegistry (self) + self.argv_ = bjam.variable("ARGV") + self.boost_build_path_ = bjam.variable("BOOST_BUILD_PATH") + self.errors_ = Errors() + self.command_line_free_features_ = property_set.empty() + + global the_manager + the_manager = self + + def scanners (self): + return self.scanners_ + + def engine (self): + return self.engine_ + + def virtual_targets (self): + return self.virtual_targets_ + + def targets (self): + return self.targets_ + + def projects (self): + return self.projects_ + + def argv (self): + return self.argv_ + + def logger (self): + return self.logger_ + + def set_logger (self, logger): + self.logger_ = logger + + def errors (self): + return self.errors_ + + def getenv(self, name): + return bjam.variable(name) + + def boost_build_path(self): + return self.boost_build_path_ + + def command_line_free_features(self): + return self.command_line_free_features_ + + def set_command_line_free_features(self, v): + self.command_line_free_features_ = v + + def construct (self, properties = [], targets = []): + """ Constructs the dependency graph. + properties: the build properties. + targets: the targets to consider. If none is specified, uses all. + """ + if not targets: + for name, project in self.projects ().projects (): + targets.append (project.target ()) + + property_groups = build_request.expand_no_defaults (properties) + + virtual_targets = [] + build_prop_sets = [] + for p in property_groups: + build_prop_sets.append (property_set.create (feature.split (p))) + + if not build_prop_sets: + build_prop_sets = [property_set.empty ()] + + for build_properties in build_prop_sets: + for target in targets: + result = target.generate (build_properties) + virtual_targets.extend (result.targets ()) + + actual_targets = [] + for virtual_target in virtual_targets: + actual_targets.extend (virtual_target.actualize ()) + diff --git a/src/boost/tools/build/src/options/help.jam b/src/boost/tools/build/src/options/help.jam new file mode 100644 index 000000000..9683d1a7c --- /dev/null +++ b/src/boost/tools/build/src/options/help.jam @@ -0,0 +1,222 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2003, 2006 Rene Rivera +# Copyright 2003, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module is the plug-in handler for the --help and --help-.* +# command-line options +import modules ; +import assert ; +import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ; +import sequence ; +import set ; +import project ; +import print ; +import os ; +import version ; +import path ; + +# List of possible modules, but which really aren't. +# +.not-modules = + boost-build bootstrap site-config test user-config + -tools allyourbase boost-base features python stlport testing unit-tests ; + +# The help system options are parsed here and handed off to the doc +# module to translate into documentation requests and actions. The +# understood options are: +# +# --help-disable-
Comeau
C++
+ +import toolset ; +import feature ; +import toolset : flags ; +import common ; +import generators ; + +import unix ; +import como ; + +feature.extend-subfeature toolset como : platform : linux ; + +toolset.inherit-generators como-linux + como linux : unix ; +generators.override como-linux.prebuilt : builtin.lib-generator ; +generators.override como-linux.searched-lib-generator : searched-lib-generator ; +toolset.inherit-flags como-linux : unix ; +toolset.inherit-rules como-linux : gcc ; + +generators.register-c-compiler como-linux.compile.c++ : CPP : OBJ + : como linux ; +generators.register-c-compiler como-linux.compile.c : C : OBJ + : como linux ; + + +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters como-linux + : version $(version) ] ; + + command = [ common.get-invocation-command como-linux : como + : $(command) ] ; + + common.handle-options como-linux : $(condition) : $(command) : $(options) ; +} + + +flags como-linux C++FLAGS off : --no_exceptions ; +flags como-linux C++FLAGS on : --exceptions ; + +flags como-linux CFLAGS off : --no_inlining ; +flags como-linux CFLAGS on full : --inlining ; + +flags como-linux CFLAGS off : -O0 ; +flags como-linux CFLAGS speed : -O3 ; +flags como-linux CFLAGS space : -Os ; + +flags como-linux CFLAGS on : -g ; +flags como-linux LINKFLAGS on : -g ; + +flags como-linux FINDLIBS : m ; +flags como-linux FINDLIBS : rt ; + +flags como-linux CFLAGS ; +flags como-linux C++FLAGS ; +flags como-linux DEFINES ; +flags como-linux UNDEFS ; +flags como-linux HDRS ; +flags como-linux STDHDRS ; +flags como-linux LINKFLAGS ; +flags como-linux ARFLAGS ; + +flags como-linux.link LIBRARIES ; +flags como-linux.link LINKPATH ; +flags como-linux.link FINDLIBS-ST ; +flags como-linux.link FINDLIBS-SA ; + +flags como-linux.link RPATH ; +flags como-linux.link RPATH_LINK ; + + +actions link bind LIBRARIES +{ + $(CONFIG_COMMAND) $(LINKFLAGS) -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1 +} + +actions link.dll bind LIBRARIES +{ + $(CONFIG_COMMAND) $(LINKFLAGS) -shared -o "$(<[1])" "$(>)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" "$(LIBRARIES)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) 2>&1 +} + +actions compile.c +{ + $(CONFIG_COMMAND) -c --c99 --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1 +} + +actions compile.c++ +{ + $(CONFIG_COMMAND) -tused -c --long_long -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" 2>&1 +} + +actions archive +{ + ar rcu $(<) $(>) +} diff --git a/src/boost/tools/build/src/tools/como-win.jam b/src/boost/tools/build/src/tools/como-win.jam new file mode 100644 index 000000000..3225ece1a --- /dev/null +++ b/src/boost/tools/build/src/tools/como-win.jam @@ -0,0 +1,117 @@ +# (C) Copyright David Abrahams 2001. +# (C) Copyright MetaCommunications, Inc. 2004. + +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# The following #// line will be used by the regression test table generation +# program as the column heading for HTML tables. Must not include a version +# number. +#//Comeau
C++
+ +import common ; +import como ; +import feature ; +import generators ; +import toolset : flags ; + +feature.extend-subfeature toolset como : platform : win ; + + +# Initializes the Comeau toolset for windows. The command is the command which +# invokes the compiler. You should either set environment variable +# COMO_XXX_INCLUDE where XXX is the used backend (as described in the +# documentation), or pass that as part of command, e.g: +# +# using como-win : 4.3 : "set COMO_BCC_INCLUDE=C:/include &&" como.exe ; +# +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters como-win + : version $(version) ] ; + + command = [ common.get-invocation-command como-win : como.exe : + $(command) ] ; + + common.handle-options como-win : $(condition) : $(command) : $(options) ; +} + +generators.register-c-compiler como-win.compile.c++ : CPP : OBJ + : como win ; +generators.register-c-compiler como-win.compile.c : C : OBJ + : como win ; + + +generators.register-linker como-win.link + : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB + : EXE + : como win ; + +# Note that status of shared libraries support is not clear, so we do not define +# the link.dll generator. +generators.register-archiver como-win.archive + : OBJ : STATIC_LIB + : como win ; + + +flags como-win C++FLAGS off : --no_exceptions ; +flags como-win C++FLAGS on : --exceptions ; + +flags como-win CFLAGS off : --no_inlining ; +flags como-win CFLAGS on full : --inlining ; + + +# The following seems to be VC-specific options. At least, when I uncomment +# then, Comeau with bcc as backend reports that bcc32 invocation failed. +# +#flags como-win CFLAGS on : /Zi ; +#flags como-win CFLAGS off : /Od ; + + +flags como-win CFLAGS ; +flags como-win CFLAGS : -D_WIN32 ; # Make sure that we get the Boost Win32 platform config header. +flags como-win CFLAGS multi : -D_MT ; # Make sure that our config knows that threading is on. +flags como-win C++FLAGS ; +flags como-win DEFINES ; +flags como-win UNDEFS ; +flags como-win HDRS ; +flags como-win SYSHDRS ; +flags como-win LINKFLAGS ; +flags como-win ARFLAGS ; +flags como-win NO_WARN ; + +#flags como-win STDHDRS : $(COMO_INCLUDE_PATH) ; +#flags como-win STDLIB_PATH : $(COMO_STDLIB_PATH)$(SLASH) ; + +flags como-win LIBPATH ; +flags como-win LIBRARIES ; +flags como-win FINDLIBS ; +flags como-win FINDLIBS ; + +nl = " +" ; + + +# For como, we repeat all libraries so that dependencies are always resolved. +# +actions link bind LIBRARIES +{ + $(CONFIG_COMMAND) --no_version --no_prelink_verbose $(LINKFLAGS) -o "$(<[1]:S=)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" "$(FINDLIBS:S=.lib)" +} + +actions compile.c +{ + $(CONFIG_COMMAND) -c --c99 -e5 --no_version --display_error_number --diag_suppress=9,21,161,748,940,962 -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<:D=)" "$(>)" +} + +actions compile.c++ +{ + $(CONFIG_COMMAND) -c -e5 --no_version --no_prelink_verbose --display_error_number --long_long --diag_suppress=9,21,161,748,940,962 --diag_error=461 -D__STL_LONG_LONG -U$(UNDEFS) -D$(DEFINES) $(WARN) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -I"$(SYSHDRS)" -o "$(<)" "$(>)" +} + +actions archive +{ + $(CONFIG_COMMAND) --no_version --no_prelink_verbose --prelink_object @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" + lib $(ARFLAGS) /nologo /out:"$(<:S=.lib)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" +} diff --git a/src/boost/tools/build/src/tools/como.jam b/src/boost/tools/build/src/tools/como.jam new file mode 100644 index 000000000..3dd5bb4e4 --- /dev/null +++ b/src/boost/tools/build/src/tools/como.jam @@ -0,0 +1,75 @@ +# Copyright Vladimir Prus 2004. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.como]] += Comeau C/C++ Compiler + +The `como-linux` and the `como-win` modules supports the +http://www.comeaucomputing.com/[Comeau C/C++ Compiler] on Linux and +Windows respectively. + +The module is initialized using the following syntax: + +---- +using como : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If the command is not specified, B2 will search for a binary +named `como` in PATH. + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling both C +and C++ sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +Before using the Windows version of the compiler, you need to setup +necessary environment variables per compiler's documentation. In +particular, the COMO_XXX_INCLUDE variable should be set, where XXX +corresponds to the used backend C compiler. + +|# # end::doc[] + +# This is a generic 'como' toolset. Depending on the current system, it +# forwards either to 'como-linux' or 'como-win' modules. + +import feature ; +import os ; +import toolset ; + +feature.extend toolset : como ; +feature.subfeature toolset como : platform : : propagated link-incompatible ; + +rule init ( * : * ) +{ + if [ os.name ] = LINUX + { + toolset.using como-linux : + $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + else + { + toolset.using como-win : + $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + + } +} diff --git a/src/boost/tools/build/src/tools/convert.jam b/src/boost/tools/build/src/tools/convert.jam new file mode 100644 index 000000000..3b59fcd34 --- /dev/null +++ b/src/boost/tools/build/src/tools/convert.jam @@ -0,0 +1,62 @@ +# Copyright (c) 2009 Vladimir Prus +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Implements 'convert' target that takes a bunch of source and +# tries to convert each one to the specified type. +# +# For example: +# +# convert objects obj : a.cpp b.cpp ; +# + +import targets ; +import generators ; +import project ; +import type ; +import "class" : new ; + +class convert-target-class : typed-target +{ + rule __init__ ( name : project : type + : sources * : requirements * : default-build * : usage-requirements * ) + { + typed-target.__init__ $(name) : $(project) : $(type) + : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ; + } + + rule construct ( name : source-targets * : property-set ) + { + local r = [ generators.construct $(self.project) : $(self.type) + : [ property-set.create [ $(property-set).raw ] # [ feature.expand + $(self.type) ] + # ] + : $(source-targets) ] ; + if ! $(r) + { + errors.error "unable to construct" [ full-name ] ; + } + + return $(r) ; + } + +} + +rule convert ( name type : sources * : requirements * : default-build * + : usage-requirements * ) +{ + local project = [ project.current ] ; + + # This is a circular module dependency, so it must be imported here + modules.import targets ; + targets.main-target-alternative + [ new convert-target-class $(name) : $(project) : [ type.type-from-rule-name $(type) ] + : [ targets.main-target-sources $(sources) : $(name) ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) ] + : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] + ] ; +} +IMPORT $(__name__) : convert : : convert ; diff --git a/src/boost/tools/build/src/tools/cray.jam b/src/boost/tools/build/src/tools/cray.jam new file mode 100644 index 000000000..9364b5511 --- /dev/null +++ b/src/boost/tools/build/src/tools/cray.jam @@ -0,0 +1,1158 @@ +# Copyright 2001 David Abrahams +# Copyright 2004, 2005 Markus Schoepflin +# Copyright 2011 John Maddock +# Copyright 2013, 2017-2018 Cray, Inc. +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# README.md +# +# This toolset is for the Cray Compiling Environment (CCE). +# +# The assembler, linker, and archiver are the same as those used in the +# `gcc` toolset. Therefore, there is some duplication of code between the +# `gcc` toolset and this toolset. +# +# # CCE Introduction +# +# Users want to compile and run massively parallel applications on Cray +# supercomputers. Typically, the user compiles code on a login node of the +# supercomputer and then runs the compiled program on multiple compute +# nodes using a batch control system. This means the user is almost always +# cross compiling. +# +# But, they're not just cross compiling. In order for a program to run on +# a Cray supercomputer it has to link to particular libraries. There are +# three general categories of libraries that user programs must link to: +# +# - Network libraries: Enable communication between processes on different +# compute nodes. Depends on the network hardware in the supercomputer. +# - Compute node libraries: Depends on the hardware on the targeted +# compute nodes. +# - Language extension libraries: Depends on the language extensions used +# by the program (e.g. OpenMP, Unified Parallel C, et cetera). +# +# Instead of forcing users to invoke the compiler with a bunch of +# libraries listed on the command line, CCE decides what libraries to link +# based on the environment. This is primarily controlled by loading and +# unloading modules (with the `module` command) to create a cross +# compiling and linking environment suitable for the particular hardware +# on the targeted Cray supercomputer. +# +# CCE compilers come in two parts: the compiler itself, and the compiler +# driver. Invoking a compiler directly is not supported. We must always +# invoke the compiler through a compiler driver: either `cc` for C code, +# `CC` for C++ code, or `ftn` for Fortran code. The compiler driver is +# responsible for gathering information from the environment and invoking +# the selected compiler with the appropriate command line options. +# +# For more information on CCE, search for Cray publication S-2529 on the +# Cray publications website (https://pubs.cray.com). + +import "class" : new ; +import common ; +import feature ; +import gcc ; +import generators ; +import os ; +import regex ; +import set ; +import toolset ; +import type ; +import unix ; + +### +### 'init' +### + +rule init ( : : options * : requirements * ) +{ + + # User cannot specify a 'version' in their 'using' statement. Compiler + # version is always controlled by loading and unloading modules in the + # user's environment. + + # User cannot specify a 'command' in their 'using' statement. Using a + # single 'command' argument only makes sense when a single executable can + # compile different types of code (e.g. gcc will compile C or C++ based on + # the file name extensions). In CCE, you have to invoke one of the three + # compiler drivers: cc for C code, CC for C++ code, or ftn for Fortran + # code. Each compiler driver compiles a single type of source code. It is + # possible to let the user pass in three 'command' arguments, one for each + # driver, but that seems like more effort that it's worth. + + local toolset = cray ; + + check-prgenv-module $(toolset) ; + + local command-c = [ validate-command $(toolset) cc ] ; + local command-cxx = [ validate-command $(toolset) CC ] ; + local command-fortran = [ validate-command $(toolset) ftn ] ; + + # Archive builder. + local command-ar = [ validate-command $(toolset) ar ] ; + + # The 'command' variables always have one element, but they may contain + # spaces (e.g. if 'command' is an absolute path and some path components + # have spaces). + + local version = ; + local developer-build = ; + { + local version-string = [ SHELL "\"$(command-cxx)\" -VV 2>&1" ] ; + local version-components = [ MATCH "Version ([0-9]+).([0-9]+).([a-zA-Z0-9]+)" : $(version-string) ] ; + if ! [ MATCH "([0-9]+)" : $(version-components[3]) ] + { + + # The last component of the version is not a series of digits. This means + # we're probably using a developer build of CCE (i.e. a compiler built by + # a Cray employee). Developer builds report versions like '8.7.x'. + + developer-build = true ; + + # We want to treat developer builds as though they are the highest + # possible patch version of the release. Effectively, we want to turn + # '8.7.x' into '8.7.99'. + + version-components = $(version-components[1]) $(version-components[2]) 99 ; + + } + + version = $(version-components:J=.) ; + } + + local build = ; + if $(developer-build) + { + + # If this is a developer build, we want to add the build subfeature to the + # compiler. + + local version-string = [ SHELL "\"$(command-cxx)\" -VV 2>&1" ] ; + build = [ MATCH "[(][0-9]+_([0-9a-fA-F]+)[)]" : $(version-string) ] ; + + # Truncate build hash to 7 characters + build = [ MATCH "(.......)................................." : $(build) ] ; + } + + # IMPORTANT: 'set-cray-feature-defaults' causes the B2 tests to + # fail. I tried using an 'init' argument called 'ignore-cray-defaults' and + # setting up 'test-config.jam' to pass 'ignore-cray-defaults' during + # testing, but I couldn't get the test to read my 'test-config.jam' file + # when running tests individually. So, I just comment out + # 'set-cray-feature-defaults' during testing. + + set-cray-feature-defaults ; + + { + + # 'check-init-parameters' ensures that each time a toolset is initialized, + # it is initialized with a unique configuration. The return value is a + # B2 property condition which uniquely identifies this + # configured instance of this toolset. Typically, toolsets use the + # returned condition as the conditional in a 'toolset.flags' call to set + # flags specific to this configuration of this toolset. + + local identifying-condition = [ common.check-init-parameters $(toolset) $(requirements) : version $(version) : build $(build) ] ; + + # 'handle-options' uses 'toolset.flags' to set 'CONFIG_COMMAND' variables + # on targets when this toolset is used. The 'CONFIG_COMMAND' variables + # specify the commands to call for compiling. This would be more relevant + # if our 'init' rule had arguments that might affect the command that is + # invoked (e.g. in many toolsets 'version' affects the name of the + # compiler command). For now, we'll do this because it is a common pattern + # in toolsets, and we may need it in the future. + + handle-options + $(toolset) + : $(identifying-condition) + : $(command-c) $(command-cxx) $(command-fortran) $(command-ar) + : $(options) ; + + # Add compiler version to 'VERSION' variable on all targets. 'VERSION' is + # not used in any actions, but it is used in some updating rule + # procedures. + + toolset.flags $(toolset) VERSION $(identifying-condition) : [ numeric-version $(version) ] ; + } +} + +rule check-prgenv-module ( toolset ) +{ + + local compiler = [ os.environ PE_ENV ] ; + compiler = $(compiler:L) ; + + # We could check that environment variable CRAY_PRGENV$PE_ENV is set to + # "loaded", but this seems unnecessary and redundant. + + local default-compiler = cray ; + + if ! $(compiler) + { + log-warning $(toolset) : no PrgEnv module loaded + : falling back to PrgEnv-$(default-compiler) + : please load the PrgEnv-$(default-compiler) module next time ; + compiler = $(default-compiler) ; + } + + if $(compiler) != $(default-compiler) + { + log-error $(toolset) : compiler '$(compiler)' not supported + : toolset initialization failed + : please load the PrgEnv-$(default-compiler) module next time ; + # Do not abort, as suggested by: + # https://www.bfgroup.xyz/b2/manual/release/index.html#bbv2.extending.toolset_modules. + } +} + +rule set-cray-feature-defaults ( ) +{ + + # CCE users expect that using the 'cray' toolset without any explicit + # options will give them the same result as invoking CCE without any + # explicit options. So, we set feature defaults to match the default CCE + # options. + # + # The decision to turn off by default was a tough one. + # When CCE produces debugging symbols, it disables all inlining. This + # causes a decrease in performance, which the user probably was not + # expecting since they thought they were compiling with default CCE + # options. + + feature.set-default cxxstd-dialect : gnu ; + feature.set-default debug-symbols : off ; + feature.set-default optimization : default ; + feature.set-default inlining : default ; + feature.set-default vectorize : default ; +} + +### +### Command line options +### + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + + # Check if '--debug-configuration' was passed on the command line. This is + # inspired by 'common.jam' and other modules. + + # Variable names with a '.' prefix are intended to be globals. + # + # Refer to: CONTRIBUTING.adoc + + # The Jam language uses dynamic scoping. Setting '.debug-configuration' in + # this module influences the behavior of methods called from this module. + + .debug-configuration = true ; +} + +if [ MATCH (--debug-driver) : [ modules.peek : ARGV ] ] +{ + + .debug-driver = true ; +} + +### +### Features +### + +feature.extend toolset : cray ; + +# Typically, extending '' with the value 'cray' would cause +# 'cray' to be the default '' as long as it is the first value +# added to ''. However, we already imported the 'gcc' toolset, so +# 'cray' is not the first value added to ''. Therefore, we need +# to call 'feature.set-default'. +# +# If the build request specifies a '' (e.g. on the command line), +# then the '' feature default is ignored. However, if the 'cray' +# toolset is selected in 'user-config.jam' (e.g. with 'using cray ;'), +# then the build request will use the '' feature default. +# Therefore, we must use 'feature.set-default' so that selecting the +# 'cray' toolset in 'user-config.jam' works correctly. + +feature.set-default toolset : cray ; + +# CCE is different from other compilers in that it optimizes, inlines, and +# vectorizes by default. B2 assumes that 'off' is the default for +# all compilers. However, for CCE, 'off' and 'default' have different +# meanings. For CCE, 'off' requires an additional command line argument to +# turn the feature off. 'default' will not include an additional command +# line argument, but will do optimization, inlining, and vectorizing at +# whatever default level CCE uses. + +feature.extend optimization : default ; +feature.extend inlining : default ; +feature.extend vectorize : default ; + +### +### Flags +### + +# Updating rules are named in a dotted hierarchy. For example: +# +# compile +# \_ compile.c++ +# \_ compile.c++.preprocess +# \_ compile.c +# \_ compile.c.preprocess +# +# This naming convention allows us to apply flags to multiple children in +# the hierarchy. For example, if we apply a flag to 'compile.c++', that +# flag is also applied to its child 'compile.c++.preprocess'. If we apply +# a flag to 'compile', then that flag is applied to all children under +# 'compile'. + +toolset.flags cray.compile OPTIONS shared : -h pic ; + +toolset.flags cray.compile OPTIONS default ; # Blank. +toolset.flags cray.compile OPTIONS off : -O 0 ; +toolset.flags cray.compile OPTIONS speed : -O 3 ; +toolset.flags cray.compile OPTIONS space ; # Blank. CCE does not optimize for space. + +toolset.flags cray.compile OPTIONS default ; # Blank. +toolset.flags cray.compile OPTIONS off : -h ipa0 ; +toolset.flags cray.compile OPTIONS on ; # Blank. CCE does inlining by default. +toolset.flags cray.compile OPTIONS full : -h ipa5 ; + +toolset.flags cray.compile OPTIONS default ; # Blank. +toolset.flags cray.compile OPTIONS off : -h vector0 ; +toolset.flags cray.compile OPTIONS on ; # Blank. CCE vectorizes by default. +toolset.flags cray.compile OPTIONS full : -h vector3 ; + +toolset.flags cray.link FINDLIBS-SA multi : rt ; # Not sure if this is correct. + +toolset.flags cray.link OPTIONS shared : -h pic ; + +{ + # + # Link flags copied from 'gcc.jam'. + # + + local toolset = cray ; + local generic-os = [ set.difference [ feature.values ] : aix darwin vxworks solaris osf hpux ] ; + # Strip the binary when no debugging is needed. We use --strip-all flag + # as opposed to -s since icc (intel's compiler) is generally + # option-compatible with and inherits from the gcc toolset, but does not + # support -s. + toolset.flags $(toolset).link OPTIONS $(generic-os)/on : -Wl,--strip-all ; + toolset.flags $(toolset).link RPATH $(generic-os) : ; + toolset.flags $(toolset).link RPATH_OPTION $(generic-os) : -rpath ; + toolset.flags $(toolset).link RPATH_LINK $(generic-os) : ; + toolset.flags $(toolset).link START-GROUP $(generic-os) : -Wl,--start-group ; + toolset.flags $(toolset).link END-GROUP $(generic-os) : -Wl,--end-group ; + + # gnu ld has the ability to change the search behaviour for libraries + # referenced by the -l switch. These modifiers are -Bstatic and + # -Bdynamic and change search for -l switches that follow them. The + # following list shows the tried variants. Search stops at the first + # variant that has a match. + # + # *nix: -Bstatic -lxxx + # libxxx.a + # + # *nix: -Bdynamic -lxxx + # libxxx.so + # libxxx.a + # + # windows (mingw, cygwin) -Bstatic -lxxx + # libxxx.a + # xxx.lib + # + # windows (mingw, cygwin) -Bdynamic -lxxx + # libxxx.dll.a + # xxx.dll.a + # libxxx.a + # xxx.lib + # cygxxx.dll (*) + # libxxx.dll + # xxx.dll + # libxxx.a + # + # (*) This is for cygwin + # Please note that -Bstatic and -Bdynamic are not a guarantee that a + # static or dynamic lib indeed gets linked in. The switches only change + # search patterns! + + # On *nix mixing shared libs with static runtime is not a good idea. + toolset.flags $(toolset).link FINDLIBS-ST-PFX $(generic-os)/shared : -Wl,-Bstatic ; + toolset.flags $(toolset).link FINDLIBS-SA-PFX $(generic-os)/shared : -Wl,-Bdynamic ; + + toolset.flags $(toolset).link HAVE_SONAME $(generic-os) : "" ; + toolset.flags $(toolset).link SONAME_OPTION $(generic-os) : -h ; + + # See note [1] + toolset.flags $(toolset).link OPTIONS $(generic-os)/static : -static ; + + # [1] + # For static we made sure there are no dynamic libraries in the + # link. On HP-UX not all system libraries exist as archived libraries (for + # example, there is no libunwind.a), so, on this platform, the -static option + # cannot be specified. +} + +# Flags for 'free' features ('free' features are features that do not have +# a pre-defined set of values). + +toolset.flags cray.compile USER_OPTIONS ; +toolset.flags cray.compile.c++ USER_OPTIONS ; +toolset.flags cray.compile.asm USER_OPTIONS ; +toolset.flags cray.compile DEFINES ; +toolset.flags cray.compile INCLUDES ; + +toolset.flags cray.link USER_OPTIONS ; +toolset.flags cray.link LINKPATH ; +toolset.flags cray.link FINDLIBS-ST ; +toolset.flags cray.link FINDLIBS-SA ; +toolset.flags cray.link LIBRARIES ; + +toolset.flags cray.archive AROPTIONS ; + +### +### Actions +### + +actions compile.c++ +{ + "$(CONFIG_COMMAND_CXX)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -c -o "$(<)" "$(>)" $(DRIVER_OPTIONS) +} + +actions compile.c +{ + "$(CONFIG_COMMAND_C)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -c -o "$(<)" "$(>)" $(DRIVER_OPTIONS) +} + +actions compile.asm +{ + "$(CONFIG_COMMAND_CXX)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -c -o "$(<)" "$(>)" $(DRIVER_OPTIONS) +} + +actions compile.c++.preprocess +{ + "$(CONFIG_COMMAND_CXX)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -E "$(>)" >"$(<)" $(DRIVER_OPTIONS) +} + +actions compile.c.preprocess +{ + "$(CONFIG_COMMAND_C)" $(OPTIONS) $(USER_OPTIONS) -D$(SPACE)$(DEFINES) -I$(SPACE)"$(INCLUDES)" -E "$(>)" >"$(<)" $(DRIVER_OPTIONS) +} + +# We don't want to invoke 'ld' (the linker) directly for 'link', since we +# want to give the CCE compiler driver a chance to modify the command line +# it passes to 'ld'. +# +# The question is: which CCE compiler driver do we use? The driver for C, +# the driver for C++, or the driver for Fortran? +# +# Here are things that definitely do not work: +# +# - Using the driver for C doesn't work when linking C++ programs, because +# things like 'std::cout' are not available in C, they are only +# available in C++. +# +# We use the driver for C++ below since we are primarily interested in +# compiling Boost, which is written in C++. Also, the C++ driver will +# properly link C code as well. + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND_CXX)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) $(DRIVER_OPTIONS) +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND_CXX)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) $(DRIVER_OPTIONS) +} + +actions piecemeal archive +{ + "$(.AR)" $(AROPTIONS) rsc "$(<)" "$(>)" +} + +### +### Updating rules +### + +# These are the actual updating rules that apply the associated actions +# when called. + +rule compile.c++ ( targets * : sources * : properties * ) +{ + compile-c++-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile.c ( targets * : sources * : properties * ) +{ + compile-c-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile.asm ( targets * : sources * : properties * ) +{ + compile-asm-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile.c++.preprocess ( targets * : sources * : properties * ) +{ + compile-c++-preprocess-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile.c.preprocess ( targets * : sources * : properties * ) +{ + compile-c-preprocess-procedure $(targets) : $(sources) : $(properties) ; +} + +rule link ( targets * : sources * : properties * ) +{ + link-procedure $(targets) : $(sources) : $(properties) ; +} + +rule link.dll ( targets * : sources * : properties * ) +{ + link-dll-procedure $(targets) : $(sources) : $(properties) ; +} + +rule archive ( targets * : sources * : properties * ) +{ + archive-procedure $(targets) : $(sources) : $(properties) ; +} + +# These are the procedure portions of the updating rules. Calling the +# procedure portion may modify the targets, but it will not apply actions +# to the targets. This allows us to reuse the procedure portions of the +# updating rules without applying the same actions to targets. + +rule compile-c++-procedure ( targets * : sources * : properties * ) +{ + set-cxxstd-procedure $(targets) : $(sources) : $(properties) ; + set-cxxstd-dialect-procedure $(targets) : $(sources) : $(properties) ; + set-debug-symbols-procedure $(targets) : $(sources) : $(properties) ; + add-space-procedure $(targets) : $(sources) : $(properties) ; + debug-driver-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile-c-procedure ( targets * : sources * : properties * ) +{ + set-debug-symbols-procedure $(targets) : $(sources) : $(properties) ; + add-space-procedure $(targets) : $(sources) : $(properties) ; + debug-driver-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile-asm-procedure ( targets * : sources * : properties * ) +{ + compile-c++-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile-c++-preprocess-procedure ( targets * : sources * : properties * ) +{ + compile-c++-procedure $(targets) : $(sources) : $(properties) ; +} + +rule compile-c-preprocess-procedure ( targets * : sources * : properties * ) +{ + compile-c-procedure $(targets) : $(sources) : $(properties) ; +} + +rule link-procedure ( targets * : sources * : properties * ) +{ + set-cxxstd-procedure $(targets) : $(sources) : $(properties) ; + set-cxxstd-dialect-procedure $(targets) : $(sources) : $(properties) ; + gcc-link-procedure $(targets) : $(sources) : $(properties) ; + debug-driver-procedure $(targets) : $(sources) : $(properties) ; + + # CCE driver command line flags for linking executables. + + local link = [ feature.get-values : $(properties) ] ; + switch $(link) + { + case shared : + DRIVER_OPTIONS on $(<) += -dynamic ; + case static : + DRIVER_OPTIONS on $(<) += -static ; + } + + # The link command line from the 'gcc' toolset includes: + # + # '$(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA)' + # + # The 'FINDLIBS-ST' and 'FINDLIBS-SA' variables are the libraries + # specified by the '' and '' + # features, respectively. The 'FINDLIBS-ST-PFX' is typically + # '-Wl,-Bstatic'. The 'FINDLIBS-SA-PFX' is typically '-Wl,-Bdynamic'. + # + # The '-Bstatic' and '-Bdynamic' flags passed to the linker tell the + # linker how to link all of the following libraries. The flag is in effect + # until it is overridden by another '-B' flag on the command line. + # + # So, it makes sense that the 'gcc' toolset includes these flags, so the + # '' and '' libraries are linked + # properly. + # + # The last flag that is set ('-Bdynamic') affects the link type for any + # other libraries on the command line. In the 'gcc' toolset, this is okay, + # since there are no other libraries specified on the command line after + # these flags. However, when the CCE compiler driver invokes the linker, + # it adds additional libraries to the command line based on what modules + # are loaded in the environment. So, the last '-B' flag on the CCE driver + # command line affects the link type for all libraries that CCE + # automatically appends. + # + # Therefore, we have to set the final '-B' flag to the link type we want + # the CCE libraries to be linked with. Appending to the 'OPTIONS' variable + # seems reasonable. + + local link = [ feature.get-values : $(properties) ] ; + switch $(link) + { + case shared : + OPTIONS on $(<) += -Wl,-Bdynamic ; + case static : + OPTIONS on $(<) += -Wl,-Bstatic ; + } +} + +rule link-dll-procedure ( targets * : sources * : properties * ) +{ + set-cxxstd-procedure $(targets) : $(sources) : $(properties) ; + set-cxxstd-dialect-procedure $(targets) : $(sources) : $(properties) ; + gcc-link-dll-procedure $(targets) : $(sources) : $(properties) ; + debug-driver-procedure $(targets) : $(sources) : $(properties) ; + + # CCE driver command line flags for linking shared libraries. + + DRIVER_OPTIONS on $(<) += -shared ; +} + +rule archive-procedure ( targets * : sources * : properties * ) +{ + gcc-archive-procedure $(targets) : $(sources) : $(properties) ; + debug-driver-procedure $(targets) : $(sources) : $(properties) ; +} + +# Utility procedure rules intended to be called from updating rules. + +rule gcc-link-procedure ( targets * : sources * : properties * ) +{ + + # Copied from 'gcc.jam'. + + SPACE on $(targets) = " " ; + gcc.quote-rpath $(targets) ; +} + +rule gcc-link-dll-procedure ( targets * : sources * : properties * ) +{ + + # Copied from 'gcc.jam'. + + SPACE on $(targets) = " " ; + gcc.quote-rpath $(targets) ; +} + +rule gcc-archive-procedure ( targets * : sources * : properties * ) +{ + + # Copied from 'gcc.jam'. + + # Always remove archive and start again. Here is the rationale from + # + # Andre Hentz: + # + # I had a file, say a1.c, that was included into liba.a. I moved a1.c to + # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd + # errors. After some debugging I traced it back to the fact that a1.o was + # *still* in liba.a + # + # Rene Rivera: + # + # Originally removing the archive was done by splicing an RM onto the + # archive action. That makes archives fail to build on NT when they have + # many files because it will no longer execute the action directly and blow + # the line length limit. Instead we remove the file in a different action, + # just before building the archive. + # + local clean.a = $(targets[1])(clean) ; + TEMPORARY $(clean.a) ; + NOCARE $(clean.a) ; + LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; + DEPENDS $(clean.a) : $(sources) ; + DEPENDS $(targets) : $(clean.a) ; + common.RmTemps $(clean.a) : $(targets) ; +} + +rule add-space-procedure ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +rule set-cxxstd-procedure ( targets * : sources * : properties * ) +{ + + # Translate '' into a standard recognized by CCE. + + local version = [ on $(targets[1]) return $(VERSION) ] ; + + local cxxstd = [ feature.get-values cxxstd : $(properties) ] ; + local cray-cxxstd = ; + + local unsupported-values = 2a 20 ; # I don't know what '2a' means. + if $(cxxstd) && $(cxxstd) in $(unsupported-values) + { + + log-warning cray : ignoring unsupported property '$(cxxstd)' ; + + # Set to default value, or blank if default is unsupported. + + local default-value = [ get-default-feature-value cxxstd ] ; + if $(default-value) in $(unsupported-values) + { + cxxstd = ; + } + else + { + cxxstd = $(default-value) ; + } + } + + switch $(cxxstd) + { + case 98 : cray-cxxstd = 03 ; + case 03 : cray-cxxstd = 03 ; + case 0x : cray-cxxstd = 11 ; + case 11 : cray-cxxstd = 11 ; + case 1y : cray-cxxstd = 14 ; + case 14 : cray-cxxstd = 14 ; + case 1z : cray-cxxstd = 17 ; + case 17 : cray-cxxstd = 17 ; + case latest : + cray-cxxstd = [ latest-cray-cxxstd $(version) ] ; + } + + # If the 'cray-cxxstd' is not supported by this compiler version, we just + # let the command line fail. + + # If 'cxxstd' was blank, then 'cray-cxxstd' is also blank, and nothing is + # added to the command line. The compiler just uses it's default C++ + # standard. + + # Apply final options. + local space = " " ; + OPTIONS on $(targets) += -h$(space)std=c++$(cray-cxxstd) ; +} + +rule set-cxxstd-dialect-procedure ( targets * : sources * : properties * ) +{ + + # Translate '' into '-h [no]conform' and '-h [no]gnu' + # options. + + local version = [ on $(targets[1]) return $(VERSION) ] ; + + local cxxstd-dialect = [ feature.get-values cxxstd-dialect : $(properties) ] ; + local cray-conform = ; + local cray-gnu = ; + + local unsupported-values = ms ; + if $(cxxstd-dialect) && $(cxxstd-dialect) in $(unsupported-values) + { + + log-warning cray : ignoring unsupported property '$(cxxstd-dialect)' ; + + # Set to default value, or blank if default is unsupported. + + local default-value = [ get-default-feature-value cxxstd-dialect ] ; + if $(default-value) in $(unsupported-values) + { + cxxstd-dialect = ; + } + else + { + cxxstd-dialect = $(default-value) ; + } + } + + switch $(cxxstd-dialect) + { + case gnu : cray-conform = noconform ; + cray-gnu = gnu ; + case iso : cray-conform = conform ; + cray-gnu = nognu ; + } + + if [ has-conform-option $(version) ] = false + { + # The '-h [no]conform' option is ignored in recent versions of CCE. + cray-conform = ; + } + + # If 'cxxstd-dialect' was blank, then 'cray-conform' and 'cray-gnu' are + # also blank, and nothing is added to the command line. The compiler just + # uses it's default C++ dialect. + + # Apply final options. + local space = " " ; + OPTIONS on $(targets) += -h$(space)$(cray-conform) + -h$(space)$(cray-gnu) ; +} + +rule set-debug-symbols-procedure ( targets * : sources * : properties * ) +{ + + local debug-symbols = [ feature.get-values : $(properties) ] ; + if $(debug-symbols) = "on" + { + local optimization = [ feature.get-values : $(properties) ] ; + local debug-option = ; + if $(optimization) = off + { + debug-option = 0 ; + } + else + { + debug-option = 3 ; + } + + local space = " " ; + OPTIONS on $(targets) += -G$(space)$(debug-option) ; + } +} + +rule debug-driver-procedure ( targets * : sources * : properties * ) +{ + if $(.debug-driver) + { + + # Passing '-vv' to the CCE driver causes it to output the command lines + # for the underlying tools that it invokes. + + DRIVER_OPTIONS on $(<) += -vv ; + } +} + +### +### Generators +### + +class cray-linking-generator : gcc-linking-generator +{ + rule action-class ( ) + { + return action ; + } +} + +# We reuse some generator classes from the 'unix' toolset. Specifically, +# we are reusing generators for the following updating actions: +# +# - 'archive' +# - 'searched-lib-generator' +# - 'prebuilt' +# +# Inheriting these generators is like using the same generator classes as +# the 'unix' toolset, but pointing them to the 'cray' updating rules. + +toolset.inherit-generators cray : unix : unix.link unix.link.dll ; + +# The 'C-compiling-generator' class adds source paths to the '' +# property. + +generators.register [ new C-compiling-generator + cray.compile.c++ + : CPP + : OBJ + : cray ] ; +generators.register [ new C-compiling-generator + cray.compile.c + : C + : OBJ + : cray ] ; +generators.register [ new C-compiling-generator + cray.compile.asm + : ASM + : OBJ + : cray ] ; +generators.register [ new C-compiling-generator + cray.compile.c++.preprocess + : CPP + : PREPROCESSED_CPP + : cray ] ; +generators.register [ new C-compiling-generator + cray.compile.c.preprocess + : C + : PREPROCESSED_C + : cray ] ; +generators.register [ new cray-linking-generator + cray.link + : LIB OBJ + : EXE + : cray ] ; +generators.register [ new cray-linking-generator + cray.link.dll + : LIB OBJ + : SHARED_LIB + : cray ] ; + +# Tell B2 to prefer 'cray' generators over other valid +# generators. This is used to resolve a tie when B2 finds that +# there is more than one viable generator for a particular build request. + +generators.override cray.prebuilt : builtin.prebuilt ; +generators.override cray.searched-lib-generator : searched-lib-generator ; + +type.set-generated-target-suffix PREPROCESSED_CPP : cray : i ; +type.set-generated-target-suffix PREPROCESSED_C : cray : i ; + +### +### Utility rules +### + +rule validate-command ( toolset command ) +{ + local found-command = [ common.find-tool $(command) ] ; + if $(found-command) && $(.debug-configuration) + { + log-notice $(toolset) : command '$(command)' found at [ common.get-absolute-tool-path $(found-command) ] ; + } + if ! $(found-command) + { + log-warning $(toolset) : command '$(command)' not found ; + found-command = $(command) ; + } + return $(found-command) ; +} + +local rule options-helper ( rule-or-module variable-name condition * : feature options * ) +{ + toolset.flags $(rule-or-module) $(variable-name) $(condition) : [ feature.get-values $(feature) : $(options) ] : unchecked ; +} + +rule handle-options ( + toolset + : toolset-condition * + : command-c command-cxx command-fortran command-ar + : options * +) +{ + + # Configures some common 'toolset.flags' options. In particular, this rule + # sets the compiler command name to invoke. Inspired by + # 'common.handle-options'. + + # We cannot use a single 'CONFIG_COMMAND' variable because each CCE driver + # can only handle a single source code language. Therefore, we have to + # give actions a way to specify which driver they intend to use, and we + # accomplish this by providing multiple 'CONFIG_COMMAND' variables to the + # action. We cannot set the language through a flag in the 'OPTIONS' + # variable the way the 'gcc' toolset does. + + toolset.flags $(toolset) CONFIG_COMMAND_C $(toolset-condition) : $(command-c) : unchecked ; + toolset.flags $(toolset) CONFIG_COMMAND_CXX $(toolset-condition) : $(command-cxx) : unchecked ; + toolset.flags $(toolset) CONFIG_COMMAND_FORTRAN $(toolset-condition) : $(command-fortran) : unchecked ; + toolset.flags $(toolset).archive .AR $(toolset-condition) : $(command-ar) : unchecked ; + + # The following flags are applied to all targets built by this + # configuration of this toolset. This particular configuration of this + # toolset is identified by '$(toolset-condition)'. This allows the user to + # specify 'options' in their 'using' statement, and those options will be + # applied to all targets built by this configuration of this toolset. + + options-helper $(toolset).compile USER_OPTIONS $(toolset-condition) : $(options) ; + options-helper $(toolset).compile USER_OPTIONS $(toolset-condition) : $(options) ; + options-helper $(toolset).compile.c++ USER_OPTIONS $(toolset-condition) : $(options) ; + options-helper $(toolset).compile.fortran USER_OPTIONS $(toolset-condition) : $(options) ; + options-helper $(toolset).compile.asm USER_OPTIONS $(toolset-condition) : $(options) ; + options-helper $(toolset).compile DEFINES $(toolset-condition) : $(options) ; + options-helper $(toolset).compile INCLUDES $(toolset-condition) : $(options) ; + + options-helper $(toolset).link USER_OPTIONS $(toolset-condition) : $(options) ; + options-helper $(toolset).link LINKPATH $(toolset-condition) : $(options) ; + options-helper $(toolset).link FINDLIBS-ST $(toolset-condition) : $(options) ; + options-helper $(toolset).link FINDLIBS-SA $(toolset-condition) : $(options) ; + options-helper $(toolset).link LIBRARIES $(toolset-condition) : $(options) ; + + options-helper $(toolset).archive AROPTIONS $(toolset-condition) : $(options) ; +} + +rule latest-cray-cxxstd ( compiler-version ) +{ + # Select latest 'cray-cxxstd' based on compiler version. + + local cray-cxxstd = 03 ; + + if $(compiler-version) >= [ numeric-version 8.6 ] + { + cray-cxxstd = 14 ; + } + + return $(cray-cxxstd) ; +} + +rule has-conform-option ( compiler-version ) +{ + + # Returns 'true' or 'false'. Returns empty list if the 'compiler-version' + # is not supported. + + local result = true ; + + if $(compiler-version) >= [ numeric-version 8.6 ] + { + result = false ; + } + + return $(result) ; +} + +local rule justify-right ( pad-char elements * ) +{ + + # Returns a list of 'elements' where each 'element' is at least 2 + # characters long. If an 'element' is less than two characters long, pads + # 'element' with 'pad-char' to make it 2 characters long. + + local result = ; + local p = $(pad-char) ; + for local e in $(elements) + { + switch $(e) + { + case ?? : result += $(e) ; + case ? : result += $(p)$(e) ; + case * : result += $(p)$(p) ; + } + } + return $(result) ; +} + +local rule list-justify-left ( pad-elem elements * ) +{ + + # Add 'pad-elem' to 'elements' list until it has 4 elements. If 'elements' + # list already had 4 or more elements, returns the first 4 elements in + # 'elements' list. + + local tally = x ; + local result = ; + for local e in $(elements) + { + if $(tally) != xxxxx + { + result += $(e) ; + tally = $(tally)x ; + } + } + + while $(tally) != xxxxx + { + result += $(pad-elem) ; + tally = $(tally)x ; + } + + return $(result) ; +} + +local rule numeric-version ( dotted-version ) +{ + + # Returns a numeric representation of version that can be compared + # directly with comparison operators. + + local result = [ regex.split $(dotted-version) "[.]" ] ; + result = [ list-justify-left 0 $(result) ] ; + result = [ justify-right 0 $(result) ] ; + result = $(result:J="") ; + + return $(result) ; +} + +local rule get-default-feature-value ( feature-name ) +{ + local default-property = [ feature.defaults $(feature-name) ] ; + local default-value = [ feature.get-values $(feature-name) : $(default-property) ] ; + return $(default-value) ; +} + +rule log ( log-level prefix ? : * ) +{ + for local message-arg in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 + { + local message = $($(message-arg)) ; + if $(message) + { + ECHO "$(log-level):" "$(prefix):" $(message) ; + } + } +} + +rule log-error ( prefix ? : * ) +{ + log error $(prefix) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) : $(20) : $(21) : $(22) : $(23) : $(24) ; +} + +rule log-warning ( prefix ? : * ) +{ + log warning $(prefix) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) : $(20) : $(21) : $(22) : $(23) : $(24) ; +} + +rule log-notice ( prefix ? : * ) +{ + log notice $(prefix) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) : $(20) : $(21) : $(22) : $(23) : $(24) ; +} + +rule __test__ ( ) +{ + import assert ; + + assert.result 08060000 : numeric-version 8.6 ; + assert.result 08061500 : numeric-version 8.6.15 ; + assert.result 08061501 : numeric-version 8.6.15.1 ; + assert.result 08061501 : numeric-version 8.6.15.1.2 ; + + local a = [ numeric-version 8.6 ] ; + local b = [ numeric-version 8.5.9 ] ; + + # 'assert.equal x : y' forces the test to fail. It's like saying 'assert + # false'. + + if ! ( $(a) > $(b) ) + { + assert.equal x : y ; + } + + if ! ( $(b) < $(a) ) + { + assert.equal x : y ; + } + + if ! ( $(a) >= $(b) ) + { + assert.equal x : y ; + } + + if ! ( $(a) >= $(a) ) + { + assert.equal x : y ; + } + + if ! ( $(b) <= $(a) ) + { + assert.equal x : y ; + } + + if ! ( $(b) <= $(b) ) + { + assert.equal x : y ; + } + + if ! ( $(a) = $(a) ) + { + assert.equal x : y ; + } + + if ! ( $(a) != $(b) ) + { + assert.equal x : y ; + } +} diff --git a/src/boost/tools/build/src/tools/cw-config.jam b/src/boost/tools/build/src/tools/cw-config.jam new file mode 100644 index 000000000..a84008d8f --- /dev/null +++ b/src/boost/tools/build/src/tools/cw-config.jam @@ -0,0 +1,34 @@ +#~ Copyright 2005 Rene Rivera. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Automatic configuration for CodeWarrior toolset. To use, just import this module. + +import os ; +import toolset : using ; + +if [ os.name ] = NT +{ + for local R in 9 8 7 + { + local cw-path = [ W32_GETREG + "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)" + : "PATH" ] ; + local cw-version = [ W32_GETREG + "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions\\CodeWarrior for Windows R$(R)" + : "VERSION" ] ; + cw-path ?= [ W32_GETREG + "HKEY_LOCAL_MACHINE\\SOFTWARE\\Metrowerks\\CodeWarrior for Windows\\$(R).0" + : "PATH" ] ; + cw-version ?= $(R).0 ; + + if $(cw-path) + { + if --debug-configuration in [ modules.peek : ARGV ] + { + ECHO "notice:" using cw ":" $(cw-version) ":" "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ; + } + using cw : $(cw-version) : "$(cw-path)\\Other Metrowerks Tools\\Command Line Tools\\mwcc.exe" ; + } + } +} diff --git a/src/boost/tools/build/src/tools/cw.jam b/src/boost/tools/build/src/tools/cw.jam new file mode 100644 index 000000000..9078c7307 --- /dev/null +++ b/src/boost/tools/build/src/tools/cw.jam @@ -0,0 +1,304 @@ +# Copyright (C) Reece H Dunn 2004 +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.cw]] += Code Warrior + +The `cw` module support CodeWarrior compiler, originally produced by +Metrowerks and presently developed by Freescale. B2 supports +only the versions of the compiler that target x86 processors. All such +versions were released by Metrowerks before acquisition and are not sold +any longer. The last version known to work is 9.4. + +The module is initialized using the following syntax: + +---- +using cw : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If the command is not specified, B2 will search for a binary +named `mwcc` in default installation paths and in PATH. + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling both C +and C++ sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +`setup`:: + The command that sets up environment variables prior to invoking the + compiler. If not specified, `cwenv.bat` alongside the compiler binary + will be used. +`compiler`:: + The command that compiles C and C++ sources. If not specified, `mwcc` + will be used. The command will be invoked after the setup script was + executed and adjusted the PATH variable. +`linker`:: + The command that links executables and dynamic libraries. If not + specified, `mwld` will be used. The command will be invoked after the + setup script was executed and adjusted the PATH variable. + +|# # end::doc[] + +# based on the msvc.jam toolset + +import property ; +import generators ; +import os ; +import type ; +import toolset : flags ; +import errors : error ; +import feature : feature get-values ; +import path ; +import sequence : unique ; +import common ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +feature.extend toolset : cw ; + +toolset.add-requirements cw,shared:multi ; + +nl = " +" ; + +rule init ( version ? : command * : options * ) +{ + # TODO: fix the $(command[1]) = $(compiler) issue + + setup = [ get-values : $(options) ] ; + setup ?= cwenv.bat ; + compiler = [ get-values : $(options) ] ; + compiler ?= mwcc ; + linker = [ get-values : $(options) ] ; + linker ?= mwld ; + + local condition = [ common.check-init-parameters cw : + version $(version) ] ; + + command = [ common.get-invocation-command cw : mwcc.exe : $(command) : + [ default-paths $(version) ] ] ; + + common.handle-options cw : $(condition) : $(command) : $(options) ; + + local root = [ feature.get-values : $(options) ] ; + if $(command) + { + command = [ common.get-absolute-tool-path $(command[-1]) ] ; + } + local tool-root = $(command) ; + + setup = $(tool-root)\\$(setup) ; + + # map the batch file in setup so it can be executed + + other-tools = $(tool-root:D) ; + root ?= $(other-tools:D) ; + + flags cw.link RUN_PATH $(condition) : + "$(root)\\Win32-x86 Support\\Libraries\\Runtime" + "$(root)\\Win32-x86 Support\\Libraries\\Runtime\\Libs\\MSL_All-DLLs" ; + + setup = "set \"CWFOLDER="$(root)"\" && call \""$(setup)"\" > nul " ; + + if [ os.name ] = NT + { + setup = $(setup)" +" ; + } + else + { + setup = "cmd /S /C "$(setup)" \"&&\" " ; + } + + # bind the setup command to the tool so it can be executed before the + # command + + local prefix = $(setup) ; + + flags cw.compile .CC $(condition) : $(prefix)$(compiler) ; + flags cw.link .LD $(condition) : $(prefix)$(linker) ; + flags cw.archive .LD $(condition) : $(prefix)$(linker) ; + + if [ MATCH "^([89]\\.)" : $(version) ] + { + if [ os.name ] = NT + { + # The runtime libraries + flags cw.compile CFLAGS static/single/off : -runtime ss ; + flags cw.compile CFLAGS static/single/on : -runtime ssd ; + + flags cw.compile CFLAGS static/multi/off : -runtime sm ; + flags cw.compile CFLAGS static/multi/on : -runtime smd ; + + flags cw.compile CFLAGS shared/off : -runtime dm ; + flags cw.compile CFLAGS shared/on : -runtime dmd ; + } + } +} + + +local rule default-paths ( version ? ) # FIXME +{ + local possible-paths ; + local ProgramFiles = [ common.get-program-files-dir ] ; + + # TODO: add support for cw8 and cw9 detection + + local version-6-path = $(ProgramFiles)"\\Metrowerks\\CodeWarrior" ; + possible-paths += $(version-6-path) ; + + # perform post-processing + + possible-paths + = $(possible-paths)"\\Other Metrowerks Tools\\Command Line Tools" ; + + possible-paths += [ modules.peek : PATH Path path ] ; + + return $(possible-paths) ; +} + + + + +## declare generators + +generators.register-c-compiler cw.compile.c++ : CPP : OBJ : cw ; +generators.register-c-compiler cw.compile.c : C : OBJ : cw ; + +generators.register-linker cw.link + : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB + : EXE + : cw + ; +generators.register-linker cw.link.dll + : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB + : SHARED_LIB IMPORT_LIB + : cw + ; + +generators.register-archiver cw.archive + : OBJ + : STATIC_LIB + : cw + ; + +## compilation phase + +flags cw WHATEVER ; + +flags cw.compile CFLAGS on : -g ; +flags cw.compile CFLAGS off : -O0 ; +flags cw.compile CFLAGS speed : -O4,p ; +flags cw.compile CFLAGS space : -O4,s ; +flags cw.compile CFLAGS off : -inline off ; +flags cw.compile CFLAGS on : -inline on ; +flags cw.compile CFLAGS full : -inline all ; +flags cw.compile CFLAGS off : -Cpp_exceptions off ; + + +flags cw.compile CFLAGS on : -RTTI on ; +flags cw.compile CFLAGS off : -RTTI off ; + +flags cw.compile CFLAGS on : -w on ; +flags cw.compile CFLAGS off : -w off ; +flags cw.compile CFLAGS all : -w all ; +flags cw.compile CFLAGS extra : -w all ; +flags cw.compile CFLAGS pedantic : -w all ; +flags cw.compile CFLAGS on : -w error ; + +flags cw.compile USER_CFLAGS : ; +flags cw.compile.c++ USER_CFLAGS : ; + +flags cw.compile DEFINES ; +flags cw.compile UNDEFS ; +flags cw.compile INCLUDES ; + +actions compile.c +{ + $(.CC) -c -cwd include -lang c -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")" +} +actions compile.c++ +{ + $(.CC) -c -cwd include -lang c++ -U$(UNDEFS) $(CFLAGS) $(USER_CFLAGS) -I- -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)")" +} + +## linking phase + +flags cw.link DEF_FILE ; + +flags cw LINKFLAGS : -search ; +flags cw LINKFLAGS on : -g ; +flags cw LINKFLAGS console : -subsystem console ; +flags cw LINKFLAGS gui : -subsystem windows ; +flags cw LINKFLAGS wince : -subsystem wince ; +flags cw LINKFLAGS native : -subsystem native ; +flags cw LINKFLAGS auto : -subsystem auto ; + +flags cw LINKFLAGS LIB/static : -library ; + +flags cw.link USER_LINKFLAGS ; +flags cw.link LINKPATH ; + +flags cw.link FINDLIBS_ST ; +flags cw.link FINDLIBS_SA ; +flags cw.link LIBRARY_OPTION cw : "" : unchecked ; +flags cw.link LIBRARIES_MENTIONED_BY_FILE : ; + +rule link.dll ( targets + : sources * : properties * ) +{ + DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ; +} + +if [ os.name ] in NT +{ + actions archive + { + if exist "$(<[1])" DEL "$(<[1])" + $(.LD) -library -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" + } +} +else # cygwin +{ + actions archive + { + _bbv2_out_="$(<)" + if test -f "$_bbv2_out_" ; then + _bbv2_existing_="$(<:W)" + fi + $(.LD) -library -o "$(<:W)" $_bbv2_existing_ @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" + } +} + +actions link bind DEF_FILE +{ + $(.LD) -o "$(<[1]:W)" -L"$(LINKPATH)" $(LINKFLAGS) $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" +} + +actions link.dll bind DEF_FILE +{ + $(.LD) -shared -o "$(<[1]:W)" -implib "$(<[2]:W)" -L"$(LINKPATH)" $(LINKFLAGS) -f"$(DEF_FILE)" $(USER_LINKFLAGS) @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES_MENTIONED_BY_FILE) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" +} + diff --git a/src/boost/tools/build/src/tools/cygwin.jam b/src/boost/tools/build/src/tools/cygwin.jam new file mode 100644 index 000000000..1348e3757 --- /dev/null +++ b/src/boost/tools/build/src/tools/cygwin.jam @@ -0,0 +1,12 @@ +# Copyright 2004 Vladimir Prus. +# Copyright 2016 Steven Watanabe +# Copyright 2017 Peter Dimov +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Provides utility functions for handling cygwin paths + +rule cygwin-to-windows-path ( path ) +{ + return [ MATCH "(.*)[\n]+" : [ SHELL "cygpath -w $(path)" ] ] ; +} diff --git a/src/boost/tools/build/src/tools/darwin.jam b/src/boost/tools/build/src/tools/darwin.jam new file mode 100644 index 000000000..45432d2f1 --- /dev/null +++ b/src/boost/tools/build/src/tools/darwin.jam @@ -0,0 +1,511 @@ +# Copyright 2003 Christopher Currie +# Copyright 2006 Dave Abrahams +# Copyright 2003, 2004, 2005, 2006 Vladimir Prus +# Copyright 2005-2007 Mat Marcus +# Copyright 2005-2007 Adobe Systems Incorporated +# Copyright 2007-2010 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/ +# for explanation why it's a separate toolset. + +import feature : feature ; +import toolset : flags ; +import type ; +import common ; +import generators ; +import path : basename ; +import version ; +import property-set ; +import regex ; +import errors ; + +## Use a framework. +feature framework : : free ; + +## The MacOSX version to compile for, which maps to the SDK to use (sysroot). +feature macosx-version : : propagated link-incompatible symmetric optional ; + +## The minimal MacOSX version to target. +feature macosx-version-min : : propagated optional ; + +## A dependency, that is forced to be included in the link. +feature force-load : : free dependency incidental ; + +############################################################################# + +_ = " " ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +feature.extend toolset : darwin ; +import gcc ; +toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ; + +generators.override darwin.prebuilt : builtin.prebuilt ; +generators.override darwin.searched-lib-generator : searched-lib-generator ; + +# Override default do-nothing generators. +generators.override darwin.compile.c.pch : pch.default-c-pch-generator ; +generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ; + +type.set-generated-target-suffix PCH : darwin : gch ; + +toolset.inherit-rules darwin : gcc : localize ; +toolset.inherit-flags darwin : gcc + : static + arm/32 + arm/64 + arm/ + x86/32 + x86/64 + x86/ + power/32 + power/64 + power/ + full ; + +# Options: +# +# PATH +# Platform root path. The common autodetection will set this to +# "/Developer". And when a command is given it will be set to +# the corresponding "*.platform/Developer" directory. +# +rule init ( version ? : command * : options * : requirement * ) +{ + # First time around, figure what is host OSX version + if ! $(.host-osx-version) + { + .host-osx-version = [ MATCH "^([0-9.]+)" + : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ; + if $(.debug-configuration) + { + ECHO notice\: OSX version on this machine is $(.host-osx-version) ; + } + } + + # - The root directory of the tool install. + local root = [ feature.get-values : $(options) ] ; + + # - The bin directory where to find the commands to execute. + local bin ; + + # - The configured compile driver command. + local command = [ common.get-invocation-command darwin : g++ : $(command) ] ; + + # The version as reported by the compiler + local real-version ; + + # - Autodetect the root and bin dir if not given. + if $(command) + { + bin ?= [ common.get-absolute-tool-path $(command[1]) ] ; + if $(bin) = "/usr/bin" + { + root ?= /Developer ; + } + else + { + local r = $(bin:D) ; + r = $(r:D) ; + root ?= $(r) ; + } + } + + # - Autodetect the version if not given. + if $(command) + { + # - The 'command' variable can have multiple elements. When calling + # the SHELL builtin we need a single string. + local command-string = $(command:J=" ") ; + real-version = [ MATCH "^([0-9.]+)" + : [ SHELL "$(command-string) -dumpversion" ] ] ; + version ?= $(real-version) ; + } + + .real-version.$(version) = $(real-version) ; + + # - Define the condition for this toolset instance. + local condition = + [ common.check-init-parameters darwin $(requirement) : version $(version) ] ; + + # - Set the toolset generic common options. + common.handle-options darwin : $(condition) : $(command) : $(options) ; + + real-version = [ regex.split $(real-version) \\. ] ; + # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates. + if [ version.version-less $(real-version) : 4 0 ] + { + flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ; + } + # - GCC 4.2 and higher in Darwin does not have -Wno-long-double. + if [ version.version-less $(real-version) : 4 2 ] + { + flags darwin.compile OPTIONS $(condition) : -Wno-long-double ; + } + # - GCC on Darwin with -pedantic, suppress unsupported long long warning + flags darwin.compile OPTIONS $(condition)/pedantic : -Wno-long-long ; + + # - GCC on El Capitan (10.11) does not support -finline-functions + if "10.11.0" <= $(.host-osx-version) + { + flags darwin.compile OPTIONS $(condition)/full : -Wno-inline ; + } + + # - The symbol strip program. + local strip ; + if in $(options) + { + # We can turn off strip by specifying it as empty. In which + # case we switch to using the linker to do the strip. + flags darwin.link.dll OPTIONS + $(condition)/LIB/shared/32/on : -Wl,-x ; + flags darwin.link.dll OPTIONS + $(condition)/LIB/shared//on : -Wl,-x ; + flags darwin.link OPTIONS + $(condition)/EXE/32/on : -s ; + flags darwin.link OPTIONS + $(condition)/EXE//on : -s ; + } + else + { + # Otherwise we need to find a strip program to use. And hence + # also tell the link action that we need to use a strip + # post-process. + flags darwin.link NEED_STRIP $(condition)/on : "" ; + strip = + [ common.get-invocation-command darwin + : strip : [ feature.get-values : $(options) ] : $(bin) : search-path ] ; + flags darwin.link .STRIP $(condition) : $(strip[1]) ; + if $(.debug-configuration) + { + ECHO notice\: using strip for $(condition) at $(strip[1]) ; + } + } + + # - The archive builder (libtool is the default as creating + # archives in darwin is complicated. + local archiver = + [ common.get-invocation-command darwin + : libtool : [ feature.get-values : $(options) ] : $(bin) : search-path ] ; + flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ; + if $(.debug-configuration) + { + ECHO notice\: using archiver for $(condition) at $(archiver[1]) ; + } + + # - Initialize the SDKs available in the root for this tool. + local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ; + + #~ ECHO --- ; + #~ ECHO --- bin :: $(bin) ; + #~ ECHO --- root :: $(root) ; + #~ ECHO --- version :: $(version) ; + #~ ECHO --- condition :: $(condition) ; + #~ ECHO --- strip :: $(strip) ; + #~ ECHO --- archiver :: $(archiver) ; + #~ ECHO --- sdks :: $(sdks) ; + #~ ECHO --- ; + #~ EXIT ; +} + +# Add and set options for a discovered SDK version. +local rule init-sdk ( condition * : root ? : version + : version-feature ? ) +{ + local rule version-to-feature ( version + ) + { + switch $(version[1]) + { + case appletv* : + { + return $(version[1])-$(version[2-]:J=.) ; + } + case iphone* : + { + return $(version[1])-$(version[2-]:J=.) ; + } + case mac* : + { + return $(version[2-]:J=.) ; + } + case * : + { + return $(version:J=.) ; + } + } + } + + if $(version-feature) + { + if $(.debug-configuration) + { + ECHO notice\: available sdk for $(condition)/$(version-feature) at $(root) ; + } + + # Add the version to the features for specifying them. + if ! $(version-feature) in [ feature.values macosx-version ] + { + feature.extend macosx-version : $(version-feature) ; + } + if ! $(version-feature) in [ feature.values macosx-version-min ] + { + feature.extend macosx-version-min : $(version-feature) ; + } + + # Set the flags the version needs to compile with, first + # generic options. + flags darwin.compile OPTIONS $(condition)/$(version-feature) + : -isysroot $(root) ; + flags darwin.link OPTIONS $(condition)/$(version-feature) + : -isysroot $(root) ; + + # Then device variation options. + switch $(version[1]) + { + case appletvsim* : + { + local N = $(version[2]) ; + if ! $(version[3]) { N += 00 ; } + else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; } + else { N += 0$(version[3]) ; } + if ! $(version[4]) { N += 00 ; } + else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; } + else { N += 0$(version[4]) ; } + N = $(N:J=) ; + flags darwin.compile OPTIONS $(version-feature) + : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; + flags darwin.link OPTIONS $(version-feature) + : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; + } + + case appletv* : + { + flags darwin.compile OPTIONS $(version-feature) + : -mtvos-version-min=$(version[2-]:J=.) ; + flags darwin.link OPTIONS $(version-feature) + : -mtvos-version-min=$(version[2-]:J=.) ; + } + + case iphonesim* : + { + local N = $(version[2]) ; + if ! $(version[3]) { N += 00 ; } + else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; } + else { N += 0$(version[3]) ; } + if ! $(version[4]) { N += 00 ; } + else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; } + else { N += 0$(version[4]) ; } + N = $(N:J=) ; + flags darwin.compile OPTIONS $(version-feature) + : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; + flags darwin.link OPTIONS $(version-feature) + : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ; + } + + case iphone* : + { + flags darwin.compile OPTIONS $(version-feature) + : -miphoneos-version-min=$(version[2-]:J=.) ; + flags darwin.link OPTIONS $(version-feature) + : -miphoneos-version-min=$(version[2-]:J=.) ; + } + + case mac* : + { + flags darwin.compile OPTIONS $(version-feature) + : -mmacosx-version-min=$(version[2-]:J=.) ; + flags darwin.link OPTIONS $(version-feature) + : -mmacosx-version-min=$(version[2-]:J=.) ; + } + } + + if $(version[3]) > 0 + { + # We have a minor version of an SDK. We want to set up + # previous minor versions, plus the current minor version. + # So we recurse to set up the previous minor versions, up to + # the current version. + local minor-minus-1 = [ CALC $(version[3]) - 1 ] ; + return + [ init-sdk $(condition) : $(root) + : $(version[1-2]) $(minor-minus-1) : [ version-to-feature $(version[1-2]) $(minor-minus-1) ] ] + $(version-feature) ; + } + else + { + return $(version-feature) ; + } + } + else if $(version[4]) + { + # We have a patch version of an SDK. We want to set up + # both the specific patch version, and the minor version. + # So we recurse to set up the patch version. Plus the minor version. + return + [ init-sdk $(condition) : $(root) + : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ] + [ init-sdk $(condition) : $(root) + : $(version) : [ version-to-feature $(version) ] ] ; + } + else + { + # Yes, this is intentionally recursive. + return + [ init-sdk $(condition) : $(root) + : $(version) : [ version-to-feature $(version) ] ] ; + } +} + +# Determine the MacOSX SDK versions installed and their locations. +local rule init-available-sdk-versions ( condition * : root ? ) +{ + root ?= /Developer ; + local sdks-root = $(root)/SDKs ; + local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk AppleTVOS*.sdk AppleTVSimulator*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ; + local result ; + for local sdk in $(sdks) + { + local sdk-match = [ MATCH "([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)?" : $(sdk:D=) ] ; + local sdk-platform = $(sdk-match[1]:L) ; + local sdk-version = $(sdk-match[2-]) ; + if $(sdk-version) + { + switch $(sdk-platform) + { + case macosx : + { + sdk-version = mac $(sdk-version) ; + } + case appletvos : + { + sdk-version = appletv $(sdk-version) ; + } + case appletvsimulator : + { + sdk-version = appletvsim $(sdk-version) ; + } + case iphoneos : + { + sdk-version = iphone $(sdk-version) ; + } + case iphonesimulator : + { + sdk-version = iphonesim $(sdk-version) ; + } + case * : + { + sdk-version = $(sdk-version:J=-) ; + } + } + result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ; + } + } + return $(result) ; +} + +rule compile.m ( targets * : sources * : properties * ) +{ + LANG on $(<) = "-x objective-c" ; + gcc.set-fpic-options $(targets) : $(sources) : $(properties) ; +} + +actions compile.m +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +rule compile.mm ( targets * : sources * : properties * ) +{ + LANG on $(<) = "-x objective-c++" ; + gcc.set-fpic-options $(targets) : $(sources) : $(properties) ; +} + +actions compile.mm +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +# Set the max header padding to allow renaming of libs for installation. +flags darwin.link.dll OPTIONS : -headerpad_max_install_names ; + +# To link the static runtime we need to link to all the core runtime libraries. +flags darwin.link OPTIONS static + : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ; + +# Strip as much as possible when optimizing. +flags darwin.link OPTIONS speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ; +flags darwin.link OPTIONS space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ; + +# Dynamic/shared linking. +flags darwin.compile OPTIONS shared : -dynamic ; + +# Misc options. +flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ; +#~ flags darwin.link OPTIONS : -fexceptions ; + +# Add the framework names to use. +flags darwin.link FRAMEWORK ; + +# +flags darwin.link FORCE_LOAD ; + +# This is flag is useful for debugging the link step +# uncomment to see what libtool is doing under the hood +#~ flags darwin.link.dll OPTIONS : -Wl,-v ; + +# set up the -F option to include the paths to any frameworks used. +local rule prepare-framework-path ( target + ) +{ + # The -framework option only takes basename of the framework. + # The -F option specifies the directories where a framework + # is searched for. So, if we find feature + # with some path, we need to generate property -F option. + local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ; + + # Be sure to generate no -F if there's no path. + for local framework-path in $(framework-paths) + { + if $(framework-path) != "" + { + FRAMEWORK_PATH on $(target) += -F$(framework-path) ; + } + } +} + +rule link ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ; + prepare-framework-path $(<) ; +} + +# Note that using strip without any options was reported to result in broken +# binaries, at least on OS X 10.5.5, see: +# http://svn.boost.org/trac/boost/ticket/2347 +# So we pass -S -x. +actions link bind LIBRARIES FORCE_LOAD +{ + "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS) + $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)" +} + +rule link.dll ( targets * : sources * : properties * ) +{ + prepare-framework-path $(<) ; +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS) +} + +# We use libtool instead of ar to support universal binary linking +# TODO: Find a way to use the underlying tools, i.e. lipo, to do this. +actions piecemeal archive +{ + "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)" +} diff --git a/src/boost/tools/build/src/tools/darwin.py b/src/boost/tools/build/src/tools/darwin.py new file mode 100644 index 000000000..f03d63f35 --- /dev/null +++ b/src/boost/tools/build/src/tools/darwin.py @@ -0,0 +1,57 @@ +# Copyright (C) Christopher Currie 2003. Permission to copy, use, +# modify, sell and distribute this software is granted provided this +# copyright notice appears in all copies. This software is provided +# "as is" without express or implied warranty, and with no claim as to +# its suitability for any purpose. + +# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/ +# for explanation why it's a separate toolset. + +import common, gcc, builtin +from b2.build import feature, toolset, type, action, generators +from b2.util.utility import * + +toolset.register ('darwin') + +toolset.inherit_generators ('darwin', [], 'gcc') +toolset.inherit_flags ('darwin', 'gcc') +toolset.inherit_rules ('darwin', 'gcc') + +def init (version = None, command = None, options = None): + options = to_seq (options) + + condition = common.check_init_parameters ('darwin', None, ('version', version)) + + command = common.get_invocation_command ('darwin', 'g++', command) + + common.handle_options ('darwin', condition, command, options) + + gcc.init_link_flags ('darwin', 'darwin', condition) + +# Darwin has a different shared library suffix +type.set_generated_target_suffix ('SHARED_LIB', ['darwin'], 'dylib') + +# we need to be able to tell the type of .dylib files +type.register_suffixes ('dylib', 'SHARED_LIB') + +feature.feature ('framework', [], ['free']) + +toolset.flags ('darwin.compile', 'OPTIONS', 'shared', ['-dynamic']) +toolset.flags ('darwin.compile', 'OPTIONS', None, ['-Wno-long-double', '-no-cpp-precomp']) +toolset.flags ('darwin.compile.c++', 'OPTIONS', None, ['-fcoalesce-templates']) + +toolset.flags ('darwin.link', 'FRAMEWORK', '') + +# This is flag is useful for debugging the link step +# uncomment to see what libtool is doing under the hood +# toolset.flags ('darwin.link.dll', 'OPTIONS', None, '[-Wl,-v']) + +action.register ('darwin.compile.cpp', None, ['$(CONFIG_COMMAND) $(ST_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) + +# TODO: how to set 'bind LIBRARIES'? +action.register ('darwin.link.dll', None, ['$(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) + +def darwin_archive (manager, targets, sources, properties): + pass + +action.register ('darwin.archive', darwin_archive, ['ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"']) diff --git a/src/boost/tools/build/src/tools/diab.jam b/src/boost/tools/build/src/tools/diab.jam new file mode 100644 index 000000000..c07572af8 --- /dev/null +++ b/src/boost/tools/build/src/tools/diab.jam @@ -0,0 +1,131 @@ +# Copyright 2015, Wind River Inc. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# +# Diab C++ Compiler +# + +import feature generators common ; +import toolset : flags ; +import os ; + +feature.extend toolset : diab ; + +# Inherit from Unix toolset to get library ordering magic. +toolset.inherit diab : unix ; + +generators.override diab.prebuilt : builtin.lib-generator ; +generators.override diab.prebuilt : builtin.prebuilt ; +generators.override diab.searched-lib-generator : searched-lib-generator ; + + +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters diab : version $(version) ] ; + + local command = [ common.get-invocation-command diab : dcc : $(command) ] ; + + if $(command) + { + local root = [ common.get-absolute-tool-path $(command[-1]) ] ; + + if $(root) + { + flags diab .root linux : "\"$(root)\"/" ; + flags diab .root windows : $(root:T)/ ; + } + } + # If we can't find 'CC' anyway, at least show 'CC' in the commands + command ?= CC ; + + common.handle-options diab : $(condition) : $(command) : $(options) ; +} + +generators.register-c-compiler diab.compile.c++ : CPP : OBJ : diab ; +generators.register-c-compiler diab.compile.c : C : OBJ : diab ; + + +# unlike most compilers, Diab defaults to static linking. +# flags cxx LINKFLAGS static : ; +flags diab.compile OPTIONS on : -g ; +flags diab.link OPTIONS on : -g ; + +flags diab.compile OPTIONS off : ; +flags diab.compile OPTIONS speed : -speed ; +flags diab.compile OPTIONS space : -size ; + +# flags diab.compile OPTIONS off : -Xinline=0 ; +# flags diab.compile OPTIONS on : -Xinline=10 ; +# flags diab.compile OPTIONS full : -Xinline=50 ; + +flags diab.compile OPTIONS ; +flags diab.compile.c++ OPTIONS ; +flags diab.compile DEFINES ; + +flags diab.compile.c++ OPTIONS off : -Xno-exceptions ; +# So Dinkum STL knows when exceptions are disabled +flags diab.compile.c++ DEFINES off : _NO_EX=1 ; +flags diab.compile.c++ DEFINES off : _NO_RTTI ; +flags diab.compile INCLUDES ; +flags diab.link OPTIONS ; + +flags diab.compile OPTIONS shared : -Xpic ; +#flags diab.compile OPTIONS static : ; +# get VxWorks link options from shell environment +flags diab.link OPTIONS static : [ os.environ LDFLAGS_STATIC ] ; +flags diab.link.dll OPTIONS : [ os.environ LDFLAGS_SO ] ; +flags diab.link OPTIONS shared : [ os.environ LDFLAGS_DYNAMIC ] ; + +flags diab.link LOPTIONS shared : -Xdynamic -Xshared -Xpic ; + +flags diab.link LIBPATH ; +flags diab.link LIBRARIES ; +flags diab.link FINDLIBS-ST ; +flags diab.link FINDLIBS-SA ; + +actions link bind LIBRARIES +{ + $(CONFIG_COMMAND) $(OPTIONS) $(LOPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) +} + +# When creating dynamic libraries, we don't want to be warned about unresolved +# symbols, therefore all unresolved symbols are marked as expected by +# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool +# chain. + +actions link.dll bind LIBRARIES +{ + $(.root:E=)dplus $(OPTIONS) $(LOPTIONS) "$(LIBRARIES)" -o "$(<[1])" -L$(LIBPATH) "$(>)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) +} + +#rule compile.asm ( targets * : sources * : properties * ) +#{ +# setup-fpic $(targets) : $(sources) : $(properties) ; +# setup-address-model $(targets) : $(sources) : $(properties) ; +#} + +actions compile.asm +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + + + +actions compile.c +{ + $(.root:E=)dcc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + $(.root:E=)dplus -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" +} + +# Always create archive from scratch. See the gcc toolet for rationale. +RM = [ common.rm-command ] ; +actions together piecemeal archive +{ + $(RM) "$(<)" + dar rc $(<) $(>) +} diff --git a/src/boost/tools/build/src/tools/dmc.jam b/src/boost/tools/build/src/tools/dmc.jam new file mode 100644 index 000000000..bf308824a --- /dev/null +++ b/src/boost/tools/build/src/tools/dmc.jam @@ -0,0 +1,174 @@ +# Digital Mars C++ + +# (C) Copyright Christof Meerwald 2003. +# (C) Copyright Aleksey Gurtovoy 2004. +# (C) Copyright Arjan Knepper 2006. +# +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.dmc]] += Digital Mars C/C++ Compiler + +The `dmc` module supports the http://www.digitalmars.com/[Digital Mars +C++ compiler.] + +The module is initialized using the following syntax: + +---- +using dmc : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If the command is not specified, B2 will search for a binary +named `dmc` in PATH. + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling both C +and C++ sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +|# # end::doc[] + +# The following #// line will be used by the regression test table generation +# program as the column heading for HTML tables. Must not include version number. +#//Digital
Mars C++
+ +import feature generators common ; +import toolset : flags ; +import sequence regex ; + +feature.extend toolset : dmc ; + +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters dmc : version $(version) ] ; + + local command = [ common.get-invocation-command dmc : dmc : $(command) ] ; + command ?= dmc ; + + common.handle-options dmc : $(condition) : $(command) : $(options) ; + + if $(command) + { + command = [ common.get-absolute-tool-path $(command[-1]) ] ; + } + root = $(command:D) ; + + if $(root) + { + # DMC linker is sensitive the the direction of slashes, and + # won't link if forward slashes are used in command. + root = [ sequence.join [ regex.split $(root) "/" ] : "\\" ] ; + flags dmc .root $(condition) : $(root)\\bin\\ ; + } + else + { + flags dmc .root $(condition) : "" ; + } +} + + +# Declare generators +generators.register-linker dmc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : dmc ; +generators.register-linker dmc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : dmc ; + +generators.register-archiver dmc.archive : OBJ : STATIC_LIB : dmc ; +generators.register-c-compiler dmc.compile.c++ : CPP : OBJ : dmc ; +generators.register-c-compiler dmc.compile.c : C : OBJ : dmc ; + + +# Declare flags +# dmc optlink has some limitation on the amount of debug-info included. Therefore only linenumbers are enabled in debug builds. +# flags dmc.compile OPTIONS on : -g ; +flags dmc.compile OPTIONS on : -gl ; +flags dmc.link OPTIONS on : /CO /NOPACKF /DEBUGLI ; +flags dmc.link OPTIONS off : /PACKF ; + +flags dmc.compile OPTIONS off : -S -o+none ; +flags dmc.compile OPTIONS speed : -o+time ; +flags dmc.compile OPTIONS space : -o+space ; +flags dmc.compile OPTIONS on : -Ae ; +flags dmc.compile OPTIONS on : -Ar ; +# FIXME: +# Compiling sources to be linked into a shared lib (dll) the -WD cflag should be used +# Compiling sources to be linked into a static lib (lib) or executable the -WA cflag should be used +# But for some reason the -WD cflag is always in use. +# flags dmc.compile OPTIONS shared : -WD ; +# flags dmc.compile OPTIONS static : -WA ; + +# Note that these two options actually imply multithreading support on DMC +# because there is no single-threaded dynamic runtime library. Specifying +# multi would be a bad idea, though, because no option would be +# matched when the build uses the default settings of dynamic +# and single. +flags dmc.compile OPTIONS off/shared : -ND ; +flags dmc.compile OPTIONS on/shared : -ND ; + +flags dmc.compile OPTIONS off/static/single : ; +flags dmc.compile OPTIONS on/static/single : ; +flags dmc.compile OPTIONS off/static/multi : -D_MT ; +flags dmc.compile OPTIONS on/static/multi : -D_MT ; + +flags dmc.compile OPTIONS : ; +flags dmc.compile.c++ OPTIONS : ; + +flags dmc.compile DEFINES : ; +flags dmc.compile INCLUDES : ; + +flags dmc.link ; +flags dmc.archive OPTIONS ; + +flags dmc LIBPATH ; +flags dmc LIBRARIES ; +flags dmc FINDLIBS ; +flags dmc FINDLIBS ; + +actions together link bind LIBRARIES +{ + "$(.root)link" $(OPTIONS) /NOI /DE /XN "$(>)" , "$(<[1])" ,, $(LIBRARIES) user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def" +} + +actions together link.dll bind LIBRARIES +{ + echo LIBRARY "$(<[1])" > $(<[2]:B).def + echo DESCRIPTION 'A Library' >> $(<[2]:B).def + echo EXETYPE NT >> $(<[2]:B).def + echo SUBSYSTEM WINDOWS >> $(<[2]:B).def + echo CODE EXECUTE READ >> $(<[2]:B).def + echo DATA READ WRITE >> $(<[2]:B).def + "$(.root)link" $(OPTIONS) /NOI /DE /XN /ENTRY:_DllMainCRTStartup /IMPLIB:"$(<[2])" "$(>)" $(LIBRARIES) , "$(<[1])" ,, user32.lib kernel32.lib "$(FINDLIBS:S=.lib)" , "$(<[2]:B).def" +} + +actions compile.c +{ + "$(.root)dmc" -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(.root)dmc" -cpp -c -Ab $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o"$(<)" "$(>)" +} + +actions together piecemeal archive +{ + "$(.root)lib" $(OPTIONS) -c -n -p256 "$(<)" "$(>)" +} diff --git a/src/boost/tools/build/src/tools/docutils.jam b/src/boost/tools/build/src/tools/docutils.jam new file mode 100644 index 000000000..7506ee985 --- /dev/null +++ b/src/boost/tools/build/src/tools/docutils.jam @@ -0,0 +1,125 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Support for docutils ReStructuredText processing. + +import type ; +import scanner ; +import generators ; +import os ; +import common ; +import toolset ; +import path ; +import feature : feature ; +import property ; +import errors ; + +.initialized = ; + +type.register ReST : rst ; + +class rst-scanner : common-scanner +{ + rule __init__ ( paths * ) + { + common-scanner.__init__ . $(paths) ; + } + + rule pattern ( ) + { + return "^[ ]*\\.\\.[ ]+include::[ ]+([^ +]+)" + "^[ ]*\\.\\.[ ]+image::[ ]+([^ +]+)" + "^[ ]*\\.\\.[ ]+figure::[ ]+([^ +]+)" + ; + } +} + +scanner.register rst-scanner : include ; +type.set-scanner ReST : rst-scanner ; + +generators.register-standard docutils.html : ReST : HTML ; + +rule init ( docutils-dir ? : tools-dir ? ) +{ + docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ; + tools-dir ?= $(docutils-dir)/tools ; + + if ! $(.initialized) + { + .initialized = true ; + if $(docutils-dir) + { + .docutils-dir = $(docutils-dir) ; + .tools-dir = $(tools-dir:R="") ; + + .setup = [ + common.prepend-path-variable-command PYTHONPATH + : $(.docutils-dir) $(.docutils-dir)/extras ] ; + RST2XXX = [ common.find-tool rst2html ] ; + } + else + { + RST2XXX_PY = [ common.find-tool rst2html.py ] ; + } + } +} + +rule html ( target : source : properties * ) +{ + if ! [ on $(target) return $(RST2XXX) ] + { + local python-cmd = [ property.select : $(properties) ] ; + if ! $(.tools-dir) && ! $(RST2XXX_PY) { + errors.user-error + "The docutils module is used, but not configured. " + : "" + : "Please modify your user-config.jam or project-config.jam to contain:" + : "" + : " using docutils : ;" + : "" + : "On Ubuntu, 'docutils-common' package will create /usr/share/docutils." + : "Other flavours of Linux likely have docutils as package as well." + : "On Windows, you can install from http://docutils.sourceforge.net/." + ; + } + + if $(RST2XXX_PY) + { + if $(RST2XXX_PY:D) + { + # If we have a path to the rst2html.py script, we need to use + # the python interpreter to load it up. + RST2XXX on $(target) = $(python-cmd:G=:E="python") $(RST2XXX_PY) ; + } + else + { + # Otherwise, bare rst2html.py, we can just exec that directly. + # This work for both Nix, and the standard Windows Python installs. + RST2XXX on $(target) = $(RST2XXX_PY) ; + } + } + else + { + RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ; + } + } +} + + +feature docutils : : free ; +feature docutils-html : : free ; +feature docutils-cmd : : free ; +toolset.flags docutils COMMON-FLAGS : ; +toolset.flags docutils HTML-FLAGS : ; +toolset.flags docutils RST2XXX : ; + +actions html +{ + $(.setup) + "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<) +} + diff --git a/src/boost/tools/build/src/tools/doxproc.py b/src/boost/tools/build/src/tools/doxproc.py new file mode 100644 index 000000000..92ec1c311 --- /dev/null +++ b/src/boost/tools/build/src/tools/doxproc.py @@ -0,0 +1,859 @@ +#!/usr/bin/python +# Copyright 2006 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +''' +Processing of Doxygen generated XML. +''' + +import os +import os.path +import sys +import time +import string +import getopt +import glob +import re +import xml.dom.minidom + + +def usage(): + print ''' +Usage: + %s options + +Options: + --xmldir Directory with the Doxygen xml result files. + --output Write the output BoostBook to the given location. + --id The ID of the top level BoostBook section. + --title The title of the top level BoostBook section. + --enable-index Generate additional index sections for classes and + types. +''' % ( sys.argv[0] ) + + +def get_args( argv = sys.argv[1:] ): + spec = [ + 'xmldir=', + 'output=', + 'id=', + 'title=', + 'enable-index', + 'help' ] + options = { + '--xmldir' : 'xml', + '--output' : None, + '--id' : 'dox', + '--title' : 'Doxygen' + } + ( option_pairs, other ) = getopt.getopt( argv, '', spec ) + map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs ) + + if '--help' in options: + usage() + sys.exit(1) + + return { + 'xmldir' : options['--xmldir'], + 'output' : options['--output'], + 'id' : options['--id'], + 'title' : options['--title'], + 'index' : '--enable-index' in options + } + +def if_attribute(node, attribute, true_value, false_value=None): + if node.getAttribute(attribute) == 'yes': + return true_value + else: + return false_value + +class Doxygen2BoostBook: + + def __init__( self, **kwargs ): + ## + self.args = kwargs + self.args.setdefault('id','') + self.args.setdefault('title','') + self.args.setdefault('last_revision', time.asctime()) + self.args.setdefault('index', False) + self.id = '%(id)s.reference' % self.args + self.args['id'] = self.id + #~ This is our template BoostBook document we insert the generated content into. + self.boostbook = xml.dom.minidom.parseString(''' +
+ %(title)s + + Headers + + + Classes + + + Index + +
+''' % self.args ) + self.section = { + 'headers' : self._getChild('library-reference',id='%(id)s.headers' % self.args), + 'classes' : self._getChild('index',id='%(id)s.classes' % self.args), + 'index' : self._getChild('index',id='%(id)s.index' % self.args) + } + #~ Remove the index sections if we aren't generating it. + if not self.args['index']: + self.section['classes'].parentNode.removeChild(self.section['classes']) + self.section['classes'].unlink() + del self.section['classes'] + self.section['index'].parentNode.removeChild(self.section['index']) + self.section['index'].unlink() + del self.section['index'] + #~ The symbols, per Doxygen notion, that we translated. + self.symbols = {} + #~ Map of Doxygen IDs and BoostBook IDs, so we can translate as needed. + self.idmap = {} + #~ Marks generation, to prevent redoing it. + self.generated = False + + #~ Add an Doxygen generated XML document to the content we are translating. + def addDox( self, document ): + self._translateNode(document.documentElement) + + #~ Turns the internal XML tree into an output UTF-8 string. + def tostring( self ): + self._generate() + #~ return self.boostbook.toprettyxml(' ') + return self.boostbook.toxml('utf-8') + + #~ Does post-processing on the partial generated content to generate additional info + #~ now that we have the complete source documents. + def _generate( self ): + if not self.generated: + self.generated = True + symbols = self.symbols.keys() + symbols.sort() + #~ Populate the header section. + for symbol in symbols: + if self.symbols[symbol]['kind'] in ('header'): + self.section['headers'].appendChild(self.symbols[symbol]['dom']) + for symbol in symbols: + if self.symbols[symbol]['kind'] not in ('namespace', 'header'): + container = self._resolveContainer(self.symbols[symbol], + self.symbols[self.symbols[symbol]['header']]['dom']) + if container.nodeName != 'namespace': + ## The current BoostBook to Docbook translation doesn't + ## respect, nor assign, IDs to inner types of any kind. + ## So nuke the ID entry so as not create bogus links. + del self.idmap[self.symbols[symbol]['id']] + container.appendChild(self.symbols[symbol]['dom']) + self._rewriteIDs(self.boostbook.documentElement) + + #~ Rewrite the various IDs from Doxygen references to the newly created + #~ BoostBook references. + def _rewriteIDs( self, node ): + if node.nodeName in ('link'): + if node.getAttribute('linkend') in self.idmap: + #~ A link, and we have someplace to repoint it at. + node.setAttribute('linkend',self.idmap[node.getAttribute('linkend')]) + else: + #~ A link, but we don't have a generated target for it. + node.removeAttribute('linkend') + elif hasattr(node,'hasAttribute') and node.hasAttribute('id') and node.getAttribute('id') in self.idmap: + #~ Simple ID, and we have a translation. + node.setAttribute('id',self.idmap[node.getAttribute('id')]) + #~ Recurse, and iterate, depth-first traversal which turns out to be + #~ left-to-right and top-to-bottom for the document. + if node.firstChild: + self._rewriteIDs(node.firstChild) + if node.nextSibling: + self._rewriteIDs(node.nextSibling) + + def _resolveContainer( self, cpp, root ): + container = root + for ns in cpp['namespace']: + node = self._getChild('namespace',name=ns,root=container) + if not node: + node = container.appendChild( + self._createNode('namespace',name=ns)) + container = node + for inner in cpp['name'].split('::'): + node = self._getChild(name=inner,root=container) + if not node: + break + container = node + return container + + def _setID( self, id, name ): + self.idmap[id] = name.replace('::','.').replace('/','.') + #~ print '--| setID:',id,'::',self.idmap[id] + + #~ Translate a given node within a given context. + #~ The translation dispatches to a local method of the form + #~ "_translate[_context0,...,_contextN]", and the keyword args are + #~ passed along. If there is no translation handling method we + #~ return None. + def _translateNode( self, *context, **kwargs ): + node = None + names = [ ] + for c in context: + if c: + if not isinstance(c,xml.dom.Node): + suffix = '_'+c.replace('-','_') + else: + suffix = '_'+c.nodeName.replace('-','_') + node = c + names.append('_translate') + names = map(lambda x: x+suffix,names) + if node: + for name in names: + if hasattr(self,name): + return getattr(self,name)(node,**kwargs) + return None + + #~ Translates the children of the given parent node, appending the results + #~ to the indicated target. For nodes not translated by the translation method + #~ it copies the child over and recurses on that child to translate any + #~ possible interior nodes. Hence this will translate the entire subtree. + def _translateChildren( self, parent, **kwargs ): + target = kwargs['target'] + for n in parent.childNodes: + child = self._translateNode(n,target=target) + if child: + target.appendChild(child) + else: + child = n.cloneNode(False) + if hasattr(child,'data'): + child.data = re.sub(r'\s+',' ',child.data) + target.appendChild(child) + self._translateChildren(n,target=child) + + #~ Translate the given node as a description, into the description subnode + #~ of the target. If no description subnode is present in the target it + #~ is created. + def _translateDescription( self, node, target=None, tag='description', **kwargs ): + description = self._getChild(tag,root=target) + if not description: + description = target.appendChild(self._createNode(tag)) + self._translateChildren(node,target=description) + return description + + #~ Top level translation of: ..., + #~ translates the children. + def _translate_doxygen( self, node ): + #~ print '_translate_doxygen:', node.nodeName + result = [] + for n in node.childNodes: + newNode = self._translateNode(n) + if newNode: + result.append(newNode) + return result + + #~ Top level translation of: + #~ + #~ + #~ + #~ ... + #~ + #~ ... + #~ + #~ ... + #~ + #~ builds the class and symbol sections, if requested. + def _translate_doxygenindex( self, node ): + #~ print '_translate_doxygenindex:', node.nodeName + if self.args['index']: + entries = [] + classes = [] + #~ Accumulate all the index entries we care about. + for n in node.childNodes: + if n.nodeName == 'compound': + if n.getAttribute('kind') not in ('file','dir','define'): + cpp = self._cppName(self._getChildData('name',root=n)) + entry = { + 'name' : cpp['name'], + 'compoundname' : cpp['compoundname'], + 'id' : n.getAttribute('refid') + } + if n.getAttribute('kind') in ('class','struct'): + classes.append(entry) + entries.append(entry) + for m in n.childNodes: + if m.nodeName == 'member': + cpp = self._cppName(self._getChildData('name',root=m)) + entry = { + 'name' : cpp['name'], + 'compoundname' : cpp['compoundname'], + 'id' : n.getAttribute('refid') + } + if hasattr(m,'getAttribute') and m.getAttribute('kind') in ('class','struct'): + classes.append(entry) + entries.append(entry) + #~ Put them in a sensible order. + entries.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower())) + classes.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower())) + #~ And generate the BoostBook for them. + self._translate_index_(entries,target=self.section['index']) + self._translate_index_(classes,target=self.section['classes']) + return None + + #~ Translate a set of index entries in the BoostBook output. The output + #~ is grouped into groups of the first letter of the entry names. + def _translate_index_(self, entries, target=None, **kwargs ): + i = 0 + targetID = target.getAttribute('id') + while i < len(entries): + dividerKey = entries[i]['name'][0].upper() + divider = target.appendChild(self._createNode('indexdiv',id=targetID+'.'+dividerKey)) + divider.appendChild(self._createText('title',dividerKey)) + while i < len(entries) and dividerKey == entries[i]['name'][0].upper(): + iename = entries[i]['name'] + ie = divider.appendChild(self._createNode('indexentry')) + ie = ie.appendChild(self._createText('primaryie',iename)) + while i < len(entries) and entries[i]['name'] == iename: + ie.appendChild(self.boostbook.createTextNode(' (')) + ie.appendChild(self._createText( + 'link',entries[i]['compoundname'],linkend=entries[i]['id'])) + ie.appendChild(self.boostbook.createTextNode(')')) + i += 1 + + #~ Translate a ..., + #~ by retranslating with the "kind" of compounddef. + def _translate_compounddef( self, node, target=None, **kwargs ): + return self._translateNode(node,node.getAttribute('kind')) + + #~ Translate a .... For + #~ namespaces we just collect the information for later use as there is no + #~ currently namespaces are not included in the BoostBook format. In the future + #~ it might be good to generate a namespace index. + def _translate_compounddef_namespace( self, node, target=None, **kwargs ): + namespace = { + 'id' : node.getAttribute('id'), + 'kind' : 'namespace', + 'name' : self._getChildData('compoundname',root=node), + 'brief' : self._getChildData('briefdescription',root=node), + 'detailed' : self._getChildData('detaileddescription',root=node), + 'parsed' : False + } + if namespace['name'] in self.symbols: + if not self.symbols[namespace['name']]['parsed']: + self.symbols[namespace['name']]['parsed'] = True + #~ for n in node.childNodes: + #~ if hasattr(n,'getAttribute'): + #~ self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs) + else: + self.symbols[namespace['name']] = namespace + #~ self._setID(namespace['id'],namespace['name']) + return None + + #~ Translate a ..., which + #~ forwards to the kind=struct as they are the same. + def _translate_compounddef_class( self, node, target=None, **kwargs ): + return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs) + + #~ Translate a ... into: + #~
+ #~ + #~ ... + #~ + #~
+ def _translate_compounddef_struct( self, node, tag='struct', target=None, **kwargs ): + result = None + includes = self._getChild('includes',root=node) + if includes: + ## Add the header into the output table. + self._translate_compounddef_includes_(includes,includes,**kwargs) + ## Compounds are the declared symbols, classes, types, etc. + ## We add them to the symbol table, along with the partial DOM for them + ## so that they can be organized into the output later. + compoundname = self._getChildData('compoundname',root=node) + compoundname = self._cppName(compoundname) + self._setID(node.getAttribute('id'),compoundname['compoundname']) + struct = self._createNode(tag,name=compoundname['name'].split('::')[-1]) + self.symbols[compoundname['compoundname']] = { + 'header' : includes.firstChild.data, + 'namespace' : compoundname['namespace'], + 'id' : node.getAttribute('id'), + 'kind' : tag, + 'name' : compoundname['name'], + 'dom' : struct + } + ## Add the children which will be the members of the struct. + for n in node.childNodes: + self._translateNode(n,target=struct,scope=compoundname['compoundname']) + result = struct + return result + + #~ Translate a ..., + def _translate_compounddef_includes_( self, node, target=None, **kwargs ): + name = node.firstChild.data + if name not in self.symbols: + self._setID(node.getAttribute('refid'),name) + self.symbols[name] = { + 'kind' : 'header', + 'id' : node.getAttribute('refid'), + 'dom' : self._createNode('header', + id=node.getAttribute('refid'), + name=name) + } + return None + + #~ Translate a ... into: + #~ + #~ ... + #~ + def _translate_basecompoundref( self, ref, target=None, **kwargs ): + inherit = target.appendChild(self._createNode('inherit', + access=ref.getAttribute('prot'))) + self._translateChildren(ref,target=inherit) + return + + #~ Translate: + #~ + #~ + #~ ... + #~ ... + #~ ... + #~ ... + #~ + #~ ... + #~ + #~ Into: + #~ + def _translate_templateparamlist( self, templateparamlist, target=None, **kwargs ): + template = target.appendChild(self._createNode('template')) + for param in templateparamlist.childNodes: + if param.nodeName == 'param': + type = self._getChildData('type',root=param) + defval = self._getChild('defval',root=param) + paramKind = None + if type in ('class','typename'): + paramKind = 'template-type-parameter' + else: + paramKind = 'template-nontype-parameter' + templateParam = template.appendChild( + self._createNode(paramKind, + name=self._getChildData('declname',root=param))) + if paramKind == 'template-nontype-parameter': + template_type = templateParam.appendChild(self._createNode('type')) + self._translate_type( + self._getChild('type',root=param),target=template_type) + if defval: + value = self._getChildData('ref',root=defval.firstChild) + if not value: + value = self._getData(defval) + templateParam.appendChild(self._createText('default',value)) + return template + + #~ Translate: + #~ ... + #~ Into: + #~ ... + def _translate_briefdescription( self, brief, target=None, **kwargs ): + self._translateDescription(brief,target=target,**kwargs) + return self._translateDescription(brief,target=target,tag='purpose',**kwargs) + + #~ Translate: + #~ ... + #~ Into: + #~ ... + def _translate_detaileddescription( self, detailed, target=None, **kwargs ): + return self._translateDescription(detailed,target=target,**kwargs) + + #~ Translate: + #~ ... + #~ With kind specific translation. + def _translate_sectiondef( self, sectiondef, target=None, **kwargs ): + self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs) + + #~ Translate non-function sections. + def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ): + for n in sectiondef.childNodes: + if hasattr(n,'getAttribute'): + self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs) + return None + + #~ Translate: + #~ ... + def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ): + return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) + + #~ Translate: + #~ ... + def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs): + return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) + + #~ Translate: + #~ ... + #~ All the various function group translations end up here for which + #~ they are translated into: + #~ + #~ ... + #~ + def _translate_sectiondef_func_( self, sectiondef, name='functions', target=None, **kwargs ): + members = target.appendChild(self._createNode('method-group',name=name)) + for n in sectiondef.childNodes: + if hasattr(n,'getAttribute'): + self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs) + return members + + #~ Translate: + #~ ... + def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ): + return self._translate_sectiondef_func_(sectiondef, + name='public member functions',target=target,**kwargs) + + #~ Translate: + #~ ... + def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs): + return self._translate_sectiondef_func_(sectiondef, + name='public static functions',target=target,**kwargs) + + #~ Translate: + #~ ... + def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ): + return self._translate_sectiondef_func_(sectiondef, + name='protected member functions',target=target,**kwargs) + + #~ Translate: + #~ ... + def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs): + return self._translate_sectiondef_func_(sectiondef, + name='private static functions',target=target,**kwargs) + + #~ Translate: + #~ ... + def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ): + return self._translate_sectiondef_func_(sectiondef, + name='private member functions',target=target,**kwargs) + + #~ Translate: + #~
...
...
+ def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ): + return self._translate_sectiondef_func_(sectiondef, + name=self._getChildData('header', root=sectiondef),target=target,**kwargs) + + #~ Translate: + #~ + #~ ... + #~ + #~ To: + #~ + #~ ... + #~ + def _translate_memberdef_typedef( self, memberdef, target=None, scope=None, **kwargs ): + self._setID(memberdef.getAttribute('id'), + scope+'::'+self._getChildData('name',root=memberdef)) + typedef = target.appendChild(self._createNode('typedef', + id=memberdef.getAttribute('id'), + name=self._getChildData('name',root=memberdef))) + typedef_type = typedef.appendChild(self._createNode('type')) + self._translate_type(self._getChild('type',root=memberdef),target=typedef_type) + return typedef + + #~ Translate: + #~ + #~ ... + #~ + #~ To: + #~ + #~ ... + #~ + def _translate_memberdef_function( self, memberdef, target=None, scope=None, **kwargs ): + name = self._getChildData('name',root=memberdef) + self._setID(memberdef.getAttribute('id'),scope+'::'+name) + ## Check if we have some specific kind of method. + if name == scope.split('::')[-1]: + kind = 'constructor' + target = target.parentNode + elif name == '~'+scope.split('::')[-1]: + kind = 'destructor' + target = target.parentNode + elif name == 'operator=': + kind = 'copy-assignment' + target = target.parentNode + else: + kind = 'method' + method = target.appendChild(self._createNode(kind, + # id=memberdef.getAttribute('id'), + name=name, + cv=' '.join([ + if_attribute(memberdef,'const','const','').strip() + ]), + specifiers=' '.join([ + if_attribute(memberdef,'static','static',''), + if_attribute(memberdef,'explicit','explicit',''), + if_attribute(memberdef,'inline','inline','') + ]).strip() + )) + ## We iterate the children to translate each part of the function. + for n in memberdef.childNodes: + self._translateNode(memberdef,'function',n,target=method) + return method + + #~ Translate: + #~ ... + def _translate_memberdef_function_templateparamlist( + self, templateparamlist, target=None, **kwargs ): + return self._translate_templateparamlist(templateparamlist,target=target,**kwargs) + + #~ Translate: + #~ ... + #~ To: + #~ ...? + def _translate_memberdef_function_type( self, resultType, target=None, **kwargs ): + methodType = self._createNode('type') + self._translate_type(resultType,target=methodType) + if methodType.hasChildNodes(): + target.appendChild(methodType) + return methodType + + #~ Translate: + #~ ... + def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ): + result = self._translateDescription(description,target=target,**kwargs) + ## For functions if we translate the brief docs to the purpose they end up + ## right above the regular description. And since we just added the brief to that + ## on the previous line, don't bother with the repetition. + # result = self._translateDescription(description,target=target,tag='purpose',**kwargs) + return result + + #~ Translate: + #~ ... + def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ): + return self._translateDescription(description,target=target,**kwargs) + + #~ Translate: + #~ ... + def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ): + return self._translateDescription(description,target=target,**kwargs) + + #~ Translate: + #~ ... + def _translate_memberdef_function_param( self, param, target=None, **kwargs ): + return self._translate_param(param,target=target,**kwargs) + + #~ Translate: + #~ + #~ ... + #~ ... + #~ + #~ To: + #~ + #~ ... + #~ + def _translate_memberdef_variable( self, memberdef, target=None, scope=None, **kwargs ): + self._setID(memberdef.getAttribute('id'), + scope+'::'+self._getChildData('name',root=memberdef)) + data_member = target.appendChild(self._createNode('data-member', + id=memberdef.getAttribute('id'), + name=self._getChildData('name',root=memberdef))) + data_member_type = data_member.appendChild(self._createNode('type')) + self._translate_type(self._getChild('type',root=memberdef),target=data_member_type) + + #~ Translate: + #~ + #~ ... + #~ ... + #~ + #~ To: + #~ + #~ ... + #~ + def _translate_memberdef_enum( self, memberdef, target=None, scope=None, **kwargs ): + self._setID(memberdef.getAttribute('id'), + scope+'::'+self._getChildData('name',root=memberdef)) + enum = target.appendChild(self._createNode('enum', + id=memberdef.getAttribute('id'), + name=self._getChildData('name',root=memberdef))) + for n in memberdef.childNodes: + self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs) + return enum + + #~ Translate: + #~ + #~ + #~ ... + #~ ... + #~ + #~ + #~ To: + #~ + #~ ... + #~ + def _translate_memberdef_enum_enumvalue( self, enumvalue, target=None, scope=None, **kwargs ): + self._setID(enumvalue.getAttribute('id'), + scope+'::'+self._getChildData('name',root=enumvalue)) + value = target.appendChild(self._createNode('enumvalue', + id=enumvalue.getAttribute('id'), + name=self._getChildData('name',root=enumvalue))) + initializer = self._getChild('initializer',root=enumvalue) + if initializer: + self._translateChildren(initializer, + target=target.appendChild(self._createNode('default'))) + return value + + #~ Translate: + #~ + #~ ... + #~ ... + #~ ... + #~ + #~ To: + #~ + #~ ... + #~ ... + #~ + def _translate_param( self, param, target=None, **kwargs): + parameter = target.appendChild(self._createNode('parameter', + name=self._getChildData('declname',root=param))) + paramtype = parameter.appendChild(self._createNode('paramtype')) + self._translate_type(self._getChild('type',root=param),target=paramtype) + defval = self._getChild('defval',root=param) + if defval: + self._translateChildren(self._getChild('defval',root=param),target=parameter) + return parameter + + #~ Translate: + #~ ... + def _translate_ref( self, ref, **kwargs ): + return self._translateNode(ref,ref.getAttribute('kindref')) + + #~ Translate: + #~ ... + #~ To: + #~ ... + def _translate_ref_compound( self, ref, **kwargs ): + result = self._createNode('link',linkend=ref.getAttribute('refid')) + classname = result.appendChild(self._createNode('classname')) + self._translateChildren(ref,target=classname) + return result + + #~ Translate: + #~ ... + #~ To: + #~ ... + def _translate_ref_member( self, ref, **kwargs ): + result = self._createNode('link',linkend=ref.getAttribute('refid')) + self._translateChildren(ref,target=result) + return result + + #~ Translate: + #~ ... + def _translate_type( self, type, target=None, **kwargs ): + result = self._translateChildren(type,target=target,**kwargs) + #~ Filter types to clean up various readability problems, most notably + #~ with really long types. + xml = target.toxml('utf-8'); + if ( + xml.startswith('boost::mpl::') or + xml.startswith('BOOST_PP_') or + re.match('boost::(lazy_)?(enable|disable)_if',xml) + ): + while target.firstChild: + target.removeChild(target.firstChild) + target.appendChild(self._createText('emphasis','unspecified')) + return result + + def _getChild( self, tag = None, id = None, name = None, root = None ): + if not root: + root = self.boostbook.documentElement + for n in root.childNodes: + found = True + if tag and found: + found = found and tag == n.nodeName + if id and found: + if n.hasAttribute('id'): + found = found and n.getAttribute('id') == id + else: + found = found and n.hasAttribute('id') and n.getAttribute('id') == id + if name and found: + found = found and n.hasAttribute('name') and n.getAttribute('name') == name + if found: + #~ print '--|', n + return n + return None + + def _getChildData( self, tag, **kwargs ): + return self._getData(self._getChild(tag,**kwargs),**kwargs) + + def _getData( self, node, **kwargs ): + if node: + text = self._getChild('#text',root=node) + if text: + return text.data.strip() + return '' + + def _cppName( self, type ): + parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':')) + result = { + 'compoundname' : parts.group(1), + 'namespace' : parts.group(1).split('::')[0:-1], + 'name' : parts.group(1).split('::')[-1], + 'specialization' : parts.group(2) + } + if result['namespace'] and len(result['namespace']) > 0: + namespace = '::'.join(result['namespace']) + while ( + len(result['namespace']) > 0 and ( + namespace not in self.symbols or + self.symbols[namespace]['kind'] != 'namespace') + ): + result['name'] = result['namespace'].pop()+'::'+result['name'] + namespace = '::'.join(result['namespace']) + return result + + def _createNode( self, tag, **kwargs ): + result = self.boostbook.createElement(tag) + for k in kwargs.keys(): + if kwargs[k] != '': + if k == 'id': + result.setAttribute('id',kwargs[k]) + else: + result.setAttribute(k,kwargs[k]) + return result + + def _createText( self, tag, data, **kwargs ): + result = self._createNode(tag,**kwargs) + data = data.strip() + if len(data) > 0: + result.appendChild(self.boostbook.createTextNode(data)) + return result + + +def main( xmldir=None, output=None, id=None, title=None, index=False ): + #~ print '--- main: xmldir = %s, output = %s' % (xmldir,output) + + input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) ) + input.sort + translator = Doxygen2BoostBook(id=id, title=title, index=index) + #~ Feed in the namespaces first to build up the set of namespaces + #~ and definitions so that lookup is unambiguous when reading in the definitions. + namespace_files = filter( + lambda x: + os.path.basename(x).startswith('namespace'), + input) + decl_files = filter( + lambda x: + not os.path.basename(x).startswith('namespace') and not os.path.basename(x).startswith('_'), + input) + for dox in namespace_files: + #~ print '--|',os.path.basename(dox) + translator.addDox(xml.dom.minidom.parse(dox)) + for dox in decl_files: + #~ print '--|',os.path.basename(dox) + translator.addDox(xml.dom.minidom.parse(dox)) + + if output: + output = open(output,'w') + else: + output = sys.stdout + if output: + output.write(translator.tostring()) + + +main( **get_args() ) diff --git a/src/boost/tools/build/src/tools/doxygen-config.jam b/src/boost/tools/build/src/tools/doxygen-config.jam new file mode 100644 index 000000000..1a0c827c2 --- /dev/null +++ b/src/boost/tools/build/src/tools/doxygen-config.jam @@ -0,0 +1,11 @@ +#~ Copyright 2005, 2006 Rene Rivera. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Automatic configuration for Doxygen tools. To use, just import this module. + +import toolset : using ; + +ECHO "warning: doxygen-config.jam is deprecated. Use 'using doxygen ;' instead." ; + +using doxygen ; diff --git a/src/boost/tools/build/src/tools/doxygen.jam b/src/boost/tools/build/src/tools/doxygen.jam new file mode 100644 index 000000000..71ee78abf --- /dev/null +++ b/src/boost/tools/build/src/tools/doxygen.jam @@ -0,0 +1,782 @@ +# Copyright 2003, 2004 Douglas Gregor +# Copyright 2003, 2004, 2005 Vladimir Prus +# Copyright 2006 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module defines rules to handle generation of various outputs from source +# files documented with doxygen comments. The supported transformations are: +# +# * Source -> Doxygen XML -> BoostBook XML +# * Source -> Doxygen HTML +# +# The type of transformation is selected based on the target requested. For +# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an +# empty suffix, will produce a .xml and .boostbook. For Doxygen +# HTML specifying a target with an ".html" suffix will produce a directory +# with the Doxygen html files, and a .html file redirecting to +# that directory. + +import alias ; +import boostbook ; +import "class" : new ; +import common ; +import feature ; +import make ; +import modules ; +import generators ; +import os ; +import param ; +import path ; +import print ; +import project ; +import property ; +import stage ; +import targets ; +import toolset ; +import type ; +import utility ; +import xsltproc ; +import virtual-target ; + + +# Use to specify extra configuration parameters. These get translated into a +# doxyfile which configures the building of the docs. +feature.feature "doxygen:param" : : free ; + +# Specify the "boost.doxygen.header.prefix" XSLT option. +feature.feature prefix : : free ; + +# Specify the "boost.doxygen.reftitle" XSLT option. +feature.feature reftitle : : free ; + +# Which processor to use for various translations from Doxygen. +feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ; + +# To generate, or not, index sections. +feature.feature doxygen.doxproc.index : no yes : propagated incidental ; + +# The ID for the resulting BoostBook reference section. +feature.feature doxygen.doxproc.id : : free ; + +# The title for the resulting BoostBook reference section. +feature.feature doxygen.doxproc.title : : free ; + +# Location for images when generating XML +feature.feature "doxygen:xml-imagedir" : : free ; + +# Indicates whether the entire directory should be deleted +feature.feature doxygen.rmdir : off on : optional incidental ; + +# Doxygen configuration input file. +type.register DOXYFILE : doxyfile ; + +# Doxygen XML multi-file output. +type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ; + +# Doxygen XML coallesed output. +type.register DOXYGEN_XML : doxygen : XML ; + +# Doxygen HTML multifile directory. +type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ; + +# Redirection HTML file to HTML multifile directory. +type.register DOXYGEN_HTML : : HTML ; + +type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ; + + +# Initialize the Doxygen module. Parameters are: +# name: the name of the 'doxygen' executable. If not specified, the name +# 'doxygen' will be used +# +rule init ( name ? ) +{ + if ! $(.initialized) + { + .initialized = true ; + + .doxproc = [ modules.binding $(__name__) ] ; + .doxproc = $(.doxproc:D)/doxproc.py ; + + generators.register-composing doxygen.headers-to-doxyfile + : H HPP CPP MARKDOWN : DOXYFILE ; + generators.register-standard doxygen.run + : DOXYFILE : DOXYGEN_XML_MULTIFILE ; + generators.register-standard doxygen.xml-dir-to-boostbook + : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : doxproc ; + generators.register-xslt doxygen.xml-to-boostbook + : DOXYGEN_XML : BOOSTBOOK : xsltproc ; + generators.register-xslt doxygen.collect + : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ; + generators.register-standard doxygen.run + : DOXYFILE : DOXYGEN_HTML_MULTIFILE ; + generators.register-standard doxygen.html-redirect + : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ; + generators.register-standard doxygen.copy-latex-pngs + : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ; + + IMPORT $(__name__) : doxygen : : doxygen ; + } + + if $(name) + { + modify-config ; + .doxygen = $(name) ; + check-doxygen ; + } + + if ! $(.doxygen) + { + check-doxygen ; + } +} + + +local rule freeze-config ( ) +{ + if ! $(.initialized) + { + import errors ; + errors.user-error doxygen must be initialized before it can be used. ; + } + if ! $(.config-frozen) + { + .config-frozen = true ; + if [ .is-cygwin ] + { + .is-cygwin = true ; + } + } +} + + +local rule modify-config ( ) +{ + if $(.config-frozen) + { + import errors ; + errors.user-error "Cannot change doxygen after it has been used." ; + } +} + + +local rule check-doxygen ( ) +{ + if --debug-configuration in [ modules.peek : ARGV ] + { + ECHO "notice:" using doxygen ":" $(.doxygen) ; + } + local extra-paths ; + if [ os.name ] = NT + { + local ProgramFiles = [ modules.peek : ProgramFiles ] ; + if $(ProgramFiles) + { + extra-paths = "$(ProgramFiles:J= )" ; + } + else + { + extra-paths = "C:\\Program Files" ; + } + } + .doxygen = [ common.get-invocation-command doxygen : doxygen : $(.doxygen) : + $(extra-paths) ] ; +} + + +rule name ( ) +{ + freeze-config ; + return $(.doxygen) ; +} + + +local rule .is-cygwin ( ) +{ + if [ os.on-windows ] + { + local file = [ path.make [ modules.binding $(__name__) ] ] ; + local dir = [ path.native [ path.join [ path.parent $(file) ] doxygen ] + ] ; + local command = cd \"$(dir)\" "&&" \"$(.doxygen)\" + windows-paths-check.doxyfile 2>&1 ; + command = $(command:J=" ") ; + result = [ SHELL $(command) ] ; + if [ MATCH "(Parsing file /)" : $(result) ] + { + return true ; + } + } +} + + +# Runs Doxygen on the given Doxygen configuration file (the source) to generate +# the Doxygen files. The output is dumped according to the settings in the +# Doxygen configuration file, not according to the target! Because of this, we +# essentially "touch" the target file, in effect making it look like we have +# really written something useful to it. Anyone that uses this action must deal +# with this behavior. +# +actions doxygen-action +{ + $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)" +} + + +# Runs the Python doxproc XML processor. +# +actions doxproc +{ + python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)" +} + + +rule translate-path ( path ) +{ + freeze-config ; + if [ os.on-windows ] + { + if [ os.name ] = CYGWIN + { + if $(.is-cygwin) + { + return $(path) ; + } + else + { + return $(path:W) ; + } + } + else + { + if $(.is-cygwin) + { + match = [ MATCH "^(.):(.*)" : $(path) ] ; + if $(match) + { + return /cygdrive/$(match[1])$(match[2]:T) ; + } + else + { + return $(path:T) ; + } + } + else + { + return $(path) ; + } + } + } + else + { + return $(path) ; + } +} + +toolset.uses-features doxygen.headers-to-doxyfile : "" ; + +# Generates a doxygen configuration file (doxyfile) given a set of C++ sources +# and a property list that may contain features. +# +rule headers-to-doxyfile ( target : sources * : properties * ) +{ + local text = "# Generated by B2 version 2" ; + + local output-dir ; + + # Translate into command line flags. + for local param in [ feature.get-values : $(properties) ] + { + local namevalue = [ MATCH "([^=]*)=(.*)" : $(param) ] ; + if $(namevalue[1]) = OUTPUT_DIRECTORY + { + output-dir = [ translate-path [ utility.unquote $(namevalue[2]) ] ] + ; + text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ; + } + else + { + text += "$(namevalue[1]) = $(namevalue[2])" ; + } + } + + if ! $(output-dir) + { + output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ; + text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ; + } + + local headers ; + for local header in $(sources:G=) + { + header = [ translate-path $(header) ] ; + headers += \"$(header)\" ; + } + + # Doxygen generates LaTex by default. So disable it unconditionally, or at + # least until someone needs, and hence writes support for, LaTex output. + text += "GENERATE_LATEX = NO" ; + text += "INPUT = $(headers:J= )" ; + print.output $(target) plain ; + print.text $(text) : true ; +} + +toolset.uses-features doxygen.run : "" ; + +# Run Doxygen. See doxygen-action for a description of the strange properties of +# this rule. +# +rule run ( target : source : properties * ) +{ + freeze-config ; + if on in $(properties) + { + local output-dir = [ path.make [ MATCH + "OUTPUT_DIRECTORY=\"?([^\"]*)" : $(properties) ] ] ; + local html-dir = [ path.make [ MATCH HTML_OUTPUT=(.*) : + $(properties) ] ] ; + if $(output-dir) && $(html-dir) && + [ path.glob $(output-dir) : $(html-dir) ] + { + HTMLDIR on $(target) = [ path.native [ path.join $(output-dir) + $(html-dir) ] ] ; + rm-htmldir $(target) ; + } + } + doxygen-action $(target) : $(source) ; + NAME on $(target) = $(.doxygen) ; + RM on $(target) = [ modules.peek common : RM ] ; + *.XML on $(target) = [ path.native [ path.join [ path.make [ on $(target) + return $(LOCATE) ] ] $(target:B:S=) *.xml ] ] ; +} + + +if [ os.name ] = NT +{ + RMDIR = rmdir /s /q ; +} +else +{ + RMDIR = rm -rf ; +} + +actions quietly rm-htmldir +{ + $(RMDIR) $(HTMLDIR) +} + + +# The rules below require BoostBook stylesheets, so we need some code to check +# that the boostbook module has actually been initialized. +# +rule check-boostbook ( ) +{ + if ! [ modules.peek boostbook : .initialized ] + { + import errors ; + errors.user-error + : The boostbook module is not initialized you have attempted to use + : the 'doxygen' toolset, which requires BoostBook, but never + : initialized BoostBook. + : "Hint:" add 'using boostbook \;' to your user-config.jam. ; + } +} + + +# Collect the set of Doxygen XML files into a single XML source file that can be +# handled by an XSLT processor. The source is completely ignored (see +# doxygen-action), because this action picks up the Doxygen XML index file xml/ +# index.xml. This is because we can not teach Doxygen to act like a NORMAL +# program and take a "-o output.xml" argument (grrrr). The target of the +# collection will be a single Doxygen XML file. +# +rule collect ( target : source : properties * ) +{ + check-boostbook ; + local collect-xsl-dir + = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ; + local source-path + = [ path.make [ on $(source) return $(LOCATE) ] ] ; + local collect-path + = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ; + local native-path + = [ path.native $(collect-path) ] ; + local real-source + = [ path.native [ path.join $(collect-path) index.xml ] ] ; + xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl) + : doxygen.xml.path=$(native-path) ; +} + +toolset.uses-features doxygen.xml-to-boostbook : ; + +# Translate Doxygen XML into BoostBook. +# +rule xml-to-boostbook ( target : source : properties * ) +{ + check-boostbook ; + local xsl-dir = [ boostbook.xsl-dir ] ; + local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen + doxygen2boostbook.xsl ] ] ; + + local xslt-properties = $(properties) ; + for local prefix in [ feature.get-values : $(properties) ] + { + xslt-properties += "boost.doxygen.header.prefix=$(prefix)" ; + } + for local title in [ feature.get-values : $(properties) ] + { + xslt-properties += "boost.doxygen.reftitle=$(title)" ; + } + + xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ; +} + + +toolset.flags doxygen.xml-dir-to-boostbook OPTIONS yes : + --enable-index ; +toolset.flags doxygen.xml-dir-to-boostbook ID ; +toolset.flags doxygen.xml-dir-to-boostbook TITLE ; + + +rule xml-dir-to-boostbook ( target : source : properties * ) +{ + DOXPROC on $(target) = $(.doxproc) ; + LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ; + doxygen.doxproc $(target) : $(source:S=) ; +} + + +# Generate the HTML redirect to HTML dir index.html file. +# +rule html-redirect ( target : source : properties * ) +{ + local uri = "$(target:B)/index.html" ; + print.output $(target) plain ; + print.text +" + + + + + + + + + Automatic redirection failed, please go to $(uri). + + +" + : true ; +} + +rule copy-latex-pngs ( target : source : requirements * ) +{ + local directory = [ path.native [ feature.get-values + : $(requirements) ] ] ; + local location = [ on $(target) return $(LOCATE) ] ; + + local pdf-location = [ path.native [ path.join [ path.make $(location) ] + [ path.make $(directory) ] ] ] ; + local html-location = [ path.native [ path.join . html [ path.make + $(directory) ] ] ] ; + + common.MkDir $(pdf-location) ; + common.MkDir $(html-location) ; + + DEPENDS $(target) : $(pdf-location) $(html-location) ; + + if [ os.name ] = NT + { + CP on $(target) = copy /y ; + FROM on $(target) = \\*.png ; + TOHTML on $(target) = .\\html\\$(directory) ; + TOPDF on $(target) = \\$(directory) ; + } + else + { + CP on $(target) = cp ; + FROM on $(target) = /*.png ; + TOHTML on $(target) = ./html/$(directory) ; + TOPDF on $(target) = $(target:D)/$(directory) ; + } +} + +actions copy-latex-pngs +{ + $(CP) $(>:S=)$(FROM) $(TOHTML) + $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF) + echo "Stamped" > "$(<)" +} + + +# Building latex images for doxygen XML depends on latex, dvips, and gs being in +# your PATH. This is true for most Unix installs, but not on Win32, where you +# will need to install MkTex and Ghostscript and add these tools to your path. + +actions check-latex +{ + latex -version >$(<) +} + +actions check-dvips +{ + dvips -version >$(<) +} + +if [ os.name ] = "NT" +{ + actions check-gs + { + gswin32c -version >$(<) + } +} +else +{ + actions check-gs + { + gs -version >$(<) + } +} + + +local rule check-tools-targets ( project ) +{ + if ! $(.check-tools-targets) + { + # Find the root project. + # + # This is a best effort attempt to avoid using different locations for + # storing *.check files depending on which project imported the doxygen + # toolset first. The files are stored in a location related to the + # project's root project. Note that this location may change depending + # on the folder the build was run from in case the build uses multiple + # related projects with their own Jamroot separate modules. + local project-module = [ $(project).project-module ] ; + local root-module = [ project.get-jamroot-module $(project-module) ] ; + if ! $(root-module) + { + import errors ; + if [ project.is-config-module $(project-module) ] + { + errors.user-error doxygen targets can not be declared in Boost + Build's configuration modules. ; + } + else + { + errors.user-error doxygen targets can not be declared in + standalone projects. : use a Jamfile/Jamroot project + instead. ; + } + } + local root-project = [ project.target $(root-module) ] ; + + local targets = + [ new file-target latex.check : : $(root-project) : [ new action : + doxygen.check-latex ] ] + [ new file-target dvips.check : : $(root-project) : [ new action : + doxygen.check-dvips ] ] + [ new file-target gs.check : : $(root-project) : [ new action : + doxygen.check-gs ] ] ; + + for local target in $(targets) + { + .check-tools-targets += [ virtual-target.register $(target) ] ; + } + } + return $(.check-tools-targets) ; +} + + +project.initialize $(__name__) ; +project doxygen ; + +class doxygen-check-tools-target-class : basic-target +{ + rule construct ( name : sources * : property-set ) + { + IMPORT doxygen : check-tools-targets : $(__name__) : + doxygen.check-tools-targets ; + return [ property-set.empty ] [ doxygen.check-tools-targets [ project ] + ] ; + } +} + + +# Declares a metatarget for collecting version information on different external +# tools used in this module. +# +rule check-tools ( target ) +{ + freeze-config ; + targets.create-metatarget doxygen-check-tools-target-class : + [ project.current ] : $(target) ; +} + + +# User-level rule to generate HTML files or BoostBook XML from a set of headers +# via Doxygen. +# +rule doxygen ( target : sources + : requirements * : default-build * : + usage-requirements * ) +{ + param.handle-named-params + sources requirements default-build usage-requirements ; + requirements += none ; + freeze-config ; + local project = [ project.current ] ; + + if $(target:S) = .html + { + # Build an HTML directory from the sources. + local html-location = [ feature.get-values : $(requirements) + ] ; + local output-dir ; + if [ $(project).get build-dir ] + { + # Explicitly specified build dir. Add html at the end. + output-dir = [ path.join [ $(project).build-dir ] + $(html-location:E=html) ] ; + } + else + { + # Trim 'bin' from implicit build dir, for no other reason than + # backward compatibility. + output-dir = [ path.join [ path.parent [ $(project).build-dir ] ] + $(html-location:E=html) ] ; + } + output-dir = [ path.root $(output-dir) [ path.pwd ] ] ; + local output-dir-native = [ path.native $(output-dir) ] ; + requirements = [ property.change $(requirements) : ] ; + + # The doxygen configuration file. + targets.create-typed-target DOXYFILE : $(project) : $(target:S=.tag) + : $(sources) + : $(requirements) + GENERATE_HTML=YES + GENERATE_XML=NO + "OUTPUT_DIRECTORY=\"$(output-dir-native)\"" + HTML_OUTPUT=$(target:B) + : $(default-build) ; + $(project).mark-target-as-explicit $(target:S=.tag) ; + + # The html directory to generate by running doxygen. + targets.create-typed-target DOXYGEN_HTML_MULTIFILE : $(project) + : $(target:S=.dir) # Name. + : $(target:S=.tag) # Sources. + : $(requirements) + "OUTPUT_DIRECTORY=\"$(output-dir-native)\"" + HTML_OUTPUT=$(target:B) + : $(default-build) ; + $(project).mark-target-as-explicit $(target:S=.dir) ; + + # The redirect html file into the generated html. + targets.create-typed-target DOXYGEN_HTML : $(project) : $(target) + : $(target:S=.dir) # Sources. + : $(requirements) $(output-dir) + : $(default-build) ; + } + else + { + # Build a BoostBook XML file from the sources. + local location-xml = [ feature.get-values : $(requirements) ] + ; + requirements = [ property.change $(requirements) : ] ; + local target-xml = $(target:B=$(target:B)-xml) ; + + # Check whether we need to build images. + local images-location = [ feature.get-values : + $(requirements) ] ; + if $(images-location) + { + # Prepare a metatarget for collecting used external tool version + # information. We use only one such metatarget as they always + # produce the same files and we do not want to deal with multiple + # metatargets having matching names, causing 'ambiguous variants' + # errors. + if ! $(.check-tools) + { + # FIXME: Since we have the check-tools target object reference, + # see how we can use that instead of having to construct a valid + # target reference string for use in property + # values. + local project-id = --doxygen.check-tools-project-- ; + local target-id = --doxygen.check-tools-- ; + local pm = [ $(project).project-module ] ; + project.register-id $(project-id) : $(pm) ; + check-tools $(target-id) ; + .check-tools = /$(project-id)//$(target-id) ; + } + + doxygen $(target).doxygen-xml-images.html : $(sources) : + $(requirements) + on + QUIET=YES + WARNINGS=NO + WARN_IF_UNDOCUMENTED=NO + $(.check-tools) ; + $(project).mark-target-as-explicit $(target).doxygen-xml-images.html + ; + + targets.create-typed-target DOXYGEN_XML_IMAGES : $(project) + : $(target).doxygen-xml-images # Name. + : $(target).doxygen-xml-images.html # Sources. + : $(requirements) + : $(default-build) ; + $(project).mark-target-as-explicit $(target).doxygen-xml-images ; + + if ! [ MATCH (/)$ : $(images-location) ] + { + images-location = $(images-location)/ ; + } + + requirements += + $(target).doxygen-xml-images + boost.doxygen.formuladir=$(images-location) ; + } + + # The doxygen configuration file. + targets.create-typed-target DOXYFILE : $(project) : $(target-xml:S=.tag) + : $(sources) + : $(requirements) + GENERATE_HTML=NO + GENERATE_XML=YES + XML_OUTPUT=$(target-xml) + : $(default-build) ; + $(project).mark-target-as-explicit $(target-xml:S=.tag) ; + + # The Doxygen XML directory for the processed source files. + targets.create-typed-target DOXYGEN_XML_MULTIFILE : $(project) + : $(target-xml:S=.dir) # Name. + : $(target-xml:S=.tag) # Sources. + : $(requirements) + : $(default-build) ; + $(project).mark-target-as-explicit $(target-xml:S=.dir) ; + + # The resulting BoostBook file is generated by the processor tool. The + # tool can be either the xsltproc plus accompanying XSL scripts. Or it + # can be the python doxproc.py script. + targets.create-typed-target BOOSTBOOK : $(project) : $(target-xml) + : $(target-xml:S=.dir) # Sources. + : $(requirements) + : $(default-build) ; + $(project).mark-target-as-explicit $(target-xml) ; + + stage $(target:S=.xml) # Name. + : $(target-xml) # Sources. + : $(requirements) + $(location-xml:E=.) + $(target:S=.xml) + : $(default-build) ; + $(project).mark-target-as-explicit $(target:S=.xml) ; + + # TODO: See why this alias target is used here instead of simply naming + # the previous stage target $(target) and having it specify the alias + # target's usage requirements directly. + alias $(target) : : $(requirements) : $(default-build) : + $(usage-requirements) $(target:S=.xml) ; + } +} diff --git a/src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile b/src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile new file mode 100644 index 000000000..9b969df9c --- /dev/null +++ b/src/boost/tools/build/src/tools/doxygen/windows-paths-check.doxyfile @@ -0,0 +1,3 @@ +INPUT = windows-paths-check.hpp +GENERATE_HTML = NO +GENERATE_LATEX = NO diff --git a/src/boost/tools/build/src/tools/doxygen/windows-paths-check.hpp b/src/boost/tools/build/src/tools/doxygen/windows-paths-check.hpp new file mode 100644 index 000000000..e69de29bb diff --git a/src/boost/tools/build/src/tools/embarcadero.jam b/src/boost/tools/build/src/tools/embarcadero.jam new file mode 100644 index 000000000..c561bfc94 --- /dev/null +++ b/src/boost/tools/build/src/tools/embarcadero.jam @@ -0,0 +1,640 @@ +# Copyright (c) 2020 Edward Diener +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.borland]] += Embarcadero C++ Compiler + +The `embarcadero` module supports the 32-bit command line C++ compiler +bcc32x and the 64-bit command line C++ compiler bcc64, both clang-based, +running on Microsoft Windows. These are the clang-based Windows compilers +for all versions of Embarcadero C++. + +The module is initialized using the following syntax: + +---- +using embarcadero : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +`version`: + +The version should be the compiler version if specified. if the +version is not specified Boost Build will find the latest installed +version of Embarcadero C++ and use that for the version. If the version +is specified Boost Build does not check if this matches any particular +version of Embarcadero C++, so you may use the version as a mnemonic to +configure separate 'versions'. + +`c++-compile-command`: + +If the c++-compile-command is not specified, Boost.Build will default to the +bcc64 compiler. If you specify a compiler option of 32 the +default compiler will be bcc32x. In either case when the command is not given +Boost Build will assume the compiler is in the PATH. So it is not necessary +to specify a command if you accept the default compiler and the Embarcadero +C++ binary directory is in the PATH. + +If the command is specified it will be used as is to invoke the compiler. +If the command has either 'bcc32x(.exe)' or 'bcc64(.exe)' in it Boost Build +will use the appropriate compiler to configure the toolset. If the command +does not have either 'bcc32x(.exe)' or 'bcc64(.exe)' in it, Boost Build +will use the default compiler to configure the toolset. If you have your +own command, which does not have 'bcc32x(.exe)' in it but invokes the +'bcc32x(.exe)' compiler, specify the 32 compiler option. + +`compiler options`: + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +and C++ sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +`asmflags`:: +Specifies additional command line options that will be passed to the assembler. + +`archiveflags`:: +Specifies additional command line options that will be passed to the archiver, +which creates a static library. + +`address-model`:: +This option can be used to specify the default compiler as specified in the +dicsussion above of the c++-compile-command. Otherwise the address model +is not used to initialize the toolset. + +`user-interface`:: +Specifies the user interface for applications. Valid choices are `console` +for a console applicatiuon and `gui` for a Windows application. + +`root`:: +Normallly Boost Build will automatically be able to determine the root of +the Embarcadero C++ installation. It does this in various ways, but primarily +by checking a registry entry. If you specify the root it will use that +path, and the root you specify should be the full path to the Embarcadero +C++ installation on your machine ( without a trailing \ or / ). You should +not need to specify this option unless Boost Build can not find the +Embarcadero C++ root directory. + +Examples:: + +using embarcadero ; + +Configures the toolset to use the latest version, with bcc64 as the compiler. +The bcc64 compiler must be in the PATH. + +using embarcadero : 7.40 ; + +Configures the toolset to use the 7.40 version, with bcc64 as the compiler. +The bcc64 compiler must be in the PATH. + +using embarcadero : 7.40 : bcc32x ; +using embarcadero : 7.40 : : 32 ; + +Configures the toolset to use the 7.40 version, with bcc32x as the compiler. +The bcc32x compiler must be in the PATH. + +using embarcadero : : c:/some_path/bcc64 ; + +Configures the toolset to use the latest version, with full command specified. + +using embarcadero : : full_command : 32 ; + +Configures the toolset to use the latest version, with full command specified +and bcc32x as the compiler. + +using embarcadero : : : c:/root_path ; + +Configures the toolset to use the latest version, with bcc64 as the compiler +and the root directory of the installation specified. The bcc64 compiler must +be in the PATH. + +|# # end::doc[] + +import clang-linux ; +import path ; +import os ; +import type ; +import common ; +import toolset ; +import feature ; +import toolset : flags ; +import clang ; +import gcc ; +import generators ; +import errors ; + +feature.extend toolset : embarcadero ; + +toolset.inherit-generators embarcadero : clang-linux ; +generators.override embarcadero.prebuilt : builtin.lib-generator ; +generators.override embarcadero.prebuilt : builtin.prebuilt ; +generators.override embarcadero.searched-lib-generator : searched-lib-generator ; + +toolset.inherit-rules embarcadero : clang-linux ; +toolset.inherit-flags embarcadero + : clang-linux + : shared + shared + multi + multi/windows + gnu + gnu11 + libc++ + windows/static + windows/shared + off + 98 + 03 + 0x + 11 + 1y + 14 + 1z + 17 + 2a + 20 + latest + 98/iso + 03/iso + 0x/iso + 11/iso + 1y/iso + 14/iso + 1z/iso + 17/iso + 2a/iso + 20/iso + latest/iso + ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] { + .debug-configuration = true ; +} + +rule init ( version ? : command * : options * ) +{ + + local compiler = bcc64 ; + local preprocessor = cpp64 ; + local amodel = 64 ; + + local optam = [ feature.get-values : $(options) ] ; + + if $(optam) + { + if $(optam) = 32 + { + compiler = bcc32x ; + preprocessor = cpp32x ; + amodel = 32 ; + } + else if ! ( $(optam) = 64 ) + { + $(optam) = "" ; + } + } + + command = [ common.get-invocation-command embarcadero : $(compiler) : $(command) ] ; + + switch $(command[1]:BL) + { + case bcc32x : + compiler = bcc32x ; + preprocessor = cpp32x ; + amodel = 32 ; + case bcc64 : + compiler = bcc64 ; + preprocessor = cpp64 ; + amodel = 64 ; + case "bcc32x.exe" : + compiler = bcc32x ; + preprocessor = cpp32x ; + amodel = 32 ; + case "bcc64.exe" : + compiler = bcc64 ; + preprocessor = cpp64 ; + amodel = 64 ; + } + + if $(optam) && $(optam) != $(amodel) + { + errors.user-error "embarcadero initialization: compiler and address model" : + "the compiler '$(compiler)' does not match the address-model of '$(optam)'" ; + } + else + { + # Determine the version + if $(command) { + local command-string = "$(command)" ; + command-string = $(command-string:J=" ") ; + local soutput = [ SHELL "$(command-string) --version" ] ; + version ?= [ MATCH "Embarcadero C[+][+] ([0-9.]+)" : $(soutput) ] ; + cl_version = [ MATCH ".+version[ ]+([0-9.]+)" : $(soutput) ] ; + if ! $(cl_version) + { + cl_version = 5.0 ; + } + } + + local condition = [ common.check-init-parameters embarcadero : version $(version) ] ; + handle-options $(condition) : $(command) : $(options) ; + + # Support for the Embarcadero root directory. If the Embarcadero binary + # directory is not in the PATH we need to tell the underlying clang + # implementation where to find the Embarcadero header/library files + # and set the correct runtime path so that we can execute Embarcadero + # programs and find Embarcadero DLLs. + + local root = [ feature.get-values : $(options) ] ; + + # 1) Look in registry + + if ! $(root) + { + + local sdkdir = [ get_sdk_dir ] ; + + if $(sdkdir) + { + + local bdsv = [ get_bds_version $(sdkdir) ] ; + + if $(bdsv) + { + + local has_dec = [ MATCH "(.+[.])" : $(bdsv) ] ; + local bdsv_full ; + + if ! $(has_dec) + { + bdsv_full = $(bdsv).0 ; + } + + local troot = [ W32_GETREG "HKEY_LOCAL_MACHINE\\SOFTWARE\\Embarcadero\\BDS\\$(bdsv)" : RootDir ] ; + + if $(troot) + { + troot = $(troot:T) ; + troot = [ concatenate $(troot) : name ] ; + root = $(troot:D) ; + } + else + { + troot = [ W32_GETREG "HKEY_CURRENT_USER\\SOFTWARE\\Embarcadero\\BDS\\$(bdsv)" : RootDir ] ; + if $(troot) + { + troot = $(troot:T) ; + troot = [ concatenate $(troot) : name ] ; + root = $(troot:D) ; + } + else if $(bdsv_full) + { + troot = [ W32_GETREG "HKEY_LOCAL_MACHINE\\SOFTWARE\\Embarcadero\\BDS\\$(bdsv_full)" : RootDir ] ; + if $(troot) + { + troot = $(troot:T) ; + troot = [ concatenate $(troot) : name ] ; + root = $(troot:D) ; + } + else + { + troot = [ W32_GETREG "HKEY_CURRENT_USER\\SOFTWARE\\Embarcadero\\BDS\\$(bdsv_full)" : RootDir ] ; + if $(troot) + { + troot = $(troot:T) ; + troot = [ concatenate $(troot) : name ] ; + root = $(troot:D) ; + } + } + } + } + } + } + } + + + # 2) Look for path in the command + + if ! $(root) + { + + local cpath = $(command[1]:D) ; + + if $(cpath) + { + root = $(cpath:P) ; + } + } + + # 3) Search for the directory of the command + + if ! $(root) + { + + local pdirs = [ path.programs-path ] ; + + for local dir in $(pdirs) + { + + local match = [ MATCH "/(.:.+)" : $(dir) ] ; + + if $(match) + { + dir = "$(match)" ; + } + + if [ CHECK_IF_FILE $(dir)/$(command) ] + { + root = $(dir:P) ; + break ; + } + if [ CHECK_IF_FILE $(dir)/$(command).exe ] + { + root = $(dir:P) ; + break ; + } + } + } + + if ! $(root) + { + errors.user-error "Embarcadero toolset initialization: the root directory for the Embarcadero installation can not be found" ; + } + else + { + + local lib_path = $(root)/bin $(root)/bin64 $(root)/Bpl C:/Users/Public/Documents/Embarcadero ; + if $(.debug-configuration) + { + ECHO "notice:" using Embarcadero libraries with clang compilation"::" $(condition) "::" $(lib_path) ; + } + flags embarcadero.link RUN_PATH $(condition) : $(lib_path) ; + + local system_include_option = "-isystem " ; + local system_include_directories = $(root)/include/windows/crtl $(root)/include/windows/sdk $(root)/include/windows/rtl $(root)/include/dinkumware64 ; + + local lib_dir_release ; + local lib_dir_debug ; + local archiver ; + local arflags ; + local implib ; + local assembler ; + local asmflags ; + local asmoutput ; + + if $(compiler) = bcc32x + { + lib_dir_release = $(root)/lib/win32c/release $(root)/lib/win32c/release/psdk ; + lib_dir_debug = $(root)/lib/win32c/debug ; + archiver = tlib ; + arflags = /P512 ; + implib = implib ; + assembler = $(root)/bin/tasm32 ; + + # /ml makes all symbol names case-sensitive + + asmflags = /ml ; + asmoutput = "," ; + } + else if $(compiler) = bcc64 + { + + lib_dir_release = $(root)/lib/win64/release $(root)/lib/win64/release/psdk ; + lib_dir_debug = $(root)/lib/win64/debug ; + archiver = tlib64 ; + arflags = /P2048 ; + implib = mkexp ; + } + + flags embarcadero.compile .EMB_SYSINC $(condition) : $(system_include_option)$(system_include_directories) ; + flags embarcadero.link LINKPATH $(condition)/release : $(lib_dir_release) ; + flags embarcadero.link LINKPATH $(condition)/debug : $(lib_dir_debug) $(lib_dir_release) ; + flags embarcadero.archive .AR $(condition) : $(root)/bin/$(archiver) ; + flags embarcadero.archive .ARFLAGS $(condition) : $(arflags) ; + flags embarcadero.asm .ASM $(condition) : $(assembler) ; + flags embarcadero.asm .ASMFLAGS $(condition) : $(asmflags) ; + flags embarcadero.asm .ASMOUTPUT $(condition) : $(asmoutput) ; + flags embarcadero.asm USER_OPTIONS $(condition) : [ feature.get-values : $(options) ] ; + flags embarcadero.archive AROPTIONS $(condition) : [ feature.get-values : $(options) ] ; + flags embarcadero.link.dll .IMPLIB_COMMAND $(condition) : $(root)/bin/$(implib) ; + + local mte = [ feature.get-values : $(options) ] ; + + if $(mte) + { + flags embarcadero OPTIONS EXE/$(condition) : $(mte) ; + } + else + { + flags embarcadero OPTIONS EXE/$(condition) : console ; + } + } + } +} + +local rule concatenate ( path : name ) + { + + local result ; + local has_ending_slash = [ MATCH ".*([/\\])$" : $(path) ] ; + local has_backward_slash = [ MATCH ".*([\\])" : $(path) ] ; + + if $(has_ending_slash) + { + result = $(path)$(name) ; + } + else if $(has_backward_slash) + { + result = $(path)"\\"$(name) ; + } + else + { + result = $(path)"/"$(name) ; + } + return $(result) ; + } + +local rule get_sdk_dir ( ) + { + + local ret ; + local appdata = [ os.environ APPDATA ] ; + + if $(appdata) + { + ret = $(appdata:T)/Embarcadero/BDS ; + } + return $(ret) ; + } + +local rule get_bds_version ( sdir ) + { + + local ret ; + local flist = [ GLOB $(sdir) : * ] ; + + if $(flist) + { + + local dirs ; + + for local file in $(flist) + { + if ! [ CHECK_IF_FILE $(file) ] + { + dirs += $(file) ; + } + } + if $(dirs) + { + + local ldir = $(dirs[-1]) ; + + ret = $(ldir:B) ; + } + } + return $(ret) ; + } + +local rule handle-options ( condition * : command * : options * ) +{ + if $(.debug-configuration) + { + ECHO "notice:" will use '$(command)' for embarcadero, condition + $(condition:E=(empty)) ; + } + + flags embarcadero CONFIG_COMMAND $(condition) : $(command) ; + + flags embarcadero.compile OPTIONS $(condition) : + [ feature.get-values : $(options) ] ; + + flags embarcadero.compile.c++ OPTIONS $(condition) : + [ feature.get-values : $(options) ] ; + + flags embarcadero.link OPTIONS $(condition) : + [ feature.get-values : $(options) ] ; +} + +############################################################################### +# Declare generators + +type.set-generated-target-suffix OBJ : embarcadero windows 64 : o ; +type.set-generated-target-suffix OBJ : embarcadero windows 32 : obj ; +type.set-generated-target-suffix STATIC_LIB : embarcadero windows 64 : a ; +type.set-generated-target-suffix STATIC_LIB : embarcadero windows 32 : lib ; +type.set-generated-target-suffix IMPORT_LIB : embarcadero windows 64 : a ; +type.set-generated-target-suffix IMPORT_LIB : embarcadero windows 32 : lib ; + +generators.register-linker embarcadero.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : embarcadero ; +generators.register-linker embarcadero.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : embarcadero ; + +generators.register-archiver embarcadero.archive : OBJ : STATIC_LIB : embarcadero ; +generators.register-c-compiler embarcadero.compile.c++ : CPP : OBJ : embarcadero ; +generators.register-c-compiler embarcadero.compile.c : C : OBJ : embarcadero ; +generators.register-c-compiler embarcadero.compile.asm : ASM : OBJ : embarcadero 64 ; +generators.register-standard embarcadero.asm : ASM : OBJ : embarcadero 32 ; + +# Flags + +local opt_console = -tC ; +local opt_shared = -tD ; +local opt_mt = -tM ; +local opt_drtl = -tR ; +local opt_dapp = -tW ; +local opt_compile_flags = -DNDEBUG ; +local opt_lflags = "-lS:1048576 -lSc:4098 -lH:1048576 -lHc:8192" ; + +flags embarcadero OPTIONS console : $(opt_console) ; +flags embarcadero OPTIONS gui : $(opt_dapp) ; +flags embarcadero OPTIONS shared : $(opt_drtl) ; +flags embarcadero OPTIONS LIB/shared : $(opt_shared) ; +flags embarcadero OPTIONS multi : $(opt_mt) ; +flags embarcadero.compile OPTIONS release : $(opt_compile_flags) ; +flags embarcadero.link OPTIONS : $(opt_lflags) ; +flags embarcadero.archive AROPTIONS ; +flags embarcadero.asm USER_OPTIONS ; +flags embarcadero.compile OPTIONS 32 : -m32 ; +flags embarcadero.compile OPTIONS 64 : -m64 ; +flags embarcadero.link OPTIONS 32 : -m32 ; +flags embarcadero.link OPTIONS 64 : -m64 ; +flags embarcadero.link .EMBLRSP release : _emb_lpr ; +flags embarcadero.link .EMBLRSP debug : _emb_lpd ; +flags embarcadero.compile .EMBCRSP release : _emb_sir ; +flags embarcadero.compile .EMBCRSP debug : _emb_sid ; + +nl = " +" ; + +rule compile.c++ ( targets * : sources * : properties * ) { +} + +actions compile.c++ { + "$(CONFIG_COMMAND)" -c -x c++ @"@($(<[1]:DBW)$(.EMBCRSP)$(<[1]:S).rsp:E=$(nl)"$(.EMB_SYSINC)")" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" +} + +rule compile.c ( targets * : sources * : properties * ) +{ +} + +actions compile.c +{ + "$(CONFIG_COMMAND)" -c -x c @"@($(<[1]:DBW)$(.EMBCRSP)$(<[1]:S).rsp:E=$(nl)"$(.EMB_SYSINC)")" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" +} + +rule archive ( targets * : sources * : properties * ) +{ +} + +actions updated together piecemeal archive +{ + "$(.AR)" $(AROPTIONS) $(.ARFLAGS) /u /a /C "$(<)" +-"$(>)" +} + +rule link ( targets * : sources * : properties * ) { +} + +rule link.dll ( targets * : sources * : properties * ) { +} + +actions link bind LIBRARIES { + "$(CONFIG_COMMAND)" @"@($(<[1]:DBW)$(.EMBLRSP)$(<[1]:S).rsp:E=$(nl)-L"$(LINKPATH)")" -o "$(<)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) $(USER_OPTIONS) +} + +actions link.dll bind LIBRARIES { + "$(CONFIG_COMMAND)" @"@($(<[1]:DBW)$(.EMBLRSP)$(<[1]:S).rsp:E=$(nl)-L"$(LINKPATH)")" -o "$(<[1])" @"@($(<[1]:W).rsp:E=$(nl)"$(>)")" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) $(USER_OPTIONS) && "$(.IMPLIB_COMMAND)" "$(<[2])" "$(<[1])" +} + +rule asm ( targets * : sources * : properties * ) +{ +} + +actions asm +{ + $(.ASM) $(.ASMFLAGS) $(USER_OPTIONS) "$(>)" $(.ASMOUTPUT) "$(<)" +} + +rule compile.asm ( targets * : sources * : properties * ) +{ + LANG on $(<) = "-x assembler-with-cpp" ; +} + +actions compile.asm +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} diff --git a/src/boost/tools/build/src/tools/emscripten.jam b/src/boost/tools/build/src/tools/emscripten.jam new file mode 100644 index 000000000..180dfd302 --- /dev/null +++ b/src/boost/tools/build/src/tools/emscripten.jam @@ -0,0 +1,105 @@ +# Copyright Rene Rivera 2016 +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; +import os ; +import toolset ; +import common ; +import gcc ; +import type ; + +feature.feature embind : off on : propagated ; +feature.feature closure : off on full : propagated ; +feature.feature link-optimization : off on full : propagated ; + +rule init ( version ? : command * : options * ) +{ + command = [ common.get-invocation-command emscripten + : emcc + : $(command) ] ; + + # Determine the version + if $(command) + { + local command-string = \"$(command)\" ; + command-string = $(command-string:J=" ") ; + version ?= [ MATCH "([0-9.]+)" + : [ SHELL "$(command-string) --version" ] ] ; + } + + local condition = [ common.check-init-parameters emscripten + : version $(version) ] ; + + common.handle-options emscripten : $(condition) : $(command) : $(options) ; +} + +feature.extend toolset : emscripten ; + +toolset.inherit-generators emscripten emscripten + : gcc + : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch + ; +toolset.inherit-rules emscripten : gcc ; +toolset.inherit-flags emscripten : gcc + : + off speed space + off on + off on + off on + ; + +type.set-generated-target-suffix EXE : emscripten : "js" ; +type.set-generated-target-suffix OBJ : emscripten : "bc" ; +type.set-generated-target-suffix STATIC_LIB : emscripten : "bc" ; + +toolset.flags emscripten.compile OPTIONS ; +toolset.flags emscripten.compile OPTIONS ; +toolset.flags emscripten.compile.c++ OPTIONS ; + +toolset.flags emscripten.compile OPTIONS off : -O0 ; +toolset.flags emscripten.compile OPTIONS speed : -O3 ; +toolset.flags emscripten.compile OPTIONS space : -Oz ; +toolset.flags emscripten.link OPTIONS off : -O0 ; +toolset.flags emscripten.link OPTIONS speed : -O3 ; +toolset.flags emscripten.link OPTIONS space : -O3 ; + +toolset.flags emscripten.compile OPTIONS on : --profiling-funcs ; + +toolset.flags emscripten.compile OPTIONS off : -fno-inline ; +toolset.flags emscripten.compile OPTIONS on : -Wno-inline ; +toolset.flags emscripten.compile OPTIONS full : -Wno-inline ; + +toolset.flags emscripten OPTIONS off : -g0 ; +toolset.flags emscripten OPTIONS on : -g4 -s DEMANGLE_SUPPORT=1 ; +toolset.flags emscripten OPTIONS off : -fno-rtti ; + +toolset.flags emscripten.link OPTIONS on : --bind ; +toolset.flags emscripten.link OPTIONS on : --closure 1 ; +toolset.flags emscripten.link OPTIONS full : --closure 2 ; +toolset.flags emscripten.link OPTIONS off : --llvm-lto 0 ; +toolset.flags emscripten.link OPTIONS on : --llvm-lto 1 ; +toolset.flags emscripten.link OPTIONS full : --llvm-lto 3 ; + +actions compile.c +{ + "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions archive +{ + "$(CONFIG_COMMAND)" $(AROPTIONS) -r -o "$(<)" "$(>)" +} + +toolset.flags emscripten.link USER_OPTIONS ; + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) +} diff --git a/src/boost/tools/build/src/tools/features/__init_features__.jam b/src/boost/tools/build/src/tools/features/__init_features__.jam new file mode 100644 index 000000000..ff04722a9 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/__init_features__.jam @@ -0,0 +1,23 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Here we automatically define any "feature" modules in this directory. + +local key = feature ; + +import os path modules ; + +.this-module's-file = [ modules.binding $(__name__) ] ; +.this-module's-dir = [ path.parent [ path.make $(.this-module's-file) ] ] ; +.to-load-jamfiles = [ path.glob $(.this-module's-dir) : *-$(key).jam ] ; +.to-load-modules = [ MATCH ^(.*)\.jam$ : $(.to-load-jamfiles) ] ; + +# A loop over all matched modules in this directory +for local m in $(.to-load-modules) +{ + m = [ path.basename $(m) ] ; + m = $(key)s/$(m) ; + import $(m) ; +} diff --git a/src/boost/tools/build/src/tools/features/address-model-feature.jam b/src/boost/tools/build/src/tools/features/address-model-feature.jam new file mode 100644 index 000000000..e6b416a29 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/address-model-feature.jam @@ -0,0 +1,22 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.address-model]]`address-model`:: +*Allowed values:* `32`, `64`. ++ +Specifies if 32-bit or 64-bit code should be generated by the compiler. Whether +this feature works depends on the used compiler, its version, how the compiler +is configured, and the values of the `architecture` `instruction-set` features. +Please see the section <> for details. + +|# # end::doc[] + +feature.feature address-model + : 16 32 64 32_64 + : propagated optional ; diff --git a/src/boost/tools/build/src/tools/features/allow-feature.jam b/src/boost/tools/build/src/tools/features/allow-feature.jam new file mode 100644 index 000000000..913251dbf --- /dev/null +++ b/src/boost/tools/build/src/tools/features/allow-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.allow]]`allow`:: +This feature is used to allow specific generators to run. For example, Qt tools +can only be invoked when Qt library is used. In that case, `qt` will be +in usage requirement of the library. + +|# # end::doc[] + +feature.feature allow + : + : free ; diff --git a/src/boost/tools/build/src/tools/features/architecture-feature.jam b/src/boost/tools/build/src/tools/features/architecture-feature.jam new file mode 100644 index 000000000..3c9d92f32 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/architecture-feature.jam @@ -0,0 +1,52 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.architecture]]`architecture`:: +*Allowed values:* `x86`, `ia64`, `sparc`, `power`, `mips`, `mips1`, `mips2`, +`mips3`, `mips4`, `mips32`, `mips32r2`, `mips64`, `parisc`, `arm`, +`s390x`, `loongarch`. ++ +Specifies the general processor family to generate code for. + +|# # end::doc[] + +feature.feature architecture + : + # x86 and x86-64 + x86 + + # ia64 + ia64 + + # Sparc + sparc + + # RS/6000 & PowerPC + power + + # LoongArch + loongarch + + # MIPS/SGI + mips mips1 mips2 mips3 mips4 mips32 mips32r2 mips64 + + # HP/PA-RISC + parisc + + # Advanced RISC Machines + arm + + # RISC-V + riscv + + # z Systems (aka s390x) + s390x + : + propagated optional + ; diff --git a/src/boost/tools/build/src/tools/features/archiveflags-feature.jam b/src/boost/tools/build/src/tools/features/archiveflags-feature.jam new file mode 100644 index 000000000..017153631 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/archiveflags-feature.jam @@ -0,0 +1,18 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.archiveflags]]`archiveflags`:: +The value of this feature is passed without modification to the archiver tool +when creating static libraries. + +|# # end::doc[] + +feature.feature archiveflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/asmflags-feature.jam b/src/boost/tools/build/src/tools/features/asmflags-feature.jam new file mode 100644 index 000000000..39c203de8 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/asmflags-feature.jam @@ -0,0 +1,17 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.asmflags]]`asmflags`:: +The value of this feature is passed without modification to the assembler. + +|# # end::doc[] + +feature.feature asmflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/build-feature.jam b/src/boost/tools/build/src/tools/features/build-feature.jam new file mode 100644 index 000000000..90e3b2692 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/build-feature.jam @@ -0,0 +1,22 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.build]]`build`:: +*Allowed values:* `no` ++ +Used to conditionally disable build of a target. If `no` is in +properties when building a target, build of that target is skipped. Combined +with conditional requirements this allows you to skip building some target in +configurations where the build is known to fail. + +|# # end::doc[] + +feature.feature build + : yes no + : optional ; diff --git a/src/boost/tools/build/src/tools/features/cflags-feature.jam b/src/boost/tools/build/src/tools/features/cflags-feature.jam new file mode 100644 index 000000000..2f0b9a53f --- /dev/null +++ b/src/boost/tools/build/src/tools/features/cflags-feature.jam @@ -0,0 +1,21 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.cflags]]`cflags`; `cxxflags`; `linkflags`:: +The value of these features is passed without modification to the corresponding +tools. For `cflags` that is both the C and {CPP} compilers, for `cxxflags` that +is the {CPP} compiler, and for `linkflags` that is the linker. The features are +handy when you are trying to do something special that cannot be achieved by a +higher-level feature in B2. + +|# # end::doc[] + +feature.feature cflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/compileflags-feature.jam b/src/boost/tools/build/src/tools/features/compileflags-feature.jam new file mode 100644 index 000000000..9448542c2 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/compileflags-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2020 RenĂ© Ferdinand Rivera Morell +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.compileflags]]`compileflags`:: +The value of this feature is passed without modification to the corresponding +tools. The values from the `compileflags` is applied to all compilation of any +language for the tools. + +|# # end::doc[] + +feature.feature compileflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/conditional-feature.jam b/src/boost/tools/build/src/tools/features/conditional-feature.jam new file mode 100644 index 000000000..5eca03899 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/conditional-feature.jam @@ -0,0 +1,31 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.conditional]]`conditional`:: +Used to introduce indirect conditional requirements. The value should have the +form: ++ +---- +@rulename +---- ++ +where _rulename_ should be a name of a rule with the following signature: ++ +---- +rule rulename ( properties * ) +---- ++ +The rule will be called for each target with its properties and should return +any additional properties. See also section <> for an example. + +|# # end::doc[] + +feature.feature conditional + : + : incidental free ; diff --git a/src/boost/tools/build/src/tools/features/coverage-feature.jam b/src/boost/tools/build/src/tools/features/coverage-feature.jam new file mode 100644 index 000000000..c07b33d83 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/coverage-feature.jam @@ -0,0 +1,22 @@ +# Copyright 2019 Rene Rivera +# Copyright 2019 Hans Dembinski +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.coverage]]`coverage`:: +*Allowed values:* `off`, `on`. ++ +Enables code instrumentation to generate coverage data during execution. + +|# # end::doc[] + +feature.feature coverage + : + off # Disable coverage generation for the tool (default). + on # Enable coverage generation for the tool. + : incidental propagated ; diff --git a/src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam b/src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam new file mode 100644 index 000000000..a0feea64a --- /dev/null +++ b/src/boost/tools/build/src/tools/features/cxx-template-depth-feature.jam @@ -0,0 +1,39 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; +import numbers ; + +#| tag::doc[] + +[[bbv2.builtin.features.cpp-template-depth]]`c++-template-depth`:: +*Allowed values:* Any positive integer. ++ +Allows configuring a {CPP} compiler with the maximal template instantiation +depth parameter. Specific toolsets may or may not provide support for this +feature depending on whether their compilers provide a corresponding +command-line option. ++ +NOTE: Due to some internal details in the current B2 implementation it +is not possible to have features whose valid values are all positive integer. +As a workaround a large set of allowed values has been defined for this feature +and, if a different one is needed, user can easily add it by calling the +feature.extend rule. + +|# # end::doc[] + +# TODO: This should be upgraded as soon as Boost Build adds support for custom +# validated feature values or at least features allowing any positive integral +# value. See related Boost Build related trac ticket #194. + +feature.feature c++-template-depth + : + [ numbers.range 64 1024 : 64 ] + [ numbers.range 20 1000 : 10 ] + # Maximum template instantiation depth guaranteed for ANSI/ISO C++ + # conforming programs. + 17 + : + incidental optional propagated ; diff --git a/src/boost/tools/build/src/tools/features/cxxabi-feature.jam b/src/boost/tools/build/src/tools/features/cxxabi-feature.jam new file mode 100644 index 000000000..9ca0f2b05 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/cxxabi-feature.jam @@ -0,0 +1,18 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.cxxabi]]`c++abi`:: +Selects a specific variant of C++ ABI if the compiler supports several. + + +|# # end::doc[] + +feature.feature c++abi + : + : propagated optional ; diff --git a/src/boost/tools/build/src/tools/features/cxxflags-feature.jam b/src/boost/tools/build/src/tools/features/cxxflags-feature.jam new file mode 100644 index 000000000..fe4d97208 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/cxxflags-feature.jam @@ -0,0 +1,17 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.cxxflags]]`cxxflags`:: +See <`>>. + +|# # end::doc[] + +feature.feature cxxflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/cxxstd-feature.jam b/src/boost/tools/build/src/tools/features/cxxstd-feature.jam new file mode 100644 index 000000000..4494ed300 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/cxxstd-feature.jam @@ -0,0 +1,50 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.cxxstd]]`cxxstd`:: +*Allowed values*: `98`, `03`, `0x`, `11`, `1y`, `14`, `1z`, `17`, `2a`, `20`, +`latest`. ++ +Specifies the version of the C++ Standard Language to build with. All the +official versions of the standard since "98" are included. It is also possible +to specify using the experimental, work in progress, `latest` version. Some +compilers specified intermediate versions for the experimental versions leading +up to the released standard version. Those are included following the GNU +nomenclature as `0x`, `1y`, `1z`, and `2a`. Depending on the compiler `latest` +would map to one of those. + +NOTE: This is an `optional` feature. Hence when not specified the compiler +default behaviour is used. + +NOTE: Please consult the toolset specific documentation for which `cxxstd` +is supported. + +|# # end::doc[] + +feature.feature cxxstd + : 98 03 0x 11 1y 14 1z 17 2a 20 2b 23 2c 26 latest + : optional composite propagated ; + +#| tag::doc[] + +[[bbv2.builtin.features.cxxstd-dialect]]`cxxstd-dialect`:: +*Subfeature of* `cxxstd` ++ +*Allowed values*: `iso`, `gnu`, `ms`. ++ +Indicates if a non-standard dialect should be used. These usually have +either/or extensions or platform specific functionality. Not specifying the +dialect will default to 'iso' which will attempt to use ISO C++ Standard +conformance to the best of the compiler's ability. + +|# # end::doc[] + +feature.subfeature cxxstd : dialect + : iso gnu ms + : composite propagated ; diff --git a/src/boost/tools/build/src/tools/features/debug-feature.jam b/src/boost/tools/build/src/tools/features/debug-feature.jam new file mode 100644 index 000000000..197ef7400 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/debug-feature.jam @@ -0,0 +1,34 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.debug-symbols]]`debug-symbols`:: +*Allowed values:* `on`, `off`. ++ +Specifies if produced object files, executables, and libraries should include +debug information. Typically, the value of this feature is implicitly set by +the `variant` feature, but it can be explicitly specified by the user. The most +common usage is to build release variant with debugging information. + +|# # end::doc[] + +feature.feature debug-symbols + : on off + : propagated ; + +#| tag::prof-doc[] + +[[bbv2.builtin.features.profiling]]`profiling`:: +*Allowed values:* `off`, `on`. ++ +Enables generation of extra code to write profile information. +|# # end::prof-doc[] + +feature.feature profiling + : off on + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/define-feature.jam b/src/boost/tools/build/src/tools/features/define-feature.jam new file mode 100644 index 000000000..497bc829f --- /dev/null +++ b/src/boost/tools/build/src/tools/features/define-feature.jam @@ -0,0 +1,30 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.define]]`define`:: +Specifies a preprocessor symbol that should be defined on the command line. +You may either specify just the symbol, which will be defined without any +value, or both the symbol and the value, separated by equal sign. + +|# # end::doc[] + +feature.feature define + : + : free ; + +#| tag::undef-doc[] + +[[bbv2.builtin.features.undef]]`undef`:: +Specifies a preprocessor symbol to undefine. + +|# # end::undef-doc[] + +feature.feature undef + : + : free ; diff --git a/src/boost/tools/build/src/tools/features/dependency-feature.jam b/src/boost/tools/build/src/tools/features/dependency-feature.jam new file mode 100644 index 000000000..61a075fc4 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/dependency-feature.jam @@ -0,0 +1,62 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# The following features are incidental since they have no effect on built +# products. Not making them incidental will result in problems in corner cases, +# e.g.: +# +# unit-test a : a.cpp : b ; +# lib b : a.cpp b ; +# +# Here, if is not incidental, we would decide we have two targets for +# a.obj with different properties and complain about it. +# +# Note that making a feature incidental does not mean it is ignored. It may be +# ignored when creating a virtual target, but the rest of build process will use +# them. + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.dependency]]`dependency`:: +Introduces a dependency on the target named by the value of this feature (so it +will be brought up-to-date whenever the target being declared is). The +dependency is not used in any other way. + +|# # end::doc[] + +feature.feature dependency + : + : free dependency incidental ; + +#| tag::impl-doc[] + +[[bbv2.builtin.features.implicit-dependency]]`implicit-dependency`:: +Indicates that the target named by the value of this feature may produce files +that are included by the sources of the target being declared. See the section +<> for more information. + +|# # end::impl-doc[] + +feature.feature implicit-dependency + : + : free dependency incidental ; + +#| tag::use-doc[] + +[[bbv2.builtin.features.use]]`use`:: +Introduces a dependency on the target named by the value of this feature (so it +will be brought up-to-date whenever the target being declared is), and adds its +usage requirements to the build properties of the target being declared. The +dependency is not used in any other way. The primary use case is when you want +the usage requirements (such as `#include` paths) of some library to be +applied, but do not want to link to it. + +|# # end::use-doc[] + +feature.feature use + : + : free dependency incidental ; diff --git a/src/boost/tools/build/src/tools/features/dll-feature.jam b/src/boost/tools/build/src/tools/features/dll-feature.jam new file mode 100644 index 000000000..f6bb25f96 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/dll-feature.jam @@ -0,0 +1,73 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.dll-path]]`dll-path`:: +Specifies an additional directory where the system should look for shared +libraries when the executable or shared library is run. This feature only +affects Unix compilers. Please see +<> +in <> for details. + +|# # end::doc[] + +feature.feature dll-path + : + : free path ; + +#| tag::hardcode-doc[] + +[[bbv2.builtin.features.hardcode-dll-paths]]`hardcode-dll-paths`:: +*Allowed values:* `true`, `false`. ++ +Controls automatic generation of dll-path properties. ++ +This property is specific to Unix systems. If an executable is built with +`true`, the generated binary will contain the list of all +the paths to the used shared libraries. As the result, the executable can be +run without changing system paths to shared libraries or installing the +libraries to system paths. This is very convenient during development. Please +see the <> for details. Note that on Mac OSX, +the paths are unconditionally hardcoded by the linker, and it is not possible +to disable that behavior + +|# # end::hardcode-doc[] + +feature.feature hardcode-dll-paths + : true false + : incidental ; + +# An internal feature that holds the paths of all dependency shared libraries. +# On Windows, it is needed so that we can add all those paths to PATH when +# running applications. On Linux, it is needed to add proper -rpath-link command +# line options. +feature.feature xdll-path + : + : free path ; + +#| tag::def-doc[] + +[[bbv2.builtin.features.def-file]]`def-file`:: +Provides a means to specify def-file for windows DLLs. + +|# # end::def-doc[] + +feature.feature def-file + : + : free dependency ; + +#| tag::suppress-doc[] + +[[bbv2.builtin.features.suppress-import-lib]]`suppress-import-lib`:: +Suppresses creation of import library by the linker. + +|# # end::suppress-doc[] + +feature.feature suppress-import-lib + : false true + : incidental ; diff --git a/src/boost/tools/build/src/tools/features/exception-feature.jam b/src/boost/tools/build/src/tools/features/exception-feature.jam new file mode 100644 index 000000000..c24c10a81 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/exception-feature.jam @@ -0,0 +1,47 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# TODO: Documentation. + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.exception-handling]]`exception-handling`:: +*Allowed values:* `on`, `off`. ++ +Disables exceptions. + +|# # end::doc[] + +feature.feature exception-handling + : on off + : propagated ; + +#| tag::asynch-doc[] + +[[bbv2.builtin.features.asynch-exceptions]]`asynch-exceptions`:: +*Allowed values:* `off`, `on`. ++ +Selects whether there is support for asynchronous EH (e.g. catching SEGVs). + +|# # end::asynch-doc[] + +feature.feature asynch-exceptions + : off on + : propagated ; + +#| tag::doc[] + +[[bbv2.builtin.features.extern-c-nothrow]]`extern-c-nothrow`:: +*Allowed values:* `off`, `on`. ++ +Selects whether all `extern "C"` functions are considered `nothrow` by default. + +|# # end::doc[] + +feature.feature extern-c-nothrow + : off on + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/fflags-feature.jam b/src/boost/tools/build/src/tools/features/fflags-feature.jam new file mode 100644 index 000000000..3c58f2107 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/fflags-feature.jam @@ -0,0 +1,18 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.fflags]]`fflags`:: +The value of this feature is passed without modification to the tool when +compiling Fortran sources. + +|# # end::doc[] + +feature.feature fflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/file-feature.jam b/src/boost/tools/build/src/tools/features/file-feature.jam new file mode 100644 index 000000000..2a00b6e90 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/file-feature.jam @@ -0,0 +1,18 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.file]]`file`:: +When used in requirements of a prebuilt library target this feature specifies +the path to the library file. See <> for examples. + +|# # end::doc[] + +feature.feature file + : + : free dependency incidental ; diff --git a/src/boost/tools/build/src/tools/features/find-lib-feature.jam b/src/boost/tools/build/src/tools/features/find-lib-feature.jam new file mode 100644 index 000000000..84e07e581 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/find-lib-feature.jam @@ -0,0 +1,42 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.find-shared-library]]`find-shared-library`:: +Adds a shared library to link to. Usually link:#bbv2.tasks.libraries[`lib`] +targets should be preferred over using this feature. + +|# # end::doc[] + +feature.feature find-shared-library + : + : free ; #order-sensitive ; + +#| tag::doc[] + +[[bbv2.builtin.features.find-static-library]]`find-static-library`:: +Adds a static library to link to. Usually link:#bbv2.tasks.libraries[`lib`] +targets should be preferred over using this feature. + +|# # end::doc[] + +feature.feature find-static-library + : + : free ; #order-sensitive ; + +#| tag::path-doc[] + +[[bbv2.builtin.features.library-path]]`library-path`:: +Adds to the list of directories which will be used by the linker to search for +libraries. + +|# # end::path-doc[] + +feature.feature library-path + : + : free path ; #order-sensitive ; diff --git a/src/boost/tools/build/src/tools/features/flags-feature.jam b/src/boost/tools/build/src/tools/features/flags-feature.jam new file mode 100644 index 000000000..56dec0cc8 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/flags-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.flags]]`flags`:: +This feature is used for generic, i.e. non-language specific, flags for tools. +The value of this feature is passed without modification to the tool that will +build the target. + +|# # end::doc[] + +feature.feature flags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/force-include-feature.jam b/src/boost/tools/build/src/tools/features/force-include-feature.jam new file mode 100644 index 000000000..8d98374a3 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/force-include-feature.jam @@ -0,0 +1,21 @@ +# Copyright 2020 Nikita Kniazev +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.force-include]]`force-include`:: +Specifies an include path that has to be included in a way like if +`#include "file"` appeared as the first line of every target's source file. + +The include order is not guaranteed if used multiple times on a single target. + +|# # end::doc[] + +feature.feature "force-include" + : + : free + ; diff --git a/src/boost/tools/build/src/tools/features/include-feature.jam b/src/boost/tools/build/src/tools/features/include-feature.jam new file mode 100644 index 000000000..254dcbfe9 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/include-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.include]]`include`:: +Specifies an additional include path that is to be passed to C and {CPP} +compilers. + +|# # end::doc[] + +feature.feature "include" + : + : free path #order-sensitive + ; diff --git a/src/boost/tools/build/src/tools/features/instruction-set-feature.jam b/src/boost/tools/build/src/tools/features/instruction-set-feature.jam new file mode 100644 index 000000000..003c674f4 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/instruction-set-feature.jam @@ -0,0 +1,73 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.instruction-set]]`instruction-set`:: +*Allowed values:* depends on the used toolset. ++ +Specifies for which specific instruction set the code should be generated. The +code in general might not run on processors with older/different instruction +sets. ++ +While B2 allows a large set of possible values for this features, +whether a given value works depends on which compiler you use. Please see +the section <> for details. + +|# # end::doc[] + +feature.feature instruction-set + : + # x86 and x86-64 + native i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3 + pentium3m pentium-m pentium4 pentium4m prescott nocona core2 corei7 corei7-avx core-avx-i + conroe conroe-xe conroe-l allendale merom merom-xe kentsfield kentsfield-xe penryn wolfdale + yorksfield nehalem sandy-bridge ivy-bridge haswell broadwell skylake skylake-avx512 cannonlake + icelake-client icelake-server cascadelake cooperlake tigerlake + atom + k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp athlon-mp k8 opteron athlon64 athlon-fx + k8-sse3 opteron-sse3 athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3 bdver4 btver1 + btver2 znver1 znver2 + winchip-c6 winchip2 + c3 c3-2 c7 + + # ia64 + itanium itanium1 merced itanium2 mckinley + + # Sparc + v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934 + sparclet tsc701 v9 ultrasparc ultrasparc3 + + # RS/6000 & PowerPC + 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400 + 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2 + power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a + + # MIPS + 4kc 4km 4kp 4ksc 4kec 4kem 4kep 4ksd 5kc 5kf 20kc 24kc 24kf2_1 24kf1_1 24kec + 24kef2_1 24kef1_1 34kc 34kf2_1 34kf1_1 34kn 74kc 74kf2_1 74kf1_1 74kf3_2 1004kc + 1004kf2_1 1004kf1_1 i6400 i6500 interaptiv loongson2e loongson2f loongson3a + gs464 gs464e gs264e m4k m14k m14kc m14ke m14kec m5100 m5101 octeon octeon+ octeon2 + octeon3 orion p5600 p6600 r2000 r3000 r3900 r4000 r4400 r4600 r4650 r4700 r5900 + r6000 r8000 rm7000 rm9000 r10000 r12000 r14000 r16000 sb1 sr71000 vr4100 vr4111 + vr4120 vr4130 vr4300 vr5000 vr5400 vr5500 xlr xlp + + # HP/PA-RISC + 700 7100 7100lc 7200 7300 8000 + + # Advanced RISC Machines + armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312 + armv7 armv7s + + cortex-a9+vfpv3 cortex-a53 cortex-r5 cortex-r5+vfpv3-d16 + + # z Systems (aka s390x) + z196 zEC12 z13 z14 z15 + + : + propagated optional + ; diff --git a/src/boost/tools/build/src/tools/features/internal-feature.jam b/src/boost/tools/build/src/tools/features/internal-feature.jam new file mode 100644 index 000000000..0b323de30 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/internal-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# TODO: Documentation. + +import feature ; + +# Internal feature. +feature.feature library-file + : + : free dependency ; + +# Internal feature used to store the name of a bjam action to call when building +# a target. +feature.feature action + : + : free ; diff --git a/src/boost/tools/build/src/tools/features/library-feature.jam b/src/boost/tools/build/src/tools/features/library-feature.jam new file mode 100644 index 000000000..0db67fe36 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/library-feature.jam @@ -0,0 +1,22 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.library]]`library`:: +This feature is almost equivalent to the +<`>> feature, except that it takes +effect only for linking. When you want to link all targets in a Jamfile to +certain library, the `` feature is preferred over `X` -- the +latter will add the library to all targets, even those that have nothing to do +with libraries. + +|# # end::doc[] + +feature.feature library + : + : free dependency incidental ; diff --git a/src/boost/tools/build/src/tools/features/link-feature.jam b/src/boost/tools/build/src/tools/features/link-feature.jam new file mode 100644 index 000000000..86cb86d09 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/link-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.link]]`link`:: +*Allowed values:* `shared`, `static` ++ +Controls how libraries are built. + +|# # end::doc[] + +feature.feature link + : shared static + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/linkflags-feature.jam b/src/boost/tools/build/src/tools/features/linkflags-feature.jam new file mode 100644 index 000000000..266aed50d --- /dev/null +++ b/src/boost/tools/build/src/tools/features/linkflags-feature.jam @@ -0,0 +1,17 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.linkflags]]`linkflags`:: +See <`>>. + +|# # end::doc[] + +feature.feature linkflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/local-visibility-feature.jam b/src/boost/tools/build/src/tools/features/local-visibility-feature.jam new file mode 100644 index 000000000..373a2c234 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/local-visibility-feature.jam @@ -0,0 +1,27 @@ +# Copyright 2018 Andrey Semashev +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.local-visibility]]`local-visibility`:: +*Allowed values:* `global`, `protected`, `hidden`. ++ +This feature has the same effect as the +<> feature but is intended +to be used by targets that require a particular symbol visibility. Unlike the +`visibility` feature, `local-visibility` is not inherited by the target +dependencies and only affects the target to which it is applied. ++ +The `local-visibility` feature supports the same values with the same meaning +as the `visibility` feature. By default, if `local-visibility` is not specified +for a target, the value of the `visibility` feature is used. + +|# # end::doc[] + +feature.feature local-visibility + : global protected hidden + : optional ; diff --git a/src/boost/tools/build/src/tools/features/location-feature.jam b/src/boost/tools/build/src/tools/features/location-feature.jam new file mode 100644 index 000000000..2730199a1 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/location-feature.jam @@ -0,0 +1,18 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.location]]`location`:: +Specifies the build directory for a target. The feature is used primarily with +<`>> rule. + +|# # end::doc[] + +feature.feature location + : + : free path ; diff --git a/src/boost/tools/build/src/tools/features/location-prefix-feature.jam b/src/boost/tools/build/src/tools/features/location-prefix-feature.jam new file mode 100644 index 000000000..b60d07215 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/location-prefix-feature.jam @@ -0,0 +1,18 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.location-prefix]]`location-prefix`:: +Sets the build directory for a target as the project’s build directory prefixed +with the value of this feature. See section <> for an example. + +|# # end::doc[] + +feature.feature location-prefix + : + : free ; diff --git a/src/boost/tools/build/src/tools/features/lto-feature.jam b/src/boost/tools/build/src/tools/features/lto-feature.jam new file mode 100644 index 000000000..359c2f99b --- /dev/null +++ b/src/boost/tools/build/src/tools/features/lto-feature.jam @@ -0,0 +1,46 @@ +# Copyright 2019 Dmitry Arkhipov +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.lto]]`lto`:: +*Allowed values:* `on`. ++ +Enables link time optimizations (also known as interprocedural optimizations or +whole-program optimizations). Currently supported toolsets are <>, +clang and <>. The feature is optional. + +|# # end::doc[] + +feature.feature lto + : on + : optional propagated ; + +#| tag::doc[] + +[[bbv2.builtin.features.lto-mode]]`lto-mode`:: +*Subfeature of* `lto` ++ +*Allowed values:* `full`, `thin`, `fat`. ++ +Specifies the type of LTO to use. ++ +`full`::: Use the monolithic LTO: on linking all input is merged into a single + module. +`thin`::: Use clang's ThinLTO: each compiled file contains a summary of the + module, these summaries are merged into a single index. This allows to avoid + merging all modules together, which greatly reduces linking time. +`fat`::: Produce gcc's fat LTO objects: compiled files contain both the + intermidiate language suitable for LTO and object code suitable for regular + linking. + +|# # end::doc[] + +feature.subfeature lto + : mode + : full thin fat + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/name-feature.jam b/src/boost/tools/build/src/tools/features/name-feature.jam new file mode 100644 index 000000000..97998ad28 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/name-feature.jam @@ -0,0 +1,22 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.name]]`name`:: +When used in requirements of a prebuilt library target this feature specifies +the name of the library (the name of the library file without any +platform-specific suffixes or prefixes). See <> for examples. ++ +When used in requirements of an `` target it specifies the name of the +target file. + +|# # end::doc[] + +feature.feature name + : + : free ; diff --git a/src/boost/tools/build/src/tools/features/objcflags-feature.jam b/src/boost/tools/build/src/tools/features/objcflags-feature.jam new file mode 100644 index 000000000..b94e6f1e0 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/objcflags-feature.jam @@ -0,0 +1,32 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.mflags]]`mflags`:: +The value of this feature is passed without modification to the tool when +compiling Objective C sources. + + +|# # end::doc[] + +feature.feature mflags + : + : free optional ; + +#| tag::doc[] + +[[bbv2.builtin.features.mmflags]]`mmflags`:: +The value of this feature is passed without modification to the tool when +compiling Objective {CPP} sources. + + +|# # end::doc[] + +feature.feature mmflags + : + : free optional ; diff --git a/src/boost/tools/build/src/tools/features/optimization-feature.jam b/src/boost/tools/build/src/tools/features/optimization-feature.jam new file mode 100644 index 000000000..a9a89302f --- /dev/null +++ b/src/boost/tools/build/src/tools/features/optimization-feature.jam @@ -0,0 +1,46 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.optimization]]`optimization`:: +*Allowed values:* `off`, `speed`, `space`. ++ +Enables optimization. `speed` optimizes for faster code, `space` optimizes for +smaller binary. + +|# # end::doc[] + +feature.feature optimization + : off speed space + : propagated ; + +#| tag::inline-doc[] + +[[bbv2.builtin.features.inlining]]`inlining`:: +*Allowed values:* `off`, `on`, `full`. ++ +Enables inlining. + +|# # end::inline-doc[] + +feature.feature inlining + : off on full + : propagated ; + +#| tag::vector-doc[] + +[[bbv2.builtin.features.vectorize]]`vectorize`:: +*Allowed values:* `off`, `on`, `full`. ++ +Enables vectorization. + +|# # end::vector-doc[] + +feature.feature vectorize + : off on full + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/os-feature.jam b/src/boost/tools/build/src/tools/features/os-feature.jam new file mode 100644 index 000000000..ced1fc1ff --- /dev/null +++ b/src/boost/tools/build/src/tools/features/os-feature.jam @@ -0,0 +1,95 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; +import modules ; +import os ; + +.os-names = + aix android appletv bsd cygwin darwin freebsd haiku hpux iphone linux + netbsd openbsd osf qnx qnxnto sgi solaris unix unixware windows vms vxworks + freertos + + # Not actually an OS -- used for targeting bare metal where object + # format is ELF. This catches both -elf and -eabi gcc targets as well + # as other compilers targeting ELF. It is not clear how often we need + # the 'elf' key as opposed to other bare metal targets, but let us + # stick with gcc naming. + elf + ; + +# Feature used to determine which OS we're on. New and +# features should be used instead. +local os = [ modules.peek : OS ] ; +feature.feature os : $(os) : propagated link-incompatible ; + +# Translates from bjam current OS to the os tags used in host-os and +# target-os, i.e. returns the running host-os. +# +local rule default-host-os ( ) +{ + local host-os ; + if [ os.name ] in $(.os-names:U) + { + host-os = [ os.name ] ; + } + else + { + switch [ os.name ] + { + case NT : host-os = windows ; + case AS400 : host-os = unix ; + case MINGW : host-os = windows ; + case BSDI : host-os = bsd ; + case COHERENT : host-os = unix ; + case DRAGONFLYBSD : host-os = bsd ; + case IRIX : host-os = sgi ; + case HAIKU : host-os = haiku ; + case MACOSX : host-os = darwin ; + case KFREEBSD : host-os = freebsd ; + case LINUX : host-os = linux ; + case VMS : host-os = vms ; + case SUNOS : + ECHO + "SunOS is not a supported operating system." + "We believe last version of SunOS was released in 1992, " + "so if you get this message, something is very wrong with " + "configuration logic. Please report this as a bug. " ; + EXIT ; + case * : host-os = unix ; + } + } + return $(host-os:L) ; +} + + +# The two OS features define a known set of abstract OS names. The host-os is +# the OS under which bjam is running. Even though this should really be a fixed +# property we need to list all the values to prevent unknown value errors. Both +# set the default value to the current OS to account for the default use case of +# building on the target OS. +feature.feature host-os : $(.os-names) ; +feature.set-default host-os : [ default-host-os ] ; + +#| tag::doc[] + +[[bbv2.builtin.features.target-os]]`target-os`:: +*Allowed values:* `aix`, `android`, `appletv`, `bsd`, `cygwin`, `darwin`, +`freebsd`, `haiku`, `hpux`, `iphone`, `linux`, `netbsd`, `openbsd`, `osf`, +`qnx`, `qnxnto`, `sgi`, `solaris`, `unix`, `unixware`, `windows`, `vms`, +`vxworks`, `freertos`. ++ +Specifies the operating system for which the code is to be generated. The +compiler you used should be the compiler for that operating system. This option +causes B2 to use naming conventions suitable for that operating +system, and adjust build process accordingly. For example, with gcc, it +controls if import libraries are produced for shared libraries or not. ++ +See the section <> for details of cross-compilation. + +|# # end::doc[] + +feature.feature target-os : $(.os-names) : propagated link-incompatible ; +feature.set-default target-os : [ default-host-os ] ; diff --git a/src/boost/tools/build/src/tools/features/relevant-feature.jam b/src/boost/tools/build/src/tools/features/relevant-feature.jam new file mode 100644 index 000000000..417feee8c --- /dev/null +++ b/src/boost/tools/build/src/tools/features/relevant-feature.jam @@ -0,0 +1,48 @@ +# Copyright 2017 Steven Watanabe +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.relevant]]`relevant`:: +*Allowed values:* the name of any feature. ++ +Indicates which other features are relevant for a given target. It is usually +not necessary to manage it explicitly, as B2 can deduce it in most +cases. Features which are not relevant will not affect target paths, and will +not cause conflicts. ++ +* A feature will be considered relevant if any of the following are true ++ +** It is referenced by `toolset.flags` or `toolset.uses-features` +** It is used by the requirements of a generator +** It is a sub-feature of a relevant feature +** It has a sub-feature which is relevant +** It is a composite feature, and any composed feature is relevant +** It affects target alternative selection for a main target +** It is a propagated feature and is relevant for any dependency +** It is relevant for any dependency created by the same main target +** It is used in the condition of a conditional property and the corresponding + value is relevant +** It is explicitly named as relevant ++ +* Relevant features cannot be automatically deduced in the following cases: ++ +** Indirect conditionals. Solution: return properties of the form +`result-feature:condition-feature` ++ +NOTE: This isn't really a conditional, although for most purposes it functions +like one. In particular, it does not support multiple comma-separated elements +in the condition, and it does work correctly even in contexts where conditional +properties are not allowed +** Action rules that read properties. Solution: add toolset.uses-features to + tell B2 that the feature is actually used. +** Generators and targets that manipulate property-sets directly. Solution: + set manually. + +|# # end::doc[] + +feature.feature relevant : : incidental free ; diff --git a/src/boost/tools/build/src/tools/features/response-file-feature.jam b/src/boost/tools/build/src/tools/features/response-file-feature.jam new file mode 100644 index 000000000..2aaca0e77 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/response-file-feature.jam @@ -0,0 +1,28 @@ +# Copyright 2020 RenĂ© Ferdinand Rivera Morell +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.response-file]]`response-file`:: +*Allowed values:* `auto`, `file`, `contents`. ++ +Controls whether a response file is used, or not, during the build of the +applicable target. For `file` a response file is created and the filename +replaced in the action. For `contents` the contents (`:E=`) is replaced +in the action and no response file is created. For `auto` either a response +file is created, or the contents replaced, based on the length of the +contents such that if the contents fits within the limits of the command +execution line length limits the contents is replaced. Otherwise a +response file is created and the filename is replaced in the actions. ++ +Supported for `clang-linux` and `msvc` toolsets. + +|# # end::doc[] + +feature.feature response-file + : auto file contents + : incidental ; diff --git a/src/boost/tools/build/src/tools/features/rtti-feature.jam b/src/boost/tools/build/src/tools/features/rtti-feature.jam new file mode 100644 index 000000000..f767831c6 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/rtti-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.rtti]]`rtti`:: +*Allowed values:* `on`, `off`. ++ +Disables run-time type information. + +|# # end::doc[] + +feature.feature rtti + : on off + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/runtime-feature.jam b/src/boost/tools/build/src/tools/features/runtime-feature.jam new file mode 100644 index 000000000..b27faff7f --- /dev/null +++ b/src/boost/tools/build/src/tools/features/runtime-feature.jam @@ -0,0 +1,40 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.runtime-debugging]]`runtime-debugging`:: +*Allowed values:* `on`, `off`. ++ +Specifies whether produced object files, executables, and libraries should +include behavior useful only for debugging, such as asserts. Typically, the +value of this feature is implicitly set by the `variant` feature, but it can be +explicitly specified by the user. The most common usage is to build release +variant with debugging output. + +|# # end::doc[] + +feature.feature runtime-debugging + : on off + : propagated ; + +#| tag::doc[] + +[[bbv2.builtin.features.runtime-link]]`runtime-link`:: +*Allowed values:* `shared`, `static` ++ +Controls if a static or shared C/{CPP} runtime should be used. There are some +restrictions how this feature can be used, for example on some compilers an +application using static runtime should not use shared libraries at all, and on +some compilers, mixing static and shared runtime requires extreme care. Check +your compiler documentation for more details. + +|# # end::doc[] + +feature.feature runtime-link + : shared static + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/sanitizers-feature.jam b/src/boost/tools/build/src/tools/features/sanitizers-feature.jam new file mode 100644 index 000000000..e7bf8097c --- /dev/null +++ b/src/boost/tools/build/src/tools/features/sanitizers-feature.jam @@ -0,0 +1,63 @@ +# Copyright 2019 Damian Jarek +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::addr-doc[] + +[[bbv2.builtin.features.address-sanitizer]]`address-sanitizer`:: +*Allowed values:* `on`, `norecover`. ++ +Enables address sanitizer. Value `norecover` disables recovery for the +sanitizer. The feature is optional, thus no sanitizer is enabled by default. + +|# # end::addr-doc[] + +feature.feature address-sanitizer + : on norecover + : propagated optional ; + +#| tag::leak-doc[] + +[[bbv2.builtin.features.leak-sanitizer]]`leak-sanitizer`:: +*Allowed values:* `on`, `norecover`. ++ +Enables leak sanitizer. Value `norecover` disables recovery for the +sanitizer. The feature is optional, thus no sanitizer is enabled by default. + +|# # end::leak-doc[] + +feature.feature leak-sanitizer + : on norecover + : propagated optional ; + +#| tag::thread-doc[] + +[[bbv2.builtin.features.thread-sanitizer]]`thread-sanitizer`:: +*Allowed values:* `on`, `norecover`. ++ +Enables thread sanitizer. Value `norecover` disables recovery for the +sanitizer. The feature is optional, thus no sanitizer is enabled by default. + +|# # end::thread-doc[] + +feature.feature thread-sanitizer + : on norecover + : propagated optional ; + +#| tag::undef-doc[] + +[[bbv2.builtin.features.undefined-sanitizer]]`undefined-sanitizer`:: +*Allowed values:* `on`, `norecover`. ++ +Enables undefined behavior sanitizer. Value `norecover` disables recovery for +the sanitizer. The feature is optional, thus no sanitizer is enabled by +default. + +|# # end::undef-doc[] + +feature.feature undefined-sanitizer + : on norecover + : propagated optional ; diff --git a/src/boost/tools/build/src/tools/features/search-feature.jam b/src/boost/tools/build/src/tools/features/search-feature.jam new file mode 100644 index 000000000..cee5622f7 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/search-feature.jam @@ -0,0 +1,20 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.search]]`search`:: +When used in requirements of a prebuilt library target this feature adds to the +list of directories to search for the library file. See <> +for examples. + +|# # end::doc[] + +feature.feature search + : + : free path #order-sensitive + ; diff --git a/src/boost/tools/build/src/tools/features/source-feature.jam b/src/boost/tools/build/src/tools/features/source-feature.jam new file mode 100644 index 000000000..3725cfd53 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/source-feature.jam @@ -0,0 +1,22 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.source]]`source`:: +The `X` property has the same effect on building a target as putting X +in the list of sources. It is useful when you want to add the same source to +all targets in the project (you can put `` in requirements) or to +conditionally include a source (using conditional requirements, see +the section <>. See also the +<`>> feature. + +|# # end::doc[] + +feature.feature source + : + : free dependency incidental ; diff --git a/src/boost/tools/build/src/tools/features/stdlib-feature.jam b/src/boost/tools/build/src/tools/features/stdlib-feature.jam new file mode 100644 index 000000000..4b83021d5 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/stdlib-feature.jam @@ -0,0 +1,29 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.stdlib]]`stdlib`:: +*Allowed values*: `native`, `gnu`, `gnu11`, `libc++`, `sun-stlport`, `apache`. ++ +Specifies C++ standard library to link to and in some cases the library ABI to +use: ++ +`native`::: Use compiler's default. +`gnu`::: Use GNU Standard Library (a.k.a. pass:[libstdc++]) with the old ABI. +`gnu11`::: Use GNU Standard Library with the new ABI. +`libc++`::: Use LLVM pass:[libc++]. +`sun-stlport`::: Use the STLport implementation of the standard library + provided with the Solaris Studio compiler. +`apache`::: Use the Apache stdcxx version 4 C++ standard library provided with + the Solaris Studio compiler. + +|# # end::doc[] + +feature.feature stdlib + : native gnu gnu11 libc++ sun-stlport apache + : propagated composite ; diff --git a/src/boost/tools/build/src/tools/features/strip-feature.jam b/src/boost/tools/build/src/tools/features/strip-feature.jam new file mode 100644 index 000000000..60f4c1a35 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/strip-feature.jam @@ -0,0 +1,25 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.strip]]`strip`:: +*Allowed values:* `off`, `on`. ++ +Controls whether the binary should be stripped -- that is have everything not +necessary to running removed. ++ +NOTE: This feature will show up in target paths of everything, not just +binaries. + +|# # end::doc[] + +# TODO: Should fix that when implementing feature relevance. + +feature.feature strip + : off on + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/tag-feature.jam b/src/boost/tools/build/src/tools/features/tag-feature.jam new file mode 100644 index 000000000..e11a3bd58 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/tag-feature.jam @@ -0,0 +1,39 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.tag]]`tag`:: +Used to customize the name of the generated files. The value should have the +form: ++ +---- +@rulename +---- ++ +where _rulename_ should be a name of a rule with the following signature: ++ +---- +rule tag ( name : type ? : property-set ) +---- ++ +The rule will be called for each target with the default name computed by +B2, the type of the target, and property set. The rule can either +return a string that must be used as the name of the target, or an empty +string, in which case the default name will be used. ++ +Most typical use of the `tag` feature is to encode build properties, or library +version in library target names. You should take care to return non-empty +string from the tag rule only for types you care about -- otherwise, you might +end up modifying names of object files, generated header file and other targets +for which changing names does not make sense. + +|# # end::doc[] + +feature.feature tag + : + : free ; diff --git a/src/boost/tools/build/src/tools/features/threadapi-feature.jam b/src/boost/tools/build/src/tools/features/threadapi-feature.jam new file mode 100644 index 000000000..35945ded0 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/threadapi-feature.jam @@ -0,0 +1,39 @@ +# Copyright 2017 Alexander Karzhenkov +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import property-set ; +import feature : feature ; +import toolset ; +import features/os-feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.threadapi]]`threadapi`:: +*Allowed values:* `pthread`, `win32`. ++ +Selects threading implementation. The default is `win32` if `` is +`windows` and `pthread` otherwise. + +|# # end::doc[] + +feature threadapi : pthread win32 : symmetric propagated ; +toolset.add-defaults windows:win32 ; + +rule get-default ( property-set ) +{ + local api = pthread ; + if [ $(property-set).get ] = windows { api = win32 ; } + return $(api) ; +} + +# Obsolete rule that didn't quite work. Remove this +# after all references to it have been cleaned up. +rule detect ( properties * ) +{ + # local ps = [ property-set.create $(properties) ] ; + # local api = [ $(ps).get ] ; + # if ! $(api) { api = [ get-default $(ps) ] ; } + # return $(api) threadapi:target-os ; +} diff --git a/src/boost/tools/build/src/tools/features/threading-feature.jam b/src/boost/tools/build/src/tools/features/threading-feature.jam new file mode 100644 index 000000000..c199057d0 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/threading-feature.jam @@ -0,0 +1,24 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.threading]]`threading`:: +*Allowed values:* `single`, `multi` ++ +Controls if the project should be built in multi-threaded mode. This feature +does not necessary change code generation in the compiler, but it causes the +compiler to link to additional or different runtime libraries, and define +additional preprocessor symbols (for example, `_MT` on Windows and `_REENTRANT` +on Linux). How those symbols affect the compiled code depends on the code +itself. + +|# # end::doc[] + +feature.feature threading + : single multi + : propagated ; diff --git a/src/boost/tools/build/src/tools/features/toolset-feature.jam b/src/boost/tools/build/src/tools/features/toolset-feature.jam new file mode 100644 index 000000000..7e9c6c238 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/toolset-feature.jam @@ -0,0 +1,20 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.toolset]]`toolset`:: +*Allowed values:* any of the toolset modules. ++ +Selects the toolset that will be used to build binary targets. The full list of +toolset modules is in the <> section. + +|# # end::doc[] + +feature.feature toolset + : + : implicit propagated symmetric ; diff --git a/src/boost/tools/build/src/tools/features/translate-path-feature.jam b/src/boost/tools/build/src/tools/features/translate-path-feature.jam new file mode 100644 index 000000000..c99f4655d --- /dev/null +++ b/src/boost/tools/build/src/tools/features/translate-path-feature.jam @@ -0,0 +1,34 @@ +# Copyright 2020 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.translate-path]]`translate-path`:: +Used to introduce custom path feature translation. The value should have the +form: ++ +---- +@rulename +---- ++ +where _rulename_ should be a name of a rule with the following signature: ++ +---- +rule rulename ( feature value : properties * : project-id : project-location ) +---- ++ +The rule is called for each target with the `feature` of a path property, +the path property value, target properties, the target project ID, and +the target project location. It should return the translated path value. +Or return nothing if it doesn't do path translation. Leaving it do the +default path translation. + +|# # end::doc[] + +feature.feature translate-path + : + : incidental free ; diff --git a/src/boost/tools/build/src/tools/features/user-interface-feature.jam b/src/boost/tools/build/src/tools/features/user-interface-feature.jam new file mode 100644 index 000000000..40f6ecd9b --- /dev/null +++ b/src/boost/tools/build/src/tools/features/user-interface-feature.jam @@ -0,0 +1,28 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.user-interface]]`user-interface`:: +*Allowed values:* `console`, `gui`, `wince`, `native`, `auto`. ++ +Specifies the environment for the executable which affects the entry point +symbol (or entry point function) that the linker will select. This feature is +Windows-specific. ++ +`console`::: console application. +`gui`::: application does not require a console (it is supposed to create its + own windows. +`wince`::: application is intended to run on a device that has a version of the + Windows CE kernel. +`native`::: application runs without a subsystem environment. +`auto`::: application runs in the POSIX subsystem in Windows. + +|# # end::doc[] + +feature.feature user-interface + : console gui wince native auto ; diff --git a/src/boost/tools/build/src/tools/features/variant-feature.jam b/src/boost/tools/build/src/tools/features/variant-feature.jam new file mode 100644 index 000000000..13f73da97 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/variant-feature.jam @@ -0,0 +1,114 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; +import errors ; +import property ; + +#| tag::doc[] + +[[bbv2.builtin.features.variant]]`variant`:: +*Allowed values:* `debug`, `release`, `profile`. ++ +A feature combining several low-level features, making it easy to +request common build configurations. ++ +The value `debug` expands to ++ +---- +off on off on +---- ++ +The value `release` expands to ++ +---- +speed off full off +---- ++ +The value `profile` expands to the same as `release`, plus: ++ +---- +on on +---- ++ +Users can define their own build variants using the `variant` rule +from the `common` module. ++ +NOTE: Runtime debugging is on in debug builds to suit the expectations of +people used to various IDEs. + +|# # end::doc[] + +feature.feature variant + : + : implicit composite propagated symmetric ; + +# Declares a new variant. +# +# First determines explicit properties for this variant, by refining parents' +# explicit properties with the passed explicit properties. The result is +# remembered and will be used if this variant is used as parent. +# +# Second, determines the full property set for this variant by adding to the +# explicit properties default values for all missing non-symmetric properties. +# +# Lastly, makes appropriate value of 'variant' property expand to the full +# property set. +# +rule variant ( name # Name of the variant + : parents-or-properties * # Specifies parent variants, if + # 'explicit-properties' are given, and + # explicit-properties or parents otherwise. + : explicit-properties * # Explicit properties. + ) +{ + local parents ; + if ! $(explicit-properties) + { + if $(parents-or-properties[1]:G) + { + explicit-properties = $(parents-or-properties) ; + } + else + { + parents = $(parents-or-properties) ; + } + } + else + { + parents = $(parents-or-properties) ; + } + + # The problem is that we have to check for conflicts between base variants. + if $(parents[2]) + { + errors.error "multiple base variants are not yet supported" ; + } + + local inherited ; + # Add explicitly specified properties for parents. + for local p in $(parents) + { + # TODO: This check may be made stricter. + if ! [ feature.is-implicit-value $(p) ] + { + errors.error "Invalid base variant" $(p) ; + } + + inherited += $(.explicit-properties.$(p)) ; + } + property.validate $(explicit-properties) ; + explicit-properties = [ property.refine $(inherited) + : $(explicit-properties) ] ; + + # Record explicitly specified properties for this variant. We do this after + # inheriting parents' properties so they affect other variants derived from + # this one. + .explicit-properties.$(name) = $(explicit-properties) ; + + feature.extend variant : $(name) ; + feature.compose $(name) : $(explicit-properties) ; +} +IMPORT $(__name__) : variant : : variant ; diff --git a/src/boost/tools/build/src/tools/features/version-feature.jam b/src/boost/tools/build/src/tools/features/version-feature.jam new file mode 100644 index 000000000..e4c6acb0a --- /dev/null +++ b/src/boost/tools/build/src/tools/features/version-feature.jam @@ -0,0 +1,19 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.version]]`version`:: +This feature isn't used by any of the builtin tools, but can be used, for +example, to adjust target's name via <`>> +feature. + +|# # end::doc[] + +feature.feature version + : + : free ; diff --git a/src/boost/tools/build/src/tools/features/visibility-feature.jam b/src/boost/tools/build/src/tools/features/visibility-feature.jam new file mode 100644 index 000000000..dadbc6743 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/visibility-feature.jam @@ -0,0 +1,46 @@ +# Copyright 2018 Andrey Semashev +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.visibility]]`visibility`:: +*Allowed values:* `global`, `protected`, `hidden`. ++ +Specifies the default symbol visibility in compiled binaries. Not all values +are supported on all platforms and on some platforms (for example, Windows) +symbol visibility is not supported at all. ++ +The supported values have the following meaning: ++ +`global`::: a.k.a. "default" in gcc documentation. Global symbols are + considered public, they are exported from shared libraries and can be + redefined by another shared library or executable. +`protected`::: a.k.a. "symbolic". Protected symbols are exported from shared + ibraries but cannot be redefined by another shared library or executable. + This mode is not supported on some platforms, for example OS X. +`hidden`::: Hidden symbols are not exported from shared libraries and cannot + be redefined by a different shared library or executable loaded in a process. + In this mode, public symbols have to be explicitly marked in the source code + to be exported from shared libraries. This is the recommended mode. ++ +By default compiler default visibility mode is used (no compiler flags are +added). ++ +NOTE: In Boost super-project Jamroot file this property is set to the default +value of `hidden`. This means that Boost libraries are built with hidden +visibility by default, unless the user overrides it with a different +`visibility` or a library sets a different `local-visibility` (see below). + +|# # end::doc[] + +feature.feature visibility + : global protected hidden + : optional composite propagated ; + +feature.compose global : global ; +feature.compose protected : protected ; +feature.compose hidden : hidden ; diff --git a/src/boost/tools/build/src/tools/features/warnings-feature.jam b/src/boost/tools/build/src/tools/features/warnings-feature.jam new file mode 100644 index 000000000..a2eac7807 --- /dev/null +++ b/src/boost/tools/build/src/tools/features/warnings-feature.jam @@ -0,0 +1,41 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import feature ; + +#| tag::doc[] + +[[bbv2.builtin.features.warnings]]`warnings`:: +*Allowed values:* `on`, `all`, `extra`, `pedantic`, `off`. ++ +Controls the warning level of compilers. ++ +`on`::: enable default/"reasonable" warning level. +`all`::: enable most warnings. +`extra`::: enable extra, possibly conflicting, warnings. +`pedantic`::: enable likely inconsequential, and conflicting, warnings. +`off`::: disable all warnings. ++ +Default value is `all`. + +|# # end::doc[] + +feature.feature warnings + : on all extra pedantic off + : incidental propagated ; + +#| tag::doc[] + +[[bbv2.builtin.features.warnings-as-errors]]`warnings-as-errors`:: +*Allowed values:* `off`, `on`. ++ +Makes it possible to treat warnings as errors and abort compilation on a +warning. + +|# # end::doc[] + +feature.feature warnings-as-errors + : off on + : incidental propagated ; diff --git a/src/boost/tools/build/src/tools/flags.jam b/src/boost/tools/build/src/tools/flags.jam new file mode 100644 index 000000000..e5b2dbde0 --- /dev/null +++ b/src/boost/tools/build/src/tools/flags.jam @@ -0,0 +1,152 @@ +# Copyright 2018 Steven Watanabe +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# defines the check-has-flag rule. + +import "class" ; +import common ; +import feature : feature ; +import generators ; +import make ; +import print ; +import project ; +import toolset : flags ; + +rule init ( ) +{ + if ! $(.initialized) + { + .initialized = true ; + project.push-current ; + project.initialize $(__name__) ; + project /check/flags ; + .project = [ project.current ] ; + make empty.c : : @write-main ; + make empty.cpp : : @write-main ; + obj empty.obj : empty.cpp ; + project : requirements on ; + project.pop-current ; + } +} + +rule write-main ( target : : properties * ) +{ + print.output $(target) ; + print.text "int main() { return 0; }\n" : yes ; +} + +# Applies true-properties if the toolset recognizes a specific flag. +# Otherwise applies false-properties. +# +# Option must be one of , , or . +# +# Example:: +# +# exe foo : foo.cpp : +# [ check-has-flag -std=c++11 : -std=c++11 ] ; +# +rule check-has-flag ( option message ? : true-properties * : false-properties * ) +{ + init ; + local id = [ MD5 $(option) ] ; + + if ! $(.targets.$(id)) + { + project.push-current $(.project) ; + switch $(option:G) + { + case : obj flags_$(id) : empty.c : $(option) ; + case : obj flags_$(id) : empty.cpp : $(option) ; + case : exe flags_$(id) : empty.obj : $(option) ; + case * : + import errors ; + errors.user-error "Don't know how to check $(option:G)" ; + } + project.pop-current ; + .targets.$(id) = true ; + } + message ?= "has $(option:G=)" ; + return [ check-target-builds /check/flags//flags_$(id) $(message) + : $(true-properties) : $(false-properties) ] ; +} + +IMPORT $(__name__) : check-has-flag : : check-has-flag ; + +feature flags.check : on : optional composite ; +feature.compose on : on ; + +# Some compilers don't have an easy way to cause an error +# for unknown options. In this case, we need to check +# their stdout/stderr. This generator will copy it's +# source, but will cause an error if the given pattern +# matches the output from the source. +# + +feature flags.pattern : : free ; + +class flag-check-generator : generator +{ + rule __init__ ( type : requirements * : pattern ) + { + generator.__init__ flags.check-output : $(type) : $(type)(%_valid) : + $(requirements) on ; + self.pattern = $(pattern) ; + } + rule run ( project name ? : property-set : sources * ) + { + property-set = [ property-set.create + [ property.change [ $(property-set).raw ] : ] + $(self.pattern) ] ; + return [ generator.run $(project) $(name) + : $(property-set) : $(sources) ] ; + } + rule action-class ( ) + { + return non-scanning-action ; + } +} + +# These generator definitions should probably be moved to the individual toolsets. + +# msvc-7.1 uses 4002. Later versions use 9002. +generators.register + [ class.new flag-check-generator OBJ : msvc : "(D[94]002)" ] ; +generators.register + [ class.new flag-check-generator EXE : msvc : "(LNK4044)" ] ; +generators.register + [ class.new flag-check-generator OBJ : intel : "(#10006)" ] ; +generators.register + [ class.new flag-check-generator EXE : intel : "(#10006)" ] ; +generators.override flags.check-output : all ; + +rule check-output-callback ( targets * : source-targets * : ignored * : output ? ) +{ + if [ MATCH [ on $(targets) return $(PATTERN) ] : $(output) ] + { + FLAG_CHECK_COMMAND on $(targets) = illegal-ad22d215a8bbd73 ; + } +} + +IMPORT $(__name__) : check-output-callback : : flags.check-output-callback ; + +flags flags.check-output PATTERN : ; + +rule check-output ( targets * : sources * : properties * ) +{ + local action = [ on $(sources) return $(.action) ] ; + local all-sources ; + for local t in [ $(action).targets ] + { + all-sources += [ $(t).actualize ] ; + } + REBUILDS $(targets) : $(sources) ; + __ACTION_RULE__ on $(all-sources) = flags.check-output-callback $(targets) ; + common.copy $(targets[1]) : $(sources[1]) ; +} + +actions check-output +{ + $(FLAG_CHECK_COMMAND) +} diff --git a/src/boost/tools/build/src/tools/fop.jam b/src/boost/tools/build/src/tools/fop.jam new file mode 100644 index 000000000..8ce748273 --- /dev/null +++ b/src/boost/tools/build/src/tools/fop.jam @@ -0,0 +1,69 @@ +# Copyright (C) 2003-2004 Doug Gregor and Dave Abrahams. Distributed +# under the Boost Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) +# +# This module defines rules to handle generation of PDF and +# PostScript files from XSL Formatting Objects via Apache FOP + +import generators ; +import common ; +import boostbook ; + +generators.register-standard fop.render.pdf : FO : PDF ; +generators.register-standard fop.render.ps : FO : PS ; + +# Initializes the fop toolset. +# +rule init ( fop-command ? : java-home ? : java ? ) +{ + local has-command = $(.has-command) ; + + if $(fop-command) + { + .has-command = true ; + } + + if $(fop-command) || ! $(has-command) + { + fop-command = [ common.get-invocation-command fop : fop : $(fop-command) + : [ modules.peek : FOP_DIR ] ] ; + } + + if $(fop-command) + { + .FOP_COMMAND = $(fop-command) ; + } + + if $(java-home) || $(java) + { + .FOP_SETUP = ; + + + # JAVA_HOME is the location that java was installed to. + + if $(java-home) + { + .FOP_SETUP += [ common.variable-setting-command JAVA_HOME : $(java-home) ] ; + } + + # JAVACMD is the location that of the java executable, useful for a + # non-standard java installation, where the executable isn't at + # $JAVA_HOME/bin/java. + + if $(java) + { + .FOP_SETUP += [ common.variable-setting-command JAVACMD : $(java) ] ; + } + } +} + +actions render.pdf +{ + $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) $(<) +} + +actions render.ps +{ + $(.FOP_SETUP) $(.FOP_COMMAND:E=fop) $(>) -ps $(<) +} diff --git a/src/boost/tools/build/src/tools/fortran.jam b/src/boost/tools/build/src/tools/fortran.jam new file mode 100644 index 000000000..3109f394d --- /dev/null +++ b/src/boost/tools/build/src/tools/fortran.jam @@ -0,0 +1,55 @@ +# Copyright (C) 2004 Toon Knapen +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# +# This file contains common settings for all fortran tools +# + +import "class" : new ; +import feature : feature ; + +import type ; +import generators ; +import common ; + +type.register FORTRAN : f F for f77 ; +type.register FORTRAN90 : f90 F90 ; + +feature fortran : : free ; +feature fortran90 : : free ; + +class fortran-compiling-generator : generator +{ + rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * ) + { + generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ; + } +} + +rule register-fortran-compiler ( id : source-types + : target-types + : requirements * : optional-properties * ) +{ + local g = [ new fortran-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ; + generators.register $(g) ; +} + +class fortran90-compiling-generator : generator +{ + rule __init__ ( id : source-types + : target-types + : requirements * : optional-properties * ) + { + generator.__init__ $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ; + } +} + +rule register-fortran90-compiler ( id : source-types + : target-types + : requirements * : optional-properties * ) +{ + local g = [ new fortran90-compiling-generator $(id) : $(source-types) : $(target-types) : $(requirements) : $(optional-properties) ] ; + generators.register $(g) ; +} + +# FIXME: this is ugly, should find a better way (we'd want client code to +# register all generators as "generator.some-rule", not with "some-module.some-rule".) +IMPORT $(__name__) : register-fortran-compiler : : generators.register-fortran-compiler ; +IMPORT $(__name__) : register-fortran90-compiler : : generators.register-fortran90-compiler ; diff --git a/src/boost/tools/build/src/tools/gcc.jam b/src/boost/tools/build/src/tools/gcc.jam new file mode 100644 index 000000000..47a113223 --- /dev/null +++ b/src/boost/tools/build/src/tools/gcc.jam @@ -0,0 +1,1268 @@ +# Copyright 2021 Nikita Kniazev +# Copyright 2001 David Abrahams +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2003 Vladimir Prus +# Copyright 2005 Reece H. Dunn +# Copyright 2006 Ilya Sokolov +# Copyright 2007 Roland Schwarz +# Copyright 2007 Boris Gubenko +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.gcc]] += GNU C++ + +The `gcc` module supports the http://gcc.gnu.org[GNU C++ compiler] on +Linux, a number of Unix-like system including SunOS and on Windows +(either http://www.cygwin.com[Cygwin] or http://www.mingw.org[MinGW]). + +The `gcc` module is initialized using the following syntax: + +---- +using gcc : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If the version is not explicitly specified, it will be automatically +detected by running the compiler with the `-v` option. If the command is +not specified, the `g++` binary will be searched in PATH. + +The following options can be provided, using +_`option-value syntax`_: + +`asmflags`:: +Specifies additional compiler flags that will be used when compiling assembler +sources. + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`fflags`:: +Specifies additional compiler flags that will be used when compiling Fortran +sources. + +`mflags`:: +Specifies additional compiler flags that will be used when compiling +Objective-C sources. + +`mmflags`:: +Specifies additional compiler flags that will be used when compiling +Objective-C++ sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling any +language sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +`root`:: +Specifies root directory of the compiler installation. This option is +necessary only if it is not possible to detect this information from the +compiler command--for example if the specified compiler command is a user +script. + +`archiver`:: +Specifies the archiver command that is used to produce static +libraries. Normally, it is autodetected using gcc `-print-prog-name` +option or defaulted to `ar`, but in some cases you might want to +override it, for example to explicitly use a system version instead of +one included with gcc. + +`rc`:: +Specifies the resource compiler command that will be used with the +version of gcc that is being configured. This setting makes sense only +for Windows and only if you plan to use resource files. By default +`windres` will be used. + +`rc-type`:: +Specifies the type of resource compiler. The value can be either +`windres` for msvc resource compiler, or `rc` for borland's resource +compiler. + +In order to compile 64-bit applications, you have to specify +`address-model=64`, and the `instruction-set` feature should refer to a 64 +bit processor. Currently, those include `nocona`, `opteron`, `athlon64` and +`athlon-fx`. + +|# # end::doc[] + +import "class" : new ; +import common ; +import cygwin ; +import feature ; +import fortran ; +import generators ; +import os ; +import pch ; +import property ; +import property-set ; +import rc ; +import regex ; +import sequence ; +import set ; +import toolset ; +import type ; +import unix ; +import virtual-target ; +import errors ; + + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + + +feature.extend toolset : gcc ; + +toolset.inherit-generators gcc : unix : unix.link unix.link.dll ; +toolset.inherit-flags gcc : unix ; +toolset.inherit-rules gcc : unix ; + +generators.override gcc.prebuilt : builtin.prebuilt ; +generators.override gcc.searched-lib-generator : searched-lib-generator ; + +# Make gcc toolset object files use the "o" suffix on all platforms. +type.set-generated-target-suffix OBJ : gcc : o ; +type.set-generated-target-suffix OBJ : gcc windows : o ; +type.set-generated-target-suffix OBJ : gcc cygwin : o ; + + +# Initializes the gcc toolset for the given version. If necessary, command may +# be used to specify where the compiler is located. The parameter 'options' is a +# space-delimited list of options, each one specified as +# option-value. Valid option names are: cxxflags, linkflags and +# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or +# sun and the default value will be selected based on the current OS. +# Example: +# using gcc : 3.4 : : foo bar sun ; +# +# The compiler command to use is detected in three steps: +# 1) If an explicit command is specified by the user, it will be used and must +# be available. +# 2) If only a certain version is specified, it is enforced: +# - either the 'g++-VERSION' command must be available +# - or the default command 'g++' must be available and match the exact +# version. +# 3) Without user-provided restrictions use default 'g++'. +# +rule init ( version ? : command * : options * : requirement * ) +{ + # Information about the gcc command... + # The command. + command = [ common.find-compiler gcc : g++ : $(version) : $(command) ] ; + # The 'command' variable can have multiple elements but when calling the + # SHELL builtin we need a single string, and we need to quote elements + # with spaces. + local command-string = [ common.make-command-string $(command) ] ; + # The root directory of the tool install. + local root = [ feature.get-values : $(options) ] ; + # The bin directory where to find the command to execute. + local bin ; + # The compiler flavor. + local flavor = [ feature.get-values : $(options) ] ; + # vxworks build on windows uses csh that is neither mingw or cygwin + if [ feature.get-values : $(options) ] = vxworks + { + flavor ?= vxworks ; + } + # Autodetect the root and bin dir if not given. + if $(command) + { + bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; + root ?= $(bin:D) ; + } + local target-os ; + # Autodetect the version and flavor if not given. + if $(command) + { + local machine = [ MATCH "^([^ ]+)" : + [ SHELL "$(command-string) -dumpmachine" ] ] ; + if ! $(version) { # ?= operator does not short-circuit + version ?= [ get-short-version $(command-string) ] ; + } + switch $(machine:L) + { + case *mingw* : flavor ?= mingw ; + case *cygwin* : flavor ?= cygwin ; + } + switch $(machine:L) + { + case *mingw* : target-os ?= windows ; + case *cygwin* : target-os ?= cygwin ; + case *linux* : target-os ?= linux ; + # TODO: finish this list. + } + } + + local condition ; + condition = [ common.check-init-parameters gcc $(requirement) : version $(version) + : $(condition) ] ; + + common.handle-options gcc : $(condition) : $(command) : $(options) ; + + # Set the default target-os for this toolset. + if $(target-os) && ! [ feature.get-values : $(requirement) ] + { + local conditionx = [ regex.replace $(condition) "/" "," ] ; + toolset.add-defaults $(conditionx)\:$(target-os) ; + } + + # If gcc is installed in a non-standard location, we would need to add + # LD_LIBRARY_PATH when running programs created with it (for unit-test/run + # rules). + if $(command) + { + # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries + # and all must be added to LD_LIBRARY_PATH. The linker will pick the + # right onces. Note that we do not provide a clean way to build a 32-bit + # binary using a 64-bit compiler, but user can always pass -m32 + # manually. + local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ; + if $(.debug-configuration) + { + ECHO "notice:" using gcc libraries "::" $(condition) "::" $(lib_path) ; + } + toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ; + } + + # If we are not using a system gcc installation we should adjust the various + # programs as needed to prefer using their installation specific versions. + # This is essential for correct use of MinGW and for cross-compiling. + + # - Archive builder. + local archiver = [ common.get-invocation-command gcc + : [ .get-prog-name $(command-string) : ar : $(flavor) ] + : [ feature.get-values : $(options) ] + : $(bin) + : search-path ] ; + toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ; + if $(.debug-configuration) + { + ECHO "notice:" using gcc archiver "::" $(condition) "::" $(archiver[1]) ; + } + local arflags = [ feature.get-values : $(options) ] ; + toolset.flags gcc.archive .ARFLAGS $(condition) : $(arflags) ; + + # - Resource compiler. + local rc = [ common.get-invocation-command-nodefault gcc : windres : + [ feature.get-values : $(options) ] : $(bin) : search-path ] ; + local rc-type = [ feature.get-values : $(options) ] ; + rc-type ?= windres ; + if ! $(rc) + { + # If we can not find an RC compiler we fallback to a null one that + # creates empty object files. This allows the same Jamfiles to work + # across the board. The null RC uses assembler to create the empty + # objects, so configure that. + rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] + ; + rc-type = null ; + } + rc.configure $(rc) : $(condition) : $(rc-type) ; + + toolset.flags gcc VERSION $(condition) : [ regex.split $(version) "[.]" ] ; + + init-cxxstd-flags $(condition) : $(version) ; +} + +if [ os.name ] = NT +{ + # This causes single-line command invocation to not go through .bat files, + # thus avoiding command-line length limitations. + JAMSHELL = [ modules.peek : JAMSHELL ] ; + JAMSHELL ?= % ; +} + +rule get-full-version ( command-string ) +{ + # -dumpfullversion is only supported for gcc 7+. + # Passing both options works, as the first one that's + # recognized will be used. + return [ common.match-command-output version : "^([0-9.]+)" + : "$(command-string) -dumpfullversion -dumpversion" ] ; +} + +rule get-short-version ( command-string : single-digit-since ? ) +{ + local version = [ get-full-version $(command-string) ] ; + version = [ SPLIT_BY_CHARACTERS $(version) : . ] ; + + import version ; + if [ version.version-less $(version) : $(single-digit-since:E=5) ] + { + return $(version[1-2]:J=.) ; + } + + return $(version[1]) ; +} + +# Uses -print-prog-name to get the name of the tool. +# Converts the path to native form if using cygwin. +rule .get-prog-name ( command-string : tool : flavor ? ) +{ + local prog-name = [ NORMALIZE_PATH [ MATCH "(.*)[\n]+" : + [ SHELL "$(command-string) -print-prog-name=$(tool)" ] ] ] ; + + if $(flavor) = cygwin && [ os.name ] = NT + { + prog-name = [ cygwin.cygwin-to-windows-path $(prog-name) ] ; + } + return $(prog-name) ; +} + +### +### Functions that set options on the targets. +### + +local all-os = [ feature.values ] ; + +local rule compile-link-flags ( * ) +{ + toolset.flags gcc.compile OPTIONS $(1) : $(2) ; + toolset.flags gcc.link OPTIONS $(1) : $(2) ; +} + +{ + # This logic will add -fPIC for all compilations: + # + # lib a : a.cpp b ; + # obj b : b.cpp ; + # exe c : c.cpp a d ; + # obj d : d.cpp ; + # + # This all is fine, except that 'd' will be compiled with -fPIC even + # though it is not needed, as 'd' is used only in exe. However, it is + # hard to detect where a target is going to be used. Alternatively, we + # can set -fPIC only when main target type is LIB but than 'b' would be + # compiled without -fPIC which would lead to link errors on x86-64. So, + # compile everything with -fPIC. + # + # Yet another alternative would be to create a propagated + # feature and set it when building shared libraries, but that would be + # hard to implement and would increase the target path length even more. + + # On Windows, fPIC is the default, and specifying -fPIC explicitly leads + # to a warning. + local non-windows = [ set.difference $(all-os) : cygwin windows ] ; + compile-link-flags shared/$(non-windows) : -fPIC ; +} + +{ + # Handle address-model + compile-link-flags aix/32 : -maix32 ; + compile-link-flags aix/64 : -maix64 ; + + compile-link-flags hpux/32 : -milp32 ; + compile-link-flags hpux/64 : -mlp64 ; + + local generic-os = [ set.difference $(all-os) : aix hpux ] ; + local arch = power sparc x86 ; + compile-link-flags $(generic-os)/$(arch)/32 : -m32 ; + compile-link-flags $(generic-os)/$(arch)/64 : -m64 ; +} + +{ + # Handle threading + local rule threading-flags ( * ) + { + compile-link-flags multi/$(1) : $(2) ; + if $(3) + { + toolset.flags gcc.link FINDLIBS-SA multi/$(1) : $(3) ; + } + } + + threading-flags windows : -mthreads ; + threading-flags cygwin : -mthreads ; + threading-flags solaris : -pthreads : rt ; + threading-flags qnx : -pthread ; + + local bsd = [ MATCH ^(.*bsd)$ : $(all-os) ] ; + threading-flags $(bsd) : -pthread ; + + # iOS doesn't need pthread flag according to the https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/pthread.3.html + # The default system libraries include pthread functions. No additional libraries or CFLAGS are necessary to use this API. + local no-threading = android beos haiku sgi darwin vxworks iphone appletv ; + local threading-generic-os = [ set.difference $(all-os) : $(no-threading) $(bsd) windows cygwin solaris qnx ] ; + threading-flags $(threading-generic-os) : -pthread : rt ; +} + +{ + local rule cxxstd-flags ( * ) + { + toolset.flags gcc.compile.c++ OPTIONS $(1) : $(2) ; + toolset.flags gcc.link OPTIONS $(1) : $(2) ; + } + + local cxxstd = [ feature.values ] ; + local dialects = [ feature.values ] ; + .cxxstd-dialects = [ set.difference $(dialects) : gnu iso ] ; + # C++ latest needs to be set up on a per-toolset basis + for local std in [ set.difference $(cxxstd) : latest ] + { + cxxstd-flags $(std)/iso : -std=c++$(std) ; + cxxstd-flags $(std)/gnu : -std=gnu++$(std) ; + # If we see this it's probably a mistake, but + # toolset.flags has no way to set up diagnostics. + cxxstd-flags $(std)/$(.cxxstd-dialects) : -std=c++$(std) ; + } + + local rule version-ge ( lhs : rhs ) + { + lhs = [ regex.split $(lhs) "[.]" ] ; + rhs = [ regex.split $(rhs) "[.]" ] ; + return [ sequence.compare $(rhs) : $(lhs) : numbers.less ] ; + } + # Version specific flags + local rule init-cxxstd-flags ( condition * : version ) + { + local std ; + if [ version-ge $(version) : 10 ] { std = 20 ; } + else if [ version-ge $(version) : 8 ] { std = 2a ; } + else if [ version-ge $(version) : 6 ] { std = 17 ; } + else if [ version-ge $(version) : 5 ] { std = 1z ; } + else if [ version-ge $(version) : 4.9 ] { std = 14 ; } + else if [ version-ge $(version) : 4.8 ] { std = 1y ; } + else if [ version-ge $(version) : 4.7 ] { std = 11 ; } + else if [ version-ge $(version) : 3.3 ] { std = 98 ; } + if $(std) + { + cxxstd-flags $(condition)/latest/iso : -std=c++$(std) ; + cxxstd-flags $(condition)/latest/gnu : -std=gnu++$(std) ; + cxxstd-flags $(condition)/latest/$(.cxxstd-dialects) : -std=c++$(std) ; + } + } +} + +generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : gcc ; +generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : gcc ; +generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : gcc ; +generators.register-c-compiler gcc.compile.c : C : OBJ : gcc ; +generators.register-c-compiler gcc.compile.asm : ASM : OBJ : gcc ; +generators.register-c-compiler gcc.compile.m : OBJECTIVE_C : OBJ : gcc ; +generators.register-c-compiler gcc.compile.mm : OBJECTIVE_CPP : OBJ : gcc ; + +generators.register [ new fortran-compiling-generator + gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : gcc ] ; + +rule compile.c++.preprocess ( targets * : sources * : properties * ) +{ + # Some extensions are compiled as C++ by default. For others, we need to + # pass -x c++. We could always pass -x c++ but distcc does not work with it. + if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C + { + LANG on $(<) = "-x c++" ; + } + DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; +} + +rule compile.c.preprocess ( targets * : sources * : properties * ) +{ + # If we use the name g++ then default file suffix -> language mapping does + # not work. So have to pass -x option. Maybe, we can work around this by + # allowing the user to specify both C and C++ compiler names. + #if $(>:S) != .c + #{ + LANG on $(<) = "-x c" ; + #} + DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; +} + +rule compile.c++ ( targets * : sources * : properties * ) +{ + # Some extensions are compiled as C++ by default. For others, we need to + # pass -x c++. We could always pass -x c++ but distcc does not work with it. + if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C + { + LANG on $(<) = "-x c++" ; + } + DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; +} + +rule compile.c ( targets * : sources * : properties * ) +{ + # If we use the name g++ then default file suffix -> language mapping does + # not work. So have to pass -x option. Maybe, we can work around this by + # allowing the user to specify both C and C++ compiler names. + #if $(>:S) != .c + #{ + LANG on $(<) = "-x c" ; + #} + DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; +} + +rule compile.fortran ( targets * : sources * : properties * ) +{ +} + +actions compile.c++ bind PCH_FILE +{ + "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -include"$(PCH_FILE:S=)" -I"$(INCLUDES)" -include"$(FORCE_INCLUDES)" -c -o "$(<:W)" "$(>:W)" +} + +actions compile.c bind PCH_FILE +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -include"$(PCH_FILE:S=)" -I"$(INCLUDES)" -include"$(FORCE_INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++.preprocess bind PCH_FILE +{ + "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -include"$(PCH_FILE:S=)" -I"$(INCLUDES)" -include"$(FORCE_INCLUDES)" "$(>:W)" -E >"$(<:W)" +} + +actions compile.c.preprocess bind PCH_FILE +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -include"$(PCH_FILE:S=)" -I"$(INCLUDES)" -include"$(FORCE_INCLUDES)" "$(>)" -E >$(<) +} + +actions compile.fortran +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -include"$(PCH_FILE:S=)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +rule compile.asm ( targets * : sources * : properties * ) +{ + LANG on $(<) = "-x assembler-with-cpp" ; +} + +actions compile.asm +{ + "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.m +{ + "$(CONFIG_COMMAND)" -x objective-c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.mm +{ + "$(CONFIG_COMMAND)" -x objective-c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +### +### Precompiled header use and generation. +### + +# The compiler looks for a precompiled header in each directory just before it +# looks for the include file in that directory. The name searched for is the +# name specified in the #include directive with ".gch" suffix appended. The +# logic in gcc-pch-generator will make sure that the BASE_PCH suffix is appended +# to the full header name. + +type.set-generated-target-suffix PCH : gcc : gch ; + +# GCC-specific pch generator. +class gcc-pch-generator : pch-generator +{ + import project ; + import property-set ; + import type ; + + rule run-pch ( project name ? : property-set : sources + ) + { + # Find the header in sources. Ignore any CPP sources. + local header ; + for local s in $(sources) + { + if [ type.is-derived [ $(s).type ] H ] + { + header = $(s) ; + } + } + + local pch-file = [ generator.run $(project) $(name) : $(property-set) + : $(header) ] ; + + # Return result of base class and pch-file property as + # usage-requirements. + return + [ $(pch-file[1]).add-raw $(pch-file[2-]) -Winvalid-pch ] + $(pch-file[2-]) + ; + } +} + +# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The +# latter have HPP type, but HPP type is derived from H. The type of compilation +# is determined entirely by the destination type. +generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : on gcc ] ; +generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : on gcc ] ; + +# Override default do-nothing generators. +generators.override gcc.compile.c.pch : pch.default-c-pch-generator ; +generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ; + +toolset.flags gcc.compile PCH_FILE on : ; + +actions compile.c++.pch +{ + "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -include"$(FORCE_INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c.pch +{ + "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -include"$(FORCE_INCLUDES)" -c -o "$(<)" "$(>)" +} + +### +### General options, like optimization. +### + +# Declare flags and action for compilation. +toolset.flags gcc.compile OPTIONS off : -O0 ; +toolset.flags gcc.compile OPTIONS speed : -O3 ; +toolset.flags gcc.compile OPTIONS space : -Os ; + +toolset.flags gcc.compile OPTIONS off : -fno-inline ; +toolset.flags gcc.compile OPTIONS on : -Wno-inline ; +toolset.flags gcc.compile OPTIONS full : -finline-functions -Wno-inline ; + +toolset.flags gcc.compile OPTIONS off : -w ; +toolset.flags gcc.compile OPTIONS on : -Wall ; +toolset.flags gcc.compile OPTIONS all : -Wall ; +toolset.flags gcc.compile OPTIONS extra : -Wall -Wextra ; +toolset.flags gcc.compile OPTIONS pedantic : -Wall -Wextra -pedantic ; +toolset.flags gcc.compile OPTIONS on : -Werror ; + +toolset.flags gcc.compile OPTIONS on : -g ; +toolset.flags gcc.compile OPTIONS on : -pg ; + +toolset.flags gcc.compile OPTIONS hidden : -fvisibility=hidden ; +toolset.flags gcc.compile.c++ OPTIONS hidden : -fvisibility-inlines-hidden ; +toolset.flags gcc.compile OPTIONS protected : -fvisibility=protected ; +toolset.flags gcc.compile OPTIONS protected/darwin : ; +toolset.flags gcc.compile OPTIONS global : -fvisibility=default ; + +toolset.flags gcc.compile.c++ OPTIONS off : -fno-exceptions ; +toolset.flags gcc.compile.c++ OPTIONS off : -fno-rtti ; + +# sanitizers +toolset.flags gcc.compile.c++ OPTIONS on : -fsanitize=address -fno-omit-frame-pointer ; +toolset.flags gcc.compile.c++ OPTIONS norecover : -fsanitize=address -fno-sanitize-recover=address -fno-omit-frame-pointer ; +toolset.flags gcc.compile.c++ OPTIONS on : -fsanitize=leak -fno-omit-frame-pointer ; +toolset.flags gcc.compile.c++ OPTIONS norecover : -fsanitize=leak -fno-sanitize-recover=leak -fno-omit-frame-pointer ; +toolset.flags gcc.compile.c++ OPTIONS on : -fsanitize=thread -fno-omit-frame-pointer ; +toolset.flags gcc.compile.c++ OPTIONS norecover : -fsanitize=thread -fno-sanitize-recover=thread -fno-omit-frame-pointer ; +toolset.flags gcc.compile.c++ OPTIONS on : -fsanitize=undefined -fno-omit-frame-pointer ; +toolset.flags gcc.compile.c++ OPTIONS norecover : -fsanitize=undefined -fno-sanitize-recover=undefined -fno-omit-frame-pointer ; + +toolset.flags gcc.compile.c++ OPTIONS on : --coverage ; + +# configure Dinkum STL to match compiler options +toolset.flags gcc.compile.c++ DEFINES off/vxworks : _NO_RTTI ; +toolset.flags gcc.compile.c++ DEFINES off/vxworks : _NO_EX=1 ; + +# LTO +toolset.flags gcc.compile OPTIONS on/full : -flto ; +toolset.flags gcc.link OPTIONS on/full : -flto ; + +toolset.flags gcc.compile OPTIONS on/fat : -flto -ffat-lto-objects ; +toolset.flags gcc.link OPTIONS on/fat : -flto ; + +# ABI selection +toolset.flags gcc.compile.c++ DEFINES gnu : _GLIBCXX_USE_CXX11_ABI=0 ; +toolset.flags gcc.compile.c++ DEFINES gnu11 : _GLIBCXX_USE_CXX11_ABI=1 ; + +### +### User free feature options. +### + +toolset.flags gcc.compile USER_OPTIONS ; +toolset.flags gcc.compile.c++ USER_OPTIONS ; +toolset.flags gcc.compile.asm USER_OPTIONS ; +toolset.flags gcc.compile DEFINES ; +toolset.flags gcc.compile INCLUDES ; +toolset.flags gcc.compile FORCE_INCLUDES ; +toolset.flags gcc.compile.c++ TEMPLATE_DEPTH ; +toolset.flags gcc.compile.fortran USER_OPTIONS ; +toolset.flags gcc.compile.m USER_OPTIONS ; +toolset.flags gcc.compile.mm USER_OPTIONS ; + +### +### Linking generators and actions. +### + +# Class checking that we do not try to use the static property +# while creating or using a shared library, since it is not supported by +# gcc/libc. +class gcc-linking-generator : unix-linking-generator +{ + rule run ( project name ? : property-set : sources + ) + { + local target-os = [ $(property-set).get ] ; + local no-static-link = true ; + switch $(target-os) + { + case vms : no-static-link = ; + case windows : no-static-link = ; + } + + local properties = [ $(property-set).raw ] ; + local reason ; + if $(no-static-link) && static in $(properties) + { + if shared in $(properties) + { + reason = On gcc, DLLs can not be built with + 'static'. ; + } + else if [ type.is-derived $(self.target-types[1]) EXE ] + { + for local s in $(sources) + { + local type = [ $(s).type ] ; + if $(type) && [ type.is-derived $(type) SHARED_LIB ] + { + reason = On gcc, using DLLs together with the + 'static' option is not possible. ; + } + } + } + } + if $(reason) + { + ECHO "warning:" $(reason) ; + ECHO "warning:" It is suggested to use 'static' together + with 'static'. ; + } + else + { + return [ unix-linking-generator.run $(project) $(name) : + $(property-set) : $(sources) ] ; + } + } +} + +# The set of permissible input types is different on mingw. So, define two sets +# of generators, with mingw generators selected when target-os=windows. + +local g ; +g = [ new gcc-linking-generator gcc.mingw.link + : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB + : EXE + : gcc windows ] ; +$(g).set-rule-name gcc.link.mingw ; +generators.register $(g) ; + +g = [ new gcc-linking-generator gcc.mingw.link.dll + : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB + : IMPORT_LIB SHARED_LIB + : gcc windows ] ; +$(g).set-rule-name gcc.link.dll.mingw ; +generators.register $(g) ; + +generators.register + [ new gcc-linking-generator gcc.link + : LIB OBJ + : EXE + : gcc ] ; +generators.register + [ new gcc-linking-generator gcc.link.dll + : LIB OBJ + : SHARED_LIB + : gcc ] ; + +generators.override gcc.mingw.link : gcc.link ; +generators.override gcc.mingw.link.dll : gcc.link.dll ; + +# Cygwin is similar to msvc and mingw in that it uses import libraries. While in +# simple cases, it can directly link to a shared library, it is believed to be +# slower, and not always possible. Define cygwin-specific generators here. + +g = [ new gcc-linking-generator gcc.cygwin.link + : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB + : EXE + : gcc cygwin ] ; +$(g).set-rule-name gcc.link ; +generators.register $(g) ; + +g = [ new gcc-linking-generator gcc.cygwin.link.dll + : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB + : IMPORT_LIB SHARED_LIB + : gcc cygwin ] ; +$(g).set-rule-name gcc.link.dll ; +generators.register $(g) ; + +generators.override gcc.cygwin.link : gcc.link ; +generators.override gcc.cygwin.link.dll : gcc.link.dll ; + +# Declare flags for linking. +# First, the common flags. +toolset.flags gcc.link OPTIONS on : -g ; +toolset.flags gcc.link OPTIONS on : -pg ; +toolset.flags gcc.link USER_OPTIONS ; +toolset.flags gcc.link LINKPATH ; +toolset.flags gcc.link FINDLIBS-ST ; +toolset.flags gcc.link FINDLIBS-SA ; +toolset.flags gcc.link LIBRARIES ; + +# Specify compile flags for linker as well as they may be needed for LTO +toolset.flags gcc.link OPTIONS hidden : -fvisibility=hidden -fvisibility-inlines-hidden ; +toolset.flags gcc.link OPTIONS protected : -fvisibility=protected ; +toolset.flags gcc.link OPTIONS protected/darwin : ; +toolset.flags gcc.link OPTIONS global : -fvisibility=default ; + +# sanitizers +toolset.flags gcc.link OPTIONS on : -fsanitize=address -fno-omit-frame-pointer ; +toolset.flags gcc.link OPTIONS norecover : -fsanitize=address -fno-sanitize-recover=address -fno-omit-frame-pointer ; +toolset.flags gcc.link OPTIONS on : -fsanitize=leak -fno-omit-frame-pointer ; +toolset.flags gcc.link OPTIONS norecover : -fsanitize=leak -fno-sanitize-recover=leak -fno-omit-frame-pointer ; +toolset.flags gcc.link OPTIONS on : -fsanitize=thread -fno-omit-frame-pointer ; +toolset.flags gcc.link OPTIONS norecover : -fsanitize=thread -fno-sanitize-recover=thread -fno-omit-frame-pointer ; +toolset.flags gcc.link OPTIONS on : -fsanitize=undefined -fno-omit-frame-pointer ; +toolset.flags gcc.link OPTIONS norecover : -fsanitize=undefined -fno-sanitize-recover=undefined -fno-omit-frame-pointer ; + +toolset.flags gcc.link OPTIONS on : --coverage ; + +toolset.flags gcc.link.dll .IMPLIB-COMMAND windows : "-Wl,--out-implib," ; +toolset.flags gcc.link.dll .IMPLIB-COMMAND cygwin : "-Wl,--out-implib," ; + +# target specific link flags +{ + # aix + + # On AIX we *have* to use the native linker. + # + # The -bnoipath strips the prepending (relative) path of libraries from + # the loader section in the target library or executable. Hence, during + # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded + # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without + # this option, the prepending (relative) path + library name is + # hard-coded in the loader section, causing *only* this path to be + # searched during load-time. Note that the AIX linker does not have an + # -soname equivalent, this is as close as it gets. + # + # The -bbigtoc option instrcuts the linker to create a TOC bigger than 64k. + # This is necessary for some submodules such as math, but it does make running + # the tests a tad slower. + # + # The above options are definitely for AIX 5.x, and most likely also for + # AIX 4.x and AIX 6.x. For details about the AIX linker see: + # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf + # + toolset.flags gcc.link OPTIONS aix : -Wl,-bnoipath -Wl,-bbigtoc ; + + # See note [1] + toolset.flags gcc.link OPTIONS aix/static : -static ; + + # darwin + + # On Darwin, the -s option to ld does not work unless we pass -static, + # and passing -static unconditionally is a bad idea. So, do not pass -s + # at all and darwin.jam will use a separate 'strip' invocation. + toolset.flags gcc.link RPATH darwin : ; + # This does not support -R. + toolset.flags gcc.link RPATH_OPTION darwin : -rpath ; + # -rpath-link is not supported at all. + + # See note [1] + toolset.flags gcc.link OPTIONS darwin/static : -static ; + + # vxworks + # On VxWorks we want to reflect what ever special flags have been set in the + # environment for the CPU we are targeting in the cross build + toolset.flags gcc.link OPTIONS vxworks/on : -Wl,--strip-all ; + toolset.flags gcc.link OPTIONS vxworks/static : [ os.environ LDFLAGS_STATIC ] ; + toolset.flags gcc.link.dll OPTIONS vxworks : [ os.environ LDFLAGS_SO ] ; + toolset.flags gcc.link OPTIONS vxworks/shared : [ os.environ LDFLAGS_DYNAMIC ] ; + + # default + + local generic-os = [ set.difference $(all-os) : aix darwin vxworks solaris osf hpux iphone appletv ] ; + # Strip the binary when no debugging is needed. We use --strip-all flag + # as opposed to -s since icc (intel's compiler) is generally + # option-compatible with and inherits from the gcc toolset, but does not + # support -s. + toolset.flags gcc.link OPTIONS $(generic-os)/on : + -Wl,--strip-all ; + toolset.flags gcc.link START-GROUP $(generic-os) : + -Wl,--start-group ; + toolset.flags gcc.link END-GROUP $(generic-os) : -Wl,--end-group ; + + local rpath-os = [ set.difference $(all-os) : aix darwin vxworks solaris osf hpux windows ] ; + toolset.flags gcc.link RPATH $(rpath-os) : ; + toolset.flags gcc.link RPATH_OPTION $(rpath-os) : -rpath ; + toolset.flags gcc.link RPATH_LINK $(rpath-os) : ; + + # gnu ld has the ability to change the search behaviour for libraries + # referenced by the -l switch. These modifiers are -Bstatic and + # -Bdynamic and change search for -l switches that follow them. The + # following list shows the tried variants. Search stops at the first + # variant that has a match. + # + # *nix: -Bstatic -lxxx + # libxxx.a + # + # *nix: -Bdynamic -lxxx + # libxxx.so + # libxxx.a + # + # windows (mingw, cygwin) -Bstatic -lxxx + # libxxx.a + # xxx.lib + # + # windows (mingw, cygwin) -Bdynamic -lxxx + # libxxx.dll.a + # xxx.dll.a + # libxxx.a + # xxx.lib + # cygxxx.dll (*) + # libxxx.dll + # xxx.dll + # libxxx.a + # + # (*) This is for cygwin + # Please note that -Bstatic and -Bdynamic are not a guarantee that a + # static or dynamic lib indeed gets linked in. The switches only change + # search patterns! + + # On *nix mixing shared libs with static runtime is not a good idea. + toolset.flags gcc.link FINDLIBS-ST-PFX $(generic-os)/shared : -Wl,-Bstatic ; + toolset.flags gcc.link FINDLIBS-SA-PFX $(generic-os)/shared : -Wl,-Bdynamic ; + + # On windows allow mixing of static and dynamic libs with static + # runtime is not a good idea. + toolset.flags gcc.link FINDLIBS-ST-PFX windows/static : -Wl,-Bstatic ; + toolset.flags gcc.link FINDLIBS-SA-PFX windows/static : -Wl,-Bdynamic ; + toolset.flags gcc.link OPTIONS windows/static : -Wl,-Bstatic ; + + toolset.flags gcc.link HAVE_SONAME $(generic-os) : "" ; + toolset.flags gcc.link SONAME_OPTION $(generic-os) : -h ; + + # See note [1] + toolset.flags gcc.link OPTIONS $(generic-os)/static : -static ; + + # hpux + + toolset.flags gcc.link OPTIONS hpux/on : -Wl,-s ; + + toolset.flags gcc.link HAVE_SONAME hpux : "" ; + toolset.flags gcc.link SONAME_OPTION hpux : +h ; + + # osf + + # No --strip-all, just -s. + toolset.flags gcc.link OPTIONS osf/on : -Wl,-s ; + toolset.flags gcc.link RPATH osf : ; + # This does not support -R. + toolset.flags gcc.link RPATH_OPTION osf : -rpath ; + # -rpath-link is not supported at all. + + # See note [1] + toolset.flags gcc.link OPTIONS osf/static : -static ; + + # sun + + toolset.flags gcc.link OPTIONS solaris/on : -Wl,-s ; + + toolset.flags gcc.link RPATH solaris : ; + # Solaris linker does not have a separate -rpath-link, but allows using + # -L for the same purpose. + toolset.flags gcc.link LINKPATH solaris : ; + + # This permits shared libraries with non-PIC code on Solaris. + # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the + # following is not needed. Whether -fPIC should be hardcoded, is a + # separate question. + # AH, 2004/10/16: it is still necessary because some tests link against + # static libraries that were compiled without PIC. + toolset.flags gcc.link OPTIONS solaris : -mimpure-text ; + + # See note [1] + toolset.flags gcc.link OPTIONS solaris/static : -static ; + + # [1] + # For static we made sure there are no dynamic libraries in the + # link. On HP-UX not all system libraries exist as archived libraries (for + # example, there is no libunwind.a), so, on this platform, the -static option + # cannot be specified. +} + + +# Enclose the RPATH variable on 'targets' in double quotes, unless it is already +# enclosed in single quotes. This special casing is done because it is common to +# pass '$ORIGIN' to linker -- and it has to have single quotes to prevent shell +# expansion -- and if we add double quotes then the preventing properties of +# single quotes disappear. +# +rule quote-rpath ( targets * ) +{ + local r = [ on $(targets[1]) return $(RPATH) ] ; + if ! [ MATCH ('.*') : $(r) ] + { + r = \"$(r)\" ; + } + RPATH on $(targets) = $(r) ; +} + +# Declare actions for linking. +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; + quote-rpath $(targets) ; +} + +rule link.dll ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; + quote-rpath $(targets) ; +} + +rule link.mingw ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +rule link.dll.mingw ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link.mingw bind LIBRARIES +{ + "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" @($(<:T).rsp:O=FC:<=@":>=":E=$(START-GROUP) "$(>:T)" "$(LIBRARIES:T)" $(FINDLIBS-ST-PFX:T) -l$(FINDLIBS-ST:T) $(FINDLIBS-SA-PFX:T) -l$(FINDLIBS-SA:T) $(END-GROUP)) $(OPTIONS) $(USER_OPTIONS) +} + +actions link.dll.mingw bind LIBRARIES +{ + "$(CONFIG_COMMAND)" -L"$(LINKPATH)" "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" -shared @($(<[-1]:T).rsp:O=FC:<=@":>=":E=$(START-GROUP) "$(>:T)" "$(LIBRARIES:T)" $(FINDLIBS-ST-PFX:T) -l$(FINDLIBS-ST:T) $(FINDLIBS-SA-PFX:T) -l$(FINDLIBS-SA:T) $(END-GROUP)) $(OPTIONS) $(USER_OPTIONS) +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS) +} + +### +### Archive library generation. +### + +# Default value. Mostly for the sake of intel-linux that inherits from gcc, but +# does not have the same logic to set the .AR variable. We can put the same +# logic in intel-linux, but that is hardly worth the trouble as on Linux, 'ar' +# is always available. +.AR = ar ; +.ARFLAGS = rsc ; + +toolset.flags gcc.archive AROPTIONS ; + +rule archive ( targets * : sources * : properties * ) +{ + # Always remove archive and start again. Here is the rationale from + # + # Andre Hentz: + # + # I had a file, say a1.c, that was included into liba.a. I moved a1.c to + # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd + # errors. After some debugging I traced it back to the fact that a1.o was + # *still* in liba.a + # + # Rene Rivera: + # + # Originally removing the archive was done by splicing an RM onto the + # archive action. That makes archives fail to build on NT when they have + # many files because it will no longer execute the action directly and blow + # the line length limit. Instead we remove the file in a different action, + # just before building the archive. + # + local clean.a = $(targets[1])(clean) ; + TEMPORARY $(clean.a) ; + NOCARE $(clean.a) ; + LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; + DEPENDS $(clean.a) : $(sources) ; + DEPENDS $(targets) : $(clean.a) ; + common.RmTemps $(clean.a) : $(targets) ; +} + +# Declare action for creating static libraries. +# The letter 'r' means to add files to the archive with replacement. Since we +# remove archive, we do not care about replacement, but there is no option "add +# without replacement". +# The letter 'c' suppresses the warning in case the archive does not exists yet. +# That warning is produced only on some platforms, for whatever reasons. +# +actions piecemeal archive +{ + "$(.AR)" $(AROPTIONS) $(.ARFLAGS) "$(<)" "$(>)" +} + +### +### CPU architecture and instruction set options. +### + +local rule cpu-flags ( toolset variable : architecture : instruction-set + : + values + : default ? ) +{ + if $(default) + { + toolset.flags $(toolset) $(variable) + $(architecture)/ : $(values) ; + } + toolset.flags $(toolset) $(variable) + /$(instruction-set) + $(architecture)/$(instruction-set) + : $(values) ; +} + + +# Set architecture/instruction-set options. +# +# x86 and compatible +# The 'native' option appeared in gcc 4.2 so we cannot safely use it as default. +# Use i686 instead for 32-bit. +toolset.flags gcc OPTIONS x86/32/ : -march=i686 ; +cpu-flags gcc OPTIONS : x86 : native : -march=native ; +cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ; +cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ; +cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ; +cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ; +cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ; +cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ; +cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ; +cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ; +cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ; +cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ; +cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ; +cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ; +cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ; +cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ; +cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : conroe : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : conroe-xe : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : conroe-l : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : allendale : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : wolfdale : -march=core2 -msse4.1 ; +cpu-flags gcc OPTIONS : x86 : merom : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : merom-xe : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : kentsfield : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : kentsfield-xe : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : yorksfield : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : penryn : -march=core2 ; +cpu-flags gcc OPTIONS : x86 : corei7 : -march=corei7 ; +cpu-flags gcc OPTIONS : x86 : nehalem : -march=corei7 ; +cpu-flags gcc OPTIONS : x86 : corei7-avx : -march=corei7-avx ; +cpu-flags gcc OPTIONS : x86 : sandy-bridge : -march=corei7-avx ; +cpu-flags gcc OPTIONS : x86 : core-avx-i : -march=core-avx-i ; +cpu-flags gcc OPTIONS : x86 : ivy-bridge : -march=core-avx-i ; +cpu-flags gcc OPTIONS : x86 : haswell : -march=core-avx-i -mavx2 -mfma -mbmi -mbmi2 -mlzcnt ; +cpu-flags gcc OPTIONS : x86 : broadwell : -march=broadwell ; +cpu-flags gcc OPTIONS : x86 : skylake : -march=skylake ; +cpu-flags gcc OPTIONS : x86 : skylake-avx512 : -march=skylake-avx512 ; +cpu-flags gcc OPTIONS : x86 : cannonlake : -march=skylake-avx512 -mavx512vbmi -mavx512ifma -msha ; +cpu-flags gcc OPTIONS : x86 : icelake-client : -march=icelake-client ; +cpu-flags gcc OPTIONS : x86 : icelake-server : -march=icelake-server ; +cpu-flags gcc OPTIONS : x86 : cascadelake : -march=skylake-avx512 -mavx512vnni ; +cpu-flags gcc OPTIONS : x86 : cooperlake : -march=cooperlake ; +cpu-flags gcc OPTIONS : x86 : tigerlake : -march=tigerlake ; +cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ; +cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ; +cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ; +cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ; +cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ; +cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ; +cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ; +cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ; +## +cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ; +cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ; +cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ; +cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ; +cpu-flags gcc OPTIONS : x86 : k8-sse3 : -march=k8-sse3 ; +cpu-flags gcc OPTIONS : x86 : opteron-sse3 : -march=opteron-sse3 ; +cpu-flags gcc OPTIONS : x86 : athlon64-sse3 : -march=athlon64-sse3 ; +cpu-flags gcc OPTIONS : x86 : amdfam10 : -march=amdfam10 ; +cpu-flags gcc OPTIONS : x86 : barcelona : -march=barcelona ; +cpu-flags gcc OPTIONS : x86 : bdver1 : -march=bdver1 ; +cpu-flags gcc OPTIONS : x86 : bdver2 : -march=bdver2 ; +cpu-flags gcc OPTIONS : x86 : bdver3 : -march=bdver3 ; +cpu-flags gcc OPTIONS : x86 : bdver4 : -march=bdver4 ; +cpu-flags gcc OPTIONS : x86 : btver1 : -march=btver1 ; +cpu-flags gcc OPTIONS : x86 : btver2 : -march=btver2 ; +cpu-flags gcc OPTIONS : x86 : znver1 : -march=znver1 ; +cpu-flags gcc OPTIONS : x86 : znver2 : -march=znver2 ; +cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ; +cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ; +cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ; +cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ; +cpu-flags gcc OPTIONS : x86 : c7 : -march=c7 ; +## +cpu-flags gcc OPTIONS : x86 : atom : -march=atom ; +# Sparc +cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 : default ; +cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ; +cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ; +cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ; +cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ; +cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ; +cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ; +cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ; +cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ; +cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ; +cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ; +cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ; +cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ; +cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ; +# RS/6000 & PowerPC +cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ; +cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ; +cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ; +cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ; +cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ; +cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ; +cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ; +cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ; +cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ; +cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ; +cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ; +cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ; +cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ; +cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ; +cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ; +cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ; +cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ; +cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ; +cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ; +cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ; +cpu-flags gcc OPTIONS : power : power : -mcpu=power ; +cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ; +cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ; +cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ; +cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ; +cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ; +cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ; +cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ; +cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ; +cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ; +cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ; +cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ; +cpu-flags gcc OPTIONS : s390x : z196 : -march=z196 ; +cpu-flags gcc OPTIONS : s390x : zEC12 : -march=zEC12 ; +cpu-flags gcc OPTIONS : s390x : z13 : -march=z13 ; +cpu-flags gcc OPTIONS : s390x : z14 : -march=z14 ; +cpu-flags gcc OPTIONS : s390x : z15 : -march=z15 ; +# ARM +cpu-flags gcc OPTIONS : arm : cortex-a9+vfpv3 : -mcpu=cortex-a9 -mfpu=vfpv3 -mfloat-abi=hard ; +cpu-flags gcc OPTIONS : arm : cortex-a53 : -mcpu=cortex-a53 ; +cpu-flags gcc OPTIONS : arm : cortex-r5 : -mcpu=cortex-r5 ; +cpu-flags gcc OPTIONS : arm : cortex-r5+vfpv3-d16 : -mcpu=cortex-r5 -mfpu=vfpv3-d16 -mfloat-abi=hard ; +# AIX variant of RS/6000 & PowerPC +toolset.flags gcc AROPTIONS 64/aix : "-X64" ; diff --git a/src/boost/tools/build/src/tools/gcc.py b/src/boost/tools/build/src/tools/gcc.py new file mode 100644 index 000000000..020efb6f5 --- /dev/null +++ b/src/boost/tools/build/src/tools/gcc.py @@ -0,0 +1,875 @@ +# Status: being ported by Steven Watanabe +# Base revision: 47077 +# TODO: common.jam needs to be ported +# TODO: generators.jam needs to have register_c_compiler. +# +# Copyright 2001 David Abrahams. +# Copyright 2002-2006 Rene Rivera. +# Copyright 2002-2003 Vladimir Prus. +# Copyright (c) 2005 Reece H. Dunn. +# Copyright 2006 Ilya Sokolov. +# Copyright 2007 Roland Schwarz +# Copyright 2007 Boris Gubenko. +# Copyright 2008 Steven Watanabe +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import os +import subprocess +import re + +import bjam + +from b2.tools import unix, common, rc, pch, builtin +from b2.build import feature, type, toolset, generators, property_set +from b2.build.property import Property +from b2.util.utility import os_name, on_windows +from b2.manager import get_manager +from b2.build.generators import Generator +from b2.build.toolset import flags +from b2.util.utility import to_seq + + + +__debug = None + +def debug(): + global __debug + if __debug is None: + __debug = "--debug-configuration" in bjam.variable("ARGV") + return __debug + +feature.extend('toolset', ['gcc']) + + +toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll']) +toolset.inherit_flags('gcc', 'unix') +toolset.inherit_rules('gcc', 'unix') + +generators.override('gcc.prebuilt', 'builtin.prebuilt') +generators.override('gcc.searched-lib-generator', 'searched-lib-generator') + +# Target naming is determined by types/lib.jam and the settings below this +# comment. +# +# On *nix: +# libxxx.a static library +# libxxx.so shared library +# +# On windows (mingw): +# libxxx.lib static library +# xxx.dll DLL +# xxx.lib import library +# +# On windows (cygwin) i.e. cygwin +# libxxx.a static library +# xxx.dll DLL +# libxxx.dll.a import library +# +# Note: user can always override by using the @rule +# This settings have been chosen, so that mingw +# is in line with msvc naming conventions. For +# cygwin the cygwin naming convention has been chosen. + +# Make the "o" suffix used for gcc toolset on all +# platforms +type.set_generated_target_suffix('OBJ', ['gcc'], 'o') +type.set_generated_target_suffix('STATIC_LIB', ['gcc', 'cygwin'], 'a') + +type.set_generated_target_suffix('IMPORT_LIB', ['gcc', 'cygwin'], 'dll.a') +type.set_generated_target_prefix('IMPORT_LIB', ['gcc', 'cygwin'], 'lib') + +__machine_match = re.compile('^([^ ]+)') +__version_match = re.compile('^([0-9.]+)') + +def init(version = None, command = None, options = None): + """ + Initializes the gcc toolset for the given version. If necessary, command may + be used to specify where the compiler is located. The parameter 'options' is a + space-delimited list of options, each one specified as + option-value. Valid option names are: cxxflags, linkflags and + linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun + and the default value will be selected based on the current OS. + Example: + using gcc : 3.4 : : foo bar sun ; + """ + + options = to_seq(options) + command = to_seq(command) + + # Information about the gcc command... + # The command. + command = to_seq(common.get_invocation_command('gcc', 'g++', command)) + # The root directory of the tool install. + root = feature.get_values('', options) + root = root[0] if root else '' + # The bin directory where to find the command to execute. + bin = None + # The flavor of compiler. + flavor = feature.get_values('', options) + flavor = flavor[0] if flavor else '' + # Autodetect the root and bin dir if not given. + if command: + if not bin: + bin = common.get_absolute_tool_path(command[-1]) + if not root: + root = os.path.dirname(bin) + # Autodetect the version and flavor if not given. + if command: + machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0] + machine = __machine_match.search(machine_info).group(1) + + version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0] + version = __version_match.search(version_info).group(1) + if not flavor and machine.find('mingw') != -1: + flavor = 'mingw' + + condition = None + if flavor: + condition = common.check_init_parameters('gcc', None, + ('version', version), + ('flavor', flavor)) + else: + condition = common.check_init_parameters('gcc', None, + ('version', version)) + + if command: + command = command[0] + + common.handle_options('gcc', condition, command, options) + + linker = feature.get_values('', options) + if not linker: + if os_name() == 'OSF': + linker = 'osf' + elif os_name() == 'HPUX': + linker = 'hpux' ; + else: + linker = 'gnu' + + init_link_flags('gcc', linker, condition) + + # If gcc is installed in non-standard location, we'd need to add + # LD_LIBRARY_PATH when running programs created with it (for unit-test/run + # rules). + if command: + # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries + # and all must be added to LD_LIBRARY_PATH. The linker will pick the + # right ones. Note that we don't provide a clean way to build 32-bit + # binary with 64-bit compiler, but user can always pass -m32 manually. + lib_path = [os.path.join(root, 'bin'), + os.path.join(root, 'lib'), + os.path.join(root, 'lib32'), + os.path.join(root, 'lib64')] + if debug(): + print 'notice: using gcc libraries ::', condition, '::', lib_path + toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path) + + # If it's not a system gcc install we should adjust the various programs as + # needed to prefer using the install specific versions. This is essential + # for correct use of MinGW and for cross-compiling. + + # - The archive builder. + archiver = common.get_invocation_command('gcc', + 'ar', feature.get_values('', options), [bin], path_last=True) + toolset.flags('gcc.archive', '.AR', condition, [archiver]) + if debug(): + print 'notice: using gcc archiver ::', condition, '::', archiver + + # - The resource compiler. + rc_command = common.get_invocation_command_nodefault('gcc', + 'windres', feature.get_values('', options), [bin], path_last=True) + rc_type = feature.get_values('', options) + + if not rc_type: + rc_type = 'windres' + + if not rc_command: + # If we can't find an RC compiler we fallback to a null RC compiler that + # creates empty object files. This allows the same Jamfiles to work + # across the board. The null RC uses the assembler to create the empty + # objects, so configure that. + rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True) + rc_type = 'null' + rc.configure([rc_command], condition, ['' + rc_type]) + +###if [ os.name ] = NT +###{ +### # This causes single-line command invocation to not go through .bat files, +### # thus avoiding command-line length limitations. +### JAMSHELL = % ; +###} + +#FIXME: when register_c_compiler is moved to +# generators, these should be updated +builtin.register_c_compiler('gcc.compile.c++.preprocess', ['CPP'], ['PREPROCESSED_CPP'], ['gcc']) +builtin.register_c_compiler('gcc.compile.c.preprocess', ['C'], ['PREPROCESSED_C'], ['gcc']) +builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['gcc']) +builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['gcc']) +builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['gcc']) + +# pch support + +# The compiler looks for a precompiled header in each directory just before it +# looks for the include file in that directory. The name searched for is the +# name specified in the #include directive with ".gch" suffix appended. The +# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to +# full name of the header. + +type.set_generated_target_suffix('PCH', ['gcc'], 'gch') + +# GCC-specific pch generator. +class GccPchGenerator(pch.PchGenerator): + + # Inherit the __init__ method + + def run_pch(self, project, name, prop_set, sources): + # Find the header in sources. Ignore any CPP sources. + header = None + for s in sources: + if type.is_derived(s.type(), 'H'): + header = s + + # Error handling: Base header file name should be the same as the base + # precompiled header name. + header_name = header.name() + header_basename = os.path.basename(header_name).rsplit('.', 1)[0] + if header_basename != name: + location = project.project_module + ###FIXME: + raise Exception() + ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ; + + pch_file = Generator.run(self, project, name, prop_set, [header]) + + # return result of base class and pch-file property as usage-requirements + # FIXME: what about multiple results from generator.run? + return (property_set.create([Property('pch-file', pch_file[0]), + Property('cflags', '-Winvalid-pch')]), + pch_file) + + # Calls the base version specifying source's name as the name of the created + # target. As result, the PCH will be named whatever.hpp.gch, and not + # whatever.gch. + def generated_targets(self, sources, prop_set, project, name = None): + name = sources[0].name() + return Generator.generated_targets(self, sources, + prop_set, project, name) + +# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The +# latter have HPP type, but HPP type is derived from H. The type of compilation +# is determined entirely by the destination type. +generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['on', 'gcc' ])) +generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['on', 'gcc' ])) + +# Override default do-nothing generators. +generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator') +generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator') + +flags('gcc.compile', 'PCH_FILE', ['on'], ['']) + +# Declare flags and action for compilation +flags('gcc.compile', 'OPTIONS', ['off'], ['-O0']) +flags('gcc.compile', 'OPTIONS', ['speed'], ['-O3']) +flags('gcc.compile', 'OPTIONS', ['space'], ['-Os']) + +flags('gcc.compile', 'OPTIONS', ['off'], ['-fno-inline']) +flags('gcc.compile', 'OPTIONS', ['on'], ['-Wno-inline']) +flags('gcc.compile', 'OPTIONS', ['full'], ['-finline-functions', '-Wno-inline']) + +flags('gcc.compile', 'OPTIONS', ['off'], ['-w']) +flags('gcc.compile', 'OPTIONS', ['on'], ['-Wall']) +flags('gcc.compile', 'OPTIONS', ['all'], ['-Wall', '-pedantic']) +flags('gcc.compile', 'OPTIONS', ['on'], ['-Werror']) + +flags('gcc.compile', 'OPTIONS', ['on'], ['-g']) +flags('gcc.compile', 'OPTIONS', ['on'], ['-pg']) + +flags('gcc.compile.c++', 'OPTIONS', ['off'], ['-fno-rtti']) +flags('gcc.compile.c++', 'OPTIONS', ['off'], ['-fno-exceptions']) + +# On cygwin and mingw, gcc generates position independent code by default, and +# warns if -fPIC is specified. This might not be the right way of checking if +# we're using cygwin. For example, it's possible to run cygwin gcc from NT +# shell, or using crosscompiling. But we'll solve that problem when it's time. +# In that case we'll just add another parameter to 'init' and move this login +# inside 'init'. +if not os_name () in ['CYGWIN', 'NT']: + # This logic will add -fPIC for all compilations: + # + # lib a : a.cpp b ; + # obj b : b.cpp ; + # exe c : c.cpp a d ; + # obj d : d.cpp ; + # + # This all is fine, except that 'd' will be compiled with -fPIC even though + # it's not needed, as 'd' is used only in exe. However, it's hard to detect + # where a target is going to be used. Alternative, we can set -fPIC only + # when main target type is LIB but than 'b' will be compiled without -fPIC. + # In x86-64 that will lead to link errors. So, compile everything with + # -fPIC. + # + # Yet another alternative would be to create propagated + # feature, and set it when building shared libraries, but that's hard to + # implement and will increase target path length even more. + flags('gcc.compile', 'OPTIONS', ['shared'], ['-fPIC']) + +if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX': + # OSF does have an option called -soname but it doesn't seem to work as + # expected, therefore it has been disabled. + HAVE_SONAME = '' + SONAME_OPTION = '-h' + + +flags('gcc.compile', 'USER_OPTIONS', [], ['']) +flags('gcc.compile.c++', 'USER_OPTIONS',[], ['']) +flags('gcc.compile', 'DEFINES', [], ['']) +flags('gcc.compile', 'INCLUDES', [], ['']) + +engine = get_manager().engine() + +engine.register_action('gcc.compile.c++.pch', + '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"') + +engine.register_action('gcc.compile.c.pch', + '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"') + + +def gcc_compile_cpp(targets, sources, properties): + # Some extensions are compiled as C++ by default. For others, we need to + # pass -x c++. We could always pass -x c++ but distcc does not work with it. + extension = os.path.splitext (sources [0]) [1] + lang = '' + if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']: + lang = '-x c++' + get_manager().engine().set_target_variable (targets, 'LANG', lang) + engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE')) + +def gcc_compile_c(targets, sources, properties): + engine = get_manager().engine() + # If we use the name g++ then default file suffix -> language mapping does + # not work. So have to pass -x option. Maybe, we can work around this by + # allowing the user to specify both C and C++ compiler names. + #if $(>:S) != .c + #{ + engine.set_target_variable (targets, 'LANG', '-x c') + #} + engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE')) + +engine.register_action( + 'gcc.compile.c++', + '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' + + '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' + + '-c -o "$(<:W)" "$(>:W)"', + function=gcc_compile_cpp, + bound_list=['PCH_FILE']) + +engine.register_action( + 'gcc.compile.c', + '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' + + '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"', + function=gcc_compile_c, + bound_list=['PCH_FILE']) + +engine.register_action( + 'gcc.compile.c++.preprocess', + function=gcc_compile_cpp, + bound_list=['PCH_FILE'], + command=""" + $(CONFIG_COMMAND) $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)" + """ +) + +engine.register_action( + 'gcc.compile.c.preprocess', + function=gcc_compile_c, + bound_list=['PCH_FILE'], + command=""" + $(CONFIG_COMMAND) $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<) + """ +) + +def gcc_compile_asm(targets, sources, properties): + get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp') + +engine.register_action( + 'gcc.compile.asm', + '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"', + function=gcc_compile_asm) + + +class GccLinkingGenerator(unix.UnixLinkingGenerator): + """ + The class which check that we don't try to use the static + property while creating or using shared library, since it's not supported by + gcc/libc. + """ + def run(self, project, name, ps, sources): + # TODO: Replace this with the use of a target-os property. + + no_static_link = False + if bjam.variable('UNIX'): + no_static_link = True; + ##FIXME: what does this mean? +## { +## switch [ modules.peek : JAMUNAME ] +## { +## case * : no-static-link = true ; +## } +## } + + reason = None + if no_static_link and ps.get('runtime-link') == 'static': + if ps.get('link') == 'shared': + reason = "On gcc, DLL can't be build with 'static'." + elif type.is_derived(self.target_types[0], 'EXE'): + for s in sources: + source_type = s.type() + if source_type and type.is_derived(source_type, 'SHARED_LIB'): + reason = "On gcc, using DLLS together with the " +\ + "static options is not possible " + if reason: + print 'warning:', reason + print 'warning:',\ + "It is suggested to use 'static' together",\ + "with 'static'." ; + return + else: + generated_targets = unix.UnixLinkingGenerator.run(self, project, + name, ps, sources) + return generated_targets + +if on_windows(): + flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,']) + generators.register( + GccLinkingGenerator('gcc.link', True, + ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], + [ 'EXE' ], + [ 'gcc' ])) + generators.register( + GccLinkingGenerator('gcc.link.dll', True, + ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], + ['IMPORT_LIB', 'SHARED_LIB'], + ['gcc'])) +else: + generators.register( + GccLinkingGenerator('gcc.link', True, + ['LIB', 'OBJ'], + ['EXE'], + ['gcc'])) + generators.register( + GccLinkingGenerator('gcc.link.dll', True, + ['LIB', 'OBJ'], + ['SHARED_LIB'], + ['gcc'])) + +# Declare flags for linking. +# First, the common flags. +flags('gcc.link', 'OPTIONS', ['on'], ['-g']) +flags('gcc.link', 'OPTIONS', ['on'], ['-pg']) +flags('gcc.link', 'USER_OPTIONS', [], ['']) +flags('gcc.link', 'LINKPATH', [], ['']) +flags('gcc.link', 'FINDLIBS-ST', [], ['']) +flags('gcc.link', 'FINDLIBS-SA', [], ['']) +flags('gcc.link', 'LIBRARIES', [], ['']) + +# For static we made sure there are no dynamic libraries in the +# link. On HP-UX not all system libraries exist as archived libraries (for +# example, there is no libunwind.a), so, on this platform, the -static option +# cannot be specified. +if os_name() != 'HPUX': + flags('gcc.link', 'OPTIONS', ['static'], ['-static']) + +# Now, the vendor specific flags. +# The parameter linker can be either gnu, darwin, osf, hpux or sun. +def init_link_flags(toolset, linker, condition): + """ + Now, the vendor specific flags. + The parameter linker can be either gnu, darwin, osf, hpux or sun. + """ + toolset_link = toolset + '.link' + if linker == 'gnu': + # Strip the binary when no debugging is needed. We use --strip-all flag + # as opposed to -s since icc (intel's compiler) is generally + # option-compatible with and inherits from the gcc toolset, but does not + # support -s. + + # FIXME: what does unchecked translate to? + flags(toolset_link, 'OPTIONS', map(lambda x: x + '/off', condition), ['-Wl,--strip-all']) # : unchecked ; + flags(toolset_link, 'RPATH', condition, ['']) # : unchecked ; + flags(toolset_link, 'RPATH_LINK', condition, ['']) # : unchecked ; + flags(toolset_link, 'START-GROUP', condition, ['-Wl,--start-group'])# : unchecked ; + flags(toolset_link, 'END-GROUP', condition, ['-Wl,--end-group']) # : unchecked ; + + # gnu ld has the ability to change the search behaviour for libraries + # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic + # and change search for -l switches that follow them. The following list + # shows the tried variants. + # The search stops at the first variant that has a match. + # *nix: -Bstatic -lxxx + # libxxx.a + # + # *nix: -Bdynamic -lxxx + # libxxx.so + # libxxx.a + # + # windows (mingw,cygwin) -Bstatic -lxxx + # libxxx.a + # xxx.lib + # + # windows (mingw,cygwin) -Bdynamic -lxxx + # libxxx.dll.a + # xxx.dll.a + # libxxx.a + # xxx.lib + # cygxxx.dll (*) + # libxxx.dll + # xxx.dll + # libxxx.a + # + # (*) This is for cygwin + # Please note that -Bstatic and -Bdynamic are not a guarantee that a + # static or dynamic lib indeed gets linked in. The switches only change + # search patterns! + + # On *nix mixing shared libs with static runtime is not a good idea. + flags(toolset_link, 'FINDLIBS-ST-PFX', + map(lambda x: x + '/shared', condition), + ['-Wl,-Bstatic']) # : unchecked ; + flags(toolset_link, 'FINDLIBS-SA-PFX', + map(lambda x: x + '/shared', condition), + ['-Wl,-Bdynamic']) # : unchecked ; + + # On windows allow mixing of static and dynamic libs with static + # runtime. + flags(toolset_link, 'FINDLIBS-ST-PFX', + map(lambda x: x + '/static/windows', condition), + ['-Wl,-Bstatic']) # : unchecked ; + flags(toolset_link, 'FINDLIBS-SA-PFX', + map(lambda x: x + '/static/windows', condition), + ['-Wl,-Bdynamic']) # : unchecked ; + flags(toolset_link, 'OPTIONS', + map(lambda x: x + '/static/windows', condition), + ['-Wl,-Bstatic']) # : unchecked ; + + elif linker == 'darwin': + # On Darwin, the -s option to ld does not work unless we pass -static, + # and passing -static unconditionally is a bad idea. So, don't pass -s. + # at all, darwin.jam will use separate 'strip' invocation. + flags(toolset_link, 'RPATH', condition, ['']) # : unchecked ; + flags(toolset_link, 'RPATH_LINK', condition, ['']) # : unchecked ; + + elif linker == 'osf': + # No --strip-all, just -s. + flags(toolset_link, 'OPTIONS', map(lambda x: x + '/off', condition), ['-Wl,-s']) + # : unchecked ; + flags(toolset_link, 'RPATH', condition, ['']) # : unchecked ; + # This does not supports -R. + flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ; + # -rpath-link is not supported at all. + + elif linker == 'sun': + flags(toolset_link, 'OPTIONS', map(lambda x: x + '/off', condition), ['-Wl,-s']) + # : unchecked ; + flags(toolset_link, 'RPATH', condition, ['']) # : unchecked ; + # Solaris linker does not have a separate -rpath-link, but allows to use + # -L for the same purpose. + flags(toolset_link, 'LINKPATH', condition, ['']) # : unchecked ; + + # This permits shared libraries with non-PIC code on Solaris. + # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the + # following is not needed. Whether -fPIC should be hardcoded, is a + # separate question. + # AH, 2004/10/16: it is still necessary because some tests link against + # static libraries that were compiled without PIC. + flags(toolset_link, 'OPTIONS', map(lambda x: x + '/shared', condition), ['-mimpure-text']) + # : unchecked ; + + elif linker == 'hpux': + flags(toolset_link, 'OPTIONS', map(lambda x: x + '/off', condition), + ['-Wl,-s']) # : unchecked ; + flags(toolset_link, 'OPTIONS', map(lambda x: x + '/shared', condition), + ['-fPIC']) # : unchecked ; + + else: + # FIXME: + errors.user_error( + "$(toolset) initialization: invalid linker '$(linker)' " + + "The value '$(linker)' specified for is not recognized. " + + "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'") + +# Declare actions for linking. +def gcc_link(targets, sources, properties): + engine = get_manager().engine() + engine.set_target_variable(targets, 'SPACE', ' ') + # Serialize execution of the 'link' action, since running N links in + # parallel is just slower. For now, serialize only gcc links, it might be a + # good idea to serialize all links. + engine.set_target_variable(targets, 'JAM_SEMAPHORE', 'gcc-link-semaphore') + +engine.register_action( + 'gcc.link', + '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' + + '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' + + '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' + + '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' + + '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' + + '$(OPTIONS) $(USER_OPTIONS)', + function=gcc_link, + bound_list=['LIBRARIES']) + +# Default value. Mostly for the sake of intel-linux that inherits from gcc, but +# does not have the same logic to set the .AR variable. We can put the same +# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is +# always available. +__AR = 'ar' + +flags('gcc.archive', 'AROPTIONS', [], ['']) + +def gcc_archive(targets, sources, properties): + # Always remove archive and start again. Here's rationale from + # + # Andre Hentz: + # + # I had a file, say a1.c, that was included into liba.a. I moved a1.c to + # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd + # errors. After some debugging I traced it back to the fact that a1.o was + # *still* in liba.a + # + # Rene Rivera: + # + # Originally removing the archive was done by splicing an RM onto the + # archive action. That makes archives fail to build on NT when they have + # many files because it will no longer execute the action directly and blow + # the line length limit. Instead we remove the file in a different action, + # just before building the archive. + clean = targets[0] + '(clean)' + bjam.call('TEMPORARY', clean) + bjam.call('NOCARE', clean) + engine = get_manager().engine() + engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE')) + engine.add_dependency(clean, sources) + engine.add_dependency(targets, clean) + engine.set_update_action('common.RmTemps', clean, targets) + +# Declare action for creating static libraries. +# The letter 'r' means to add files to the archive with replacement. Since we +# remove archive, we don't care about replacement, but there's no option "add +# without replacement". +# The letter 'c' suppresses the warning in case the archive does not exists yet. +# That warning is produced only on some platforms, for whatever reasons. +engine.register_action('gcc.archive', + '''"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" + ''', + function=gcc_archive, + flags=['piecemeal']) + +def gcc_link_dll(targets, sources, properties): + engine = get_manager().engine() + engine.set_target_variable(targets, 'SPACE', ' ') + engine.set_target_variable(targets, 'JAM_SEMAPHORE', 'gcc-link-semaphore') + engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME) + engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION) + +engine.register_action( + 'gcc.link.dll', + # Differ from 'link' above only by -shared. + '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' + + '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' + + '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' + + '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' + + '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' + + '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' + + '$(OPTIONS) $(USER_OPTIONS)', + function = gcc_link_dll, + bound_list=['LIBRARIES']) + +# Set up threading support. It's somewhat contrived, so perform it at the end, +# to avoid cluttering other code. + +if on_windows(): + flags('gcc', 'OPTIONS', ['multi'], ['-mthreads']) +elif bjam.variable('UNIX'): + jamuname = bjam.variable('JAMUNAME') + host_os_name = jamuname[0] + if host_os_name.startswith('SunOS'): + flags('gcc', 'OPTIONS', ['multi'], ['-pthreads']) + flags('gcc', 'FINDLIBS-SA', [], ['rt']) + elif host_os_name == 'BeOS': + # BeOS has no threading options, don't set anything here. + pass + elif host_os_name == 'Haiku': + flags('gcc', 'OPTIONS', ['multi'], ['-lroot']) + # there is no -lrt on Haiku, and -pthread is implicit + elif host_os_name.endswith('BSD'): + flags('gcc', 'OPTIONS', ['multi'], ['-pthread']) + # there is no -lrt on BSD + elif host_os_name == 'DragonFly': + flags('gcc', 'OPTIONS', ['multi'], ['-pthread']) + # there is no -lrt on BSD - DragonFly is a FreeBSD variant, + # which anoyingly doesn't say it's a *BSD. + elif host_os_name == 'IRIX': + # gcc on IRIX does not support multi-threading, don't set anything here. + pass + elif host_os_name == 'Darwin': + # Darwin has no threading options, don't set anything here. + pass + else: + flags('gcc', 'OPTIONS', ['multi'], ['-pthread']) + flags('gcc', 'FINDLIBS-SA', [], ['rt']) + +def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None): + #FIXME: for some reason this fails. Probably out of date feature code +## if default: +## flags(toolset, variable, +## ['' + architecture + '/'], +## values) + flags(toolset, variable, + #FIXME: same as above + [##'/' + instruction_set, + '' + architecture + '/' + instruction_set], + values) + +# Set architecture/instruction-set options. +# +# x86 and compatible +flags('gcc', 'OPTIONS', ['x86/32'], ['-m32']) +flags('gcc', 'OPTIONS', ['x86/64'], ['-m64']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'native', ['-march=native']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686'], default=True) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'core2', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe-xe', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe-l', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'allendale', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'wolfdale', ['-march=core2', '-msse4.1']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'merom', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'merom-xe', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'kentsfield', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'kentsfield-xe', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'yorksfield', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'penryn', ['-march=core2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'corei7', ['-march=corei7']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'nehalem', ['-march=corei7']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'corei7-avx', ['-march=corei7-avx']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'sandy-bridge', ['-march=corei7-avx']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'core-avx-i', ['-march=core-avx-i']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'ivy-bridge', ['-march=core-avx-i']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'haswell', ['-march=core-avx-i', '-mavx2', '-mfma', '-mbmi', '-mbmi2', '-mlzcnt']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'broadwell', ['-march=broadwell']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'skylake', ['-march=skylake']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'skylake-avx512', ['-march=skylake-avx512']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'cannonlake', ['-march=skylake-avx512', '-mavx512vbmi', '-mavx512ifma', '-msha']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'icelake-client', ['-march=icelake-client']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'icelake-server', ['-march=icelake-server']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'cascadelake', ['-march=skylake-avx512', '-mavx512vnni']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'cooperlake', ['-march=cooperlake']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'tigerlake', ['-march=tigerlake']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp']) +## +cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'k8-sse3', ['-march=k8-sse3']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron-sse3', ['-march=opteron-sse3']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64-sse3', ['-march=athlon64-sse3']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'amdfam10', ['-march=amdfam10']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'barcelona', ['-march=barcelona']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver1', ['-march=bdver1']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver2', ['-march=bdver2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver3', ['-march=bdver3']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'btver1', ['-march=btver1']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'btver2', ['-march=btver2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'znver1', ['-march=znver1']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'znver2', ['-march=znver2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2']) +cpu_flags('gcc', 'OPTIONS', 'x86', 'c7', ['-march=c7']) +## +cpu_flags('gcc', 'OPTIONS', 'x86', 'atom', ['-march=atom']) +# Sparc +flags('gcc', 'OPTIONS', ['sparc/32'], ['-m32']) +flags('gcc', 'OPTIONS', ['sparc/64'], ['-m64']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'], default=True) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3']) +# RS/6000 & PowerPC +flags('gcc', 'OPTIONS', ['power/32'], ['-m32']) +flags('gcc', 'OPTIONS', ['power/64'], ['-m64']) +cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403']) +cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505']) +cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601']) +cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602']) +cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603']) +cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e']) +cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604']) +cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e']) +cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620']) +cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630']) +cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740']) +cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400']) +cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450']) +cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750']) +cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801']) +cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821']) +cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823']) +cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860']) +cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970']) +cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540']) +cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power']) +cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2']) +cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3']) +cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4']) +cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5']) +cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc']) +cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64']) +cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios']) +cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1']) +cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2']) +cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc']) +cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64']) +cpu_flags('gcc', 'OPTIONS', 's390x', 'z196', ['-march=z196']) +cpu_flags('gcc', 'OPTIONS', 's390x', 'zEC12', ['-march=zEC12']) +cpu_flags('gcc', 'OPTIONS', 's390x', 'z13', ['-march=z13']) +cpu_flags('gcc', 'OPTIONS', 's390x', 'z14', ['-march=z14']) +cpu_flags('gcc', 'OPTIONS', 's390x', 'z15', ['-march=z15']) +# AIX variant of RS/6000 & PowerPC +flags('gcc', 'OPTIONS', ['power/32/aix'], ['-maix32']) +flags('gcc', 'OPTIONS', ['power/64/aix'], ['-maix64']) +flags('gcc', 'AROPTIONS', ['power/64/aix'], ['-X64']) diff --git a/src/boost/tools/build/src/tools/generate.jam b/src/boost/tools/build/src/tools/generate.jam new file mode 100644 index 000000000..bd558d9cc --- /dev/null +++ b/src/boost/tools/build/src/tools/generate.jam @@ -0,0 +1,111 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Declares main target 'generate' used to produce targets by calling a +# user-provided rule that takes and produces virtual targets. + +import "class" : new ; +import errors ; +import feature ; +import param ; +import project ; +import property ; +import property-set ; +import targets ; +import regex ; + + +feature.feature generating-rule : : free ; + + +class generated-target-class : basic-target +{ + import errors ; + import indirect ; + import virtual-target ; + + rule __init__ ( name : project : sources * : requirements * + : default-build * : usage-requirements * ) + { + basic-target.__init__ $(name) : $(project) : $(sources) + : $(requirements) : $(default-build) : $(usage-requirements) ; + + if ! [ $(self.requirements).get ] + { + errors.user-error "The generate rule requires the " + "property to be set" ; + } + } + + rule construct ( name : sources * : property-set ) + { + local result ; + local gr = [ $(property-set).get ] ; + + # FIXME: this is a copy-paste from virtual-target.jam. We should add a + # utility rule to call a rule like this. + local rule-name = [ MATCH ^@(.*) : $(gr) ] ; + if $(rule-name) + { + if $(gr[2]) + { + local target-name = [ full-name ] ; + errors.user-error "Multiple properties" + "encountered for target $(target-name)." ; + } + + result = [ indirect.call $(rule-name) $(self.project) $(name) + : $(property-set) : $(sources) ] ; + + if ! $(result) + { + ECHO "warning: Unable to construct" [ full-name ] ; + } + } + + local ur ; + local targets ; + + if $(result) + { + if [ class.is-a $(result[1]) : property-set ] + { + ur = $(result[1]) ; + targets = $(result[2-]) ; + } + else + { + ur = [ property-set.empty ] ; + targets = $(result) ; + } + } + # FIXME: the following loop should be doable using sequence.transform or + # some similar utility rule. + local rt ; + for local t in $(targets) + { + rt += [ virtual-target.register $(t) ] ; + } + return $(ur) $(rt) ; + } +} + + +rule generate ( name : sources * : requirements * : default-build * + : usage-requirements * ) +{ + param.handle-named-params + sources requirements default-build usage-requirements ; + local project = [ project.current ] ; + + targets.main-target-alternative + [ new generated-target-class $(name) : $(project) + : [ targets.main-target-sources $(sources) : $(name) ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) ] + : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] + ] ; +} + +IMPORT $(__name__) : generate : : generate ; diff --git a/src/boost/tools/build/src/tools/generators/__init_generators__.jam b/src/boost/tools/build/src/tools/generators/__init_generators__.jam new file mode 100644 index 000000000..11ab5b9d8 --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/__init_generators__.jam @@ -0,0 +1,23 @@ +# Copyright 2017 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Here we automatically define any "generator" modules in this directory. + +local key = generator ; + +import os path modules ; + +.this-module's-file = [ modules.binding $(__name__) ] ; +.this-module's-dir = [ path.parent [ path.make $(.this-module's-file) ] ] ; +.to-load-jamfiles = [ path.glob $(.this-module's-dir) : *-$(key).jam ] ; +.to-load-modules = [ MATCH ^(.*)\.jam$ : $(.to-load-jamfiles) ] ; + +# A loop over all matched modules in this directory +for local m in $(.to-load-modules) +{ + m = [ path.basename $(m) ] ; + m = $(key)s/$(m) ; + import $(m) ; +} diff --git a/src/boost/tools/build/src/tools/generators/archive-generator.jam b/src/boost/tools/build/src/tools/generators/archive-generator.jam new file mode 100644 index 000000000..0d2484cea --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/archive-generator.jam @@ -0,0 +1,74 @@ +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2017 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import generators ; + +# The generator class for handling STATIC_LIB creation. +# +class archive-generator : generator +{ + import generators ; + import property-set ; + + rule __init__ ( id composing ? : source-types + : target-types + + : requirements * ) + { + composing ?= true ; + generator.__init__ $(id) $(composing) : $(source-types) + : $(target-types) : $(requirements) ; + } + + rule run ( project name ? : property-set : sources + ) + { + sources += [ $(property-set).get ] ; + + property-set = [ $(property-set).add-raw link ] ; + + local result = [ generator.run $(project) $(name) : $(property-set) + : $(sources) ] ; + + # For static linking, if we get a library in source, we can not directly + # link to it so we need to cause our dependencies to link to that + # library. There are two approaches: + # - adding the library to the list of returned targets. + # - using the usage requirements. + # The problem with the first is: + # + # lib a1 : : liba1.a ; + # lib a2 : a2.cpp a1 : static ; + # install dist : a2 ; + # + # here we will try to install 'a1', even though it is not necessary in + # the general case. With the second approach, even indirect dependants + # will link to the library, but it should not cause any harm. So, return + # all LIB sources together with created targets, so that dependants link + # to them. + local usage-requirements = link ; + if [ $(property-set).get ] = static + { + for local t in $(sources) + { + if [ $(t).type ] && [ type.is-derived [ $(t).type ] LIB ] + { + usage-requirements += $(t) ; + } + } + } + + return [ generators.add-usage-requirements $(result) : $(usage-requirements) ] ; + } +} + + +rule register-archiver ( id composing ? : source-types + : target-types + + : requirements * ) +{ + generators.register [ new archive-generator $(id) $(composing) + : $(source-types) : $(target-types) : $(requirements) ] ; +} + +IMPORT $(__name__) : register-archiver : : generators.register-archiver ; diff --git a/src/boost/tools/build/src/tools/generators/c-compiling-generator.jam b/src/boost/tools/build/src/tools/generators/c-compiling-generator.jam new file mode 100644 index 000000000..3de0d83a3 --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/c-compiling-generator.jam @@ -0,0 +1,70 @@ +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2017 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import generators ; +import virtual-target ; + +# Declare a special compiler generator. The only thing it does is changing the +# type used to represent 'action' in the constructed dependency graph to +# 'compile-action'. That class in turn adds additional include paths to handle +# cases when a source file includes headers which are generated themselves. +# +class C-compiling-generator : generator +{ + rule __init__ ( id : source-types + : target-types + : requirements * + : optional-properties * ) + { + generator.__init__ $(id) : $(source-types) : $(target-types) : + $(requirements) : $(optional-properties) ; + } + + rule action-class ( ) + { + return compile-action ; + } +} + + +rule register-c-compiler ( id : source-types + : target-types + : requirements * + : optional-properties * ) +{ + generators.register [ new C-compiling-generator $(id) : $(source-types) : + $(target-types) : $(requirements) : $(optional-properties) ] ; +} + +# FIXME: this is ugly, should find a better way (we would like client code to +# register all generators as "generators.some-rule" instead of +# "some-module.some-rule".) +# +IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ; + +class compile-action : action +{ + import sequence ; + + rule __init__ ( targets * : sources * : action-name : properties * ) + { + action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ; + } + + # For all virtual targets for the same dependency graph as self, i.e. which + # belong to the same main target, add their directories to the include path. + # + rule adjust-properties ( property-set ) + { + local s = [ $(self.targets[1]).creating-subvariant ] ; + if $(s) + { + return [ $(property-set).add-raw + [ $(s).implicit-includes "include" : H ] ] ; + } + else + { + return $(property-set) ; + } + } +} diff --git a/src/boost/tools/build/src/tools/generators/dummy-generator.jam b/src/boost/tools/build/src/tools/generators/dummy-generator.jam new file mode 100644 index 000000000..346c4afcc --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/dummy-generator.jam @@ -0,0 +1,20 @@ +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2017 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import generators ; + +# Generator that accepts everything and produces nothing. Useful as a general +# fallback for toolset-specific actions like PCH generation. +# +class dummy-generator : generator +{ + import property-set ; + + rule run ( project name ? : property-set : sources + ) + { + return [ property-set.empty ] ; + } +} diff --git a/src/boost/tools/build/src/tools/generators/lib-generator.jam b/src/boost/tools/build/src/tools/generators/lib-generator.jam new file mode 100644 index 000000000..0085f949b --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/lib-generator.jam @@ -0,0 +1,121 @@ +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2017 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import generators ; +import param ; +import project ; +import targets ; + +# The generator class for libraries (target type LIB). Depending on properties +# it will request building of the appropriate specific library type -- +# -- SHARED_LIB, STATIC_LIB or SHARED_LIB. +# +class lib-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) + : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : + $(17) : $(18) : $(19) ; + } + + rule run ( project name ? : property-set : sources * ) + { + # The lib generator is composing, and can be only invoked with an + # explicit name. This check is present in generator.run (and so in + # builtin.linking-generator) but duplicated here to avoid doing extra + # work. + if $(name) + { + local properties = [ $(property-set).raw ] ; + # Determine the needed target type. + local actual-type ; + # files can be generated by @rule feature + # in which case we do not consider it a SEARCHED_LIB type. + if ! in $(properties:G) && + ( in $(properties:G) || in $(properties:G) ) + { + actual-type = SEARCHED_LIB ; + } + else if in $(properties:G) + { + actual-type = LIB ; + } + else if shared in $(properties) + { + actual-type = SHARED_LIB ; + } + else + { + actual-type = STATIC_LIB ; + } + property-set = [ $(property-set).add-raw LIB link ] ; + # Construct the target. + local result = [ generators.construct $(project) $(name) : $(actual-type) + : $(property-set) : $(sources) ] ; + return [ $(result[1]).add-raw link ] $(result[2-]) ; + } + } + + rule viable-source-types ( ) + { + return * ; + } +} + +generators.register [ new lib-generator builtin.lib-generator : : LIB ] ; + +# The implementation of the 'lib' rule. Beyond standard syntax that rule allows +# simplified: "lib a b c ;". +# +rule lib ( names + : sources * : requirements * : default-build * : + usage-requirements * ) +{ + param.handle-named-params + sources requirements default-build usage-requirements ; + if $(names[2]) + { + if in $(requirements:G) + { + import errors ; + errors.user-error "When several names are given to the 'lib' rule" : + "it is not allowed to specify the feature." ; + } + if $(sources) + { + import errors ; + errors.user-error "When several names are given to the 'lib' rule" : + "it is not allowed to specify sources." ; + } + } + + # This is a circular module dependency so it must be imported here. + import targets ; + + local project = [ project.current ] ; + local result ; + + for local name in $(names) + { + local r = $(requirements) ; + # Support " lib a ; " and " lib a b c ; " syntax. + if ! $(sources) && ! in $(requirements:G) + && ! in $(requirements:G) + { + r += $(name) ; + } + result += [ targets.main-target-alternative + [ new typed-target $(name) : $(project) : LIB + : [ targets.main-target-sources $(sources) : $(name) ] + : [ targets.main-target-requirements $(r) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) ] + : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ] + ] ] ; + } + return $(result) ; +} +IMPORT $(__name__) : lib : : lib ; diff --git a/src/boost/tools/build/src/tools/generators/linking-generator.jam b/src/boost/tools/build/src/tools/generators/linking-generator.jam new file mode 100644 index 000000000..5c3f1a997 --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/linking-generator.jam @@ -0,0 +1,179 @@ +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2017 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import generators ; + +# The generator class for handling EXE and SHARED_LIB creation. +# +class linking-generator : generator +{ + import path ; + import project ; + import property-set ; + import type ; + + rule __init__ ( id + composing ? : # The generator will be composing if a non-empty + # string is passed or the parameter is not given. To + # make the generator non-composing, pass an empty + # string (""). + source-types + : + target-types + : + requirements * ) + { + composing ?= true ; + generator.__init__ $(id) $(composing) : $(source-types) + : $(target-types) : $(requirements) ; + } + + rule run ( project name ? : property-set : sources + ) + { + sources += [ $(property-set).get ] ; + + # Add properties for all searched libraries. + local extra = link ; + for local s in $(sources) + { + if [ $(s).type ] = SEARCHED_LIB + { + local search = [ $(s).search ] ; + extra += $(search) ; + } + } + + # It is possible that sources include shared libraries that did not came + # from 'lib' targets, e.g. .so files specified as sources. In this case + # we have to add extra dll-path properties and propagate extra xdll-path + # properties so that application linking to us will get xdll-path to + # those libraries. + local extra-xdll-paths ; + for local s in $(sources) + { + if [ $(s).type ] && [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ] + { + local location = [ path.root [ $(s).name ] + [ $(s).path ] ] ; + extra-xdll-paths += [ path.parent $(location) ] ; + } + } + + # Hardcode DLL paths only when linking executables. + # Pros: do not need to relink libraries when installing. + # Cons: "standalone" libraries (plugins, python extensions) can not + # hardcode paths to dependent libraries. + if [ $(property-set).get ] = true + && [ type.is-derived $(self.target-types[1]) EXE ] + { + local xdll-path = [ $(property-set).get ] ; + extra += $(xdll-path) $(extra-xdll-paths) ; + } + + if $(extra) + { + property-set = [ $(property-set).add-raw $(extra) ] ; + } + + local result = [ generator.run $(project) $(name) : $(property-set) + : $(sources) ] ; + + local ur ; + if $(result) + { + ur = [ extra-usage-requirements $(result[2-]) : $(property-set) ] ; + ur = [ $(ur).add-raw + link $(extra-xdll-paths) ] ; + ur = [ $(ur).add $(result[1]) ] ; + } + return $(ur) $(result[2-]) ; + } + + rule extra-usage-requirements ( created-targets * : property-set ) + { + local result = [ property-set.empty ] ; + local extra ; + + # Add appropriate usage requirements. + local raw = [ $(property-set).raw ] ; + if shared in $(raw) + { + local paths ; + local pwd = [ path.pwd ] ; + for local t in $(created-targets) + { + if [ type.is-derived [ $(t).type ] SHARED_LIB ] + { + paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ; + } + } + extra += $(paths:G=) ; + } + + # We need to pass features that we've got from sources, + # because if a shared library is built, exe using it needs to know paths + # to other shared libraries this one depends on in order to be able to + # find them all at runtime. + + # Just pass all features in property-set, it is theoretically possible + # that we will propagate features explicitly specified by + # the user, but then the user is to blame for using an internal feature. + local values = [ $(property-set).get ] ; + extra += $(values:G=) ; + + if $(extra) + { + result = [ property-set.create $(extra) ] ; + } + return $(result) ; + } + + rule generated-targets ( sources + : property-set : project name ? ) + { + local sources2 ; # Sources to pass to inherited rule. + local properties2 ; # Properties to pass to inherited rule. + local libraries ; # Library sources. + + # Searched libraries are not passed as arguments to the linker but via + # some option. So, we pass them to the action using a property. + properties2 = [ $(property-set).raw ] ; + local fsa ; + local fst ; + for local s in $(sources) + { + if [ $(s).type ] && [ type.is-derived [ $(s).type ] SEARCHED_LIB ] + { + local name = [ $(s).name ] ; + if [ $(s).shared ] + { + fsa += $(name) ; + } + else + { + fst += $(name) ; + } + } + else + { + sources2 += $(s) ; + } + } + properties2 += $(fsa:J=&&) + $(fst:J=&&) ; + + return [ generator.generated-targets $(sources2) + : [ property-set.create $(properties2) ] : $(project) $(name) ] ; + } +} + + +rule register-linker ( id composing ? : source-types + : target-types + + : requirements * ) +{ + generators.register [ new linking-generator $(id) $(composing) + : $(source-types) : $(target-types) : $(requirements) ] ; +} + +IMPORT $(__name__) : register-linker : : generators.register-linker ; diff --git a/src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam b/src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam new file mode 100644 index 000000000..280ad9493 --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/prebuilt-lib-generator.jam @@ -0,0 +1,29 @@ +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2017 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import generators ; + +class prebuilt-lib-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) + : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : + $(17) : $(18) : $(19) ; + } + + rule run ( project name ? : property-set : sources * ) + { + local f = [ $(property-set).get ] ; + return $(f) $(sources) ; + } +} + +generators.register + [ new prebuilt-lib-generator builtin.prebuilt : : LIB : ] ; + +generators.override builtin.prebuilt : builtin.lib-generator ; diff --git a/src/boost/tools/build/src/tools/generators/searched-lib-generator.jam b/src/boost/tools/build/src/tools/generators/searched-lib-generator.jam new file mode 100644 index 000000000..cfde8de81 --- /dev/null +++ b/src/boost/tools/build/src/tools/generators/searched-lib-generator.jam @@ -0,0 +1,97 @@ +# Copyright 2002-2017 Rene Rivera +# Copyright 2002-2017 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import generators ; + +class searched-lib-generator : generator +{ + import property-set ; + + rule __init__ ( ) + { + # The requirements cause the generators to be tried *only* when we are + # building a lib target with a 'search' feature. This seems ugly --- all + # we want is to make sure searched-lib-generator is not invoked deep + # inside transformation search to produce intermediate targets. + generator.__init__ searched-lib-generator : : SEARCHED_LIB ; + } + + rule run ( project name ? : property-set : sources * ) + { + if $(name) + { + # If 'name' is empty, it means we have not been called to build a + # top-level target. In this case, we just fail immediately, because + # searched-lib-generator cannot be used to produce intermediate + # targets. + + local properties = [ $(property-set).raw ] ; + local shared ; + if shared in $(properties) + { + shared = true ; + } + + local search = [ feature.get-values : $(properties) ] ; + + local a = [ new null-action [ $(property-set).add-raw link search ] ] ; + local lib-name = [ feature.get-values : $(properties) ] ; + lib-name ?= $(name) ; + local t = [ new searched-lib-target $(lib-name) : $(project) + : $(shared) : $(search) : $(a) ] ; + # We return sources for a simple reason. If there is + # lib png : z : png ; + # the 'z' target should be returned, so that apps linking to 'png' + # will link to 'z', too. + return [ property-set.create $(search) link search name ] + [ virtual-target.register $(t) ] $(sources) ; + } + } +} + +generators.register [ new searched-lib-generator ] ; + +class searched-lib-target : abstract-file-target +{ + rule __init__ ( name + : project + : shared ? + : search * + : action + ) + { + abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project) + : $(action) : ; + + self.shared = $(shared) ; + self.search = $(search) ; + } + + rule shared ( ) + { + return $(self.shared) ; + } + + rule search ( ) + { + return $(self.search) ; + } + + rule actualize-location ( target ) + { + NOTFILE $(target) ; + } + + rule relevant ( ) + { + return [ property-set.create link search ] ; + } + + rule path ( ) + { + } +} diff --git a/src/boost/tools/build/src/tools/gettext.jam b/src/boost/tools/build/src/tools/gettext.jam new file mode 100644 index 000000000..71900b74f --- /dev/null +++ b/src/boost/tools/build/src/tools/gettext.jam @@ -0,0 +1,230 @@ +# Copyright 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module support GNU gettext internationalization utilities. +# +# It provides two main target rules: 'gettext.catalog', used for +# creating machine-readable catalogs from translations files, and +# 'gettext.update', used for update translation files from modified +# sources. +# +# To add i18n support to your application you should follow these +# steps. +# +# - Decide on a file name which will contain translations and +# what main target name will be used to update it. For example:: +# +# gettext.update update-russian : russian.po a.cpp my_app ; +# +# - Create the initial translation file by running:: +# +# bjam update-russian +# +# - Edit russian.po. For example, you might change fields like LastTranslator. +# +# - Create a main target for final message catalog:: +# +# gettext.catalog russian : russian.po ; +# +# The machine-readable catalog will be updated whenever you update +# "russian.po". The "russian.po" file will be updated only on explicit +# request. When you're ready to update translations, you should +# +# - Run:: +# +# bjam update-russian +# +# - Edit "russian.po" in appropriate editor. +# +# The next bjam run will convert "russian.po" into machine-readable form. +# +# By default, translations are marked by 'i18n' call. The 'gettext.keyword' +# feature can be used to alter this. + + +import targets ; +import property-set ; +import virtual-target ; +import "class" : new ; +import project ; +import type ; +import generators ; +import errors ; +import feature : feature ; +import toolset : flags ; +import regex ; + +.path = "" ; + +# Initializes the gettext module. +rule init ( path ? # Path where all tools are located. If not specified, + # they should be in PATH. + ) +{ + if $(.initialized) && $(.path) != $(path) + { + errors.error "Attempt to reconfigure with different path" ; + } + .initialized = true ; + if $(path) + { + .path = $(path)/ ; + } +} + +# Creates a main target 'name', which, when updated, will cause +# file 'existing-translation' to be updated with translations +# extracted from 'sources'. It's possible to specify main target +# in sources --- it which case all target from dependency graph +# of those main targets will be scanned, provided they are of +# appropricate type. The 'gettext.types' feature can be used to +# control the types. +# +# The target will be updated only if explicitly requested on the +# command line. +rule update ( name : existing-translation sources + : requirements * ) +{ + local project = [ project.current ] ; + + targets.main-target-alternative + [ new typed-target $(name) : $(project) : gettext.UPDATE : + $(existing-translation) $(sources) + : [ targets.main-target-requirements $(requirements) : $(project) ] + ] ; + $(project).mark-target-as-explicit $(name) ; +} + + +# The human editable source, containing translation. +type.register gettext.PO : po ; +# The machine readable message catalog. +type.register gettext.catalog : mo ; +# Intermediate type produce by extracting translations from +# sources. +type.register gettext.POT : pot ; +# Pseudo type used to invoke update-translations generator +type.register gettext.UPDATE ; + +# Identifies the keyword that should be used when scanning sources. +# Default: i18n +feature gettext.keyword : : free ; +# Contains space-separated list of sources types which should be scanned. +# Default: "C CPP" +feature gettext.types : : free ; + +generators.register-standard gettext.compile : gettext.PO : gettext.catalog ; + +class update-translations-generator : generator +{ + import regex : split ; + import property-set ; + + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + # The rule should be called with at least two sources. The first source + # is the translation (.po) file to update. The remaining sources are targets + # which should be scanned for new messages. All sources files for those targets + # will be found and passed to the 'xgettext' utility, which extracts the + # messages for localization. Those messages will be merged to the .po file. + rule run ( project name ? : property-set : sources * : multiple ? ) + { + local types = [ $(property-set).get ] ; + types ?= "C CPP" ; + types = [ regex.split $(types) " " ] ; + + local keywords = [ $(property-set).get ] ; + property-set = [ property-set.create $(keywords:G=) ] ; + + # First deterime the list of sources that must be scanned for + # messages. + local all-sources ; + # CONSIDER: I'm not sure if the logic should be the same as for 'stage': + # i.e. following dependency properties as well. + for local s in $(sources[2-]) + { + all-sources += [ virtual-target.traverse $(s) : : include-sources ] ; + } + local right-sources ; + for local s in $(all-sources) + { + if [ $(s).type ] in $(types) + { + right-sources += $(s) ; + } + } + + local .constructed ; + if $(right-sources) + { + # Create the POT file, which will contain list of messages extracted + # from the sources. + local extract = + [ new action $(right-sources) : gettext.extract : $(property-set) ] ; + local new-messages = [ new file-target $(name) : gettext.POT + : $(project) : $(extract) ] ; + + # Create a notfile target which will update the existing translation file + # with new messages. + local a = [ new action $(sources[1]) $(new-messages) + : gettext.update-po-dispatch ] ; + local r = [ new notfile-target $(name) : $(project) : $(a) ] ; + .constructed = [ virtual-target.register $(r) ] ; + } + else + { + errors.error "No source could be scanned by gettext tools" ; + } + return $(.constructed) ; + } +} +generators.register [ new update-translations-generator gettext.update : : gettext.UPDATE ] ; + +flags gettext.extract KEYWORD ; +actions extract +{ + $(.path)xgettext -k$(KEYWORD:E=i18n) -o $(<) $(>) +} + +# Does really updating of po file. The tricky part is that +# we're actually updating one of the sources: +# $(<) is the NOTFILE target we're updating +# $(>[1]) is the PO file to be really updated. +# $(>[2]) is the PO file created from sources. +# +# When file to be updated does not exist (during the +# first run), we need to copy the file created from sources. +# In all other cases, we need to update the file. +rule update-po-dispatch +{ + NOCARE $(>[1]) ; + gettext.create-po $(<) : $(>) ; + gettext.update-po $(<) : $(>) ; + _ on $(<) = " " ; + ok on $(<) = "" ; + EXISTING_PO on $(<) = $(>[1]) ; +} + +# Due to fancy interaction of existing and updated, this rule can be called with +# one source, in which case we copy the lonely source into EXISTING_PO, or with +# two sources, in which case the action body expands to nothing. I'd really like +# to have "missing" action modifier. +actions quietly existing updated create-po bind EXISTING_PO +{ + cp$(_)"$(>[1])"$(_)"$(EXISTING_PO)"$($(>[2]:E=ok)) +} + +actions updated update-po bind EXISTING_PO +{ + $(.path)msgmerge$(_)-U$(_)"$(EXISTING_PO)"$(_)"$(>[1])" +} + +actions gettext.compile +{ + $(.path)msgfmt -o $(<) $(>) +} + +IMPORT $(__name__) : update : : gettext.update ; diff --git a/src/boost/tools/build/src/tools/gfortran.jam b/src/boost/tools/build/src/tools/gfortran.jam new file mode 100644 index 000000000..37fbd6cd1 --- /dev/null +++ b/src/boost/tools/build/src/tools/gfortran.jam @@ -0,0 +1,39 @@ +# Copyright (C) 2004 Toon Knapen +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import toolset : flags ; +import feature ; +import fortran ; + +rule init ( version ? : command * : options * ) +{ +} + +# Declare flags and action for compilation +flags gfortran OPTIONS ; + +flags gfortran OPTIONS off : -O0 ; +flags gfortran OPTIONS speed : -O3 ; +flags gfortran OPTIONS space : -Os ; + +flags gfortran OPTIONS on : -g ; +flags gfortran OPTIONS on : -pg ; + +flags gfortran OPTIONS shared/LIB : -fPIC ; + +flags gfortran DEFINES ; +flags gfortran INCLUDES ; + +rule compile.fortran +{ +} + +actions compile.fortran +{ + gcc -Wall $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)" +} + +generators.register-fortran-compiler gfortran.compile.fortran : FORTRAN FORTRAN90 : OBJ ; diff --git a/src/boost/tools/build/src/tools/hp_cxx.jam b/src/boost/tools/build/src/tools/hp_cxx.jam new file mode 100644 index 000000000..33514a002 --- /dev/null +++ b/src/boost/tools/build/src/tools/hp_cxx.jam @@ -0,0 +1,222 @@ +# Copyright 2001 David Abrahams. +# Copyright 2004, 2005 Markus Schoepflin. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.hp_cxx]] += HP C++ Compiler for Tru64 Unix + +The `hp_cxx` modules supports the +http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN[HP C++ Compiler] +for Tru64 Unix. + +The module is initialized using the following syntax: + +---- +using hp_cxx : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If the command is not specified, B2 will search for a binary +named `hp_cxx` in PATH. + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling both C +and C++ sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +|# # end::doc[] + +# +# HP CXX compiler +# See http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN +# +# +# Notes on this toolset: +# +# - Because of very subtle issues with the default ansi mode, strict_ansi mode +# is used for compilation. One example of things that don't work correctly in +# the default ansi mode is overload resolution of function templates when +# mixed with non-template functions. +# +# - For template instantiation "-timplicit_local" is used. Previously, +# "-tlocal" has been tried to avoid the need for a template repository +# but this doesn't work with manually instantiated templates. "-tweak" +# has not been used to avoid the stream of warning messages issued by +# ar or ld when creating a library or linking an application. +# +# - Debug symbols are generated with "-g3", as this works both in debug and +# release mode. When compiling C++ code without optimization, we additionally +# use "-gall", which generates full symbol table information for all classes, +# structs, and unions. As this turns off optimization, it can't be used when +# optimization is needed. +# + +import feature generators common ; +import toolset : flags ; + +feature.extend toolset : hp_cxx ; +feature.extend c++abi : cxxarm ; + +# Inherit from Unix toolset to get library ordering magic. +toolset.inherit hp_cxx : unix ; + +generators.override hp_cxx.prebuilt : builtin.lib-generator ; +generators.override hp_cxx.prebuilt : builtin.prebuilt ; +generators.override hp_cxx.searched-lib-generator : searched-lib-generator ; + + +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters hp_cxx : version $(version) ] ; + + local command = [ common.get-invocation-command hp_cxx : cxx : $(command) ] ; + + if $(command) + { + local root = [ common.get-absolute-tool-path $(command[-1]) ] ; + + if $(root) + { + flags hp_cxx .root $(condition) : "\"$(root)\"/" ; + } + } + # If we can't find 'cxx' anyway, at least show 'cxx' in the commands + command ?= cxx ; + + common.handle-options hp_cxx : $(condition) : $(command) : $(options) ; +} + +generators.register-c-compiler hp_cxx.compile.c++ : CPP : OBJ : hp_cxx ; +generators.register-c-compiler hp_cxx.compile.c : C : OBJ : hp_cxx ; + + + +# No static linking as far as I can tell. +# flags cxx LINKFLAGS static : -bstatic ; +flags hp_cxx.compile OPTIONS on : -g3 ; +flags hp_cxx.compile OPTIONS off/on : -gall ; +flags hp_cxx.link OPTIONS on : -g ; +flags hp_cxx.link OPTIONS off : -s ; + +flags hp_cxx.compile OPTIONS off : -O0 ; +flags hp_cxx.compile OPTIONS speed/on : -O2 ; +flags hp_cxx.compile OPTIONS speed : -O2 ; + +# This (undocumented) macro needs to be defined to get all C function +# overloads required by the C++ standard. +flags hp_cxx.compile.c++ OPTIONS : -D__CNAME_OVERLOADS ; + +# Added for threading support +flags hp_cxx.compile OPTIONS multi : -pthread ; +flags hp_cxx.link OPTIONS multi : -pthread ; + +flags hp_cxx.compile OPTIONS space/on : size ; +flags hp_cxx.compile OPTIONS space : -O1 ; +flags hp_cxx.compile OPTIONS off : -inline none ; + +# The compiler versions tried (up to V6.5-040) hang when compiling Boost code +# with full inlining enabled. So leave it at the default level for now. +# +# flags hp_cxx.compile OPTIONS full : -inline all ; + +flags hp_cxx.compile OPTIONS on : -pg ; +flags hp_cxx.link OPTIONS on : -pg ; + +# Selection of the object model. This flag is needed on both the C++ compiler +# and linker command line. + +# Unspecified ABI translates to '-model ansi' as most +# standard-conforming. +flags hp_cxx.compile.c++ OPTIONS : -model ansi : : hack-hack ; +flags hp_cxx.compile.c++ OPTIONS cxxarm : -model arm ; +flags hp_cxx.link OPTIONS : -model ansi : : hack-hack ; +flags hp_cxx.link OPTIONS cxxarm : -model arm ; + +# Display a descriptive tag together with each compiler message. This tag can +# be used by the user to explicitly suppress the compiler message. +flags hp_cxx.compile OPTIONS : -msg_display_tag ; + +flags hp_cxx.compile OPTIONS ; +flags hp_cxx.compile.c++ OPTIONS ; +flags hp_cxx.compile DEFINES ; +flags hp_cxx.compile INCLUDES ; +flags hp_cxx.link OPTIONS ; + +flags hp_cxx.link LIBPATH ; +flags hp_cxx.link LIBRARIES ; +flags hp_cxx.link FINDLIBS-ST ; +flags hp_cxx.link FINDLIBS-SA ; + +flags hp_cxx.compile.c++ TEMPLATE_DEPTH ; + +actions link bind LIBRARIES +{ + $(CONFIG_COMMAND) -noimplicit_include $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lrt -lm +} + +# When creating dynamic libraries, we don't want to be warned about unresolved +# symbols, therefore all unresolved symbols are marked as expected by +# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool +# chain. + +actions link.dll bind LIBRARIES +{ + $(CONFIG_COMMAND) -shared -expect_unresolved \* -noimplicit_include $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) -lm +} + + +# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI +# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std +# is the default, no special flag is needed. +actions compile.c +{ + $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" +} + +# Note: The compiler is forced to compile the files as C++ (-x cxx) because +# otherwise it will silently ignore files with no file extension. +# +# Note: We deliberately don't suppress any warnings on the compiler command +# line, the user can always do this in a customized toolset later on. + +rule compile.c++ +{ + # We preprocess the TEMPLATE_DEPTH command line option here because we found + # no way to do it correctly in the actual action code. There we either get + # the -pending_instantiations parameter when no c++-template-depth property + # has been specified or we get additional quotes around + # "-pending_instantiations ". + local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ; + TEMPLATE_DEPTH on $(1) = "-pending_instantiations "$(template-depth) ; +} + +actions compile.c++ +{ + $(CONFIG_COMMAND) -x cxx -c -std strict_ansi -nopure_cname -noimplicit_include -timplicit_local -ptr "$(<[1]:D)/cxx_repository" $(OPTIONS) $(TEMPLATE_DEPTH) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)" +} + +# Always create archive from scratch. See the gcc toolet for rationale. +RM = [ common.rm-command ] ; +actions together piecemeal archive +{ + $(RM) "$(<)" + ar rc $(<) $(>) +} diff --git a/src/boost/tools/build/src/tools/hpfortran.jam b/src/boost/tools/build/src/tools/hpfortran.jam new file mode 100644 index 000000000..fc5998043 --- /dev/null +++ b/src/boost/tools/build/src/tools/hpfortran.jam @@ -0,0 +1,35 @@ +# Copyright (C) 2004 Toon Knapen +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import toolset : flags ; +import feature ; +import fortran ; + +rule init ( version ? : command * : options * ) +{ +} + +# Declare flags and action for compilation +flags hpfortran OPTIONS off : -O0 ; +flags hpfortran OPTIONS speed : -O3 ; +flags hpfortran OPTIONS space : -O1 ; + +flags hpfortran OPTIONS on : -g ; +flags hpfortran OPTIONS on : -pg ; + +flags hpfortran DEFINES ; +flags hpfortran INCLUDES ; + +rule compile.fortran +{ +} + +actions compile.fortran +{ + f77 +DD64 $(OPTIONS) -D$(DEFINES) -I$(INCLUDES) -c -o "$(<)" "$(>)" +} + +generators.register-fortran-compiler hpfortran.compile.fortran : FORTRAN : OBJ ; diff --git a/src/boost/tools/build/src/tools/ifort.jam b/src/boost/tools/build/src/tools/ifort.jam new file mode 100644 index 000000000..c23b02e1e --- /dev/null +++ b/src/boost/tools/build/src/tools/ifort.jam @@ -0,0 +1,44 @@ +# Copyright (C) 2004 Toon Knapen +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import toolset : flags ; +import feature ; +import fortran ; + +rule init ( version ? : command * : options * ) +{ +} + +# Declare flags and action for compilation +flags ifort OPTIONS ; + +flags ifort OPTIONS off : /Od ; +flags ifort OPTIONS speed : /O3 ; +flags ifort OPTIONS space : /O1 ; + +flags ifort OPTIONS on : "/debug:full" ; +flags ifort OPTIONS on : /Qprof_gen ; + +flags ifort.compile FFLAGS off/shared : /MD ; +flags ifort.compile FFLAGS on/shared : /MDd ; +flags ifort.compile FFLAGS off/static/single : /ML ; +flags ifort.compile FFLAGS on/static/single : /MLd ; +flags ifort.compile FFLAGS off/static/multi : /MT ; +flags ifort.compile FFLAGS on/static/multi : /MTd ; + +flags ifort DEFINES ; +flags ifort INCLUDES ; + +rule compile.fortran +{ +} + +actions compile.fortran +{ + ifort $(FFLAGS) $(OPTIONS) /names:lowercase /D$(DEFINES) /I"$(INCLUDES)" /c /object:"$(<)" "$(>)" +} + +generators.register-fortran-compiler ifort.compile.fortran : FORTRAN : OBJ ; diff --git a/src/boost/tools/build/src/tools/intel-darwin.jam b/src/boost/tools/build/src/tools/intel-darwin.jam new file mode 100644 index 000000000..87474e8f4 --- /dev/null +++ b/src/boost/tools/build/src/tools/intel-darwin.jam @@ -0,0 +1,188 @@ +# Copyright Vladimir Prus 2004. +# Copyright Noel Belcourt 2007. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import intel ; +import feature : feature ; +import os ; +import toolset ; +import toolset : flags ; +import gcc ; +import common ; +import errors ; +import generators ; + +feature.extend-subfeature toolset intel : platform : darwin ; + +toolset.inherit-generators intel-darwin + intel darwin + : gcc + # Don't inherit PCH generators. They were not tested, and probably + # don't work for this compiler. + : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch + ; + +generators.override intel-darwin.prebuilt : builtin.lib-generator ; +generators.override intel-darwin.prebuilt : builtin.prebuilt ; +generators.override intel-darwin.searched-lib-generator : searched-lib-generator ; + +toolset.inherit-rules intel-darwin : gcc ; +toolset.inherit-flags intel-darwin : gcc + : off on full space + off all on + extra pedantic + off on + x86/32 + x86/64 + ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +# Initializes the intel-darwin toolset +# version in mandatory +# name (default icc) is used to invoke the specified intel compiler +# compile and link options allow you to specify addition command line options for each version +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters intel-darwin + : version $(version) ] ; + + command = [ common.get-invocation-command intel-darwin : icc + : $(command) : /opt/intel_cc_80/bin ] ; + + common.handle-options intel-darwin : $(condition) : $(command) : $(options) ; + + # handle + # local library-path = [ feature.get-values : $(options) ] ; + # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values : $(options) ] ; + + local root = [ feature.get-values : $(options) ] ; + local bin ; + if $(command) || $(root) + { + bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; + root ?= $(bin:D) ; + + if $(root) + { + # Libraries required to run the executable may be in either + # $(root)/lib (10.1 and earlier) + # or + # $(root)/lib/architecture-name (11.0 and later: + local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ; + if $(.debug-configuration) + { + ECHO notice\: using intel libraries "::" $(condition) "::" $(lib_path) ; + } + flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ; + } + } + + local m = [ MATCH (..).* : $(version) ] ; + local n = [ MATCH (.)\\. : $(m) ] ; + if $(n) { + m = $(n) ; + } + + local major = $(m) ; + + if $(major) = "9" { + flags intel-darwin.compile OPTIONS $(condition)/off : -Ob0 ; + flags intel-darwin.compile OPTIONS $(condition)/on : -Ob1 ; + flags intel-darwin.compile OPTIONS $(condition)/full : -Ob2 ; + flags intel-darwin.compile OPTIONS $(condition)/off : -vec-report0 ; + flags intel-darwin.compile OPTIONS $(condition)/on : -vec-report1 ; + flags intel-darwin.compile OPTIONS $(condition)/full : -vec-report5 ; + flags intel-darwin.link OPTIONS $(condition)/static : -static -static-libcxa -lstdc++ -lpthread ; + flags intel-darwin.link OPTIONS $(condition)/shared : -shared-libcxa -lstdc++ -lpthread ; + } + else { + flags intel-darwin.compile OPTIONS $(condition)/off : -inline-level=0 ; + flags intel-darwin.compile OPTIONS $(condition)/on : -inline-level=1 ; + flags intel-darwin.compile OPTIONS $(condition)/full : -inline-level=2 ; + flags intel-darwin.compile OPTIONS $(condition)/off : -vec-report0 ; + flags intel-darwin.compile OPTIONS $(condition)/on : -vec-report1 ; + flags intel-darwin.compile OPTIONS $(condition)/full : -vec-report5 ; + flags intel-darwin.link OPTIONS $(condition)/static : -static -static-intel -lstdc++ -lpthread ; + flags intel-darwin.link OPTIONS $(condition)/shared : -shared-intel -lstdc++ -lpthread ; + } + + local minor = [ MATCH ".*\\.(.).*" : $(version) ] ; + + # wchar_t char_traits workaround for compilers older than 10.2 + if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) { + flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ; + } + + # - Archive builder. + local archiver = [ feature.get-values : $(options) ] ; + toolset.flags intel-darwin.archive .AR $(condition) : $(archiver[1]) ; +} + +SPACE = " " ; + +flags intel-darwin.compile OPTIONS ; +flags intel-darwin.compile.c++ OPTIONS ; +# flags intel-darwin.compile INCLUDES ; + +flags intel-darwin.compile OPTIONS space : -O1 ; # no specific space optimization flag in icc + +# +.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i + conroe conroe-xe conroe-l allendale merom + merom-xe kentsfield kentsfield-xe penryn wolfdale + yorksfield nehalem sandy-bridge ivy-bridge haswell + broadwell skylake skylake-avx512 cannonlake icelake-client + icelake-server cascadelake cooperlake tigerlake ; +.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3 + athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3 + bdver4 btver1 btver2 znver1 znver2 ; +.cpu-type-x86-64 = $(.cpu-type-em64t) $(.cpu-type-amd64) ; + +flags intel-darwin.compile OPTIONS $(.cpu-type-x86-64)/32 : -m32 ; # -mcmodel=small ; +flags intel-darwin.compile OPTIONS $(.cpu-type-x86-64)/64 : -m64 ; # -mcmodel=large ; + +flags intel-darwin.compile.c OPTIONS off : -w0 ; +flags intel-darwin.compile.c OPTIONS on : -w1 ; +flags intel-darwin.compile.c OPTIONS all : -w2 ; +flags intel-darwin.compile.c OPTIONS extra : -w3 ; +flags intel-darwin.compile.c OPTIONS pedantic : -w3 -Wcheck ; +flags intel-darwin.compile.c OPTIONS on : -Werror-all ; + +flags intel-darwin.compile.c++ OPTIONS off : -w0 ; +flags intel-darwin.compile.c++ OPTIONS on : -w1 ; +flags intel-darwin.compile.c++ OPTIONS all : -w2 ; +flags intel-darwin.compile.c++ OPTIONS extra : -w3 ; +flags intel-darwin.compile.c++ OPTIONS pedantic : -w3 -Wcheck ; +flags intel-darwin.compile.c++ OPTIONS on : -Werror-all ; + +actions compile.c +{ + "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +# Declare actions for linking +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) +} diff --git a/src/boost/tools/build/src/tools/intel-linux.jam b/src/boost/tools/build/src/tools/intel-linux.jam new file mode 100644 index 000000000..5405ff55a --- /dev/null +++ b/src/boost/tools/build/src/tools/intel-linux.jam @@ -0,0 +1,311 @@ +# Copyright (c) 2003 Michael Stevens +# Copyright (c) 2011 Bryce Lelbach +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import toolset ; +import toolset : flags ; + +import common ; +import errors ; +import feature ; +import gcc ; +import generators ; +import intel ; +import numbers ; +import os ; +import path ; +import regex ; +import type ; + +feature.extend-subfeature toolset intel : platform : linux ; + +toolset.inherit-generators intel-linux + intel linux : gcc : gcc.mingw.link gcc.mingw.link.dll ; +generators.override intel-linux.prebuilt : builtin.lib-generator ; +generators.override intel-linux.prebuilt : builtin.prebuilt ; +generators.override intel-linux.searched-lib-generator : searched-lib-generator ; + +# Override default do-nothing generators. +generators.override intel-linux.compile.c.pch : pch.default-c-pch-generator ; +generators.override intel-linux.compile.c++.pch : pch.default-cpp-pch-generator ; + +type.set-generated-target-suffix PCH : intel linux : pchi ; + +toolset.inherit-rules intel-linux : gcc ; +toolset.inherit-flags intel-linux : gcc + : off on full + space speed + off all on + extra pedantic + off on + ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +.home = [ os.home-directories ] ; +.home = $(.home[1]) ; + +# Intel oneAPI 2020, and onward. +.bin(oneAPI) = + [ path.join $(.home) intel/oneapi/compiler/latest/linux/bin ] + /opt/intel/oneapi/compiler/latest/linux/bin ; +# Intel C++ Composer XE 2011 for Linux, aka Intel C++ Compiler XE 12.0, +# aka intel-linux-12.0. +.bin(12.0) = /opt/intel/bin ; +# Intel C++ Compiler 11.1. +.bin(11.1) = /opt/intel_cce_11.1.064.x86_64/bin ; +# Intel C++ Compiler 11.0. +.bin(11.0) = /opt/intel_cce_11.0.074.x86_64/bin ; +# Intel C++ Compiler 10.1. +.bin(10.1) = /opt/intel_cce_10.1.013_x64/bin ; +# Intel C++ Compiler 9.1. +.bin(9.1) = /opt/intel_cc_91/bin ; +# Intel C++ Compiler 9.0. +.bin(9.0) = /opt/intel_cc_90/bin ; +# Intel C++ Compiler 8.1. +.bin(8.1) = /opt/intel_cc_81/bin ; +# Intel C++ Compiler 8.0. +.bin(8.0) = /opt/intel_cc_80/bin ; + +rule init ( version ? : command * : options * ) +{ + local user_version = [ MATCH "([0-9.]+)" : $(version) ] ; + local user_command = $(command) ; + if $(user_version) + { + user_version = [ regex.split $(user_version) "[.]" ] ; + } + + local command_lib_path ; + local detected_version ; + local detected_command ; + local command_abs_path ; + if ! $(user_version) && ! $(user_command) + { + # If nothing given, try and discover the latest toolset available. + if ! $(detected_command) + { + local bin_paths = $(.bin(oneAPI)) ; + detected_command = [ common.find-tool icpx : $(bin_paths) ] ; + if $(detected_command) + { + command_abs_path = [ common.get-absolute-tool-path $(detected_command) ] ; + command_lib_path = $(command_abs_path)/../compiler/lib/intel64 ; + } + } + if ! $(detected_command) + { + local bin_paths = $(.bin(12.0)) $(.bin(11.1)) $(.bin(11.0)) ; + detected_command = [ common.find-tool icpc : $(bin_paths) ] ; + if $(detected_command) + { + command_abs_path = [ common.get-absolute-tool-path $(detected_command) ] ; + command_lib_path = $(command_abs_path)/../lib/x86_64 ; + } + } + if ! $(detected_command) + { + local bin_paths = $(.bin(10.1)) $(.bin(9.1)) $(.bin(9.0)) + $(.bin(8.1)) $(.bin(8.0)) ; + detected_command = [ common.find-tool icpc : $(bin_paths) ] ; + if $(detected_command) + { + command_abs_path = [ common.get-absolute-tool-path $(detected_command) ] ; + command_lib_path = $(command_abs_path)/../lib ; + } + } + if $(detected_command) + { + local version_cmd = "LD_LIBRARY_PATH=$(command_lib_path) $(detected_command) --version" ; + local version_output = [ SHELL $(version_cmd) ] ; + detected_version = [ MATCH "([0-9.]+)" : $(version_output) ] ; + } + } + else if $(user_command) + { + # If only a command given, determine the version from the command. + # Note, we assume that the user command does everything needed to + # property execute the command. + local version_cmd = $(user_command:J=" ") ; + local version_output = [ SHELL "$(version_cmd) --version" ] ; + detected_command = $(user_command) ; + detected_version = [ MATCH "([0-9.]+)" : $(version_output) ] ; + } + else if $(user_version) + { + # Only version given, try and find the command in the location for the version. + if [ numbers.less $(user_version[1]) 2020 ] + { + local version_xy = $(user_version[1]) $(user_version[2]) ; + local bin_paths = $(.bin($(version_xy:J=.))) ; + if $(bin_paths) + { + detected_command = [ common.find-tool icpc : $(bin_paths) : path-last ] ; + command_abs_path = [ common.get-absolute-tool-path $(detected_command) ] ; + } + if [ numbers.less $(user_version[1]) 11 ] + { + command_lib_path = $(command_abs_path)/../lib ; + } + else + { + command_lib_path = $(command_abs_path)/../lib/x86_64 ; + } + } + else + { + detected_command = [ common.find-tool icpx + : [ regex.replace-list $(.bin(oneAPI)) : "latest" : $(user_version:J=.) ] + : path-last ] ; + command_abs_path = [ common.get-absolute-tool-path $(detected_command) ] ; + command_lib_path = $(command_abs_path)/../compiler/lib/intel64 ; + } + if $(detected_command) + { + local version_cmd = "LD_LIBRARY_PATH=$(command_lib_path) $(detected_command) --version" ; + local version_output = [ SHELL $(version_cmd) ] ; + detected_version = [ MATCH "([0-9.]+)" : $(version_output) ] ; + } + } + + if $(.debug-configuration) + { + ECHO "notice: intel-linux command is" $(command:E=$(detected_command)) ; + } + + version ?= $(detected_version) ; + local condition = [ common.check-init-parameters intel-linux : version $(version) ] ; + + if $(.debug-configuration) + { + ECHO "notice: intel-linux version is" $(version) ; + } + + command ?= $(detected_command) ; + common.handle-options intel-linux : $(condition) : $(command) : $(options) ; + + local tool_version = $(detected_version) ; + if $(tool_version) + { + tool_version = [ regex.split $(tool_version) "[.]" ] ; + } + tool_version ?= $(user_version) ; + + if [ numbers.less $(tool_version[1]) 10 ] + { + flags intel-linux.compile OPTIONS $(condition)/off : "-Ob0" ; + flags intel-linux.compile OPTIONS $(condition)/on : "-Ob1" ; + flags intel-linux.compile OPTIONS $(condition)/full : "-Ob2" ; + flags intel-linux.compile OPTIONS $(condition)/space : "-O1" ; + flags intel-linux.compile OPTIONS $(condition)/speed : "-O3 -ip" ; + } + else if [ numbers.less $(tool_version[1]) 11 ] + { + flags intel-linux.compile OPTIONS $(condition)/off : "-inline-level=0" ; + flags intel-linux.compile OPTIONS $(condition)/on : "-inline-level=1" ; + flags intel-linux.compile OPTIONS $(condition)/full : "-inline-level=2" ; + flags intel-linux.compile OPTIONS $(condition)/space : "-O1" ; + flags intel-linux.compile OPTIONS $(condition)/speed : "-O3 -ip" ; + } + else # newer version of intel do have -Os (at least 11+, don't know about 10) + { + flags intel-linux.compile OPTIONS $(condition)/off : "-inline-level=0" ; + flags intel-linux.compile OPTIONS $(condition)/on : "-inline-level=1" ; + flags intel-linux.compile OPTIONS $(condition)/full : "-inline-level=2" ; + flags intel-linux.compile OPTIONS $(condition)/space : "-Os" ; + flags intel-linux.compile OPTIONS $(condition)/speed : "-O3 -ip" ; + } + if [ numbers.less $(tool_version[1]) 2020 ] + { + flags intel-linux.compile OPTIONS off : -w0 ; + flags intel-linux.compile OPTIONS on : -w1 ; + flags intel-linux.compile OPTIONS all : -w2 ; + flags intel-linux.compile OPTIONS extra : -w3 ; + flags intel-linux.compile OPTIONS pedantic : -w3 -Wcheck ; + flags intel-linux.compile OPTIONS on : -Werror-all ; + } + else + { + flags intel-linux.compile OPTIONS off : -w ; + flags intel-linux.compile OPTIONS on : -Wall ; + flags intel-linux.compile OPTIONS all : -Wall ; + flags intel-linux.compile OPTIONS extra : -Wall ; + flags intel-linux.compile OPTIONS pedantic : -Wall ; + flags intel-linux.compile OPTIONS on : -Werror-all ; + } + if $(.debug-configuration) + { + ECHO notice\: using intel libraries "::" $(condition) "::" $(command_lib_path) ; + } + flags intel-linux.compile RUN_PATH $(condition) : $(command_lib_path) ; + flags intel-linux.link RUN_PATH $(condition) : $(command_lib_path) ; +} + +_ = " " ; + +rule compile.c++ ( targets * : sources * : properties * ) +{ + DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; +} + +actions compile.c++ bind PCH_FILE +{ + LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -c -xc++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)" +} + +rule compile.c ( targets * : sources * : properties * ) +{ + DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ; +} + +actions compile.c bind PCH_FILE +{ + LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -c -xc $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -use-pch"$(PCH_FILE)" -c -o "$(<)" "$(>)" +} + +# +# Compiling a pch first deletes any existing *.pchi file, as Intel's compiler +# won't over-write an existing pch: instead it creates filename$1.pchi, filename$2.pchi +# etc - which appear not to do anything except take up disk space :-( +# +actions compile.c++.pch +{ + rm -f "$(<)" && LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)" +} + +actions compile.fortran +{ + LD_LIBRARY_PATH="$(RUN_PATH)" "ifort" -c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c.pch +{ + rm -f "$(<)" && LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)" +} + +rule link ( targets * : sources * : properties * ) +{ + _ on $(targets) = " " ; +} + +actions link bind LIBRARIES +{ + LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(_)-Wl,"$(RPATH)" -Wl,-rpath-link$(_)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) +} + +rule link.dll ( targets * : sources * : properties * ) +{ + _ on $(targets) = " " ; +} + +# Differ from 'link' above only by -shared. +actions link.dll bind LIBRARIES +{ + LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(_)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(_)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS) +} diff --git a/src/boost/tools/build/src/tools/intel-vxworks.jam b/src/boost/tools/build/src/tools/intel-vxworks.jam new file mode 100644 index 000000000..de50613db --- /dev/null +++ b/src/boost/tools/build/src/tools/intel-vxworks.jam @@ -0,0 +1,192 @@ +# Copyright Wind River 2017. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import intel ; +import feature : feature ; +import os ; +import toolset ; +import toolset : flags ; +import gcc ; +import common ; +import errors ; +import generators ; + +feature.extend-subfeature toolset intel : platform : vxworks ; + +toolset.inherit-generators intel-vxworks + intel vxworks + : gcc + # Don't inherit PCH generators. They were not tested, and probably + # don't work for this compiler. + : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch + ; + +generators.override intel-vxworks.prebuilt : builtin.lib-generator ; +generators.override intel-vxworks.prebuilt : builtin.prebuilt ; +generators.override intel-vxworks.searched-lib-generator : searched-lib-generator ; + +toolset.inherit-rules intel-vxworks : gcc ; +toolset.inherit-flags intel-vxworks : gcc + : off on full space + off all on + extra pedantic + off on + x86/32 + x86/64 + ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +# Initializes the intel-vxworks toolset +# version in mandatory +# name (default icc) is used to invoke the specified intel compiler +# compile and link options allow you to specify addition command line options for each version +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters intel-vxworks + : version $(version) ] ; + + command = [ common.get-invocation-command intel-vxworks : icc + : $(command) : /opt/intel_cc_80/bin ] ; + + common.handle-options intel-vxworks : $(condition) : $(command) : $(options) ; + + # handle + # local library-path = [ feature.get-values : $(options) ] ; + # flags intel-vxworks.link USER_OPTIONS $(condition) : [ feature.get-values : $(options) ] ; + + local root = [ feature.get-values : $(options) ] ; + local bin ; + if $(command) || $(root) + { + bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ; + root ?= $(bin:D) ; + + if $(root) + { + # Libraries required to run the executable may be in either + # $(root)/lib (10.1 and earlier) + # or + # $(root)/lib/architecture-name (11.0 and later: + local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ; + if $(.debug-configuration) + { + ECHO notice\: using intel libraries "::" $(condition) "::" $(lib_path) ; + } + flags intel-vxworks.link RUN_PATH $(condition) : $(lib_path) ; + } + } + + local m = [ MATCH (..).* : $(version) ] ; + local n = [ MATCH (.)\\. : $(m) ] ; + if $(n) { + m = $(n) ; + } + + local major = $(m) ; + + + flags intel-vxworks.compile OPTIONS $(condition)/off : -inline-level=0 ; + flags intel-vxworks.compile OPTIONS $(condition)/on : -inline-level=1 ; + flags intel-vxworks.compile OPTIONS $(condition)/full : -inline-level=2 ; + + flags intel-vxworks.link OPTIONS $(condition)/static : [ os.environ LDFLAGS_STATIC ] ; + flags intel-vxworks.link OPTIONS $(condition)/shared : [ os.environ LDFLAGS_DYNAMIC ] ; + flags intel-vxworks.compile OPTIONS $(condition)/shared : -fPIC ; + + local minor = [ MATCH ".*\\.(.).*" : $(version) ] ; + + +} + +SPACE = " " ; + +flags intel-vxworks.compile OPTIONS ; +flags intel-vxworks.compile.c++ OPTIONS ; +flags intel-vxworks.compile INCLUDES ; + + +.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i + conroe conroe-xe conroe-l allendale merom + merom-xe kentsfield kentsfield-xe penryn wolfdale + yorksfield nehalem sandy-bridge ivy-bridge haswell + broadwell skylake skylake-avx512 cannonlake + icelake-client icelake-server cascadelake cooperlake + tigerlake ; +.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3 + athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3 btver1 btver2 + znver1 znver2 ; +.cpu-type-x86-64 = $(.cpu-type-em64t) $(.cpu-type-amd64) ; + +#flags intel-vxworks.compile OPTIONS $(.cpu-type-x86-64)/32 : -m32 ; # -mcmodel=small ; +#flags intel-vxworks.compile OPTIONS $(.cpu-type-x86-64)/64 : -m64 ; # -mcmodel=large ; + +flags intel-vxworks.compile.c OPTIONS off : -w0 ; +flags intel-vxworks.compile.c OPTIONS on : -w1 ; +flags intel-vxworks.compile.c OPTIONS all : -w2 ; +flags intel-vxworks.compile.c OPTIONS extra : -w3 ; +flags intel-vxworks.compile.c OPTIONS pedantic : -w3 -Wcheck ; +flags intel-vxworks.compile.c OPTIONS on : -Werror-all ; + +flags intel-vxworks.compile.c++ OPTIONS off : -w0 ; +flags intel-vxworks.compile.c++ OPTIONS on : -w1 ; +flags intel-vxworks.compile.c++ OPTIONS all : -w2 ; +flags intel-vxworks.compile.c++ OPTIONS extra : -w3 ; +flags intel-vxworks.compile.c++ OPTIONS pedantic : -w3 -Wcheck ; +flags intel-vxworks.compile.c++ OPTIONS on : -Werror-all ; + +actions compile.c +{ + "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +flags intel-vxworks ARFLAGS ; + +.AR = ar ; + +rule archive ( targets * : sources * : properties * ) +{ + # Always remove archive and start again. + # of the archive. + # + local clean.a = $(targets[1])(clean) ; + TEMPORARY $(clean.a) ; + NOCARE $(clean.a) ; + LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; + DEPENDS $(clean.a) : $(sources) ; + DEPENDS $(targets) : $(clean.a) ; + common.RmTemps $(clean.a) : $(targets) ; +} + +actions piecemeal archive +{ + "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)" +} + +flags intel-vxworks.link USER_OPTIONS ; + +# Declare actions for linking +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) +} diff --git a/src/boost/tools/build/src/tools/intel-win.jam b/src/boost/tools/build/src/tools/intel-win.jam new file mode 100644 index 000000000..7084891e9 --- /dev/null +++ b/src/boost/tools/build/src/tools/intel-win.jam @@ -0,0 +1,544 @@ +# Copyright Vladimir Prus 2004. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Importing common is needed because the rules we inherit here depend on it. +# That is nasty. +import common ; +import errors ; +import feature ; +import intel ; +import msvc ; +import os ; +import set ; +import toolset ; +import generators ; +import type ; +import path ; +import numbers ; + +feature.extend-subfeature toolset intel : platform : win ; + +toolset.inherit-generators intel-win intel win : msvc ; +toolset.inherit-flags intel-win : msvc : : YLOPTION ; +toolset.inherit-rules intel-win : msvc ; + +# Override default do-nothing generators. +generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ; +generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ; +generators.override intel-win.compile.rc : rc.compile.resource ; +generators.override intel-win.compile.mc : mc.compile ; + +toolset.flags intel-win.compile PCH_SOURCE on : ; + +toolset.add-requirements intel-win,shared:multi ; + +# Initializes the intel toolset for windows +rule init ( version ? : # the compiler version + command * : # the command to invoke the compiler itself + options * # Additional option: + # either 'vc6', 'vc7', 'vc7.1' + # or 'native'(default). + ) +{ + if $(version) + { + configure $(version) : $(command) : $(options) ; + } + else + { + if $(command) + { + errors.error "Autodetect of version from command not implemented!" ; + } + local intel_versions = [ get-autodetect-versions () ] ; + if ! $(intel_versions) + { + errors.error "No intel compiler version found!" ; + } + else + { + local msvc-version = [ feature.get-values : $(options) ] ; # On auto config mode the user can still request a msvc backend. If some intel compiler doesn't support it, don't try to configure it! + msvc-version = [ get-msvc-version-from-vc-string $(msvc-version) ] ; + for local v in $(intel_versions) + { + if [ is-msvc-supported $(v) : $(msvc-version) ] + { + configure $(v) : : $(options) ; + } + } + } + } +} + +local rule configure ( version ? : command * : options * ) +{ + local compatibility = + [ feature.get-values : $(options) ] ; + # Allow to specify toolset and visual studio backend from commandline .e.g --toolset=intel-14.0-vc10 + local vc_in_version = [ MATCH "(vc[0-9]+(\\.[0-9]+)?)$" : $(version) ] ; + vc_in_version = $(vc_in_version[1]) ; + if $(compatibility) && $(vc_in_version) + { + if $(compatibility) != $(vc_in_version) + { + errors.error "feature compatibility and vc version in toolset present!" ; + } + } + + if $(vc_in_version) && ! $(compatibility) + { + # vc Version must be stripped before check-init-parameters is called! + version = [ MATCH (.+)-vc.+$ : $(version) ] ; + + compatibility = $(vc_in_version) ; + options += $(vc_in_version) ; + } + if $(compatibility) + { + configure-really $(version) : $(command) : $(options) : $(compatibility) ; + } + else + { + local msvc_versions = [ feature.values ] ; + if ! $(msvc_versions) + { + ECHO notice\: no msvc versions detected. trying auto detect ; + toolset.using msvc : all ; + msvc_versions = [ feature.values ] ; + } + if ! $(.iclvars-$(version)-supported-vcs) + { + errors.error "Supported msvc versions not known for intel $(version)" ; + } + + for local v in $(msvc_versions) + { + if [ MATCH "($(v))" : $(.iclvars-$(version)-supported-vcs) ] + { + # Strip trailing .0 from msvc version as intel compiler uses atm only major version for Qvc + local m = [ MATCH "([0-9]+).0$" : $(v) ] ; + if $(m) + { + v = $(m) ; + } + v = "vc$(v)" ; + local options_really = $(options) ; + options_really += $(v) ; + if $(.debug-configuration) + { + ECHO "configure: intel version: $(version) msvc version: $(v)" ; + } + configure-really $(version) : $(command) : $(options) : $(v) ; + } + } + if ! [ feature.values ] + { + errors.error "Failed to register an intel toolset!" ; + } + } +} + +local rule configure-really ( version ? : command * : options * : compatibility ) +{ + local rewrite-setupscript = [ feature.get-values : $(options) ] ; + local condition = [ common.check-init-parameters intel-win + : version $(version) : compatibility $(compatibility) ] ; + + local m = [ MATCH "([0-9]+).*" : $(version) ] ; + local major = $(m[1]) ; + if ! $(major) + { + errors.error "Major version not found: $(version)" ; + } + + local msvc-version = [ get-msvc-version-from-vc-string $(compatibility) ] ; + if ! $(msvc-version) + { + errors.user-error "Invalid value for compatibility option:" + $(compatibility) ; + } + + command = [ get-compiler-invocation-cmd $(major) : $(command) ] ; + + common.handle-options intel-win : $(condition) : $(command) : $(options) ; + + local root = [ feature.get-values : $(options) ] ; + if $(command) || $(root) + { + local bin ; + if $(command) + { + bin = [ common.get-absolute-tool-path $(command[-1]) ] ; + if $(bin) && ( $(major) = 12 || [ numbers.less 12 $(major) ] ) + { + bin = [ path.make $(bin) ] ; + bin = [ path.parent $(bin) ] ; + } + } + root ?= $(bin) ; + root = $(root)/ ; + } + + local setup ; + local setup_astk_bat ; + local setup_bat ; + if $(major) = 21 || [ numbers.less 21 $(major) ] + { + setup_astk_bat = "setvars_*.bat" ; + setup_bat = "setvars.bat" ; + } + else + { + setup_astk_bat = "iclvars_*.bat" ; + setup_bat = "iclvars.bat" ; + } + + setup = [ path.glob $(root) : $(setup_astk_bat) ] ; + if ! $(setup) + { + setup = [ path.join $(root) $(setup_bat) ] ; + setup = [ path.native $(setup) ] ; + } + + local target_types ; + local iclvars_vs_arg ; + if $(major) = 12 || [ numbers.less 12 $(major) ] + { + # if we have a known intel toolset check for visual studio compatibility + # if not trust parameters + if ! [ is-msvc-supported $(version) : $(msvc-version) ] + { + errors.error "msvc $(msvc-version) not supported for intel toolset version $(version)" ; + } + if $(.iclvars-version-alias-$(compatibility)) + { + iclvars_vs_arg = $(.iclvars-version-alias-$(compatibility)) ; + } + else + { + errors.error "Don't know what parameter to pass for vc version ( $(compatibility) )" ; + } + # There are two possible paths for the 64-bit intel compiler, + # one for the IA32-Intel64 cross compiler, and one for the native + # 64 bit compiler. We prefer the latter one if it's installed, + # and don't rely on whether the OS reports whether we're 64 or 32 bit + # as that really only tells us which subsystem bjam is running in: + # + local root_start ; + if $(major) = 21 || [ numbers.less 21 $(major) ] + { + root_start = [ path.join $(root) "compiler/latest/windows/bin" ] ; + root_start = [ path.native $(root_start) ] ; + } + else + { + root_start = $(root) ; + } + local intel64_path = [ path.join $(root_start) intel64 ] ; + if [ path.glob $(intel64_path) : icl.exe ] + { + target_types = ia32 intel64 ; + } + else + { + target_types = ia32 ia32_intel64 ; + } + } + else + { + target_types = default ; + iclvars_vs_arg = $(compatibility) ; + } + + local default-assembler-intel64 = ml64 ; + local default-assembler-ia32_intel64 = ml64 ; + local default-assembler-ia32 = "ml -coff" ; + assembler = [ feature.get-values : $(options) ] ; + + for local c in $(target_types) + { + local cpu-conditions ; + local setup-call ; + if $(major) = 12 || [ numbers.less 12 $(major) ] + { + cpu-conditions = $(condition)/$(.cpu-arch-$(c)) ; + + if ! $(setup) + { + # No setup script + } + else if $(rewrite-setupscript) = off || [ os.name ] != NT + { + setup-call = "call \"$(setup)\" $(c) $(iclvars_vs_arg) > nul " ; + } + else + { + if $(rewrite-setupscript) = always + { + toolset.flags intel-win .REWRITE-SETUP $(cpu-conditions) : true ; + } + toolset.flags intel-win .SETUP-SCRIPT $(cpu-conditions) : $(setup) ; + toolset.flags intel-win .SETUP-OPTIONS $(cpu-conditions) : "$(c) $(iclvars_vs_arg)" ; + } + } + else + { + setup-call = "call \""$(setup)"\" $(compatibility) > nul " ; + cpu-conditions = $(condition) ; + } + + if $(setup-call) + { + if [ os.name ] = NT + { + setup-call = $(setup-call)"\n " ; + } + else + { + setup-call = "cmd /S /C "$(setup-call)" \"&&\" " ; + } + toolset.flags intel-win .SETUP $(cpu-conditions) : $(setup-call) ; + } + + if $(.debug-configuration) + { + for local cond in $(cpu-conditions) + { + ECHO "notice: [intel-cfg] condition: '$(cond)', setup: '$(setup-call)'" ; + } + } + + local cpu-assembler = $(assembler) ; + cpu-assembler ?= $(default-assembler-$(c)) ; + + toolset.flags intel-win.compile .CC $(cpu-conditions) : icl ; + toolset.flags intel-win.link .LD $(cpu-conditions) : xilink /nologo ; + toolset.flags intel-win.archive .LD $(cpu-conditions) : xilink /lib /nologo ; + toolset.flags intel-win.link .MT $(cpu-conditions) : mt -nologo ; + toolset.flags intel-win.compile .ASM $(cpu-conditions) : $(cpu-assembler) -nologo ; + toolset.flags intel-win.compile .MC $(cpu-conditions) : mc ; + toolset.flags intel-win.compile .RC $(cpu-conditions) : rc ; + } + + # Depending on the settings, running of tests require some runtime DLLs. + toolset.flags intel-win RUN_PATH $(condition) : $(root) ; + + + local C++FLAGS ; + + C++FLAGS += /nologo ; + + # Reduce the number of spurious error messages + C++FLAGS += /Qwn5 /Qwd985 ; + + # Enable ADL + C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too + + # Disable Microsoft "secure" overloads in Dinkumware libraries since they + # cause compile errors with Intel versions 9 and 10. + if [ numbers.less $(major) 12 ] + { + C++FLAGS += -D_SECURE_SCL=0 ; + } + + if [ numbers.less 5 $(major) ] + { + C++FLAGS += "/Zc:forScope" ; # Add support for correct for loop scoping. + } + + # Add options recognized only by intel7 and above. + if $(major) = 7 || [ numbers.less 7 $(major) ] + { + C++FLAGS += /Qansi_alias ; + } + + if $(compatibility) = vc6 + { + C++FLAGS += + # Emulate VC6 + /Qvc6 + + # No wchar_t support in vc6 dinkum library. Furthermore, in vc6 + # compatibility-mode, wchar_t is not a distinct type from unsigned + # short. + -DBOOST_NO_INTRINSIC_WCHAR_T + ; + } + else + { + if [ numbers.less 5 $(major) ] + { + # Add support for wchar_t + C++FLAGS += "/Zc:wchar_t" + # Tell the dinkumware library about it. + -D_NATIVE_WCHAR_T_DEFINED + ; + } + } + + if $(compatibility) && $(compatibility) != native + { + C++FLAGS += /Q$(compatibility) ; + } + else + { + C++FLAGS += + -Qoption,cpp,--arg_dep_lookup + # The following options were intended to disable the Intel compiler's + # 'bug-emulation' mode, but were later reported to be causing ICE with + # Intel-Win 9.0. It is not yet clear which options can be safely used. + # -Qoption,cpp,--const_string_literals + # -Qoption,cpp,--new_for_init + # -Qoption,cpp,--no_implicit_typename + # -Qoption,cpp,--no_friend_injection + # -Qoption,cpp,--no_microsoft_bugs + ; + } + + toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ; + # By default, when creating PCH, intel adds 'i' to the explicitly + # specified name of the PCH file. Of course, B2 is not + # happy when compiler produces not the file it was asked for. + # The option below stops this behaviour. + toolset.flags intel-win CFLAGS $(condition) : -Qpchi- ; + + if ! $(compatibility) + { + # If there's no backend version, assume 7.1. + compatibility = vc7.1 ; + } + + msvc-version = [ msvc.resolve-possible-msvc-version-alias $(msvc-version) ] ; + msvc.configure-version-specific intel-win : $(msvc-version) : $(condition) ; +} + +local rule get-autodetect-versions +{ + local result ; + for local v in $(.intel-autodetect-versions) + { + local major = [ MATCH "([0-9]+).*" : $(v) ] ; # Use only major version + if [ get-icl-path-from-environment $(major) ] + { + result += $(v) ; + } + } + return $(result) ; +} + +local rule get-icl-path-from-environment ( major_version ) +{ + local path = [ os.environ ICPP_COMPILER$(major_version) ] ; + if $(path) + { + path = [ path.make $(path) ] ; + local cmdpath ; + local subdirs = $(.icl-target-subdirectories) ; + while $(subdirs) + { + cmdpath = [ path.join $(path) "bin/$(subdirs[0])/icl.exe" ] ; + cmdpath = [ path.native $(cmdpath) ] ; + if [ path.exists $(cmdpath) ] + { + subdirs = ; + } else { + cmdpath = ; + subdirs = $(subdirs[2-]) ; + } + } + path = $(cmdpath) ; + } + return $(path) ; +} + +local rule get-compiler-invocation-cmd ( major_version : command * ) +{ + if $(command) + { + return [ common.get-invocation-command intel-win : icl.exe : $(command) ] ; + } + else + { + local path = [ get-icl-path-from-environment $(major_version) ] ; + return [ common.get-invocation-command intel-win : icl.exe : $(path) ] ; + } +} + +local rule is-msvc-supported ( intel-version : msvc-version ) +{ + if ! $(msvc-version) + { + return true ; + } + else + { + if $(.iclvars-$(intel-version)-supported-vcs) + { + if [ MATCH "($(msvc-version))" : $(.iclvars-$(intel-version)-supported-vcs) ] + { + return true ; + } + } + else + { + return true ; + } + } +} + +local rule get-msvc-version-from-vc-string ( vc-string ) +{ + local r = [ MATCH "^vc([0-9]+(\\.[0-9]+)?)$" : $(vc-string) ] ; + return $(r[1]) ; +} + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +# Copied from msvc.jam +# Supported CPU architectures. +.cpu-arch-ia32 = + / + /32 + x86/ + x86/32 ; + +.cpu-arch-intel64 = + /64 + x86/64 ; + +.cpu-arch-ia32_intel64 = + /64 + x86/64 ; + +.intel-autodetect-versions = 14.0 13.0 12.0 ; +.iclvars-12.0-supported-vcs = "10.0 9.0 8.0" ; +.iclvars-12.1-supported-vcs = "10.0 9.0 8.0" ; +.iclvars-13.0-supported-vcs = "11.0 10.0 9.0" ; +.iclvars-14.0-supported-vcs = "12.0 11.0 10.0 9.0" ; +.iclvars-15.0-supported-vcs = "12.0 11.0 10.0 9.0" ; +.iclvars-16.0-supported-vcs = "14.0 12.0 11.0 10.0 9.0" ; +.iclvars-17.0-supported-vcs = "14.1 14.0 12.0 11.0 10.0" ; +.iclvars-18.0-supported-vcs = "14.1 14.0 12.0 11.0 10.0" ; +.iclvars-19.0-supported-vcs = "14.2 14.1 14.0 12.0" ; +.iclvars-19.1-supported-vcs = "14.2 14.1 14.0 12.0" ; +.iclvars-21.1-supported-vcs = "14.2 14.1" ; +.iclvars-2021.1-supported-vcs = "14.2 14.1" ; +.iclvars-version-alias-vc14.2 = vs2019 ; +.iclvars-version-alias-vc14.1 = vs2017 ; +.iclvars-version-alias-vc14 = vs2015 ; +.iclvars-version-alias-vc12 = vs2013 ; +.iclvars-version-alias-vc11 = vs2012 ; +.iclvars-version-alias-vc10 = vs2010 ; +.iclvars-version-alias-vc9 = vs2008 ; +.iclvars-version-alias-vc8 = vs2005 ; +.icl-target-subdirectories = ia32 ia32_intel64 intel64 ; + +toolset.flags intel-win.link LIBRARY_OPTION intel : "" ; + +toolset.flags intel-win YLOPTION ; + diff --git a/src/boost/tools/build/src/tools/intel.jam b/src/boost/tools/build/src/tools/intel.jam new file mode 100644 index 000000000..6b1e1d6bf --- /dev/null +++ b/src/boost/tools/build/src/tools/intel.jam @@ -0,0 +1,85 @@ +# Copyright Vladimir Prus 2004. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.intel]] += Intel C++ + +The `intel-*` modules support the Intel C++ command-line compiler. + +The module is initialized using the following syntax: + +---- +using intel : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If compiler command is not specified, then B2 will look in PATH +for an executable `icpc` (on Linux), or `icl.exe` (on Windows). + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling both C +and C++ sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +`root`:: +For the Linux version, specifies the root directory of the compiler installation. +This option is necessary only if it is not possible to detect this information +from the compiler command -- for example if the specified compiler command is +a user script. For the Windows version, specifies the directory of the +`iclvars.bat` file, for versions prior to 21 ( or 2021 ), or of the `setvars.bat`, +for versions from 21 ( or 2021 ) on up, for configuring the compiler. +Specifying the `root` option without specifying the compiler command allows the +end-user not to have to worry about whether they are compiling 32-bit or 64-bit code, +as the toolset will automatically configure the compiler for the appropriate address +model and compiler command using the `iclvars.bat` or `setvars.bat` batch file. + +|# # end::doc[] + +# This is a generic 'intel' toolset. Depending on the current +# system, it forwards either to 'intel-linux' or 'intel-win' +# modules. + +import feature ; +import os ; +import toolset ; + +feature.extend toolset : intel ; +feature.subfeature toolset intel : platform : : propagated link-incompatible ; + +rule init ( * : * ) +{ + if [ os.name ] = LINUX + { + toolset.using intel-linux : + $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + else if [ os.name ] = MACOSX + { + toolset.using intel-darwin : + $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + else + { + toolset.using intel-win : + $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } +} diff --git a/src/boost/tools/build/src/tools/lex.jam b/src/boost/tools/build/src/tools/lex.jam new file mode 100644 index 000000000..8fab09a0d --- /dev/null +++ b/src/boost/tools/build/src/tools/lex.jam @@ -0,0 +1,25 @@ +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import type ; +import generators ; +import feature ; +import toolset : flags ; + +feature.feature flex.prefix : : free ; +type.register LEX : l ; +type.register LEX++ : ll ; +generators.register-standard lex.lex : LEX : C ; +generators.register-standard lex.lex : LEX++ : CPP ; + +rule init ( ) +{ +} + +flags lex.lex PREFIX ; + +actions lex +{ + flex -P$(PREFIX) -o$(<) $(>) +} diff --git a/src/boost/tools/build/src/tools/libjpeg.jam b/src/boost/tools/build/src/tools/libjpeg.jam new file mode 100644 index 000000000..ac2a5d0d8 --- /dev/null +++ b/src/boost/tools/build/src/tools/libjpeg.jam @@ -0,0 +1,234 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports the libjpeg library +# +# After 'using libjpeg', the following targets are available: +# +# /libjpeg//libjpeg -- The libjpeg library + +import project ; +import ac ; +import errors ; +import feature ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import indirect ; +import property ; +import property-set ; + +header = jpeglib.h ; + +# jpeglib.h requires stdio.h to be included first. +header-test = "#include \n#include \n" ; + +names = jpeg ; + +sources = jaricom.c jcapimin.c jcapistd.c jcarith.c jccoefct.c jccolor.c + jcdctmgr.c jchuff.c jcinit.c jcmainct.c jcmarker.c jcmaster.c + jcomapi.c jcparam.c jcprepct.c jcsample.c jctrans.c jdapimin.c + jdapistd.c jdarith.c jdatadst.c jdatasrc.c jdcoefct.c jdcolor.c + jddctmgr.c jdhuff.c jdinput.c jdmainct.c jdmarker.c jdmaster.c + jdmerge.c jdpostct.c jdsample.c jdtrans.c jerror.c jfdctflt.c + jfdctfst.c jfdctint.c jidctflt.c jidctfst.c jidctint.c jquant1.c ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +# Initializes the libjpeg library. +# +# libjpeg can be configured either to use pre-existing binaries +# or to build the library from source. +# +# Options for configuring a prebuilt libjpeg:: +# +# +# The directory containing the libjpeg binaries. +# +# Overrides the default library name. +# +# The directory containing the libjpeg headers. +# +# If none of these options is specified, then the environmental +# variables LIBJPEG_LIBRARY_PATH, LIBJPEG_NAME, and LIBJPEG_INCLUDE will +# be used instead. +# +# Options for building libjpeg from source:: +# +# +# The libjpeg source directory. Defaults to the environmental variable +# LIBJPEG_SOURCE. +# +# A rule which computes the actual name of the compiled +# libraries based on the build properties. Ignored +# when using precompiled binaries. +# +# The base name to use for the compiled library. Ignored +# when using precompiled binaries. +# +# Examples:: +# +# # Find libjpeg in the default system location +# using libjpeg ; +# # Build libjpeg from source +# using libjpeg : 8c : /home/steven/libjpeg-8c ; +# # Find libjpeg in /usr/local +# using libjpeg : 8c +# : /usr/local/include /usr/local/lib ; +# # Build libjpeg from source for msvc and find +# # prebuilt binaries for gcc. +# using libjpeg : 8c : C:/Devel/src/libjpeg-8c : msvc ; +# using libjpeg : 8c : : gcc ; +# +rule init ( + version ? + # The libjpeg version (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the libjpeg target + + : is-default ? + # Default configurations are only used when libjpeg + # has not yet been configured. This option is + # deprecated. A configuration will be treated + # as a default when none of , , + # , and are present. + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project libjpeg ; + } + + local library-path = [ feature.get-values : $(options) ] ; + local include-path = [ feature.get-values : $(options) ] ; + local source-path = [ feature.get-values : $(options) ] ; + local library-name = [ feature.get-values : $(options) ] ; + local tag = [ feature.get-values : $(options) ] ; + local build-name = [ feature.get-values : $(options) ] ; + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name) + { + is-default = true ; + } + + # Ignore environmental LIBJPEG_SOURCE if this initialization + # requested to search for a specific pre-built library. + if $(library-path) || $(include-path) || $(library-name) + { + if $(source-path) || $(tag) || $(build-name) + { + errors.user-error "incompatible options for libjpeg:" + [ property.select : $(options) ] "and" + [ property.select : $(options) ] ; + } + } + else + { + source-path ?= [ modules.peek : LIBJPEG_SOURCE ] ; + } + + if $(.configured.$(condition)) + { + if $(is-default) + { + if $(.debug) + { + ECHO "notice: [libjpeg] libjpeg is already configured" ; + } + } + else + { + errors.user-error "libjpeg is already configured" ; + } + return ; + } + else if $(source-path) + { + build-name ?= jpeg ; + library-id = [ CALC $(library-id) + 1 ] ; + tag = [ MATCH ^@?(.*)$ : $(tag) ] ; + if $(tag) + { + tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ; + } + sources = [ path.glob $(source-path) : $(sources) ] ; + if $(.debug) + { + ECHO "notice: [libjpeg] Building libjpeg from source as $(build-name)" ; + if $(condition) + { + ECHO "notice: [libjpeg] Condition" [ $(condition).raw ] ; + } + if $(sources) + { + ECHO "notice: [libjpeg] found libjpeg source in $(source-path)" ; + } + else + { + ECHO "warning: [libjpeg] could not find libjpeg source in $(source-path)" ; + } + } + local target ; + if $(sources) { + target = [ targets.create-typed-target LIB : $(.project) + : $(build-name).$(library-id) + : $(sources) + : $(requirements) + @$(tag) + $(source-path) + msvc:_CRT_SECURE_NO_DEPRECATE + msvc:_SCL_SECURE_NO_DEPRECATE + : + : $(source-path) ] ; + } + + local mt = [ new ac-library libjpeg : $(.project) : $(condition) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + if $(target) + { + $(mt).set-target $(target) ; + } + targets.main-target-alternative $(mt) ; + } else { + if $(.debug) + { + ECHO "notice: [libjpeg] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [libjpeg] Condition" [ $(condition).raw ] ; + } + } + + local mt = [ new ac-library libjpeg : $(.project) : $(condition) : + $(include-path) : $(library-path) : $(library-name) : $(root) ] ; + $(mt).set-header $(header) ; + $(mt).set-header-test $(header-test) ; + $(mt).set-default-names $(names) ; + targets.main-target-alternative $(mt) ; + } + .configured.$(condition) = true ; +} diff --git a/src/boost/tools/build/src/tools/libpng.jam b/src/boost/tools/build/src/tools/libpng.jam new file mode 100644 index 000000000..873db8f96 --- /dev/null +++ b/src/boost/tools/build/src/tools/libpng.jam @@ -0,0 +1,229 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports the libpng library +# +# After 'using libpng', the following targets are available: +# +# /libpng//libpng -- The libpng library + +import project ; +import ac ; +import errors ; +import feature ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import indirect ; +import property ; +import property-set ; + +header = png.h ; + +# On Windows, binary distributions of libpng and package managers +# name the library differently (e.g. vcpkg installs libpng16.lib). +# Listing popular names increases chances of successful look-up. +names = libpng libpng16 png png16 ; + +sources = png.c pngerror.c pngget.c pngmem.c pngpread.c pngread.c pngrio.c pngrtran.c pngrutil.c + pngset.c pngtrans.c pngwio.c pngwrite.c pngwtran.c pngwutil.c ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +# Initializes the libpng library. +# +# libpng can be configured either to use pre-existing binaries +# or to build the library from source. +# +# Options for configuring a prebuilt libpng:: +# +# +# The directory containing the libpng binaries. +# +# Overrides the default library name. +# +# The directory containing the libpng headers. +# +# If none of these options is specified, then the environmental +# variables LIBPNG_LIBRARY_PATH, LIBPNG_NAME, and LIBPNG_INCLUDE will +# be used instead. +# +# Options for building libpng from source:: +# +# +# The libpng source directory. Defaults to the environmental variable +# LIBPNG_SOURCE. +# +# A rule which computes the actual name of the compiled +# libraries based on the build properties. Ignored +# when using precompiled binaries. +# +# The base name to use for the compiled library. Ignored +# when using precompiled binaries. +# +# Examples:: +# +# # Find libpng in the default system location +# using libpng ; +# # Build libpng from source +# using libpng : 1.5.4 : /home/steven/libpng-1.5.4 ; +# # Find libpng in /usr/local +# using libpng : 1.5.4 +# : /usr/local/include /usr/local/lib ; +# # Build libpng from source for msvc and find +# # prebuilt binaries for gcc. +# using libpng : 1.5.4 : C:/Devel/src/libpng-1.5.4 : msvc ; +# using libpng : 1.5.4 : : gcc ; +# +rule init ( + version ? + # The libpng version (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the libpng target + + : is-default ? + # Default configurations are only used when libpng + # has not yet been configured. This option is + # deprecated. A configuration will be treated + # as a default when none of , , + # , and are present. + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project libpng ; + } + + local library-path = [ feature.get-values : $(options) ] ; + local include-path = [ feature.get-values : $(options) ] ; + local source-path = [ feature.get-values : $(options) ] ; + local library-name = [ feature.get-values : $(options) ] ; + local tag = [ feature.get-values : $(options) ] ; + local build-name = [ feature.get-values : $(options) ] ; + + if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name) + { + is-default = true ; + } + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + # Ignore environmental LIBPNG_SOURCE if this initialization + # requested to search for a specific pre-built library. + if $(library-path) || $(include-path) || $(library-name) + { + if $(source-path) || $(tag) || $(build-name) + { + errors.user-error "incompatible options for libpng:" + [ property.select : $(options) ] "and" + [ property.select : $(options) ] ; + } + } + else + { + source-path ?= [ modules.peek : LIBPNG_SOURCE ] ; + } + + if $(.configured.$(condition)) + { + if $(is-default) + { + if $(.debug) + { + ECHO "notice: [libpng] libpng is already configured" ; + } + } + else + { + errors.user-error "libpng is already configured" ; + } + return ; + } + else if $(source-path) + { + build-name ?= png ; + library-id = [ CALC $(library-id) + 1 ] ; + tag = [ MATCH ^@?(.*)$ : $(tag) ] ; + if $(tag) + { + tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ; + } + sources = [ path.glob $(source-path) : $(sources) ] ; + if $(.debug) + { + ECHO "notice: [libpng] Building libpng from source as $(build-name)" ; + if $(condition) + { + ECHO "notice: [libpng] Condition" [ $(condition).raw ] ; + } + if $(sources) + { + ECHO "notice: [libpng] found libpng source in $(source-path)" ; + } + else + { + ECHO "warning: [libpng] could not find libpng source in $(source-path)" ; + } + } + local target ; + if $(sources) { + target = [ targets.create-typed-target LIB : $(.project) + : $(build-name).$(library-id) + : $(sources) + : $(requirements) + @$(tag) + $(source-path) + msvc:_CRT_SECURE_NO_DEPRECATE + msvc:_SCL_SECURE_NO_DEPRECATE + shared:LIBPNG_DLL + : + : $(source-path) ] ; + } + + local mt = [ new ac-library libpng : $(.project) : $(condition) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + if $(target) + { + $(mt).set-target $(target) ; + } + targets.main-target-alternative $(mt) ; + } else { + if $(.debug) + { + ECHO "notice: [libpng] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [libpng] Condition" [ $(condition).raw ] ; + } + } + + local mt = [ new ac-library libpng : $(.project) : $(condition) : + $(include-path) : $(library-path) : $(library-name) : $(root) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + targets.main-target-alternative $(mt) ; + } + .configured.$(condition) = true ; +} diff --git a/src/boost/tools/build/src/tools/libtiff.jam b/src/boost/tools/build/src/tools/libtiff.jam new file mode 100644 index 000000000..308381503 --- /dev/null +++ b/src/boost/tools/build/src/tools/libtiff.jam @@ -0,0 +1,227 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports the libtiff library +# +# After 'using libtiff', the following targets are available: +# +# /libtiff//libtiff -- The libtiff library + +import project ; +import ac ; +import errors ; +import feature ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import indirect ; +import property ; +import property-set ; + +header = tiff.h ; +names = tiff ; + +sources = tif_aux.c tif_close.c tif_codec.c tif_color.c tif_compress.c tif_dir.c tif_dirinfo.c + tif_dirread.c tif_dirwrite.c tif_dumpmode.c tif_error.c tif_extension.c tif_fax3.c tif_fax3sm.c + tif_getimage.c tif_jbig.c tif_jpeg.c tif_jpeg_12.c tif_ojpeg.c tif_flush.c tif_luv.c tif_lzw.c + tif_next.c tif_open.c tif_packbits.c tif_pixarlog.c tif_predict.c tif_print.c tif_read.c tif_stream.cxx + tif_swab.c tif_strip.c tif_thunder.c tif_tile.c tif_version.c tif_warning.c tif_write.c tif_zip.c ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +# Initializes the libtiff library. +# +# libtiff can be configured either to use pre-existing binaries +# or to build the library from source. +# +# Options for configuring a prebuilt libtiff:: +# +# +# The directory containing the libtiff binaries. +# +# Overrides the default library name. +# +# The directory containing the libtiff headers. +# +# If none of these options is specified, then the environmental +# variables LIBTIFF_LIBRARY_PATH, LIBTIFF_NAME, and LIBTIFF_INCLUDE will +# be used instead. +# +# Options for building libtiff from source:: +# +# +# The libtiff source directory. Defaults to the environmental variable +# LIBTIFF_SOURCE. +# +# A rule which computes the actual name of the compiled +# libraries based on the build properties. Ignored +# when using precompiled binaries. +# +# The base name to use for the compiled library. Ignored +# when using precompiled binaries. +# +# Examples:: +# +# # Find libtiff in the default system location +# using libtiff ; +# # Build libtiff from source +# using libtiff : 4.0.1 : /home/steven/libtiff-4.0.1 ; +# # Find libtiff in /usr/local +# using libtiff : 4.0.1 +# : /usr/local/include /usr/local/lib ; +# # Build libtiff from source for msvc and find +# # prebuilt binaries for gcc. +# using libtiff : 4.0.1 : C:/Devel/src/libtiff-4.0.1 : msvc ; +# using libtiff : 4.0.1 : : gcc ; +# +rule init ( + version ? + # The libtiff version (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the libtiff target + + : is-default ? + # Default configurations are only used when libtiff + # has not yet been configured. This option is + # deprecated. A configuration will be treated + # as a default when none of , , + # , and are present. + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project libtiff ; + } + + local library-path = [ feature.get-values : $(options) ] ; + local include-path = [ feature.get-values : $(options) ] ; + local source-path = [ feature.get-values : $(options) ] ; + local library-name = [ feature.get-values : $(options) ] ; + local tag = [ feature.get-values : $(options) ] ; + local build-name = [ feature.get-values : $(options) ] ; + + if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name) + { + is-default = true ; + } + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + # Ignore environmental LIBTIFF_SOURCE if this initialization + # requested to search for a specific pre-built library. + if $(library-path) || $(include-path) || $(library-name) + { + if $(source-path) || $(tag) || $(build-name) + { + errors.user-error "incompatible options for libtiff:" + [ property.select : $(options) ] "and" + [ property.select : $(options) ] ; + } + } + else + { + source-path ?= [ modules.peek : LIBTIFF_SOURCE ] ; + } + + if $(.configured.$(condition)) + { + if $(is-default) + { + if $(.debug) + { + ECHO "notice: [libtiff] libtiff is already configured" ; + } + } + else + { + errors.user-error "libtiff is already configured" ; + } + return ; + } + else if $(source-path) + { + build-name ?= tiff ; + library-id = [ CALC $(library-id) + 1 ] ; + tag = [ MATCH ^@?(.*)$ : $(tag) ] ; + if $(tag) + { + tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ; + } + sources = [ path.glob $(source-path) : $(sources) ] ; + if $(.debug) + { + ECHO "notice: [libtiff] Building libtiff from source as $(build-name)" ; + if $(condition) + { + ECHO "notice: [libtiff] Condition" [ $(condition).raw ] ; + } + if $(sources) + { + ECHO "notice: [libtiff] found libtiff source in $(source-path)" ; + } + else + { + ECHO "warning: [libtiff] could not find libtiff source in $(source-path)" ; + } + } + local target ; + if $(sources) { + target = [ targets.create-typed-target LIB : $(.project) + : $(build-name).$(library-id) + : $(sources) + : $(requirements) + @$(tag) + $(source-path) + msvc:_CRT_SECURE_NO_DEPRECATE + msvc:_SCL_SECURE_NO_DEPRECATE + : + : $(source-path) ] ; + } + + local mt = [ new ac-library libtiff : $(.project) : $(condition) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + if $(target) + { + $(mt).set-target $(target) ; + } + targets.main-target-alternative $(mt) ; + } else { + if $(.debug) + { + ECHO "notice: [libtiff] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [libtiff] Condition" [ $(condition).raw ] ; + } + } + + local mt = [ new ac-library libtiff : $(.project) : $(condition) : + $(include-path) : $(library-path) : $(library-name) : $(root) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + targets.main-target-alternative $(mt) ; + } + .configured.$(condition) = true ; +} diff --git a/src/boost/tools/build/src/tools/link.jam b/src/boost/tools/build/src/tools/link.jam new file mode 100644 index 000000000..e9e8851fe --- /dev/null +++ b/src/boost/tools/build/src/tools/link.jam @@ -0,0 +1,547 @@ +# Copyright 2012 Steven Watanabe +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +import os ; +import targets ; +import project ; +import "class" : new ; +import virtual-target ; +import configure ; +import path ; +import property ; +import property-set ; +import common ; + +rule get-root-project ( project ) +{ + # Find the root project. + local root-project = $(project) ; + root-project = [ $(root-project).project-module ] ; + while + [ project.attribute $(root-project) parent-module ] && + [ project.attribute $(root-project) parent-module ] != user-config && + [ project.attribute $(root-project) parent-module ] != project-config + { + root-project = [ project.attribute $(root-project) parent-module ] ; + } + return $(root-project) ; +} + +TOUCH = [ common.file-touch-command ] ; + +actions touch { + $(TOUCH) "$(<)" +} + +rule can-symlink ( project ) +{ + if ! $(.can-symlink) + { + local root-project = [ get-root-project $(project) ] ; + + local source-target = [ new file-target test-symlink-source : : + $(project) : [ new action : link.touch ] ] ; + local target = [ new file-target test-symlink : : + $(project) : [ new action $(source-target) : link.mklink ] ] ; + + if [ configure.try-build $(target) : [ property-set.empty ] : "symlinks supported" ] + { + .can-symlink = true ; + } + else + { + .can-symlink = false ; + } + } + if $(.can-symlink) = true + { + return true ; + } +} + +if [ os.name ] = NT +{ + +# Test for Windows junctions (mklink /J) +rule can-junction ( project ) +{ + if ! $(.can-junction) + { + local root-project = [ get-root-project $(project) ] ; + + local source-target = [ new file-target test-junction-source : : + $(project) : [ new action : common.mkdir ] ] ; + local target = [ new file-target test-junction : : + $(project) : [ new action $(source-target) : link.junction ] ] ; + + if [ configure.try-build $(target) : [ property-set.empty ] : "junctions supported" ] + { + .can-junction = true ; + } + else + { + .can-junction = false ; + } + } + if $(.can-junction) = true + { + return true ; + } +} + +} +else +{ + +.can-junction = false ; + +rule can-junction ( project ) +{ +} + +} + +rule can-hardlink ( project ) +{ + if ! $(.can-hardlink) + { + local root-project = [ get-root-project $(project) ] ; + + local source-target = [ new file-target test-hardlink-source : : + $(project) : [ new action : link.touch ] ] ; + # Use so that the destination link is created + # in a different directory. AFS refuses to make hard links + # between files in different directories, so we want to check + # it. + local target = [ new file-target test-hardlink : : + $(project) : [ new action $(source-target) : link.hardlink + : [ new property-set symlink ] + ] ] ; + + if [ configure.try-build $(target) : [ property-set.empty ] : "hardlinks supported" ] + { + .can-hardlink = true ; + } + else + { + .can-hardlink = false ; + } + } + if $(.can-hardlink) = true + { + return true ; + } +} + +class file-or-directory-reference : basic-target +{ + import virtual-target ; + import property-set ; + import path ; + + rule construct ( name : source-targets * : property-set ) + { + return [ property-set.empty ] [ virtual-target.from-file $(self.name) : + [ location ] : $(self.project) ] ; + } + + # Returns true if the referred file really exists. + rule exists ( ) + { + location ; + return $(self.file-path) ; + } + + # Returns the location of target. Needed by 'testing.jam'. + rule location ( ) + { + if ! $(self.file-location) + { + local source-location = [ $(self.project).get source-location ] ; + for local src-dir in $(source-location) + { + if ! $(self.file-location) + { + local location = [ path.root $(self.name) $(src-dir) ] ; + if [ path.exists [ path.native $(location) ] ] + { + self.file-location = $(src-dir) ; + self.file-path = $(location) ; + } + } + } + } + return $(self.file-location) ; + } +} + +class symlink-target-class : basic-target +{ + import path ; + import virtual-target ; + import link ; + import os ; + import type ; + rule construct ( name : source-target : property-set ) + { + local location = [ path.join + [ $(source-target).path ] [ $(source-target).name ] ] ; + local files = [ path.glob-tree $(location) : * ] ; + local targets ; + + # If we have symlinks, don't bother checking + # for hardlinks and junctions. + if ! [ link.can-symlink $(self.project) ] + { + link.can-junction $(self.project) ; + link.can-hardlink $(self.project) ; + } + + if [ $(property-set).get ] + { + property-set = [ property-set.create + [ property.select : [ $(property-set).raw ] ] ] ; + } + else + { + local path,relative-to-build-dir = [ $(property-set).target-path ] ; + local path = $(path,relative-to-build-dir[1]) ; + local relative-to-build-dir = $(path,relative-to-build-dir[2]) ; + + if $(relative-to-build-dir) + { + path = [ path.join [ $(self.project).build-dir ] $(path) ] ; + } + + property-set = [ property-set.create $(path) ] ; + } + + local a = [ new non-scanning-action $(source-target) : + link.do-link-recursively : $(property-set) ] ; + + local t = [ new notfile-target $(name) + : $(self.project) : $(a) ] ; + + return [ property-set.empty ] [ virtual-target.register $(t) ] ; + } +} + +rule do-file-link +{ + local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ; + local source = [ path.native [ path.relative-to [ path.pwd ] $(>) ] ] ; + local old-source = [ on $(target) return $(LINK-SOURCE) ] ; + if $(old-source) + { + import errors ; + errors.user-error + Cannot create link $(target) to $(source). : + Link previously defined to another file, $(old-source[1]). ; + } + LINK-SOURCE on $(target) = $(source) $(.current-target) ; + LOCATE on $(target) = . ; + DEPENDS $(.current-target) : $(target) ; + if $(.can-symlink) = true + { + DEPENDS $(target) : $(source) ; + link.mklink $(target) : $(source) ; + } + else if $(.can-hardlink) = true + { + DEPENDS $(target) : $(source) ; + link.hardlink $(target) : $(source) ; + } + else + { + DEPENDS $(target) : $(source) ; + common.copy $(target) : $(source) ; + } +} + +rule do-link +{ + local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ; + local source = [ path.native [ path.relative-to [ path.pwd ] $(>) ] ] ; + local relative = [ path.native [ path.relative-to [ path.parent $(<) ] $(>) ] ] ; + if ! [ on $(target) return $(MKLINK_OR_DIR) ] + { + LOCATE on $(target) = . ; + DEPENDS $(.current-target) : $(target) ; + mklink-or-dir $(target) : $(source) ; + } + if [ os.name ] = NT + { + if $(.can-symlink) = true + { + MKLINK_OR_DIR on $(target) = mklink /D \"$(target)\" \"$(relative)\" ; + } + else + { + # This function should only be called + # if either symlinks or junctions are supported. + # To get here $(.can-junction) must be true. + mklink-opt = /J ; + MKLINK_OR_DIR on $(target) = mklink /J \"$(target)\" \"$(source)\" ; + } + } + else + { + MKLINK_OR_DIR on $(target) = ln -s $(relative) $(target) ; + } +} + +rule force-update +{ + local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ; + ALWAYS $(target) ; +} + +rule do-split +{ + local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ; + if ! [ on $(target) return $(MKLINK_OR_DIR) ] + { + LOCATE on $(target) = . ; + DEPENDS $(.current-target) : $(target) ; + common.mkdir $(target) ; + } + MKLINK_OR_DIR on $(target) = mkdir \"$(target)\" ; +} + +rule do-rm +{ + local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ; + ALWAYS $(target) ; + RM on $(target) = rmdir ; + link.rm $(target) ; +} + +rule mklink-or-dir +{ + NOUPDATE $(<) ; +} + +actions mklink-or-dir +{ + $(MKLINK_OR_DIR) +} + +rule link-entries ( target : files * : split ? : deleted ? ) +{ + for local s in $(files) + { + local t = [ path.join $(target) [ path.basename $(s) ] ] ; + if ! $(.known-dirs.$(t)) + { + local t = [ path.native [ path.relative-to [ path.pwd ] $(t) ] ] ; + local s = [ path.native [ path.relative-to [ path.pwd ] $(target) ] ] ; + LOCATE on $(t) = . ; + DEPENDS $(t) : $(s) ; + NOUPDATE $(s) ; + } + if $(split) + { + link-recursively $(t) : $(s) : : $(deleted) ; + } + else + { + link-entries $(t) : [ path.glob $(s) : * ] ; + } + } + if ! $(.known-dirs.$(target)) + { + .known-dirs.$(target) += $(files) ; + .known-dirs.base.$(target) = $(.current-target) ; + } +} + +rule link-recursively ( target : source : no-recurse ? : deleted ? ) +{ + if $(deleted) { + force-update $(target) ; + } + + local split ; + if [ CHECK_IF_FILE [ path.native $(source) ] ] + { + do-file-link $(target) : $(source) ; + } + else if $(.known-dirs.$(target)) && ! $(no-recurse) + { + split = true ; + if ! $(.split-dirs.$(target)) + { + if [ READLINK [ path.native $(target) ] ] + { + if ! $(deleted) { + do-rm $(target) ; + deleted = true ; + .deleted-dirs.$(target) = true ; + } + } + local .current-target = $(.known-dirs.base.$(target)) ; + for local s in $(.known-dirs.$(target)) + { + local t = [ path.join $(target) [ path.basename $(s) ] ] ; + link-recursively $(t) : $(s) : flat : $(deleted) ; + } + do-split $(target) ; + } + else if $(.deleted-dirs.$(target)) + { + deleted = true ; + } + } + else if [ path.exists [ path.native $(target) ] ] && ! $(deleted) + { + local link-target = [ READLINK [ path.native $(target) ] ] ; + if $(link-target) + { + local full-path = + [ path.root [ path.make $(link-target) ] [ path.parent $(target) ] ] ; + # HACK: Take advantage of the fact that path.glob + # normalizes its arguments. If full-path and + # source are different, but both are empty, they + # will compare equal, but that's okay because + # for the purposes of this module, empty directories + # are equivalent. + if [ path.glob $(full-path) : * ] != [ path.glob $(source) : * ] + { + if ! $(deleted) { + do-rm $(target) ; + deleted = true ; + .deleted-dirs.$(target) = true ; + } + do-split $(target) ; + split = true ; + } + } + else + { + do-split $(target) ; + split = true ; + } + } + else if $(.can-symlink) = false && $(.can-junction) = false + { + if [ READLINK [ path.native $(target) ] ] + { + if ! $(deleted) { + do-rm $(target) ; + deleted = true ; + .deleted-dirs.$(target) = true ; + } + } + do-split $(target) ; + split = true ; + } + else + { + do-link $(target) : $(source) ; + } + + if $(split) + { + .split-dirs.$(target) = true ; + } + + if ! $(no-recurse) + { + link-entries $(target) : [ path.glob $(source) : * ] : $(split) : $(deleted) ; + } +} + +rule do-link-recursively ( target : source : properties * ) +{ + local target-path = [ property.select : $(properties) ] ; + local source-path = [ on $(source) return $(LOCATE) ] [ on $(source) return $(SEARCH) ] ; + + local absolute-target = [ path.root + [ path.join [ path.make $(target-path[1]:G=) ] + [ path.basename [ path.make $(source:G=) ] ] ] + [ path.pwd ] ] ; + + local absolute-source = [ path.root + [ path.root [ path.make $(source:G=) ] + [ path.make $(source-path[1]) ] ] + [ path.pwd ] ] ; + + local .current-target = $(target) ; + + link-recursively $(absolute-target) : $(absolute-source) ; +} + +rule mklink +{ + local target-path = [ on $(<) return $(LOCATE) ] [ on $(<) return $(SEARCH) ] . ; + local source-path = [ on $(>) return $(LOCATE) ] [ on $(>) return $(SEARCH) ] . ; + local relative-path = [ path.relative-to + [ path.parent [ path.join [ path.root [ path.make $(target-path[1]) ] [ path.pwd ] ] [ path.make $(<:G=) ] ] ] + [ path.join [ path.root [ path.make $(source-path[1]) ] [ path.pwd ] ] [ path.make $(>:G=) ] ] ] ; + + PATH_TO_SOURCE on $(<) = [ path.native $(relative-path) ] ; +} + +if [ os.name ] = NT +{ + +actions junction +{ + if exist "$(<)" rmdir "$(<)" + mklink /J "$(<)" "$(>)" +} + +actions mklink +{ + if exist "$(<)" del "$(<)" + mklink "$(<)" "$(PATH_TO_SOURCE)" +} + +actions hardlink +{ + if exist "$(<)" del "$(<)" + mklink /H "$(<)" "$(>)" +} + +actions rm +{ + rmdir "$(<)" +} + +} +else +{ + +actions mklink +{ + ln -f -s "$(PATH_TO_SOURCE)" "$(<)" +} + +actions hardlink +{ + ln -f "$(>)" "$(<)" +} + +actions rm +{ + rm "$(<)" +} + +} + +rule link-directory ( name : sources : requirements * : default-build * : usage-requirements * ) +{ + local project = [ project.current ] ; + sources = [ new file-or-directory-reference $(sources) : $(project) ] ; + targets.main-target-alternative $(sources) ; + return [ targets.main-target-alternative + [ new symlink-target-class $(name) : $(project) + : [ targets.main-target-sources $(sources) : $(name) : no-renaming ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build : $(project) ] + : [ targets.main-target-usage-requirements $(usage-requirements) : + $(project) ] ] ] ; +} + +IMPORT $(__name__) : link-directory : : link-directory ; diff --git a/src/boost/tools/build/src/tools/lzma.jam b/src/boost/tools/build/src/tools/lzma.jam new file mode 100644 index 000000000..b774ff27b --- /dev/null +++ b/src/boost/tools/build/src/tools/lzma.jam @@ -0,0 +1,134 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports the lzma library +# +# After 'using lzma', the following targets are available: +# +# /lzma//lzma -- The lzma library + +import project ; +import ac ; +import errors ; +import feature ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import indirect ; +import property ; +import property-set ; + +header = lzma.h ; +# liblzma only needed for VisualC++ builds +names = lzma liblzma ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +# Initializes the lzma library. +# +# Options for configuring lzma:: +# +# +# The directory containing the lzma binaries. +# +# Overrides the default library name. +# +# The directory containing the lzma headers. +# +# If none of these options is specified, then the environmental +# variables LZMA_LIBRARY_PATH, LZMA_NAME, and LZMA_INCLUDE will +# be used instead. +# +# Examples:: +# +# # Find lzma in the default system location +# using lzma ; +# # Find lzma in /usr/local +# using lzma : 1.2.7 +# : /usr/local/include /usr/local/lib ; +# +rule init ( + version ? + # (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the target + + : is-default ? + # Default configurations are only used when + # not yet configured. This option is + # deprecated. A configuration will be treated + # as a default when none of , , + # , and are present. + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project lzma ; + } + + local library-path = [ feature.get-values : $(options) ] ; + local include-path = [ feature.get-values : $(options) ] ; + local library-name = [ feature.get-values : $(options) ] ; + + if ! $(options) + { + is-default = true ; + } + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + if $(.configured.$(condition)) + { + if $(is-default) + { + if $(.debug) + { + ECHO "notice: [lzma] lzma is already configured" ; + } + } + else + { + errors.user-error "lzma is already configured" ; + } + return ; + } + else + { + if $(.debug) + { + ECHO "notice: [lzma] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [lzma] Condition" [ $(condition).raw ] ; + } + } + + local mt = [ new ac-library lzma : $(.project) : $(condition) : + $(include-path) : $(library-path) : $(library-name) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + targets.main-target-alternative $(mt) ; + } + .configured.$(condition) = true ; +} diff --git a/src/boost/tools/build/src/tools/make.jam b/src/boost/tools/build/src/tools/make.jam new file mode 100644 index 000000000..93e41028a --- /dev/null +++ b/src/boost/tools/build/src/tools/make.jam @@ -0,0 +1,69 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2003 Douglas Gregor +# Copyright 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module defines the 'make' main target rule. + +import "class" : new ; +import param ; +import project ; +import property-set ; +import targets ; + + +class make-target-class : basic-target +{ + import "class" : new ; + import indirect ; + import toolset ; + import type ; + import virtual-target ; + + rule __init__ ( name : project : sources * : requirements * + : default-build * : usage-requirements * ) + { + basic-target.__init__ $(name) : $(project) : $(sources) : + $(requirements) : $(default-build) : $(usage-requirements) ; + } + + rule construct ( name : source-targets * : property-set ) + { + local action-name = [ $(property-set).get ] ; + # 'm' will always be set -- we add '@' ourselves in the 'make' rule + # below. + local m = [ MATCH ^@(.*) : $(action-name) ] ; + + local relevant = [ toolset.relevant [ indirect.get-rule $(m[1]) ] ] ; + local a = [ new action $(source-targets) : $(m[1]) : [ $(property-set).add $(relevant) ] ] ; + local t = [ new file-target $(self.name) exact : [ type.type + $(self.name) ] : $(self.project) : $(a) ] ; + return $(relevant) [ virtual-target.register $(t) ] ; + } +} + + +# Declares the 'make' main target. +# +rule make ( target-name : sources * : generating-rule + : requirements * : + usage-requirements * ) +{ + param.handle-named-params + sources generating-rule requirements default-build usage-requirements ; + # The '@' sign causes the feature.jam module to qualify rule name with the + # module name of current project, if needed. + local m = [ MATCH ^(@).* : $(generating-rule) ] ; + if ! $(m) + { + generating-rule = @$(generating-rule) ; + } + targets.create-metatarget make-target-class : [ project.current ] : + $(target-name) : $(sources) : $(requirements) $(generating-rule) + : : $(usage-requirements) ; +} + + +IMPORT $(__name__) : make : : make ; diff --git a/src/boost/tools/build/src/tools/make.py b/src/boost/tools/build/src/tools/make.py new file mode 100644 index 000000000..f4a226d96 --- /dev/null +++ b/src/boost/tools/build/src/tools/make.py @@ -0,0 +1,59 @@ +# Status: ported. +# Base revision: 64068 + +# Copyright 2003 Dave Abrahams +# Copyright 2003 Douglas Gregor +# Copyright 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module defines the 'make' main target rule. + +from b2.build.targets import BasicTarget +from b2.build.virtual_target import Action, FileTarget +from b2.build import type +from b2.manager import get_manager +import b2.build.property_set + + +class MakeTarget(BasicTarget): + + def construct(self, name, source_targets, property_set): + + action_name = property_set.get("")[0] + action = Action(get_manager(), source_targets, action_name[1:], property_set) + target = FileTarget(self.name(), type.type(self.name()), + self.project(), action, exact=True) + return [ b2.build.property_set.empty(), + [self.project().manager().virtual_targets().register(target)]] + +def make (target_name, sources, generating_rule, + requirements=None, usage_requirements=None): + + target_name = target_name[0] + generating_rule = generating_rule[0] + if generating_rule[0] != '@': + generating_rule = '@' + generating_rule + + if not requirements: + requirements = [] + + + requirements.append("%s" % generating_rule) + + m = get_manager() + targets = m.targets() + project = m.projects().current() + engine = m.engine() + engine.register_bjam_action(generating_rule) + + targets.main_target_alternative(MakeTarget( + target_name, project, + targets.main_target_sources(sources, target_name), + targets.main_target_requirements(requirements, project), + targets.main_target_default_build([], project), + targets.main_target_usage_requirements(usage_requirements or [], project))) + +get_manager().projects().add_rule("make", make) + diff --git a/src/boost/tools/build/src/tools/mc.jam b/src/boost/tools/build/src/tools/mc.jam new file mode 100644 index 000000000..c6c770e80 --- /dev/null +++ b/src/boost/tools/build/src/tools/mc.jam @@ -0,0 +1,44 @@ +#~ Copyright 2005 Alexey Pakhunov. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Support for Microsoft message compiler tool. +# Notes: +# - there's just message compiler tool, there's no tool for +# extracting message strings from sources +# - This file allows to use Microsoft message compiler +# with any toolset. In msvc.jam, there's more specific +# message compiling action. + +import common ; +import generators ; +import feature : feature get-values ; +import toolset : flags ; +import type ; +import rc ; + +rule init ( ) +{ +} + +type.register MC : mc ; + + +# Command line options +feature mc-input-encoding : ansi unicode : free ; +feature mc-output-encoding : unicode ansi : free ; +feature mc-set-customer-bit : no yes : free ; + +flags mc.compile MCFLAGS ansi : -a ; +flags mc.compile MCFLAGS unicode : -u ; +flags mc.compile MCFLAGS ansi : -A ; +flags mc.compile MCFLAGS unicode : -U ; +flags mc.compile MCFLAGS no : ; +flags mc.compile MCFLAGS yes : -c ; + +generators.register-standard mc.compile : MC : H RC ; + +actions compile +{ + mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)" +} diff --git a/src/boost/tools/build/src/tools/mc.py b/src/boost/tools/build/src/tools/mc.py new file mode 100644 index 000000000..cfd635e13 --- /dev/null +++ b/src/boost/tools/build/src/tools/mc.py @@ -0,0 +1,46 @@ +# Copyright (c) 2005 Alexey Pakhunov. +# Copyright (c) 2011 Juraj Ivancic +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Support for Microsoft message compiler tool. +# Notes: +# - there's just message compiler tool, there's no tool for +# extracting message strings from sources +# - This file allows to use Microsoft message compiler +# with any toolset. In msvc.jam, there's more specific +# message compiling action. + +import bjam + +from b2.tools import common, rc +from b2.build import generators, type +from b2.build.toolset import flags +from b2.build.feature import feature +from b2.manager import get_manager + +def init(): + pass + +type.register('MC', ['mc']) + + +# Command line options +feature('mc-input-encoding', ['ansi', 'unicode'], ['free']) +feature('mc-output-encoding', ['unicode', 'ansi'], ['free']) +feature('mc-set-customer-bit', ['no', 'yes'], ['free']) + +flags('mc.compile', 'MCFLAGS', ['ansi'], ['-a']) +flags('mc.compile', 'MCFLAGS', ['unicode'], ['-u']) +flags('mc.compile', 'MCFLAGS', ['ansi'], ['-A']) +flags('mc.compile', 'MCFLAGS', ['unicode'], ['-U']) +flags('mc.compile', 'MCFLAGS', ['no'], []) +flags('mc.compile', 'MCFLAGS', ['yes'], ['-c']) + +generators.register_standard('mc.compile', ['MC'], ['H', 'RC']) + +get_manager().engine().register_action( + 'mc.compile', + 'mc $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"') diff --git a/src/boost/tools/build/src/tools/message.jam b/src/boost/tools/build/src/tools/message.jam new file mode 100644 index 000000000..8d99a7d67 --- /dev/null +++ b/src/boost/tools/build/src/tools/message.jam @@ -0,0 +1,62 @@ +# Copyright 2008 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Defines main target type 'message', that prints a message when built for the +# first time. + +import project ; +import "class" : new ; +import targets ; +import property-set ; + +class message-target-class : basic-target +{ + rule set-message ( * ) + { + self.1 = $(1) ; + self.2 = $(2) ; + self.3 = $(3) ; + self.4 = $(4) ; + self.5 = $(5) ; + self.6 = $(6) ; + self.7 = $(7) ; + self.8 = $(8) ; + self.9 = $(9) ; + self.built = ; + } + + rule construct ( name : source-targets * : property-set ) + { + if ! $(self.built) + { + for i in 1 2 3 4 5 6 7 8 9 + { + if $(self.$(i)) + { + ECHO $(self.$(i)) ; + } + } + self.built = 1 ; + } + + return [ property-set.empty ] ; + } +} + + +rule message ( name : * ) +{ + local project = [ project.current ] ; + + local result = [ targets.main-target-alternative + [ new message-target-class $(name) : $(project) + : [ targets.main-target-sources : $(name) ] + : [ targets.main-target-requirements : $(project) ] + : [ targets.main-target-default-build : $(project) ] + : [ targets.main-target-usage-requirements : $(project) ] + ] ] ; + $(result).set-message $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + return $(result) ; +} +IMPORT $(__name__) : message : : message ; diff --git a/src/boost/tools/build/src/tools/message.py b/src/boost/tools/build/src/tools/message.py new file mode 100644 index 000000000..2fe93a3f2 --- /dev/null +++ b/src/boost/tools/build/src/tools/message.py @@ -0,0 +1,54 @@ +# Status: ported. +# Base revision: 64488. +# +# Copyright 2008, 2010 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Defines main target type 'message', that prints a message when built for the +# first time. + +import b2.build.targets as targets +import b2.build.property_set as property_set + +from b2.manager import get_manager + +class MessageTargetClass(targets.BasicTarget): + + def __init__(self, name, project, sources, requirements, default_build, + usage_requirements, *args): + targets.BasicTarget.__init__( + self, name, project, sources, requirements, default_build, usage_requirements) + self.args = args + self.built = False + + def construct(self, name, sources, ps): + + if not self.built: + for arg in self.args: + if type(arg) == type([]): + arg = " ".join(arg) + print arg + self.built = True + + return (property_set.empty(), []) + +def message(name, *args): + + if type(name) == type([]): + name = name[0] + + t = get_manager().targets() + project = get_manager().projects().current() + + return t.main_target_alternative( + MessageTargetClass( + name, project, + t.main_target_sources([], name), + t.main_target_requirements([], project), + t.main_target_default_build([], project), + t.main_target_usage_requirements([], project), + *args + )) + +get_manager().projects().add_rule("message", message) diff --git a/src/boost/tools/build/src/tools/midl.jam b/src/boost/tools/build/src/tools/midl.jam new file mode 100644 index 000000000..cff3725de --- /dev/null +++ b/src/boost/tools/build/src/tools/midl.jam @@ -0,0 +1,142 @@ +# Copyright (c) 2005 Alexey Pakhunov. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Microsoft Interface Definition Language (MIDL) related routines + +import common ; +import generators ; +import feature : feature get-values ; +import os ; +import scanner ; +import toolset : flags ; +import type ; + +rule init ( ) +{ +} + +type.register IDL : idl ; + +# A type library (.tlb) is generated by MIDL compiler and can be included +# to resources of an application (.rc). In order to be found by a resource +# compiler its target type should be derived from 'H' - otherwise +# the property '' will be ignored. +type.register MSTYPELIB : tlb : H ; + + +# Register scanner for MIDL files +class midl-scanner : scanner +{ + import path property-set regex scanner type virtual-target ; + + rule __init__ ( includes * ) + { + scanner.__init__ ; + + self.includes = $(includes) ; + + # List of quoted strings + self.re-strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ; + + # 'import' and 'importlib' directives + self.re-import = "import"$(self.re-strings)"[ \t]*;" ; + self.re-importlib = "importlib[ \t]*[(]"$(self.re-strings)"[)][ \t]*;" ; + + # C preprocessor 'include' directive + self.re-include-angle = "#[ \t]*include[ \t]*<(.*)>" ; + self.re-include-quoted = "#[ \t]*include[ \t]*\"(.*)\"" ; + } + + rule pattern ( ) + { + # Match '#include', 'import' and 'importlib' directives + return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" ; + } + + rule process ( target : matches * : binding ) + { + local included-angle = [ regex.transform $(matches) : $(self.re-include-angle) : 1 ] ; + local included-quoted = [ regex.transform $(matches) : $(self.re-include-quoted) : 1 ] ; + local imported = [ regex.transform $(matches) : $(self.re-import) : 1 3 ] ; + local imported_tlbs = [ regex.transform $(matches) : $(self.re-importlib) : 1 3 ] ; + + # CONSIDER: the new scoping rule seem to defeat "on target" variables. + local g = [ on $(target) return $(HDRGRIST) ] ; + local b = [ NORMALIZE_PATH $(binding:D) ] ; + + # Attach binding of including file to included targets. + # When target is directly created from virtual target + # this extra information is unnecessary. But in other + # cases, it allows to distinguish between two headers of the + # same name included from different places. + local g2 = $(g)"#"$(b) ; + + included-angle = $(included-angle:G=$(g)) ; + included-quoted = $(included-quoted:G=$(g2)) ; + imported = $(imported:G=$(g2)) ; + imported_tlbs = $(imported_tlbs:G=$(g2)) ; + + local all = $(included-angle) $(included-quoted) $(imported) ; + + INCLUDES $(target) : $(all) ; + DEPENDS $(target) : $(imported_tlbs) ; + NOCARE $(all) $(imported_tlbs) ; + SEARCH on $(included-angle) = $(self.includes:G=) ; + SEARCH on $(included-quoted) = $(b) $(self.includes:G=) ; + SEARCH on $(imported) = $(b) $(self.includes:G=) ; + SEARCH on $(imported_tlbs) = $(b) $(self.includes:G=) ; + + scanner.propagate + [ type.get-scanner CPP : [ property-set.create $(self.includes) ] ] : + $(included-angle) $(included-quoted) : $(target) ; + + scanner.propagate $(__name__) : $(imported) : $(target) ; + } +} + +scanner.register midl-scanner : include ; +type.set-scanner IDL : midl-scanner ; + + +# Command line options +feature midl-stubless-proxy : yes no : propagated ; +feature midl-robust : yes no : propagated ; + +flags midl.compile.idl MIDLFLAGS yes : /Oicf ; +flags midl.compile.idl MIDLFLAGS no : /Oic ; +flags midl.compile.idl MIDLFLAGS yes : /robust ; +flags midl.compile.idl MIDLFLAGS no : /no_robust ; + +# Architecture-specific options +architecture-x86 = x86 ; +address-model-32 = 32 ; +address-model-64 = 64 ; + +flags midl.compile.idl MIDLFLAGS $(architecture-x86)/$(address-model-32) : /win32 ; +flags midl.compile.idl MIDLFLAGS $(architecture-x86)/64 : /x64 ; +flags midl.compile.idl MIDLFLAGS ia64/$(address-model-64) : /ia64 ; + + +flags midl.compile.idl DEFINES ; +flags midl.compile.idl UNDEFS ; +flags midl.compile.idl INCLUDES ; + + +generators.register-c-compiler midl.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) ; + + +# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior +# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures +# that both files will be created so bjam will not try to recreate them +# constantly. +TOUCH_FILE = [ common.file-touch-command ] ; + +actions compile.idl +{ + midl /nologo @"@($(<[1]:W).rsp:E=$(nl)"$(>:W)" $(nl)-D$(DEFINES) $(nl)"-I$(INCLUDES)" $(nl)-U$(UNDEFS) $(nl)$(MIDLFLAGS) $(nl)/tlb "$(<[1]:W)" $(nl)/h "$(<[2]:W)" $(nl)/iid "$(<[3]:W)" $(nl)/proxy "$(<[4]:W)" $(nl)/dlldata "$(<[5]:W)")" + $(TOUCH_FILE) "$(<[4]:W)" + $(TOUCH_FILE) "$(<[5]:W)" +} diff --git a/src/boost/tools/build/src/tools/midl.py b/src/boost/tools/build/src/tools/midl.py new file mode 100644 index 000000000..7619ed1bb --- /dev/null +++ b/src/boost/tools/build/src/tools/midl.py @@ -0,0 +1,134 @@ +# Copyright (c) 2005 Alexey Pakhunov. +# Copyright (c) 2011 Juraj Ivancic +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Microsoft Interface Definition Language (MIDL) related routines +from b2.build import scanner, type +from b2.build.toolset import flags +from b2.build.feature import feature +from b2.manager import get_manager +from b2.tools import builtin, common +from b2.util import regex, utility + +def init(): + pass + +type.register('IDL', ['idl']) + +# A type library (.tlb) is generated by MIDL compiler and can be included +# to resources of an application (.rc). In order to be found by a resource +# compiler its target type should be derived from 'H' - otherwise +# the property '' will be ignored. +type.register('MSTYPELIB', ['tlb'], 'H') + +# Register scanner for MIDL files +class MidlScanner(scanner.Scanner): + def __init__ (self, includes=[]): + scanner.Scanner.__init__(self) + self.includes = includes + + # List of quoted strings + re_strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ; + + # 'import' and 'importlib' directives + self.re_import = "import" + re_strings + "[ \t]*;" ; + self.re_importlib = "importlib[ \t]*[(]" + re_strings + "[)][ \t]*;" ; + + # C preprocessor 'include' directive + self.re_include_angle = "#[ \t]*include[ \t]*<(.*)>" ; + self.re_include_quoted = "#[ \t]*include[ \t]*\"(.*)\"" ; + + def pattern(): + # Match '#include', 'import' and 'importlib' directives + return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)" + + def process(self, target, matches, binding): + included_angle = regex.transform(matches, self.re_include_angle) + included_quoted = regex.transform(matches, self.re_include_quoted) + imported = regex.transform(matches, self.re_import, [1, 3]) + imported_tlbs = regex.transform(matches, self.re_importlib, [1, 3]) + + # CONSIDER: the new scoping rule seem to defeat "on target" variables. + g = bjam.call('get-target-variable', target, 'HDRGRIST')[0] + b = os.path.normpath(os.path.dirname(binding)) + + # Attach binding of including file to included targets. + # When target is directly created from virtual target + # this extra information is unnecessary. But in other + # cases, it allows to distinguish between two headers of the + # same name included from different places. + g2 = g + "#" + b + + g = "<" + g + ">" + g2 = "<" + g2 + ">" + + included_angle = [ g + x for x in included_angle ] + included_quoted = [ g + x for x in included_quoted ] + imported = [ g + x for x in imported ] + imported_tlbs = [ g + x for x in imported_tlbs ] + + all = included_angle + included_quoted + imported + + bjam.call('INCLUDES', [target], all) + bjam.call('DEPENDS', [target], imported_tlbs) + bjam.call('NOCARE', all + imported_tlbs) + engine.set_target_variable(included_angle , 'SEARCH', [utility.get_value(inc) for inc in self.includes]) + engine.set_target_variable(included_quoted, 'SEARCH', [utility.get_value(inc) for inc in self.includes]) + engine.set_target_variable(imported , 'SEARCH', [utility.get_value(inc) for inc in self.includes]) + engine.set_target_variable(imported_tlbs , 'SEARCH', [utility.get_value(inc) for inc in self.includes]) + + get_manager().scanners().propagate(type.get_scanner('CPP', PropertySet(self.includes)), included_angle + included_quoted) + get_manager().scanners().propagate(self, imported) + +scanner.register(MidlScanner, 'include') +type.set_scanner('IDL', MidlScanner) + + +# Command line options +feature('midl-stubless-proxy', ['yes', 'no'], ['propagated'] ) +feature('midl-robust', ['yes', 'no'], ['propagated'] ) + +flags('midl.compile.idl', 'MIDLFLAGS', ['yes'], ['/Oicf' ]) +flags('midl.compile.idl', 'MIDLFLAGS', ['no' ], ['/Oic' ]) +flags('midl.compile.idl', 'MIDLFLAGS', ['yes' ], ['/robust' ]) +flags('midl.compile.idl', 'MIDLFLAGS', ['no' ], ['/no_robust']) + +# Architecture-specific options +architecture_x86 = ['' , 'x86'] +address_model_32 = ['', '32'] +address_model_64 = ['', '64'] + +flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/' + m for ar in architecture_x86 for m in address_model_32 ], ['/win32']) +flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/64' for ar in architecture_x86], ['/x64']) +flags('midl.compile.idl', 'MIDLFLAGS', ['ia64/' + m for m in address_model_64], ['/ia64']) + +flags('midl.compile.idl', 'DEFINES', [], ['']) +flags('midl.compile.idl', 'UNDEFS', [], ['']) +flags('midl.compile.idl', 'INCLUDES', [], ['']) + + +builtin.register_c_compiler('midl.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], []) + + +# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior +# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures +# that both files will be created so bjam will not try to recreate them +# constantly. +get_manager().engine().register_action( + 'midl.compile.idl', + '''midl /nologo @"@($(<[1]:W).rsp:E= +"$(>:W)" +-D$(DEFINES) +"-I$(INCLUDES)" +-U$(UNDEFS) +$(MIDLFLAGS) +/tlb "$(<[1]:W)" +/h "$(<[2]:W)" +/iid "$(<[3]:W)" +/proxy "$(<[4]:W)" +/dlldata "$(<[5]:W)")" +{touch} "$(<[4]:W)" +{touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command())) diff --git a/src/boost/tools/build/src/tools/mipspro.jam b/src/boost/tools/build/src/tools/mipspro.jam new file mode 100644 index 000000000..095334e39 --- /dev/null +++ b/src/boost/tools/build/src/tools/mipspro.jam @@ -0,0 +1,148 @@ +# Copyright Noel Belcourt 2007. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import property ; +import generators ; +import os ; +import toolset : flags ; +import feature ; +import fortran ; +import type ; +import common ; + +feature.extend toolset : mipspro ; +toolset.inherit mipspro : unix ; +generators.override mipspro.prebuilt : builtin.lib-generator ; +generators.override mipspro.searched-lib-generator : searched-lib-generator ; + +# Documentation and toolchain description located +# http://www.sgi.com/products/software/irix/tools/ + +rule init ( version ? : command * : options * ) +{ + local condition = [ + common.check-init-parameters mipspro : version $(version) ] ; + + command = [ common.get-invocation-command mipspro : CC : $(command) ] ; + + common.handle-options mipspro : $(condition) : $(command) : $(options) ; + + command_c = $(command_c[1--2]) $(command[-1]:B=cc) ; + + toolset.flags mipspro CONFIG_C_COMMAND $(condition) : $(command_c) ; + + # fortran support + local command = [ + common.get-invocation-command mipspro : f77 : $(command) : $(install_dir) ] ; + + command_f = $(command_f[1--2]) $(command[-1]:B=f77) ; + toolset.flags mipspro CONFIG_F_COMMAND $(condition) : $(command_f) ; + + # set link flags + flags mipspro.link FINDLIBS-ST : [ + feature.get-values : $(options) ] : unchecked ; + + flags mipspro.link FINDLIBS-SA : [ + feature.get-values : $(options) ] : unchecked ; +} + +# Declare generators +generators.register-c-compiler mipspro.compile.c : C : OBJ : mipspro ; +generators.register-c-compiler mipspro.compile.c++ : CPP : OBJ : mipspro ; +generators.register-fortran-compiler mipspro.compile.fortran : FORTRAN : OBJ : mipspro ; + +cpu-arch-32 = + / + /32 ; + +cpu-arch-64 = + /64 ; + +flags mipspro.compile OPTIONS $(cpu-arch-32) : -n32 ; +flags mipspro.compile OPTIONS $(cpu-arch-64) : -64 ; + +# Declare flags and actions for compilation +flags mipspro.compile OPTIONS on : -g ; +# flags mipspro.compile OPTIONS on : -xprofile=tcov ; +flags mipspro.compile OPTIONS off : -w ; +flags mipspro.compile OPTIONS on : -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning +flags mipspro.compile OPTIONS all : -fullwarn ; +flags mipspro.compile OPTIONS extra : -fullwarn ; +flags mipspro.compile OPTIONS pedantic : -fullwarn -ansiW -diag_suppress 1429 ; # suppress long long is nonstandard warning +flags mipspro.compile OPTIONS on : -w2 ; +flags mipspro.compile OPTIONS speed : -Ofast ; +flags mipspro.compile OPTIONS space : -O2 ; +flags mipspro.compile OPTIONS : "-LANG:std" ; +flags mipspro.compile.c++ OPTIONS off : "-INLINE:none" ; +flags mipspro.compile.c++ OPTIONS ; +flags mipspro.compile DEFINES ; +flags mipspro.compile INCLUDES ; + + +flags mipspro.compile.fortran OPTIONS ; + +actions compile.c +{ + "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.fortran +{ + "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +# Declare flags and actions for linking +flags mipspro.link OPTIONS on : -g ; +# Strip the binary when no debugging is needed +# flags mipspro.link OPTIONS off : -s ; +# flags mipspro.link OPTIONS on : -xprofile=tcov ; +# flags mipspro.link OPTIONS multi : -mt ; + +flags mipspro.link OPTIONS $(cpu-arch-32) : -n32 ; +flags mipspro.link OPTIONS $(cpu-arch-64) : -64 ; + +flags mipspro.link OPTIONS speed : -Ofast ; +flags mipspro.link OPTIONS space : -O2 ; +flags mipspro.link OPTIONS ; +flags mipspro.link LINKPATH ; +flags mipspro.link FINDLIBS-ST ; +flags mipspro.link FINDLIBS-SA ; +flags mipspro.link FINDLIBS-SA multi : pthread ; +flags mipspro.link LIBRARIES ; +flags mipspro.link LINK-RUNTIME static : static ; +flags mipspro.link LINK-RUNTIME shared : dynamic ; +flags mipspro.link RPATH ; + +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" -FE:template_in_elf_section -ptused $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) -lm +} + +# Slight mods for dlls +rule link.dll ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) +} + +# Declare action for creating static libraries +actions piecemeal archive +{ + ar -cr "$(<)" "$(>)" +} diff --git a/src/boost/tools/build/src/tools/mpi.jam b/src/boost/tools/build/src/tools/mpi.jam new file mode 100644 index 000000000..77941cf11 --- /dev/null +++ b/src/boost/tools/build/src/tools/mpi.jam @@ -0,0 +1,637 @@ +# Support for the Message Passing Interface (MPI) +# +# (C) Copyright 2005, 2006 Trustees of Indiana University +# (C) Copyright 2005 Douglas Gregor +# +# Distributed under the Boost Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt.) +# +# Authors: Douglas Gregor +# Andrew Lumsdaine +# +# ==== MPI Configuration ==== +# +# For many users, MPI support can be enabled simply by adding the following +# line to your user-config.jam file: +# +# using mpi ; +# +# This should auto-detect MPI settings based on the MPI wrapper compiler in +# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or +# has a different name, you can pass the name of the wrapper compiler as the +# first argument to the mpi module: +# +# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ; +# +# If your MPI implementation does not have a wrapper compiler, or the MPI +# auto-detection code does not work with your MPI's wrapper compiler, +# you can pass MPI-related options explicitly via the second parameter to the +# mpi module: +# +# using mpi : : lammpio lammpi++ +# mpi lam +# dl ; +# +# To see the results of MPI auto-detection, pass "--debug-configuration" on +# the bjam command line. +# +# The (optional) fourth argument configures Boost.MPI for running +# regression tests. These parameters specify the executable used to +# launch jobs (default: "mpirun") followed by any necessary arguments +# to this to run tests and tell the program to expect the number of +# processors to follow (default: "-np"). With the default parameters, +# for instance, the test harness will execute, e.g., +# +# mpirun -np 4 all_gather_test +# +# ==== Linking Against the MPI Libraries === +# +# To link against the MPI libraries, import the "mpi" module and add the +# following requirement to your target: +# +# /mpi//mpi +# +# Since MPI support is not always available, you should check +# "mpi.configured" before trying to link against the MPI libraries. + +import "class" : new ; +import common ; +import feature : feature ; +import generators ; +import os ; +import project ; +import property ; +import testing ; +import toolset ; +import type ; +import path ; + +# Make this module a project +project.initialize $(__name__) ; +project mpi ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +# Assuming the first part of the command line is the given prefix +# followed by some non-empty value, remove the first argument. Returns +# either nothing (if there was no prefix or no value) or a pair +# +# value rest-of-cmdline +# +# This is a subroutine of cmdline_to_features +rule add_feature ( prefix name cmdline ) +{ + local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; + + # If there was no value associated with the prefix, abort + if ! $(match) { + return ; + } + + local value = $(match[1]) ; + + if [ MATCH " +" : $(value) ] { + value = "\"$(value)\"" ; + } + + return "<$(name)>$(value)" $(match[2]) ; +} + +# Strip any end-of-line characters off the given string and return the +# result. +rule strip-eol ( string ) +{ + local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ; + + if $(match) + { + return $(match[1]) ; + } + else + { + return $(string) ; + } +} + +# Split a command-line into a set of features. Certain kinds of +# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced +# with their B2 equivalents (e.g., , , +# , ). All other arguments are introduced +# using the features in the unknown-features parameter, because we +# don't know how to deal with them. For instance, if your compile and +# correct. The incoming command line should be a string starting with +# an executable (e.g., g++ -I/include/path") and may contain any +# number of command-line arguments thereafter. The result is a list of +# features corresponding to the given command line, ignoring the +# executable. +rule cmdline_to_features ( cmdline : unknown-features ? ) +{ + local executable ; + local features ; + local otherflags ; + local result ; + + unknown-features ?= ; + + # Pull the executable out of the command line. At this point, the + # executable is just thrown away. + local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; + executable = $(match[1]) ; + cmdline = $(match[2]) ; + + # List the prefix/feature pairs that we will be able to transform. + # Every kind of parameter not mentioned here will be placed in both + # cxxflags and linkflags, because we don't know where they should go. + local feature_kinds-D = "define" ; + local feature_kinds-I = "include" ; + local feature_kinds-L = "library-path" ; + local feature_kinds-l = "find-shared-library" ; + + while $(cmdline) { + + # Check for one of the feature prefixes we know about. If we + # find one (and the associated value is nonempty), convert it + # into a feature. + local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ; + local matched ; + if $(match) && $(match[2]) { + local prefix = $(match[1]) ; + if $(feature_kinds$(prefix)) { + local name = $(feature_kinds$(prefix)) ; + local add = [ add_feature $(prefix) $(name) $(cmdline) ] ; + + if $(add) { + + if $(add[1]) = pthread + { + # Uhm. It's not really nice that this MPI implementation + # uses -lpthread as opposed to -pthread. We do want to + # set multi, instead of -lpthread. + result += "multi" ; + MPI_EXTRA_REQUIREMENTS += "multi" ; + } + else + { + result += $(add[1]) ; + } + + cmdline = $(add[2]) ; + matched = yes ; + } + } + } + + # If we haven't matched a feature prefix, just grab the command-line + # argument itself. If we can map this argument to a feature + # (e.g., -pthread -> multi), then do so; otherwise, + # and add it to the list of "other" flags that we don't + # understand. + if ! $(matched) { + match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ; + local value = $(match[1]) ; + cmdline = $(match[2]) ; + + # Check for multithreading support + if $(value) = "-pthread" || $(value) = "-pthreads" + { + result += "multi" ; + + # DPG: This is a hack intended to work around a BBv2 bug where + # requirements propagated from libraries are not checked for + # conflicts when BBv2 determines which "common" properties to + # apply to a target. In our case, the single property + # gets propagated from the common properties to Boost.MPI + # targets, even though multi is in the usage + # requirements of /mpi//mpi. + MPI_EXTRA_REQUIREMENTS += "multi" ; + } + else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] { + otherflags += $(value) ; + } + } + } + + # If there are other flags that we don't understand, add them to the + # result as both and + if $(otherflags) { + for unknown in $(unknown-features) + { + result += "$(unknown)$(otherflags:J= )" ; + } + } + + return $(result) ; +} + +# Determine if it is safe to execute the given shell command by trying +# to execute it and determining whether the exit code is zero or +# not. Returns true for an exit code of zero, false otherwise. +local rule safe-shell-command ( cmdline ) +{ + local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ; + return [ MATCH ".*(SSCOK).*" : $(result) ] ; +} + +# Initialize the MPI module. +rule init ( mpicxx ? : options * : mpirun-with-options * ) +{ + if ! $(options) && $(.debug-configuration) + { + ECHO "===============MPI Auto-configuration===============" ; + } + + if ! $(mpicxx) && [ os.on-windows ] + { + # Paths for Microsoft MPI + local ms_mpi_path_native = "C:\\Program Files\\Microsoft MPI" ; + local ms_mpi_sdk_path_native = "C:\\Program Files (x86)\\Microsoft SDKs\\MPI" ; + + # Path for Microsoft Compute Cluster Pack + local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ; + + # Try to auto-configure Microsoft MPI + if [ GLOB $(ms_mpi_path_native)\\Bin : mpiexec.exe ] && + [ GLOB $(ms_mpi_sdk_path_native)\\Include : mpi.h ] + { + if $(.debug-configuration) + { + ECHO "Found Microsoft MPI: $(ms_mpi_path_native)" ; + ECHO "Found Microsoft MPI SDK: $(ms_mpi_sdk_path_native)" ; + } + + local ms_mpi_sdk_path = [ path.make $(ms_mpi_sdk_path_native) ] ; + + # Pick up either the 32-bit or 64-bit library, depending on which address + # model the user has selected. Default to 32-bit. + options = $(ms_mpi_sdk_path)/Include + 64:$(ms_mpi_sdk_path)/Lib/x64 + $(ms_mpi_sdk_path)/Lib/x86 + msmpi + msvc:_SECURE_SCL=0 + ; + + # Setup the "mpirun" equivalent (mpiexec) + .mpirun = "\"$(ms_mpi_path_native)\\Bin\\mpiexec.exe"\" ; + .mpirun_flags = -n ; + } + # Try to auto-configure to the Microsoft Compute Cluster Pack + else if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ] + { + if $(.debug-configuration) + { + ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ; + } + + local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ; + + # Pick up either the 32-bit or 64-bit library, depending on which address + # model the user has selected. Default to 32-bit. + options = $(cluster_pack_path)/Include + 64:$(cluster_pack_path)/Lib/amd64 + $(cluster_pack_path)/Lib/i386 + msmpi + msvc:_SECURE_SCL=0 + ; + + # Setup the "mpirun" equivalent (mpiexec) + .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ; + .mpirun_flags = -n ; + } + else if $(.debug-configuration) + { + ECHO "Did not find Microsoft MPI in $(ms_mpi_path_native)" ; + ECHO " and/or Microsoft MPI SDK in $(ms_mpi_sdk_path_native)." ; + ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ; + } + } + + if ! $(options) + { + # Try to auto-detect options based on the wrapper compiler + local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ; + + if ! $(mpicxx) && ! $(command) + { + # Try "mpiCC", which is used by MPICH + command = [ common.get-invocation-command mpi : mpiCC ] ; + } + + if ! $(mpicxx) && ! $(command) + { + # Try "mpicxx", which is used by OpenMPI and MPICH2 + command = [ common.get-invocation-command mpi : mpicxx ] ; + } + + if ! $(mpicxx) && ! $(command) + { + # Try "CC", which is used by Cray + command = [ common.get-invocation-command mpi : CC ] ; + } + + local result ; + local compile_flags ; + local link_flags ; + + if ! $(command) + { + # Do nothing: we'll complain later + } + # OpenMPI and newer versions of LAM-MPI have -showme:compile and + # -showme:link. + else if [ safe-shell-command "$(command) -showme:compile" ] && + [ safe-shell-command "$(command) -showme:link" ] + { + if $(.debug-configuration) + { + ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ; + } + + compile_flags = [ SHELL "$(command) -showme:compile" ] ; + link_flags = [ SHELL "$(command) -showme:link" ] ; + + # Prepend COMPILER as the executable name, to match the format of + # other compilation commands. + compile_flags = "COMPILER $(compile_flags) -DOMPI_SKIP_MPICXX " ; + link_flags = "COMPILER $(link_flags)" ; + } + # Look for LAM-MPI's -showme + else if [ safe-shell-command "$(command) -showme" ] + { + if $(.debug-configuration) + { + ECHO "Found older LAM-MPI wrapper compiler: $(command)" ; + } + + result = [ SHELL "$(command) -showme" ] ; + } + # Look for MPICH + else if [ safe-shell-command "$(command) -show" ] + { + if $(.debug-configuration) + { + ECHO "Found MPICH wrapper compiler: $(command)" ; + } + compile_flags = [ SHELL "$(command) -compile_info" ] ; + link_flags = [ SHELL "$(command) -link_info" ] ; + } + # Sun HPC and Ibm POE + else if [ SHELL "$(command) -v 2>/dev/null" ] + { + compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ; + + local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ; + if $(back) + { + # Sun HPC + if $(.debug-configuration) + { + ECHO "Found Sun MPI wrapper compiler: $(command)" ; + } + + compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ; + compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ; + link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ; + link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ; + link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ; + + # strip out -v from compile options + local front = [ MATCH "(.*)-v" : $(link_flags) ] ; + local back = [ MATCH "-v(.*)" : $(link_flags) ] ; + link_flags = "$(front) $(back)" ; + front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ; + back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ; + link_flags = "$(front) $(back)" ; + } + else + { + # Ibm POE + if $(.debug-configuration) + { + ECHO "Found IBM MPI wrapper compiler: $(command)" ; + } + + # + compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ; + compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ; + local front = [ MATCH "(.*)-v" : $(compile_flags) ] ; + local back = [ MATCH "-v(.*)" : $(compile_flags) ] ; + compile_flags = "$(front) $(back)" ; + front = [ MATCH "(.*)-c" : $(compile_flags) ] ; + back = [ MATCH "-c(.*)" : $(compile_flags) ] ; + compile_flags = "$(front) $(back)" ; + link_flags = $(compile_flags) ; + + # get location of mpif.h from mpxlf + local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ; + f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ; + front = [ MATCH "(.*)-v" : $(f_flags) ] ; + back = [ MATCH "-v(.*)" : $(f_flags) ] ; + f_flags = "$(front) $(back)" ; + f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ; + f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ; + compile_flags = [ strip-eol $(compile_flags) ] ; + compile_flags = "$(compile_flags) $(f_flags)" ; + } + } + # Cray + else if [ safe-shell-command "$(command) -v" ] + { + compile_flags = [ safe-shell-command "$(command) -###" ] ; + link_flags = [ safe-shell-command "$(command) -###" ] ; + # ECHO "Noel: compile_flags: $(compile_flags)" ; + # ECHO "Noel: link_flags: $(link_flags)" ; + result = " " ; + } + + # Prepend COMPILER as the executable name, to match the format of + + if $(result) || $(compile_flags) && $(link_flags) + { + if $(result) + { + result = [ strip-eol $(result) ] ; + options = [ cmdline_to_features $(result) ] ; + } + else + { + compile_flags = [ strip-eol $(compile_flags) ] ; + link_flags = [ strip-eol $(link_flags) ] ; + + # Separately process compilation and link features, then combine + # them at the end. + local compile_features = [ cmdline_to_features $(compile_flags) + : "" ] ; + local link_features = [ cmdline_to_features $(link_flags) + : "" ] ; + options = $(compile_features) $(link_features) ; + } + + # If requested, display MPI configuration information. + if $(.debug-configuration) + { + if $(result) + { + ECHO " Wrapper compiler command line: $(result)" ; + } + else + { + local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" + : $(compile_flags) ] ; + ECHO "MPI compilation flags: $(match[2])" ; + local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" + : $(link_flags) ] ; + ECHO "MPI link flags: $(match[2])" ; + } + } + } + else + { + if $(command) + { + ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ; + } + else if $(mpicxx) + { + ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ; + } + else + { + ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ; + } + ECHO "You will need to manually configure MPI support." ; + } + + } + + # Find mpirun (or its equivalent) and its flags + if ! $(.mpirun) + { + .mpirun = + [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ; + .mpirun_flags = $(mpirun-with-options[2-]) ; + .mpirun_flags ?= -np ; + } + + if $(.debug-configuration) + { + if $(options) + { + echo "MPI build features: " ; + ECHO $(options) ; + } + + if $(.mpirun) + { + echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ; + } + + ECHO "====================================================" ; + } + + if $(options) + { + .configured = true ; + + # Set up the "mpi" alias + alias mpi : : : : $(options) ; + } +} + +# States whether MPI has bee configured +rule configured ( ) +{ + return $(.configured) ; +} + +# Returns the "extra" requirements needed to build MPI. These requirements are +# part of the /mpi//mpi library target, but they need to be added to anything +# that uses MPI directly to work around bugs in BBv2's propagation of +# requirements. +rule extra-requirements ( ) +{ + return $(MPI_EXTRA_REQUIREMENTS) ; +} + +# Support for testing; borrowed from Python +type.register RUN_MPI_OUTPUT ; +type.register RUN_MPI : : TEST ; + +class mpi-test-generator : generator +{ + import property-set ; + + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + self.composing = true ; + } + + rule run ( project name ? : property-set : sources * : multiple ? ) + { + # Generate an executable from the sources. This is the executable we will run. + local executable = + [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ; + + result = + [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ; + } +} + +# Use mpi-test-generator to generate MPI tests from sources +generators.register + [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ; + +generators.register-standard testing.expect-success + : RUN_MPI_OUTPUT : RUN_MPI ; + +# The number of processes to spawn when executing an MPI test. +feature "mpi:processes" : : free incidental ; + +# The flag settings on testing.capture-output do not +# apply to mpi.capture output at the moment. +# Redo this explicitly. +toolset.flags mpi.capture-output ARGS ; +toolset.uses-features mpi.capture-output : + + ; + +rule capture-output ( target : sources * : properties * ) +{ + # Use the standard capture-output rule to run the tests + testing.capture-output $(target) : $(sources[1]) : $(properties) ; + + # Determine the number of processes we should run on. + local num_processes = [ property.select : $(properties) ] ; + num_processes = $(num_processes:G=) ; + + # serialize the MPI tests to avoid overloading systems + JAM_SEMAPHORE on $(target) = mpi-run-semaphore ; + + # We launch MPI processes using the "mpirun" equivalent specified by the user. + LAUNCHER on $(target) = + [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ; +} + +# Creates a set of test cases to be run through the MPI launcher. The name, sources, +# and requirements are the same as for any other test generator. However, schedule is +# a list of numbers, which indicates how many processes each test run will use. For +# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7 +# 7 processes. The name provided is just the base name: the actual tests will be +# the name followed by a hyphen, then the number of processes. +rule mpi-test ( name : sources * : requirements * : schedule * ) +{ + sources ?= $(name).cpp ; + schedule ?= 1 2 3 4 7 8 13 17 ; + + local result ; + for processes in $(schedule) + { + result += [ testing.make-test + run-mpi : $(sources) /boost/mpi//boost_mpi + : $(requirements) msvc:static $(processes) : $(name)-$(processes) ] ; + } + return $(result) ; +} diff --git a/src/boost/tools/build/src/tools/msvc-config.jam b/src/boost/tools/build/src/tools/msvc-config.jam new file mode 100644 index 000000000..5fff97998 --- /dev/null +++ b/src/boost/tools/build/src/tools/msvc-config.jam @@ -0,0 +1,12 @@ +#~ Copyright 2005 Rene Rivera. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Automatic configuration for VisualStudio toolset. To use, just import this module. + +import toolset : using ; + +ECHO "warning: msvc-config.jam is deprecated. Use 'using msvc : all ;' instead." ; + +using msvc : all ; + diff --git a/src/boost/tools/build/src/tools/msvc.jam b/src/boost/tools/build/src/tools/msvc.jam new file mode 100644 index 000000000..cf6cc7a95 --- /dev/null +++ b/src/boost/tools/build/src/tools/msvc.jam @@ -0,0 +1,2217 @@ +# Copyright (c) 2003 David Abrahams +# Copyright (c) 2005 Vladimir Prus +# Copyright (c) 2005 Alexey Pakhunov +# Copyright (c) 2006 Bojan Resnik +# Copyright (c) 2006 Ilya Sokolov +# Copyright (c) 2007-2017 Rene Rivera +# Copyright (c) 2008 Jurko Gospodnetic +# Copyright (c) 2014 Microsoft Corporation +# Copyright (c) 2019 MichaÅ‚ Janiszewski +# Copyright (c) 2020 Nikita Kniazev +# +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.msvc]] += Microsoft Visual C++ + +The `msvc` module supports the +http://msdn.microsoft.com/visualc/[Microsoft Visual C++] command-line +tools on Microsoft Windows. The supported products and versions of +command line tools are listed below: + +* Visual Studio 2019-14.2 +* Visual Studio 2017—14.1 +* Visual Studio 2015—14.0 +* Visual Studio 2013—12.0 +* Visual Studio 2012—11.0 +* Visual Studio 2010—10.0 +* Visual Studio 2008—9.0 +* Visual Studio 2005—8.0 +* Visual Studio .NET 2003—7.1 +* Visual Studio .NET—7.0 +* Visual Studio 6.0, Service Pack 5--6.5 + +The user would then call the boost build executable with the toolset set +equal to `msvc-[version number]` for example to build with Visual Studio +2019 one could run: + +---- +.\b2 toolset=msvc-14.2 target +---- + +The `msvc` module is initialized using the following syntax: + +---- +using msvc : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If the version is not explicitly specified, the most recent version +found in the registry will be used instead. If the special value `all` +is passed as the version, all versions found in the registry will be +configured. If a version is specified, but the command is not, the +compiler binary will be searched in standard installation paths for that +version, followed by PATH. + +The compiler command should be specified using forward slashes, and +quoted. + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling both C +and C++ sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +`assembler`:: +The command that compiles assembler sources. If not specified, `ml` +will be used. The command will be invoked after the setup script was +executed and adjusted the PATH variable. + +`compiler`:: +The command that compiles C and C++ sources. If not specified, `cl` +will be used. The command will be invoked after the setup script was +executed and adjusted the PATH variable. + +`compiler-filter`:: +Command through which to pipe the output of running the compiler. For + example to pass the output to STLfilt. + +`idl-compiler`:: +The command that compiles Microsoft COM interface definition files. If +not specified, `midl` will be used. The command will be invoked after +the setup script was executed and adjusted the PATH variable. + +`linker`:: +The command that links executables and dynamic libraries. If not +specified, `link` will be used. The command will be invoked after the +setup script was executed and adjusted the PATH variable. + +`mc-compiler`:: +The command that compiles Microsoft message catalog files. If not +specified, `mc` will be used. The command will be invoked after the +setup script was executed and adjusted the PATH variable. + +`resource-compiler`:: +The command that compiles resource files. If not specified, `rc` will +be used. The command will be invoked after the setup script was +executed and adjusted the PATH variable. + +`setup`:: +The filename of the global environment setup script to run before +invoking any of the tools defined in this toolset. Will not be used in +case a target platform specific script has been explicitly specified +for the current target platform. Used setup script will be passed the +target platform identifier (x86, x86_amd64, x86_ia64, amd64 or ia64) +as a parameter. If not specified a default script is chosen based on +the used compiler binary, e.g. `vcvars32.bat` or `vsvars32.bat`. + +`setup-amd64`; `setup-i386`; `setup-ia64`:: +The filename of the target platform specific environment setup script +to run before invoking any of the tools defined in this toolset. If +not specified the global environment setup script is used. + +[[bbv2.reference.tools.compiler.msvc.64]] +== 64-bit support + +Starting with version 8.0, Microsoft Visual Studio can generate binaries +for 64-bit processor, both 64-bit flavours of x86 (codenamed +AMD64/EM64T), and Itanium (codenamed IA64). In addition, compilers that +are itself run in 64-bit mode, for better performance, are provided. The +complete list of compiler configurations are as follows (we abbreviate +AMD64/EM64T to just AMD64): + +* 32-bit x86 host, 32-bit x86 target +* 32-bit x86 host, 64-bit AMD64 target +* 32-bit x86 host, 64-bit IA64 target +* 64-bit AMD64 host, 64-bit AMD64 target +* 64-bit IA64 host, 64-bit IA64 target + +The 32-bit host compilers can be always used, even on 64-bit Windows. On +the contrary, 64-bit host compilers require both 64-bit host processor +and 64-bit Windows, but can be faster. By default, only 32-bit host, +32-bit target compiler is installed, and additional compilers need to be +installed explicitly. + +To use 64-bit compilation you should: + +1. Configure you compiler as usual. If you provide a path to the +compiler explicitly, provide the path to the 32-bit compiler. If you try +to specify the path to any of 64-bit compilers, configuration will not +work. +2. When compiling, use `address-model=64`, to generate AMD64 code. +3. To generate IA64 code, use `architecture=ia64` + +The (AMD64 host, AMD64 target) compiler will be used automatically when +you are generating AMD64 code and are running 64-bit Windows on AMD64. +The (IA64 host, IA64 target) compiler will never be used, since nobody +has an IA64 machine to test. + +It is believed that AMD64 and EM64T targets are essentially compatible. +The compiler options `/favor:AMD64` and `/favor:EM64T`, which are +accepted only by AMD64 targeting compilers, cause the generated code to +be tuned to a specific flavor of 64-bit x86. B2 will make use +of those options depending on the value of the`instruction-set` feature. + +[[bbv2.reference.tools.compiler.msvc.winrt]] +== Windows Runtime support + +Starting with version 11.0, Microsoft Visual Studio can produce binaries +for Windows Store and Phone in addition to traditional Win32 desktop. To +specify which Windows API set to target, use the `windows-api` feature. +Available options are `desktop`, `store`, or `phone`. If not specified, +`desktop` will be used. + +When using `store` or `phone` the specified toolset determines what +Windows version is targeted. The following options are available: + +* Windows 8.0: toolset=msvc-11.0 windows-api=store +* Windows 8.1: toolset=msvc-12.0 windows-api=store +* Windows Phone 8.0: toolset=msvc-11.0 windows-api=phone +* Windows Phone 8.1: toolset=msvc-12.0 windows-api=phone + +For example use the following to build for Windows Store 8.1 with the +ARM architecture: + +---- +.\b2 toolset=msvc-12.0 windows-api=store architecture=arm +---- + +Note that when targeting Windows Phone 8.1, version 12.0 didn't include +the vcvars phone setup scripts. They can be separately downloaded from +http://blogs.msdn.com/b/vcblog/archive/2014/07/18/using-boost-libraries-in-windows-store-and-phone-applications.aspx[here]. + +|# # end::doc[] + + +################################################################################ +# +# MSVC Boost Build toolset module. +# -------------------------------- +# +# All toolset versions need to have their location either auto-detected or +# explicitly specified except for the special 'default' version that expects the +# environment to find the needed tools or report an error. +# +################################################################################ + +import "class" : new ; +import common ; +import feature ; +import generators ; +import mc ; +import midl ; +import os ; +import path ; +import pch ; +import project ; +import property ; +import property-set ; +import rc ; +import sequence ; +import set ; +import testing ; +import toolset ; +import type ; +import virtual-target ; +import version ; + + +type.register MANIFEST : manifest ; + +#| tag::embed-doc[] + +[[bbv2.builtin.features.embed-manifest]]`embed-manifest`:: +*Allowed values:* `on`, `off`. ++ +This feature is specific to the `msvc` toolset (see <>), +and controls whether the manifest files should be embedded inside executables +and shared libraries, or placed alongside them. This feature corresponds to the +IDE option found in the project settings dialog, under Configuration Properties +-> Manifest Tool -> Input and Output -> Embed manifest. + +|# # end::embed-doc[] + +feature.feature embed-manifest : on off : incidental propagated ; + +#| tag::embed-doc[] + +[[bbv2.builtin.features.embed-manifest-file]]`embed-manifest-file`:: +This feature is specific to the `msvc` toolset (see <>), +and controls which manifest files should be embedded inside executables and +shared libraries. This feature corresponds to the IDE option found in the +project settings dialog, under Configuration Properties -> Manifest Tool -> +Input and Output -> Additional Manifest Files. + +|# # end::embed-doc[] + +feature.feature embed-manifest-file : : free dependency ; + +#| tag::embed-doc[] + +[[bbv2.builtin.features.embed-manifest-via]]`embed-manifest-via`:: +This feature is specific to the `msvc` toolset (see <>), +and controls whether a manifest should be embedded via linker or manifest tool. + +|# # end::embed-doc[] + +feature.feature embed-manifest-via : mt linker : incidental propagated ; + +type.register PDB : pdb ; + + +################################################################################ +# +# Public rules. +# +################################################################################ + +# Initialize a specific toolset version configuration. As the result, path to +# compiler and, possible, program names are set up, and will be used when that +# version of compiler is requested. For example, you might have: +# +# using msvc : 6.5 : cl.exe ; +# using msvc : 7.0 : Y:/foo/bar/cl.exe ; +# +# The version parameter may be omitted: +# +# using msvc : : Z:/foo/bar/cl.exe ; +# +# The following keywords have special meanings when specified as versions: +# - all - all detected but not yet used versions will be marked as used +# with their default options. +# - default - this is an equivalent to an empty version. +# +# Depending on a supplied version, detected configurations and presence 'cl.exe' +# in the path different results may be achieved. The following table describes +# the possible scenarios: +# +# Nothing "x.y" +# Passed Nothing "x.y" detected, detected, +# version detected detected cl.exe in path cl.exe in path +# +# default Error Use "x.y" Create "default" Use "x.y" +# all None Use all None Use all +# x.y - Use "x.y" - Use "x.y" +# a.b Error Error Create "a.b" Create "a.b" +# +# "x.y" - refers to a detected version; +# "a.b" - refers to an undetected version. +# +# FIXME: Currently the command parameter and the property parameter +# seem to overlap in duties. Remove this duplication. This seems to be related +# to why someone started preparing to replace init with configure rules. +# +rule init ( + # The msvc version being configured. When omitted the tools invoked when no + # explicit version is given will be configured. + version ? + + # The command used to invoke the compiler. If not specified: + # - if version is given, default location for that version will be + # searched + # + # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0 + # and 6.* will be searched + # + # - if compiler is not found in the default locations, PATH will be + # searched. + : command * + + # Options may include: + # + # All options shared by multiple toolset types as handled by the + # common.handle-options() rule, e.g. , , , + # & . + # + # + # + # + # + # + # + # Exact tool names to be used by this msvc toolset configuration. + # + # + # Command through which to pipe the output of running the compiler. + # For example to pass the output to STLfilt. + # + # + # Global setup command to invoke before running any of the msvc tools. + # It will be passed additional option parameters depending on the actual + # target platform. + # + # + # + # + # + # + # + # Platform specific setup command to invoke before running any of the + # msvc tools used when builing a target for a specific platform, e.g. + # when building a 32 or 64 bit executable. + # + # + # Whether to rewrite setup scripts. New scripts will be output in + # build tree and will be used instead of originals in build actions. + # Possible values: + # * on - rewrite scripts, if they do not already exist (default) + # * always - always rewrite scripts, even if they already exist + # * off - use original setup scripts + : options * +) +{ + if $(command) + { + options += $(command) ; + } + configure $(version) : $(options) ; +} + + +# 'configure' is a newer version of 'init'. The parameter 'command' is passed as +# a part of the 'options' list. See the 'init' rule comment for more detailed +# information. +# +rule configure ( version ? : options * ) +{ + switch $(version) + { + case "all" : + if $(options) + { + import errors ; + errors.error "MSVC toolset configuration: options should be" + "empty when '$(version)' is specified." ; + } + + # Configure (i.e. mark as used) all registered versions. + local all-versions = [ $(.versions).all ] ; + if ! $(all-versions) + { + if $(.debug-configuration) + { + ECHO "notice: [msvc-cfg] Asked to configure all registered" + "msvc toolset versions when there are none currently" + "registered." ; + } + } + else + { + for local v in $(all-versions) + { + # Note that there is no need to skip already configured + # versions here as this will request configure-really rule + # to configure the version using default options which will + # in turn cause it to simply do nothing in case the version + # has already been configured. + configure-really $(v) ; + } + } + + case "default" : + configure-really : $(options) ; + + case * : + configure-really $(version) : $(options) ; + } +} + + +# Sets up flag definitions dependent on the compiler version used. +# - 'version' is the version of compiler in N.M format. +# - 'conditions' is the property set to be used as flag conditions. +# - 'toolset' is the toolset for which flag settings are to be defined. +# This makes the rule reusable for other msvc-option-compatible compilers. +# +rule configure-version-specific ( toolset : version : conditions ) +{ + toolset.push-checking-for-flags-module unchecked ; + # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and + # /Zc:wchar_t options that improve C++ standard conformance, but those + # options are off by default. If we are sure that the msvc version is at + # 7.*, add those options explicitly. We can be sure either if user specified + # version 7.* explicitly or if we auto-detected the version ourselves. + if ! [ MATCH ^(6\\.) : $(version) ] + { + toolset.flags $(toolset).compile OPTIONS $(conditions) : "/Zc:forScope" "/Zc:wchar_t" ; + toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ; + + # Explicitly disable the 'function is deprecated' warning. Some msvc + # versions have a bug, causing them to emit the deprecation warning even + # with /W0. + toolset.flags $(toolset).compile OPTIONS $(conditions)/off : /wd4996 ; + + if [ MATCH "^([78]\\.)" : $(version) ] + { + # 64-bit compatibility warning deprecated since 9.0, see + # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx + toolset.flags $(toolset).compile OPTIONS $(conditions)/all : /Wp64 ; + } + } + + # 12.0 (VS2013 Update 2) introduced /Zc:inline opt-in standard conformance + # compiler flag that also similar to linker /opt:ref removes unreferenced + # variables and functions that have internal linkage + if ! [ version.version-less [ SPLIT_BY_CHARACTERS [ MATCH "^([0123456789.]+)" : $(version) ] : . ] : 12 ] + { + toolset.flags $(toolset).compile OPTIONS $(conditions) : "/Zc:inline" ; + + # /Gy analog for variables: https://devblogs.microsoft.com/cppblog/introducing-gw-compiler-switch/ + toolset.flags $(toolset).compile OPTIONS $(conditions)/speed $(conditions)/space : /Gw ; + } + + # 14.0 introduced /Zc:throwingNew opt-in flag that disables a workaround + # for not throwing operator new in VC up to 6.0 + if ! [ version.version-less [ SPLIT_BY_CHARACTERS [ MATCH "^([0123456789.]+)" : $(version) ] : . ] : 14 ] + { + toolset.flags $(toolset).compile C++FLAGS $(conditions) : "/Zc:throwingNew" ; + } + + # 14.27 (VS2019 Version 16.7) introduced support for ASAN on x86 and x64 CPUs + # This check however now only tests for 14.2 (which is 16.0) as msvc.jam doesn't distinguish between minor versions (e.g. 14.21..14.28 etc) + if ! [ version.version-less [ SPLIT_BY_CHARACTERS [ MATCH "^([0123456789.]+)" : $(version) ] : . ] : 14 2 ] + { + # General asan compile and link options. + toolset.flags $(toolset).compile OPTIONS + $(conditions)/on + : /fsanitize=address /FS ; + toolset.flags $(toolset).link LINKFLAGS + $(conditions)/on + : -incremental\:no ; + + # The various 64 bit runtime asan support libraries and related flags. + toolset.flags $(toolset).link FINDLIBS_SA + $(conditions)/on//shared + $(conditions)/on/64/shared + : clang_rt.asan_dynamic-x86_64 + clang_rt.asan_dynamic_runtime_thunk-x86_64 ; + toolset.flags $(toolset).link LINKFLAGS + $(conditions)/on//shared + $(conditions)/on/64/shared + : /wholearchive\:"clang_rt.asan_dynamic-x86_64.lib" + /wholearchive\:"clang_rt.asan_dynamic_runtime_thunk-x86_64.lib" ; + toolset.flags $(toolset).link FINDLIBS_SA + $(conditions)/on//static/EXE + $(conditions)/on//static/UNIT_TEST + $(conditions)/on/64/static/EXE + $(conditions)/on/64/static/UNIT_TEST + : clang_rt.asan-x86_64 + clang_rt.asan_cxx-x86_64 ; + toolset.flags $(toolset).link LINKFLAGS + $(conditions)/on//static/EXE + $(conditions)/on//static/UNIT_TEST + $(conditions)/on/64/static/EXE + $(conditions)/on/64/static/UNIT_TEST + : /wholearchive\:"clang_rt.asan-x86_64.lib" + /wholearchive\:"clang_rt.asan_cxx-x86_64.lib" ; + toolset.flags $(toolset).link.dll FINDLIBS_SA + $(conditions)/on//static + $(conditions)/on/64/static + : clang_rt.asan_dll_thunk-x86_64 ; + toolset.flags $(toolset).link.dll LINKFLAGS + $(conditions)/on//static + $(conditions)/on/64/static + : /wholearchive\:"clang_rt.asan_dll_thunk-x86_64.lib" ; + + # The various 32 bit runtime asan support libraries and related flags. + toolset.flags $(toolset).link FINDLIBS_SA + $(conditions)/on/32/shared + : clang_rt.asan_dynamic-i386 clang_rt.asan_dynamic_runtime_thunk-i386 ; + toolset.flags $(toolset).link LINKFLAGS + $(conditions)/on/32/shared + : /wholearchive\:"clang_rt.asan_dynamic-i386.lib" + /wholearchive\:"clang_rt.asan_dynamic_runtime_thunk-i386.lib" ; + toolset.flags $(toolset).link FINDLIBS_SA + $(conditions)/on/32/static/EXE + $(conditions)/on/32/static/UNIT_TEST + : clang_rt.asan-i386 clang_rt.asan_cxx-i386 ; + toolset.flags $(toolset).link LINKFLAGS + $(conditions)/on/32/static/EXE + $(conditions)/on/32/static/UNIT_TEST + : /wholearchive\:"clang_rt.asan-i386.lib" + /wholearchive\:"clang_rt.asan_cxx-i386.lib" ; + toolset.flags $(toolset).link.dll FINDLIBS_SA + $(conditions)/on/32/static/LIB/shared + : clang_rt.asan_dll_thunk-i386 ; + toolset.flags $(toolset).link.dll LINKFLAGS + $(conditions)/on/32/static/LIB/shared + : /wholearchive\:"clang_rt.asan_dll_thunk-i386.lib" ; + } + + # + # Processor-specific optimization. + # + + if [ MATCH "^([67])" : $(version) ] + { + # 8.0 deprecates some of the options. + toolset.flags $(toolset).compile OPTIONS $(conditions)/speed $(conditions)/space : /Ogiy /Gs ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/speed : /Ot ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/space : /Os ; + + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-i386)/ : /GB ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-i386)/i486 : /G4 ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-i386)/$(.cpu-type-g5) : /G5 ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-i386)/$(.cpu-type-g6) : /G6 ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-i386)/$(.cpu-type-g7) : /G7 ; + + # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math" + # tests will fail. + toolset.flags $(toolset).compile OPTIONS $(conditions) : /Op ; + + # 7.1 and below have single-threaded static RTL. + toolset.flags $(toolset).compile OPTIONS $(conditions)/off/static/single : /ML ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/on/static/single : /MLd ; + } + else + { + # 8.0 and above adds some more options. + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-amd64)/ : "/favor:blend" ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-amd64)/$(.cpu-type-em64t) : "/favor:EM64T" ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/$(.cpu-arch-amd64)/$(.cpu-type-amd64) : "/favor:AMD64" ; + + # 8.0 and above only has multi-threaded static RTL. + toolset.flags $(toolset).compile OPTIONS $(conditions)/off/static/single : /MT ; + toolset.flags $(toolset).compile OPTIONS $(conditions)/on/static/single : /MTd ; + + # Specify target machine type so the linker will not need to guess. + toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : "/MACHINE:X64" ; + toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : "/MACHINE:X86" ; + toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : "/MACHINE:IA64" ; + toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-arm) : "/MACHINE:ARM" ; + toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-arm64) : "/MACHINE:ARM64" ; + + if [ version.version-less [ SPLIT_BY_CHARACTERS [ MATCH "^([0123456789.]+)" : $(version) ] : . ] : 11 ] + { + # Make sure that manifest will be generated even if there is no + # dependencies to put there. + toolset.flags $(toolset).link LINKFLAGS $(conditions) : /MANIFEST ; + } + else + { + toolset.flags $(toolset).link LINKFLAGS $(conditions)/mt : /MANIFEST ; + toolset.flags $(toolset).link LINKFLAGS $(conditions)/linker/off : /MANIFEST ; + toolset.flags $(toolset).link LINKFLAGS $(conditions)/linker/on : "/MANIFEST:EMBED" ; + + local conditionx = [ feature.split $(conditions) ] ; + toolset.add-defaults $(conditionx:J=,)\:linker ; + } + } + + toolset.pop-checking-for-flags-module ; +} + +# Feature for handling targeting different Windows API sets. +feature.feature windows-api : desktop store phone : propagated composite link-incompatible ; +feature.compose store : WINAPI_FAMILY=WINAPI_FAMILY_APP _WIN32_WINNT=0x0602 + /APPCONTAINER ; +feature.compose phone : WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP _WIN32_WINNT=0x0602 + /APPCONTAINER "/NODEFAULTLIB:ole32.lib" "/NODEFAULTLIB:kernel32.lib" WindowsPhoneCore.lib ; +feature.set-default windows-api : desktop ; + + +# Registers this toolset including all of its flags, features & generators. Does +# nothing on repeated calls. +# +rule register-toolset ( ) +{ + if ! msvc in [ feature.values toolset ] + { + register-toolset-really ; + } +} + +rule resolve-possible-msvc-version-alias ( version ) +{ + if $(.version-alias-$(version)) + { + version = $(.version-alias-$(version)) ; + } + return $(version) ; +} + + +# Declare action for creating static libraries. If library exists, remove it +# before adding files. See +# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale. +if [ os.name ] in NT +{ + # The 'DEL' command would issue a message to stdout if the file does not + # exist, so need a check. + actions archive + { + if exist "$(<[1])" DEL "$(<[1])" + $(.SETUP) $(.LD) $(AROPTIONS) /out:"$(<[1])" @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>)" $(LIBRARIES_MENTIONED_BY_FILE) "$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" "$(LIBRARY_OPTION)$(FINDLIBS_SA).lib") + } +} +else +{ + actions archive + { + $(.RM) "$(<[1])" + $(.SETUP) $(.LD) $(AROPTIONS) /out:"$(<[1])" @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>)" $(LIBRARIES_MENTIONED_BY_FILE) "$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" "$(LIBRARY_OPTION)$(FINDLIBS_SA).lib") + } +} + +rule compile.asm ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; +} + +actions compile.asm +{ + $(.SETUP) $(.ASM) -D$(ASMDEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) $(.ASM_OUTPUT) "$(<:W)" "$(>:W)" +} + + +rule compile.c ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + get-rspline $(targets) : -TC CFLAGS ; + compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; +} + + +rule compile.c.preprocess ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + get-rspline $(targets) : -TC CFLAGS ; + preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; +} + + +rule compile.c.pch ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + get-rspline $(targets[1]) : -TC CFLAGS ; + get-rspline $(targets[2]) : -TC CFLAGS ; + local pch-source = [ on $(<) return $(PCH_SOURCE) ] ; + if $(pch-source) + { + DEPENDS $(<) : $(pch-source) ; + compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ; + } + else + { + compile-c-c++-pch $(targets) : $(sources) ; + } +} + +toolset.flags msvc YLOPTION : "-Yl" ; + +# Action for running the C/C++ compiler without using precompiled headers. +# +# WARNING: Synchronize any changes this in action with intel-win +# +# Notes regarding PDB generation, for when we use +# on/database: +# +# 1. PDB_CFLAG is only set for on/database, ensuring +# that the /Fd flag is dropped if PDB_CFLAG is empty. +# +# 2. When compiling executables's source files, PDB_NAME is set on a per-source +# file basis by rule compile-c-c++. The linker will pull these into the +# executable's PDB. +# +# 3. When compiling library's source files, PDB_NAME is updated to .pdb +# for each source file by rule archive, as in this case compiler must be used +# to create a single PDB for our library. +# +actions compile-c-c++ bind PDB_NAME PCH_HEADER +{ + $(.SETUP) $(.CC) @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>[1]:W)" -c -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -FI"$(PCH_HEADER)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE)) $(.CC.FILTER) +} + +actions preprocess-c-c++ bind PDB_NAME PCH_HEADER +{ + $(.SETUP) $(.CC) @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>[1]:W)" -P -Fi"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -FI"$(PCH_HEADER)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE)) +} + +rule compile-c-c++ ( targets + : sources * ) +{ + DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ; + DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ; + PDB_NAME on $(<) = $(<[1]:S=.pdb) ; + LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ; +} + +rule preprocess-c-c++ ( targets + : sources * ) +{ + DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ; + DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ; + PDB_NAME on $(<) = $(<:S=.pdb) ; + LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ; +} + +# Action for running the C/C++ compiler using precompiled headers. In addition +# to whatever else it needs to compile, this action also adds a temporary source +# .cpp file used to compile the precompiled headers themselves. +# +# The global .escaped-double-quote variable is used to avoid messing up Emacs +# syntax highlighting in the messy N-quoted code below. +actions compile-c-c++-pch +{ + $(.SETUP) $(.CC) @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>[2]:W)" -c -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE)) @($(<[1]:W).cpp:<=":>=":E=$(.hash)include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl)) $(.CC.FILTER) +} + + +# Action for running the C/C++ compiler using precompiled headers. An already +# built source file for compiling the precompiled headers is expected to be +# given as one of the source parameters. +actions compile-c-c++-pch-s +{ + $(.SETUP) $(.CC) @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>[2]:W)" -c -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE)) $(.CC.FILTER) +} + + +rule compile.c++ ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + get-rspline $(targets) : -TP C++FLAGS ; + compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; +} + +rule compile.c++.preprocess ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + get-rspline $(targets) : -TP C++FLAGS ; + preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ; +} + + +rule compile.c++.pch ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + get-rspline $(targets[1]) : -TP C++FLAGS ; + get-rspline $(targets[2]) : -TP C++FLAGS ; + local pch-source = [ on $(<) return $(PCH_SOURCE) ] ; + if $(pch-source) + { + DEPENDS $(<) : $(pch-source) ; + compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ; + } + else + { + compile-c-c++-pch $(targets) : $(sources) ; + } +} + +rule compile.idl ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; +} + +# See midl.jam for details. +# +actions compile.idl +{ + $(.SETUP) $(.IDL) /nologo @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>:W)" -D$(DEFINES) "-I$(INCLUDES:W)" -U$(UNDEFS) $(MIDLFLAGS) /tlb "$(<[1]:W)" /h "$(<[2]:W)" /iid "$(<[3]:W)" /proxy "$(<[4]:W)" /dlldata "$(<[5]:W)") + $(.TOUCH_FILE) "$(<[4]:W)" + $(.TOUCH_FILE) "$(<[5]:W)" +} + +rule compile.mc ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; +} + +actions compile.mc +{ + $(.SETUP) $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)" +} + + +rule compile.rc ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; +} + +actions compile.rc +{ + $(.SETUP) $(.RC) /nologo -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)" +} + +toolset.uses-features msvc.link : ; + +rule link ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + if on in $(properties) && mt in $(properties) + { + if [ feature.get-values : $(properties) ] + { + DEPENDS $(<) : [ on $(<) return $(EMBED_MANIFEST_FILE) ] ; + msvc.manifest.user $(targets) $(EMBED_MANIFEST_FILE) : $(sources) : $(properties) ; + } + else + { + msvc.manifest $(targets) : $(sources) : $(properties) ; + } + } +} + +rule link.dll ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ; + local import-lib ; + if ! true in $(properties) + { + import-lib = $(targets[2]) ; + IMPORT_LIB on $(targets) = $(import-lib) ; + } + # On msvc-14.1, the linker might not touch the import library + # if the exports do not change. (Apparently this could also + # happen for incremental linking, which is why we disable it, + # but that no longer seems to be enough). + # Therefore, don't update the import library just because + # it's out-dated. It will be force updated, when the dll + # is updated. Also, make it so that anything that depends + # on it depends on the dll as well. + NOUPDATE $(import-lib) ; + INCLUDES $(import-lib) : $(targets[1]) ; + if on in $(properties) && mt in $(properties) + { + if [ feature.get-values : $(properties) ] + { + DEPENDS $(<) : [ on $(<) return $(EMBED_MANIFEST_FILE) ] ; + msvc.manifest.dll.user $(targets) $(EMBED_MANIFEST_FILE) : $(sources) : $(properties) ; + } + else + { + msvc.manifest.dll $(targets) : $(sources) : $(properties) ; + } + } +} + +# Incremental linking a DLL causes no end of problems: if the actual exports do +# not change, the import .lib file is never updated. Therefore, the .lib is +# always out-of-date and gets rebuilt every time. I am not sure that incremental +# linking is such a great idea in general, but in this case I am sure we do not +# want it. + +# Windows manifest is a new way to specify dependencies on managed DotNet +# assemblies and Windows native DLLs. The manifests are embedded as resources +# and are useful in any PE target (both DLL and EXE). + +{ + actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE MANIFEST_FILE + { + $(.SETUP) $(.LD) @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>)" $(LIBRARIES_MENTIONED_BY_FILE) $(LIBRARIES) "$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" "$(LIBRARY_OPTION)$(FINDLIBS_SA).lib") $(LINKOPT) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" /MANIFESTINPUT:"$(MANIFEST_FILE)" + } + + actions manifest + { + $(.SETUP) $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1" + } + + actions manifest.user bind EMBED_MANIFEST_FILE + { + $(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);1" + } + + actions link.dll bind IMPORT_LIB DEF_FILE LIBRARIES_MENTIONED_BY_FILE MANIFEST_FILE + { + $(.SETUP) $(.LD) @($(<[1]:W).rsp:O=FC:<=@":>=":E="$(>)" $(LIBRARIES_MENTIONED_BY_FILE) $(LIBRARIES) "$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" "$(LIBRARY_OPTION)$(FINDLIBS_SA).lib") $(LINKOPT) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" /MANIFESTINPUT:"$(MANIFEST_FILE)" /DLL /IMPLIB:"$(IMPORT_LIB:W)" /def:"$(DEF_FILE)" + } + + actions manifest.dll + { + $(.SETUP) $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2" + } + + actions manifest.dll.user bind EMBED_MANIFEST_FILE + { + $(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2" + } +} + +# This rule sets up the pdb file that will be used when generating static +# libraries and the debug-store option is database, so that the compiler puts +# all the debug info into a single .pdb file named after the library. +# +# Poking at source targets this way is probably not clean, but it is the +# easiest approach. +# +rule archive ( targets + : sources * : properties * ) +{ + set-setup-command $(targets) : $(properties) ; + PDB_NAME on $(>) = $(<[1]:S=.pdb) ; + LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ; +} + + +################################################################################ +# +# Classes. +# +################################################################################ + +class msvc-pch-generator : pch-generator +{ + import property-set ; + + rule run-pch ( project name ? : property-set : sources * ) + { + # Searching for the header and source file in the sources. + local pch-header ; + local pch-source ; + for local s in $(sources) + { + if [ type.is-derived [ $(s).type ] H ] + { + pch-header = $(s) ; + } + else if + [ type.is-derived [ $(s).type ] CPP ] || + [ type.is-derived [ $(s).type ] C ] + { + pch-source = $(s) ; + } + } + + if ! $(pch-header) + { + import errors : user-error : errors.user-error ; + errors.user-error "can not build pch without pch-header" ; + } + + # If we do not have the PCH source - that is fine. We will just create a + # temporary .cpp file in the action. + local pch-header-dir = [ $(pch-header).path ] ; + local generated = [ generator.run $(project) $(name) + : [ property-set.create + # Passing of is a dirty trick, needed because + # non-composing generators with multiple inputs are subtly + # broken. For more detailed information see: + # https://zigzag.cs.msu.su:7813/boost.build/ticket/111 + $(pch-source) "$(pch-header-dir)" + [ $(property-set).raw ] ] + : $(pch-header) ] ; + + local pch-file ; + for local g in $(generated[2-]) + { + if [ type.is-derived [ $(g).type ] PCH ] + { + pch-file = $(g) ; + } + } + + return [ $(generated[1]).add-raw $(pch-header) + $(pch-file) ] $(generated[2-]) ; + } +} + + +################################################################################ +# +# Local rules. +# +################################################################################ + +# Detects versions listed as '.known-versions' by checking registry information, +# environment variables & default paths. Supports both native Windows and +# Cygwin. +# +local rule auto-detect-toolset-versions ( ) +{ + if [ os.name ] in NT CYGWIN + { + # Get installation paths from the registry. + for local i in $(.known-versions) + { + if $(.version-$(i)-reg) + { + local vc-path ; + for local x in "" "Wow6432Node\\" + { + vc-path += [ W32_GETREG + "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg) + : "ProductDir" ] ; + } + + if $(vc-path) + { + vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ; + register-configuration $(i) : [ path.native $(vc-path[1]) ] ; + } + } + } + } + + # Check environment and default installation paths. + for local i in $(.known-versions) + { + if ! $(i) in [ $(.versions).all ] + { + register-configuration $(i) : [ default-path $(i) ] ; + } + } +} + +actions write-setup-script +{ + @($(STDOUT):E=$(FILE_CONTENTS:J=$(.nl))) > "$(<)" +} + +if [ os.name ] = NT +{ + local rule call-batch-script ( command ) + { + return "call $(command) >nul$(.nl)" ; + } +} +else +{ + # On cygwin, we need to run both the batch script + # and the following command in the same instance + # of cmd.exe. + local rule call-batch-script ( command ) + { + return "cmd.exe /S /C call $(command) \">nul\" \"&&\" " ; + } +} + +# Local helper rule to create the vcvars setup command for given architecture +# and options. +# +local rule generate-setup-cmd ( version : command : parent : options * : cpu : global-setup ? : default-global-setup-options : default-setup ) +{ + local setup-options ; + local setup = [ feature.get-values : $(options) ] ; + + if ! $(setup)-is-defined + { + if $(global-setup)-is-defined + { + setup = $(global-setup) ; + + # If needed we can easily add using configuration flags + # here for overriding which options get passed to the + # global setup command for which target platform: + # setup-options = [ feature.get-values : $(options) ] ; + setup-options ?= $(default-global-setup-options) ; + } + else + { + if [ MATCH "(14.3)" : $(version) ] + { + if $(.debug-configuration) + { + ECHO "notice: [generate-setup-cmd] $(version) is 14.3" ; + } + parent = [ path.native [ path.join $(parent) "..\\..\\..\\..\\..\\Auxiliary\\Build" ] ] ; + } + else if [ MATCH "(14.2)" : $(version) ] + { + if $(.debug-configuration) + { + ECHO "notice: [generate-setup-cmd] $(version) is 14.2" ; + } + parent = [ path.native [ path.join $(parent) "..\\..\\..\\..\\..\\Auxiliary\\Build" ] ] ; + } + else if [ MATCH "(14.1)" : $(version) ] + { + if $(.debug-configuration) + { + ECHO "notice: [generate-setup-cmd] $(version) is 14.1" ; + } + parent = [ path.native [ path.join $(parent) "..\\..\\..\\..\\..\\Auxiliary\\Build" ] ] ; + } + setup = [ locate-default-setup $(command) : $(parent) : $(default-setup) ] ; + setup ?= [ path.join $(parent) "vcvarsall.bat" ] ; + } + } + + return $(setup) "$(setup-options:J= )" ; +} + +# Worker for set-setup-command. Usable in a virtual-target.action. +rule adjust-setup-command ( new-setup : setup : properties * ) +{ + local internal = $(new-setup:S=.read) ; + NOTFILE $(internal) ; + local setup-options = [ property.select : $(properties) ] ; + setup-options = $(setup-options:G=:E=) ; + DEPENDS $(internal) : $(setup) ; + DEPENDS $(new-setup) : $(internal) ; + REBUILDS $(new-setup) : $(internal) ; + msvc.read-setup $(internal) : $(setup) ; + msvc.write-setup-script $(new-setup) : $(setup) ; + __ACTION_RULE__ on $(internal) = msvc.rewrite-setup $(setup) $(setup-options) $(new-setup) ; +} + +# This doesn't actually do anything. It's merely +# used as a trigger for __ACTION_RULE__. +actions quietly read-setup { } + +# Calculates the changes to the environment make by setup-script +# Should be used as a callback for __ACTION_RULE__ +local rule rewrite-setup ( setup-script setup-options new-setup : target : * ) +{ + local setup-path = [ on $(setup-script) return $(LOCATE) $(SEARCH) ] ; + setup-path = $(setup-path[1]) ; + local command = "\"$(setup-script:G=:R=$(setup-path))\" $(setup-options)" ; + local original-vars = [ SPLIT_BY_CHARACTERS [ SHELL set ] : "\n" ] ; + local new-vars = [ SPLIT_BY_CHARACTERS [ SHELL "$(command) >nul && set" ] : "\n" ] ; + local diff-vars = [ set.difference $(new-vars) : $(original-vars) ] ; + if $(diff-vars) + { + FILE_CONTENTS on $(new-setup) = "REM $(command)" "SET "$(diff-vars) ; + } +} + +IMPORT msvc : rewrite-setup : : msvc.rewrite-setup ; + +# Helper rule to generate a faster alternative to MSVC setup scripts. +# We used to call MSVC setup scripts directly in every action, however in +# newer MSVC versions (10.0+) they make long-lasting registry queries +# which have a significant impact on build time. +local rule set-setup-command ( targets * : properties * ) +{ + if ! [ on $(targets) return $(.SETUP) ] + { + local setup-script = [ on $(targets) return $(.SETUP-SCRIPT) ] ; + # If no setup script was given, then we don't need to do anything. + if ! $(setup-script) + { + return ; + } + local setup-options = [ on $(targets) return $(.SETUP-OPTIONS) ] ; + local key = .setup-command-$(setup-script:E=)-$(setup-options:E=) ; + if ! $($(key)) + { + properties = [ feature.expand $(properties) ] ; + properties = [ property.select : $(properties) ] ; + local ps = [ property-set.create $(properties) $(setup-options) ] ; + local original = [ virtual-target.from-file $(setup-script) : [ path.pwd ] : $(.project) ] ; + local action = [ new non-scanning-action $(original) : msvc.adjust-setup-command : $(ps) ] ; + local new-setup = [ virtual-target.register [ new file-target msvc-setup.bat exact : : $(.project) : $(action) ] ] ; + local command = [ $(new-setup).actualize ] ; + local path = [ on $(command) return $(LOCATE) ] ; + local block-update = $(command:S=.nup) ; + NOUPDATE $(block-update) ; + NOTFILE $(block-update) ; + DEPENDS $(block-update) : $(command) ; + if [ on $(targets) return $(.REWRITE-SETUP) ] + { + ALWAYS $(command) ; + } + $(key) = [ call-batch-script "\"$(command:WG=:R=$(path))\" $(setup-options:E=)" ] $(block-update) ; + } + DEPENDS $(targets) : $($(key)[2]) ; + .SETUP on $(targets) = $($(key)[1]) ; + } +} + +# Worker rule for toolset version configuration. Takes an explicit version id or +# nothing in case it should configure the default toolset version (the first +# registered one or a new 'default' one in case no toolset versions have been +# registered yet). +# +local rule configure-really ( version ? : options * ) +{ + local command = [ feature.get-values : $(options) ] ; + + if ! $(version) && ! $(command) + { + # We were given neither a command, nor a version. + # Take the best registered (i.e. auto-detected) version. + # FIXME: consider whether an explicitly specified setup script + # should disable this logic. We already won't get here if + # there is a user specified command. + version = [ $(.versions).all ] ; + for local known in $(.known-versions) + { + if $(known) in $(version) + { + version = $(known) ; + break ; + } + } + # version might still have multiple elements if no versions + # were auto-detected, but an unknown version was configured + # manually. + version = $(version[1]) ; + } + + # Handle a user-provided command, and deduce the version if necessary. + # If the user-requested version was not autodetected and no command + # was given, attempt to find it in PATH + if $(command) || ! ( $(version:E=default) in [ $(.versions).all ] ) + { + local found-command = [ common.get-invocation-command-nodefault msvc : cl.exe : $(command) ] ; + + if $(found-command) + { + command = $(found-command) ; + if ! $(command:D) + { + local path = [ common.get-absolute-tool-path $(command) ] ; + command = $(command:R=$(path)) ; + } + } + else + { + # If we still failed to find cl.exe, bail out. + ECHO ; + ECHO warning\: + "Did not find command for MSVC toolset." + "If you have Visual Studio 2017 installed you will need to" + "specify the full path to the command," + "set VS150COMNTOOLS for your installation," + "or" + "build from the 'Visual Studio Command Prompt for VS 2017'." + ; + ECHO ; + command ?= cl.exe ; + } + + if ! $(version) + { + # Even if version is not explicitly specified, try to detect the + # version from the path. + # FIXME: We currently detect both Microsoft Visual Studio 9.0 and + # 9.0express as 9.0 here. + if [ MATCH "(MSVC\\\\14.3)" : $(command) ] + { + version = 14.3 ; + } + else if [ MATCH "(MSVC\\\\14.2)" : $(command) ] + { + version = 14.2 ; + } + else if [ MATCH "(MSVC\\\\14.1)" : $(command) ] + { + version = 14.1 ; + } + else if [ MATCH "(Microsoft Visual Studio 14)" : $(command) ] + { + version = 14.0 ; + } + else if [ MATCH "(Microsoft Visual Studio 12)" : $(command) ] + { + version = 12.0 ; + } + else if [ MATCH "(Microsoft Visual Studio 11)" : $(command) ] + { + version = 11.0 ; + } + else if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ] + { + version = 10.0 ; + } + else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ] + { + version = 9.0 ; + } + else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ] + { + version = 8.0 ; + } + else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ] + { + version = 7.1 ; + } + else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" : + $(command) ] + { + version = 7.1toolkit ; + } + else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ] + { + version = 7.0 ; + } + else + { + version = 6.0 ; + } + } + } + + # Version alias -> real version number. + version = [ resolve-possible-msvc-version-alias $(version) ] ; + + # Check whether the selected configuration is already in use. + if $(version) in [ $(.versions).used ] + { + # Allow multiple 'toolset.using' calls for the same configuration if the + # identical sets of options are used. + if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] ) + { + import errors ; + errors.user-error "MSVC toolset configuration: Toolset version" + "'$(version)' already configured." ; + } + } + else + { + # Register a new configuration. + $(.versions).register $(version) ; + $(.versions).set $(version) : options : $(options) ; + + # Mark the configuration as 'used'. + $(.versions).use $(version) ; + + # Generate conditions and save them. + local conditions = [ common.check-init-parameters msvc : version $(version) ] ; + + $(.versions).set $(version) : conditions : $(conditions) ; + + command ?= [ $(.versions).get $(version) : default-command ] ; + + # For 14.1+ we need the exact version as MS is planning rolling updates + # that will cause our `setup-cmd` to become invalid + exact-version = [ MATCH "(14\.[1-9][0-9]\.[0-9\.]+)" : $(command) ] ; + + common.handle-options msvc : $(conditions) : $(command) : $(options) ; + + # Generate and register setup command. + + local below-8.0 = [ MATCH "^([67]\\.)" : $(version) ] ; + local below-11.0 = [ MATCH "^([6789]\\.|10\\.)" : $(version) ] ; + + local cpu = i386 amd64 ia64 arm arm64 ; + if $(below-8.0) + { + cpu = i386 ; + } + else if $(below-11.0) + { + cpu = i386 amd64 ia64 ; + } + + local setup-amd64 ; + local setup-i386 ; + local setup-ia64 ; + local setup-arm ; + local setup-arm64 ; + local setup-phone-i386 ; + local setup-phone-arm ; + + if $(command) + { + # TODO: Note that if we specify a non-existant toolset version then + # this rule may find and use a corresponding compiler executable + # belonging to an incorrect toolset version. For example, if you + # have only MSVC 7.1 installed, have its executable on the path and + # specify you want Boost Build to use MSVC 9.0, then you want Boost + # Build to report an error but this may cause it to silently use the + # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0 + # toolset version. + command = [ common.get-absolute-tool-path $(command[-1]) ] ; + } + + if $(command) + { + local parent = [ path.make $(command) ] ; + parent = [ path.parent $(parent) ] ; + parent = [ path.native $(parent) ] ; + + # Setup will be used if the command name has been specified. If + # setup is not specified explicitly then a default setup script will + # be used instead. Setup scripts may be global or architecture/ + # /platform/cpu specific. Setup options are used only in case of + # global setup scripts. + + # Default setup scripts provided with different VC distributions: + # + # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386 + # builds. It was located in the bin folder for the regular version + # and in the root folder for the free VC 7.1 tools. + # + # Later 8.0 & 9.0 versions introduce separate platform specific + # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium) + # located in or under the bin folder. Most also include a global + # vcvarsall.bat helper script located in the root folder which runs + # one of the aforementioned vcvars*.bat scripts based on the options + # passed to it. So far only the version coming with some PlatformSDK + # distributions does not include this top level script but to + # support those we need to fall back to using the worker scripts + # directly in case the top level script can not be found. + + local global-setup = [ feature.get-values : $(options) ] ; + global-setup = $(global-setup[1]) ; + local global-setup-phone = $(global-setup) ; + if ! $(below-8.0) + { + global-setup ?= [ locate-default-setup $(command) : $(parent) : + vcvarsall.bat ] ; + } + + local default-setup-amd64 = vcvars64.bat ; + local default-setup-i386 = vcvars32.bat ; + local default-setup-ia64 = vcvarsx86_ia64.bat ; + local default-setup-arm = vcvarsx86_arm.bat ; + local default-setup-arm64 = vcvarsx86_arm64.bat ; + local default-setup-phone-i386 = vcvarsphonex86.bat ; + local default-setup-phone-arm = vcvarsphonex86_arm.bat ; + + # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and + # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx + # mention an x86_IPF option, that seems to be a documentation bug + # and x86_ia64 is the correct option. + local default-global-setup-options-amd64 = x86_amd64 ; + local default-global-setup-options-i386 = x86 ; + local default-global-setup-options-ia64 = x86_ia64 ; + local default-global-setup-options-arm = x86_arm ; + local default-global-setup-options-arm64 = x86_arm64 ; + + # When using 64-bit Windows, and targeting 64-bit, it is possible to + # use a native 64-bit compiler, selected by the "amd64" & "ia64" + # parameters to vcvarsall.bat. There are two variables we can use -- + # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is + # 'x86' when running 32-bit Windows, no matter which processor is + # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T) + # Windows. + # + if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ] + { + default-global-setup-options-amd64 = amd64 ; + } + # When B2 itself is running as a 32-bit process on 64-bit + # Windows, the above test will fail (since WOW64 simulates a 32-bit + # environment, including environment values). So check the WOW64 + # variable PROCESSOR_ARCHITEW6432 as well. + if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITEW6432 ] ] + { + default-global-setup-options-amd64 = amd64 ; + } + # TODO: The same 'native compiler usage' should be implemented for + # the Itanium platform by using the "ia64" parameter. For this + # though we need someone with access to this platform who can find + # out how to correctly detect this case. + else if $(somehow-detect-the-itanium-platform) + { + default-global-setup-options-ia64 = ia64 ; + } + + for local c in $(cpu) + { + exact-version ?= $(version) ; + setup-$(c) = [ generate-setup-cmd $(exact-version) : $(command) : $(parent) : $(options) : $(c) : $(global-setup) : $(default-global-setup-options-$(c)) : $(default-setup-$(c)) ] ; + } + + # Windows phone has different setup scripts, located in a different directory hierarchy. + # The 11.0 toolset can target Windows Phone 8.0 and the 12.0 toolset can target Windows Phone 8.1, + # each of which have a different directory for their vcvars setup scripts. + local phone-parent = [ path.native [ path.join $(parent) WPSDK ] ] ; + local phone-directory = $(phone-parent) ; + if [ MATCH "(11.0)" : $(version) ] + { + phone-directory = [ path.native [ path.join $(phone-directory) WP80 ] ] ; + } + else if [ MATCH "(12.0)" : $(version) ] + { + phone-directory = [ path.native [ path.join $(phone-directory) WP81 ] ] ; + } + global-setup-phone ?= [ locate-default-setup $(phone-directory) : $(phone-parent) : vcvarsphoneall.bat ] ; + + # If can't locate default phone setup script then this VS version doesn't support Windows Phone. + if $(global-setup-phone)-is-defined + { + # i386 CPU is for the Windows Phone emulator in Visual Studio. + local phone-cpu = i386 arm ; + for local c in $(phone-cpu) + { + setup-phone-$(c) = [ generate-setup-cmd $(version) : $(phone-directory) : $(phone-parent) : $(options) : $(c) : $(global-setup-phone) : $(default-global-setup-options-$(c)) : $(default-setup-phone-$(c)) ] ; + } + } + } + + # Get tool names (if any) and finish setup. + + compiler = [ feature.get-values : $(options) ] ; + compiler ?= cl ; + + linker = [ feature.get-values : $(options) ] ; + linker ?= link ; + + resource-compiler = [ feature.get-values : $(options) ] ; + resource-compiler ?= rc ; + + # Turn on some options for i386 assembler + # -coff generate COFF format object file (compatible with cl.exe output) + local default-assembler-amd64 = ml64 ; + local default-assembler-i386 = "ml -coff" ; + local default-assembler-ia64 = ias ; + local default-assembler-arm = armasm ; + local default-assembler-arm64 = armasm64 ; + + # For the assembler the following options are turned on by default: + # + # -Zp4 align structures to 4 bytes + # -Cp preserve case of user identifiers + # -Cx preserve case in publics, externs + # + local assembler-flags-amd64 = "-c -Zp4 -Cp -Cx" ; + local assembler-flags-i386 = "-c -Zp4 -Cp -Cx" ; + local assembler-flags-ia64 = "-c -Zp4 -Cp -Cx" ; + local assembler-flags-arm = "" ; + local assembler-flags-arm64 = "" ; + + local assembler-output-flag-amd64 = -Fo ; + local assembler-output-flag-i386 = -Fo ; + local assembler-output-flag-ia64 = -Fo ; + local assembler-output-flag-arm = -o ; + local assembler-output-flag-arm64 = -o ; + + assembler = [ feature.get-values : $(options) ] ; + + idl-compiler = [ feature.get-values : $(options) ] ; + idl-compiler ?= midl ; + + mc-compiler = [ feature.get-values : $(options) ] ; + mc-compiler ?= mc ; + + manifest-tool = [ feature.get-values : $(options) ] ; + manifest-tool ?= mt ; + + local cc-filter = [ feature.get-values : $(options) ] + ; + + for local c in $(cpu) + { + # Setup script is not required in some configurations. + setup-$(c) ?= "" ; + + local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ; + + if $(.debug-configuration) + { + for local cpu-condition in $(cpu-conditions) + { + ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c):J= )'" ; + } + } + + local cpu-assembler = $(assembler) ; + cpu-assembler ?= $(default-assembler-$(c)) ; + local assembler-flags = $(assembler-flags-$(c)) ; + local assembler-output-flag = $(assembler-output-flag-$(c)) ; + + for local api in desktop store phone + { + local setup-script = $(setup-$(c)) ; + if $(api) = phone + { + setup-script = $(setup-phone-$(c)) ; + } + + if always in $(options) + { + toolset.flags msvc .REWRITE-SETUP $(api)/$(cpu-conditions) : true ; + } + + if ! $(setup-script) + { + # Should we try to set up some error handling or fallbacks here? + } + else if off in $(options) || [ os.name ] != NT + { + toolset.flags msvc .SETUP $(api)/$(cpu-conditions) : [ call-batch-script "\"$(setup-script[1]:W)\" $(setup-script[2-]:E=)" ] ; + } + else + { + toolset.flags msvc .SETUP-SCRIPT $(api)/$(cpu-conditions) : $(setup-script[1]) ; + toolset.flags msvc .SETUP-OPTIONS $(api)/$(cpu-conditions) : $(setup-script[2-]) ; + } + + toolset.flags msvc.compile .RC $(api)/$(cpu-conditions) : $(resource-compiler) ; + toolset.flags msvc.compile .IDL $(api)/$(cpu-conditions) : $(idl-compiler) ; + toolset.flags msvc.compile .MC $(api)/$(cpu-conditions) : $(mc-compiler) ; + toolset.flags msvc.link .MT $(api)/$(cpu-conditions) : $(manifest-tool) -nologo ; + + if $(api) = desktop + { + toolset.flags msvc.compile .CC $(api)/$(cpu-conditions) : $(compiler) /Zm800 -nologo ; + } + else + { + toolset.flags msvc.compile .CC $(api)/$(cpu-conditions) : $(compiler) /Zm800 /ZW /EHsc -nologo ; + } + toolset.flags msvc.compile .ASM $(api)/$(cpu-conditions) : $(cpu-assembler) $(assembler-flags) -nologo ; + toolset.flags msvc.compile .ASM_OUTPUT $(api)/$(cpu-conditions) : $(assembler-output-flag) ; + toolset.flags msvc.link .LD $(api)/$(cpu-conditions) : $(linker) /NOLOGO "/INCREMENTAL:NO" ; + toolset.flags msvc.archive .LD $(api)/$(cpu-conditions) : $(linker) /lib /NOLOGO ; + } + + if $(cc-filter) + { + toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ; + } + } + + # Starting with Visual Studio 2013 the CRT is split into a desktop and app dll. + # If targeting WinRT and 12.0 set lib path to link against app CRT. + if [ MATCH "(12)" : $(version) ] + { + local storeLibPath = [ path.join $(parent) "lib/store" ] ; + toolset.flags msvc.link LINKPATH $(conditions)/store/$(.cpu-arch-i386) : [ path.native $(storeLibPath) ] ; + toolset.flags msvc.link LINKPATH $(conditions)/store/$(.cpu-arch-amd64) : [ path.native [ path.join $(storeLibPath) "amd64" ] ] ; + toolset.flags msvc.link LINKPATH $(conditions)/store/$(.cpu-arch-arm) : [ path.native [ path.join $(storeLibPath) "arm" ] ] ; + } + + # LTO + toolset.flags msvc.compile OPTIONS $(conditions)/on : /GL ; + toolset.flags msvc.link LINKFLAGS $(conditions)/on : /LTCG ; + + # Set version-specific flags. + configure-version-specific msvc : $(version) : $(conditions) ; + } +} + + +# Returns the default installation path for the given version. +# +local rule default-path ( version ) +{ + local result ; + { + # try to use vswhere + local pseudo_env_VSCOMNTOOLS ; + local all-env-paths ; + local root = [ os.environ "ProgramFiles(x86)" ] ; + if ( ! $(root) ) + { + root = [ os.environ "ProgramFiles" ] ; + } + if ( ! $(root) ) && [ os.name ] in CYGWIN + { + # We probably are in an 'env -i' Cygwin session, where the user + # was unable restore the "ProgramFiles(x86)" environment variable, + # because it is an invalid environment variable name in Cygwin. + # However, we can try to query cygpath instead. + root = [ SHELL "cygpath -w -F 42" : strip-eol ] ; # CSIDL_PROGRAM_FILESX86 + if ( ! $(root) ) + { + root = [ SHELL "cygpath -w -F 38" : strip-eol ] ; # CSIDL_PROGRAM_FILES + } + } + # When we are a Cygwin build, [ SHELL ] does execute using "/bin/sh -c". + # When /bin/sh does find a forward slash, no PATH search is performed, + # causing [ SHELL "C:\\...\\Installer/vswhere.exe" ] to succeed. + # And fortunately, forward slashes do also work in native Windows. + local vswhere = "$(root)/Microsoft Visual Studio/Installer/vswhere.exe" ; + # The check for $(root) is to avoid a segmentation fault if not found. + if $(version) in 14.1 14.2 14.3 default && $(root) && [ path.exists $(vswhere) ] + { + local req = "-requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64" ; + local prop = "-property installationPath" ; + local limit ; + + if $(version) = 14.3 + { + limit = "-version \"[17.0,18.0)\" -prerelease" ; + } + else if $(version) = 14.2 || $(version) = "default" + { + limit = "-version \"[16.0,17.0)\"" ; + } + else if $(version) = 14.1 + { + limit = "-version \"[15.0,16.0)\"" ; + } + + # Quoting the "*" is for when we are a Cygwin build, to bypass /bin/sh. + local vswhere_cmd = "\"$(vswhere)\" -latest -products \"*\" $(req) $(prop) $(limit)" ; + # The split character "\r" is for when we are a Cygwin build. + local shell_ret = [ SPLIT_BY_CHARACTERS [ SHELL $(vswhere_cmd) ] : "\r\n" ] ; + pseudo_env_VSCOMNTOOLS = [ path.native [ path.join $(shell_ret) "\\Common7\\Tools" ] ] ; + if ! [ path.exists $(pseudo_env_VSCOMNTOOLS) ] + { + return ; # Not found. If we have vswhere, assume that it works. + } + all-env-paths = $(pseudo_env_VSCOMNTOOLS) ; + } + else + { + all-env-paths = [ sequence.transform os.environ + : $(.version-$(version)-env) ] ; + } + + # Check environment or previous path_VS150 + for local env-path in $(all-env-paths) + { + if $(env-path) && $(.version-$(version)-path) + { + for local bin-path in $(.version-$(version)-path) + { + result = [ path.glob [ path.make $(env-path) ] : $(bin-path) ] ; + if $(result) + { + result = [ path.native $(result[1]) ] ; + break ; + } + } + } + if $(result) + { + break ; + } + } + } + + return $(result) ; +} + + + +rule get-rspline ( target : lang-opt lang-flags ) +{ + CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS) + $($(lang-flags)) $(OPTIONS) $(USER_COMPILEFLAGS) $(USER_$(lang-flags)) -D$(DEFINES) + \"-I$(INCLUDES:W)\" \"-FI$(FORCE_INCLUDES:W)\" ] ; +} + +class msvc-linking-generator : linking-generator +{ + # Calls the base version. If necessary, also create a target for the + # manifest file.specifying source's name as the name of the created + # target. As result, the PCH will be named whatever.hpp.gch, and not + # whatever.gch. + rule generated-targets ( sources + : property-set : project name ? ) + { + local result = [ linking-generator.generated-targets $(sources) + : $(property-set) : $(project) $(name) ] ; + + if $(result) + { + local name-main = [ $(result[1]).name ] ; + local action = [ $(result[1]).action ] ; + + if [ $(property-set).get ] = "on" + { + # We force the exact name on PDB. The reason is tagging -- the + # tag rule may reasonably special case some target types, like + # SHARED_LIB. The tag rule will not catch PDBs, and it cannot + # even easily figure out if a PDB is paired with a SHARED_LIB, + # EXE or something else. Because PDBs always get the same name + # as the main target, with .pdb as extension, just force it. + local target = [ class.new file-target $(name-main:S=.pdb) exact + : PDB : $(project) : $(action) ] ; + local registered-target = [ virtual-target.register $(target) ] + ; + if $(target) != $(registered-target) + { + $(action).replace-targets $(target) : $(registered-target) ; + } + result += $(registered-target) ; + } + + if [ $(property-set).get ] = "off" + { + # Manifest is an evil target. It has .manifest appened to the + # name of the main target, including extension, e.g. + # a.exe.manifest. We use the 'exact' name to achieve this + # effect. + local target = [ class.new file-target $(name-main).manifest + exact : MANIFEST : $(project) : $(action) ] ; + local registered-target = [ virtual-target.register $(target) ] + ; + if $(target) != $(registered-target) + { + $(action).replace-targets $(target) : $(registered-target) ; + } + result += $(registered-target) ; + } + } + return $(result) ; + } +} + + +# Unsafe worker rule for the register-toolset() rule. Must not be called +# multiple times. +# +local rule register-toolset-really ( ) +{ + feature.extend toolset : msvc ; + + # Intel and msvc supposedly have link-compatible objects. + feature.subfeature toolset msvc : vendor : intel : propagated optional ; + + # Inherit MIDL flags. + toolset.inherit-flags msvc : midl ; + + # Inherit MC flags. + toolset.inherit-flags msvc : mc ; + + # Dynamic runtime comes only in MT flavour. + toolset.add-requirements + msvc,shared:multi ; + + # Declare msvc toolset specific features. + { + feature.feature debug-store : object database : propagated ; + feature.feature pch-source : : dependency free ; + } + + # Declare generators. + { + # TODO: Is it possible to combine these? Make the generators + # non-composing so that they do not convert each source into a separate + # .rsp file. + generators.register [ new msvc-linking-generator msvc.link : + OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : msvc ] ; + generators.register [ new msvc-linking-generator msvc.link.dll : + OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : + msvc false ] ; + generators.register [ new msvc-linking-generator msvc.link.dll : + OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB : + msvc true ] ; + + generators.register-archiver msvc.archive : OBJ : STATIC_LIB : msvc ; + generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : msvc ; + generators.register-c-compiler msvc.compile.c : C : OBJ : msvc ; + generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : msvc ; + generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : msvc ; + + # Using 'register-c-compiler' adds the build directory to INCLUDES. + generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : msvc ; + generators.override msvc.compile.rc : rc.compile.resource ; + generators.register-standard msvc.compile.asm : ASM : OBJ : msvc ; + + generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : msvc ; + generators.override msvc.compile.idl : midl.compile.idl ; + + generators.register-standard msvc.compile.mc : MC : H RC : msvc ; + generators.override msvc.compile.mc : mc.compile ; + + # Note: the 'H' source type will catch both '.h' and '.hpp' headers as + # the latter have their HPP type derived from H. The type of compilation + # is determined entirely by the destination type. + generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : on msvc ] ; + generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : on msvc ] ; + + generators.override msvc.compile.c.pch : pch.default-c-pch-generator ; + generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ; + } + + toolset.flags msvc.compile PCH_FILE on : ; + toolset.flags msvc.compile PCH_SOURCE on : ; + toolset.flags msvc.compile PCH_HEADER on : ; + + # + # Declare flags for compilation. + # + + toolset.flags msvc.compile OPTIONS speed : /O2 ; + toolset.flags msvc.compile OPTIONS space : /O1 ; + + toolset.flags msvc.compile OPTIONS $(.cpu-arch-ia64)/$(.cpu-type-itanium) : /G1 ; + toolset.flags msvc.compile OPTIONS $(.cpu-arch-ia64)/$(.cpu-type-itanium2) : /G2 ; + + toolset.flags msvc.compile OPTIONS on/object : /Z7 ; + toolset.flags msvc.compile OPTIONS on/database : /Zi ; + toolset.flags msvc.compile OPTIONS off : /Od ; + toolset.flags msvc.compile OPTIONS off : /Ob0 ; + toolset.flags msvc.compile OPTIONS on : /Ob1 ; + toolset.flags msvc.compile OPTIONS full : /Ob2 ; + + toolset.flags msvc.compile OPTIONS on : /W3 ; + toolset.flags msvc.compile OPTIONS off : /W0 ; + toolset.flags msvc.compile OPTIONS all : /W4 ; + toolset.flags msvc.compile OPTIONS extra : /W4 ; + toolset.flags msvc.compile OPTIONS pedantic : /W4 ; + toolset.flags msvc.compile OPTIONS on : /WX ; + + toolset.flags msvc.compile C++FLAGS on/off/off : /EHs ; + toolset.flags msvc.compile C++FLAGS on/off/on : /EHsc ; + toolset.flags msvc.compile C++FLAGS on/on/off : /EHa ; + toolset.flags msvc.compile C++FLAGS on/on/on : /EHac ; + + toolset.flags msvc.compile C++FLAGS 14 : "/std:c++14" ; + toolset.flags msvc.compile C++FLAGS 17 : "/std:c++17" ; + toolset.flags msvc.compile C++FLAGS 20 : "/std:c++20" ; + toolset.flags msvc.compile C++FLAGS latest : "/std:c++latest" ; + + # By default 8.0 enables rtti support while prior versions disabled it. We + # simply enable or disable it explicitly so we do not have to depend on this + # default behaviour. + toolset.flags msvc.compile C++FLAGS on : /GR ; + toolset.flags msvc.compile C++FLAGS off : /GR- ; + toolset.flags msvc.compile OPTIONS off/shared : /MD ; + toolset.flags msvc.compile OPTIONS on/shared : /MDd ; + + toolset.flags msvc.compile OPTIONS off/static/multi : /MT ; + toolset.flags msvc.compile OPTIONS on/static/multi : /MTd ; + + toolset.flags msvc.compile USER_CFLAGS : ; + toolset.flags msvc.compile.c++ USER_C++FLAGS : ; + toolset.flags msvc.compile.c++ USER_COMPILEFLAGS : ; + + toolset.flags msvc.compile PDB_CFLAG on/database : /Fd ; + + toolset.flags msvc.compile DEFINES ; + toolset.flags msvc.compile UNDEFS ; + toolset.flags msvc.compile INCLUDES ; + toolset.flags msvc.compile FORCE_INCLUDES ; + + # Declare flags for the assembler. + toolset.flags msvc.compile.asm USER_ASMFLAGS ; + + toolset.flags msvc.compile.asm ASMFLAGS x86/on : "/Zi /Zd" ; + + toolset.flags msvc.compile.asm ASMFLAGS x86/on : /W3 ; + toolset.flags msvc.compile.asm ASMFLAGS x86/off : /W0 ; + toolset.flags msvc.compile.asm ASMFLAGS x86/all : /W4 ; + toolset.flags msvc.compile.asm ASMFLAGS x86/on : /WX ; + + toolset.flags msvc.compile.asm ASMDEFINES x86 : ; + + # Declare flags for linking. + { + toolset.flags msvc.link PDB_LINKFLAG on/database : "/PDB:" ; # not used yet + toolset.flags msvc.link LINKFLAGS on : /DEBUG ; + toolset.flags msvc.link DEF_FILE ; + toolset.flags msvc.link MANIFEST_FILE linker : ; + + # The linker disables the default optimizations when using /DEBUG so we + # have to enable them manually for release builds with debug symbols. + toolset.flags msvc LINKFLAGS on/off : "/OPT:REF,ICF" ; + + toolset.flags msvc LINKFLAGS console : "/subsystem:console" ; + toolset.flags msvc LINKFLAGS gui : "/subsystem:windows" ; + toolset.flags msvc LINKFLAGS wince : "/subsystem:windowsce" ; + toolset.flags msvc LINKFLAGS native : "/subsystem:native" ; + toolset.flags msvc LINKFLAGS auto : "/subsystem:posix" ; + + toolset.flags msvc.link LINKFLAGS ; + toolset.flags msvc.link LINKPATH ; + + toolset.flags msvc.link FINDLIBS_ST ; + toolset.flags msvc.link FINDLIBS_SA ; + toolset.flags msvc.link LIBRARY_OPTION msvc : "" : unchecked ; + toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : ; + } + + toolset.flags msvc.archive AROPTIONS ; + + # Enable response file control + toolset.flags msvc RESPONSE_FILE_SUB auto : a ; + toolset.flags msvc RESPONSE_FILE_SUB file : f ; + toolset.flags msvc RESPONSE_FILE_SUB contents : c ; + + # Create a project to allow building the setup scripts + project.initialize $(__name__) ; + .project = [ project.current ] ; + project msvc ; + + feature.feature msvc.setup-options : : free ; +} + + +# Locates the requested setup script under the given folder and returns its full +# path or nothing in case the script can not be found. In case multiple scripts +# are found only the first one is returned. +# +# TODO: There used to exist a code comment for the msvc.init rule stating that +# we do not correctly detect the location of the vcvars32.bat setup script for +# the free VC7.1 tools in case user explicitly provides a path. This should be +# tested or simply remove this whole comment in case this toolset version is no +# longer important. +# +local rule locate-default-setup ( command : parent : setup-name ) +{ + local result = [ GLOB $(command) $(parent) : $(setup-name) ] ; + if $(result[1]) + { + return $(result[1]) ; + } +} + + +# Validates given path, registers found configuration and prints debug +# information about it. +# +local rule register-configuration ( version : path ? ) +{ + if $(path) + { + local command = [ GLOB $(path) : cl.exe ] ; + + if $(command) + { + if $(.debug-configuration) + { + ECHO notice\: "[msvc-cfg]" msvc-$(version) detected, command\: + '$(command)' ; + } + + $(.versions).register $(version) ; + $(.versions).set $(version) : default-command : $(command) ; + } + } +} + + +################################################################################ +# +# Startup code executed when loading this module. +# +################################################################################ + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +# Miscellaneous constants. +.RM = [ common.rm-command ] ; +.nl = " +" ; +_ = " " ; +.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ; +.escaped-double-quote = "\"" ; +.hash = "\#" ; +.TOUCH_FILE = [ common.file-touch-command ] ; + +# List of all registered configurations. +.versions = [ new configurations ] ; + +# Supported CPU architectures. +.cpu-arch-info-i386 = x86 32 ; +.cpu-arch-info-amd64 = x86 64 ; +.cpu-arch-info-ia64 = ia64 64 ; +.cpu-arch-info-arm = arm 32 ; +.cpu-arch-info-arm64 = arm 64 ; + +# Fill explicit architecture and address model values +for local cpu in [ MATCH "^\\.cpu-arch-info-(.*)" : [ VARNAMES $(__name__) ] ] +{ + local arch = $(.cpu-arch-info-$(cpu)[1]) ; + .cpus-on-$(arch) += $(cpu) ; + .cpu-arch-$(cpu) = $(arch)/$(.cpu-arch-info-$(cpu)[2]) ; +} + +# Match implicit architecture and address model based on the current platform +.default-cpu-arch = [ os.environ PROCESSOR_ARCHITEW6432 ] ; +.default-cpu-arch ?= [ os.environ PROCESSOR_ARCHITECTURE ] ; +.default-cpu-arch = $(.default-cpu-arch:L) ; +switch $(.default-cpu-arch) +{ + case x86 : .default-cpu-arch = i386 ; + case em64t : .default-cpu-arch = amd64 ; +} + +for local cpu in $(.cpus-on-$(.cpu-arch-info-$(.default-cpu-arch)[1])) +{ + .cpu-arch-$(cpu) += /$(.cpu-arch-info-$(cpu)[2]) ; +} + +.cpu-arch-$(.default-cpu-arch) += $(.cpu-arch-info-$(.default-cpu-arch)[1])/ ; +.cpu-arch-$(.default-cpu-arch) += / ; + +# If there is only one address model for an architecture we allow to ommit it +for local arch in [ MATCH "^\\.cpus-on-(.*)" : [ VARNAMES $(__name__) ] ] +{ + if ! $(.cpus-on-$(arch)[2-]) && $(.cpus-on-$(arch)[1]) != $(.default-cpu-arch) + { + .cpu-arch-$(.cpus-on-$(arch)) += $(arch)/ ; + } +} + + +# Supported CPU types (only Itanium optimization options are supported from +# VC++ 2005 on). See +# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more +# detailed information. +.cpu-type-g5 = i586 pentium pentium-mmx ; +.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6 + k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 c7 ; +.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i + conroe conroe-xe conroe-l allendale merom + merom-xe kentsfield kentsfield-xe penryn wolfdale + yorksfield nehalem sandy-bridge ivy-bridge haswell + broadwell skylake skylake-avx512 cannonlake icelake-client + icelake-server cascadelake cooperlake tigerlake ; +.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3 + athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3 + bdver4 btver1 btver2 znver1 znver2 ; +.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp + athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ; +.cpu-type-itanium = itanium itanium1 merced ; +.cpu-type-itanium2 = itanium2 mckinley ; +.cpu-type-arm = armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312 + armv7 armv7s ; + +# Known toolset versions, in order of preference. +.known-versions = 14.3 14.2 14.1 14.0 12.0 11.0 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 + 7.1toolkit 7.0 6.0 ; + +# Version aliases. +.version-alias-6 = 6.0 ; +.version-alias-6.5 = 6.0 ; +.version-alias-7 = 7.0 ; +.version-alias-8 = 8.0 ; +.version-alias-9 = 9.0 ; +.version-alias-10 = 10.0 ; +.version-alias-11 = 11.0 ; +.version-alias-12 = 12.0 ; +.version-alias-14 = 14.0 ; + +# Names of registry keys containing the Visual C++ installation path (relative +# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft"). +.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ; +.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ; +.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ; +.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ; +.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ; +.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ; +.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ; +.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ; +.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ; +.version-11.0-reg = "VisualStudio\\11.0\\Setup\\VC" ; +.version-12.0-reg = "VisualStudio\\12.0\\Setup\\VC" ; +.version-14.0-reg = "VisualStudio\\14.0\\Setup\\VC" ; + +# Visual C++ Toolkit 2003 does not store its installation path in the registry. +# The environment variable 'VCToolkitInstallDir' and the default installation +# path will be checked instead. +.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003/bin" ; +.version-7.1toolkit-env = VCToolkitInstallDir ; +# Visual Studio 2017 doesn't use a registry at all. And the suggested methods +# of discovery involve having a compiled program. So as a fallback we search +# paths for VS2017 (aka msvc >= 14.1). +.version-14.1-path = + "../../VC/Tools/MSVC/*/bin/Host*/*" + "Microsoft Visual Studio/2017/*/VC/Tools/MSVC/*/bin/Host*/*" + ; +.version-14.1-env = VS150COMNTOOLS ProgramFiles ProgramFiles(x86) ; +.version-14.2-path = + "../../VC/Tools/MSVC/*/bin/Host*/*" + "Microsoft Visual Studio/2019/*/VC/Tools/MSVC/*/bin/Host*/*" + ; +.version-14.2-env = VS160COMNTOOLS ProgramFiles ProgramFiles(x86) ; +.version-14.3-path = + "../../VC/Tools/MSVC/*/bin/Host*/*" + "Microsoft Visual Studio/2022/*/VC/Tools/MSVC/*/bin/Host*/*" + ; +.version-14.3-env = VS170COMNTOOLS ProgramFiles ProgramFiles(x86) ; + +# Auto-detect all the available msvc installations on the system. +auto-detect-toolset-versions ; + + +# And finally trigger the actual Boost Build toolset registration. +register-toolset ; diff --git a/src/boost/tools/build/src/tools/msvc.py b/src/boost/tools/build/src/tools/msvc.py new file mode 100644 index 000000000..7c1bdf0f1 --- /dev/null +++ b/src/boost/tools/build/src/tools/msvc.py @@ -0,0 +1,1314 @@ +# Copyright (c) 2003 David Abrahams. +# Copyright (c) 2005 Vladimir Prus. +# Copyright (c) 2005 Alexey Pakhunov. +# Copyright (c) 2006 Bojan Resnik. +# Copyright (c) 2006 Ilya Sokolov. +# Copyright (c) 2007 Rene Rivera +# Copyright (c) 2008 Jurko Gospodnetic +# Copyright (c) 2011 Juraj Ivancic +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +################################################################################ +# +# MSVC Boost Build toolset module. +# -------------------------------- +# +# All toolset versions need to have their location either auto-detected or +# explicitly specified except for the special 'default' version that expects the +# environment to find the needed tools or report an error. +# +################################################################################ + +from os import environ +import os.path +import re +import _winreg + +import bjam + +from b2.tools import common, rc, pch, builtin, mc, midl +from b2.build import feature, type, toolset, generators, property_set +from b2.build.property import Property +from b2.util import path +from b2.manager import get_manager +from b2.build.generators import Generator +from b2.build.toolset import flags +from b2.util.utility import to_seq, on_windows +from b2.tools.common import Configurations + +__debug = None + +def debug(): + global __debug + if __debug is None: + __debug = "--debug-configuration" in bjam.variable("ARGV") + return __debug + + +# It is not yet clear what to do with Cygwin on python port. +def on_cygwin(): + return False + + +type.register('MANIFEST', ['manifest']) +feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ; + +type.register('PDB',['pdb']) + +################################################################################ +# +# Public rules. +# +################################################################################ + +# Initialize a specific toolset version configuration. As the result, path to +# compiler and, possible, program names are set up, and will be used when that +# version of compiler is requested. For example, you might have: +# +# using msvc : 6.5 : cl.exe ; +# using msvc : 7.0 : Y:/foo/bar/cl.exe ; +# +# The version parameter may be omitted: +# +# using msvc : : Z:/foo/bar/cl.exe ; +# +# The following keywords have special meanings when specified as versions: +# - all - all detected but not yet used versions will be marked as used +# with their default options. +# - default - this is an equivalent to an empty version. +# +# Depending on a supplied version, detected configurations and presence 'cl.exe' +# in the path different results may be achieved. The following table describes +# the possible scenarios: +# +# Nothing "x.y" +# Passed Nothing "x.y" detected, detected, +# version detected detected cl.exe in path cl.exe in path +# +# default Error Use "x.y" Create "default" Use "x.y" +# all None Use all None Use all +# x.y - Use "x.y" - Use "x.y" +# a.b Error Error Create "a.b" Create "a.b" +# +# "x.y" - refers to a detected version; +# "a.b" - refers to an undetected version. +# +# FIXME: Currently the command parameter and the property parameter +# seem to overlap in duties. Remove this duplication. This seems to be related +# to why someone started preparing to replace init with configure rules. + +def init(version = None, command = None, options = None): + # When initialized from + # using msvc : x.0 ; + # we get version as a single element list i.e. ['x.0'], + # but when specified from the command line we get a string i.e. 'x.0'. + # We want to work with a string, so unpack the list if needed. + is_single_element_list = (isinstance(version,list) and len(version) == 1) + assert(version==None or isinstance(version,str) or is_single_element_list) + if is_single_element_list: + version = version[0] + + options = to_seq(options) + command = to_seq(command) + + if command: + options.extend(""+cmd for cmd in command) + configure(version,options) + +def configure(version=None, options=None): + if version == "all": + if options: + raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version)) + + # Configure (i.e. mark as used) all registered versions. + all_versions = __versions.all() + if not all_versions: + if debug(): + print "notice: [msvc-cfg] Asked to configure all registered" \ + "msvc toolset versions when there are none currently" \ + "registered." ; + else: + for v in all_versions: + # Note that there is no need to skip already configured + # versions here as this will request configure-really rule + # to configure the version using default options which will + # in turn cause it to simply do nothing in case the version + # has already been configured. + configure_really(v) + elif version == "default": + configure_really(None,options) + else: + configure_really(version, options) + +def extend_conditions(conditions,exts): + return [ cond + '/' + ext for cond in conditions for ext in exts ] + +def configure_version_specific(toolset_arg, version, conditions): + # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and + # /Zc:wchar_t options that improve C++ standard conformance, but those + # options are off by default. If we are sure that the msvc version is at + # 7.*, add those options explicitly. We can be sure either if user specified + # version 7.* explicitly or if we auto-detected the version ourselves. + if not re.search('^6\\.', version): + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',conditions, ['/Zc:forScope','/Zc:wchar_t']) + toolset.flags('{}.compile.c++'.format(toolset_arg), 'C++FLAGS',conditions, ['/wd4675']) + + # Explicitly disable the 'function is deprecated' warning. Some msvc + # versions have a bug, causing them to emit the deprecation warning even + # with /W0. + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['off']), ['/wd4996']) + if re.search('^[78]\.', version): + # 64-bit compatibility warning deprecated since 9.0, see + # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['all']), ['/Wp64']) + + # + # Processor-specific optimization. + # + if re.search('^[67]', version ): + # 8.0 deprecates some of the options. + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['speed','space']), ['/Ogiy', '/Gs']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['speed']), ['/Ot']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['space']), ['/Os']) + + cpu_arch_i386_cond = extend_conditions(conditions, __cpu_arch_i386) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['']),['/GB']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['i486']),['/G4']) + + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['' + t for t in __cpu_type_g5]), ['/G5']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['' + t for t in __cpu_type_g6]), ['/G6']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['' + t for t in __cpu_type_g7]), ['/G7']) + + # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math" + # tests will fail. + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', conditions, ['/Op']) + + # 7.1 and below have single-threaded static RTL. + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['off/static/single']), ['/ML']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['on/static/single']), ['/MLd']) + else: + # 8.0 and above adds some more options. + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/' for a in __cpu_arch_amd64]), ['/favor:blend']) + + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/' + t for a in __cpu_arch_amd64 for t in __cpu_type_em64t]), ['/favor:EM64T']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/' + t for a in __cpu_arch_amd64 for t in __cpu_type_amd64]), ['/favor:AMD64']) + + # 8.0 and above only has multi-threaded static RTL. + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['off/static/single']), ['/MT']) + toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['on/static/single']), ['/MTd']) + + # Specify target machine type so the linker will not need to guess. + toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_amd64), ['/MACHINE:X64']) + toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_i386), ['/MACHINE:X86']) + toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_ia64), ['/MACHINE:IA64']) + + # Make sure that manifest will be generated even if there is no + # dependencies to put there. + toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', conditions, ['/MANIFEST']) + + +# Registers this toolset including all of its flags, features & generators. Does +# nothing on repeated calls. + +def register_toolset(): + if not 'msvc' in feature.values('toolset'): + register_toolset_really() + + +engine = get_manager().engine() + +# this rule sets up the pdb file that will be used when generating static +# libraries and the debug-store option is database, so that the compiler +# puts all debug info into a single .pdb file named after the library +# +# Poking at source targets this way is probably not clean, but it's the +# easiest approach. +def archive(targets, sources=None, properties=None): + bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb') + +# Declare action for creating static libraries. If library exists, remove it +# before adding files. See +# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale. +if not on_cygwin(): + engine.register_action( + 'msvc.archive', + '''if exist "$(<[1])" DEL "$(<[1])" + $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E= +"$(>)" +$(LIBRARIES_MENTIONED_BY_FILE) +"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" +"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''', + function=archive) +else: + engine.register_action( + 'msvc.archive', + '''{rm} "$(<[1])" + $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E= +"$(>)" +$(LIBRARIES_MENTIONED_BY_FILE) +"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" +"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"'''.format(rm=common.rm_command()), + function=archive) + +# For the assembler the following options are turned on by default: +# +# -Zp4 align structures to 4 bytes +# -Cp preserve case of user identifiers +# -Cx preserve case in publics, externs +# +engine.register_action( + 'msvc.compile.asm', + '$(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"' ) + + +# Equivalent to [ on $(target) return $(prefix)$(var)$(suffix) ]. Note that $(var) can be a list. +def expand_target_variable(target,var,prefix=None,suffix=None): + list = bjam.call( 'get-target-variable', target, var ) + return " ".join([ ("" if prefix is None else prefix) + elem + ("" if suffix is None else suffix) for elem in list ]) + + +def get_rspline(targets, lang_opt): + result = lang_opt + '\n' + \ + expand_target_variable(targets, 'UNDEFS' , '\n-U' ) + \ + expand_target_variable(targets, 'CFLAGS' , '\n' ) + \ + expand_target_variable(targets, 'C++FLAGS', '\n' ) + \ + expand_target_variable(targets, 'OPTIONS' , '\n' ) + '\n-c' + \ + expand_target_variable(targets, 'DEFINES' , '\n-D' , '\n' ) + \ + expand_target_variable(targets, 'INCLUDES', '\n"-I', '"\n' ) + bjam.call('set-target-variable', targets, 'CC_RSPLINE', result) + +def compile_c(targets, sources = [], properties = None): + get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' ) + get_rspline(targets, '-TC') + compile_c_cpp(targets,sources) + +def compile_c_preprocess(targets, sources = [], properties = None): + get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' ) + get_rspline(targets, '-TC') + preprocess_c_cpp(targets,sources) + +def compile_c_pch(targets, sources = [], properties = []): + get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' ) + get_rspline([targets[0]], '-TC') + get_rspline([targets[1]], '-TC') + +toolset.flags( 'msvc', 'YLOPTION', [], ['-Yl'] ) + +def compile_cpp(targets,sources=[],properties=None): + get_rspline(targets,'-TP') + bjam.call('set-target-variable', targets, 'PCH_FILE', sources) + compile_c_cpp(targets,sources) + +def compile_cpp_preprocess(targets,sources=[],properties=None): + get_rspline(targets,'-TP') + preprocess_c_cpp(targets,sources) + +def compile_cpp_pch(targets,sources=[],properties=None): + get_rspline([targets[0]], '-TP') + get_rspline([targets[1]], '-TP') + + +# Action for running the C/C++ compiler without using precompiled headers. +# +# WARNING: Synchronize any changes this in action with intel-win +# +# Notes regarding PDB generation, for when we use on/database +# +# 1. PDB_CFLAG is only set for on/database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty +# +# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++. +# The linker will pull these into the executable's PDB +# +# 3. When compiling library's source files, PDB_NAME is updated to .pdb for each source file by rule archive, +# as in this case the compiler must be used to create a single PDB for our library. +# + +class SetupAction: + def __init__(self, setup_func, function): + self.setup_func = setup_func + self.function = function + + def __call__(self, targets, sources, property_set): + assert(callable(self.setup_func)) + # This can modify sources. + action_name = self.setup_func(targets, sources, property_set) + # Bjam actions defined from Python have only the command + # to execute, and no associated jam procedural code. So + # passing 'property_set' to it is not necessary. + bjam.call("set-update-action", action_name, targets, sources, []) + if self.function: + self.function(targets, sources, property_set) + +def register_setup_action(action_name,setup_function,function=None): + global engine + if action_name in engine.actions: + raise "Bjam action %s is already defined" % action_name + engine.actions[action_name] = SetupAction(setup_function, function) + + +engine.register_action('compile-c-c++', +'$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)''', +bound_list=['PDB_NAME']) + +def setup_compile_c_cpp_action(targets, sources, properties): + sources += bjam.call('get-target-variable',targets,'PCH_FILE') + sources += bjam.call('get-target-variable',targets,'PCH_HEADER') + return 'compile-c-c++' + + +register_setup_action( + 'msvc.compile.c', + setup_compile_c_cpp_action, + function=compile_c) + +register_setup_action( + 'msvc.compile.c++', + setup_compile_c_cpp_action, + function=compile_cpp) + + +engine.register_action('preprocess-c-c++', +'$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"', +bound_list=['PDB_NAME']) + +def setup_preprocess_c_cpp_action(targets, sources, properties): + sources += bjam.call('get-target-variable',targets,'PCH_FILE') + sources += bjam.call('get-target-variable',targets,'PCH_HEADER') + return 'preprocess-c-c++' + +register_setup_action( + 'msvc.compile.c.preprocess', + setup_preprocess_c_cpp_action, + function=compile_c_preprocess) + +register_setup_action( + 'msvc.compile.c++.preprocess', + setup_preprocess_c_cpp_action, + function=compile_cpp_preprocess) + +def compile_c_cpp(targets,sources=None): + pch_header = bjam.call('get-target-variable',targets[0],'PCH_HEADER') + pch_file = bjam.call('get-target-variable',targets[0],'PCH_FILE') + if pch_header: get_manager().engine().add_dependency(targets[0],pch_header) + if pch_file: get_manager().engine().add_dependency(targets[0],pch_file) + bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb') + +def preprocess_c_cpp(targets,sources=None): + #same as above + return compile_c_cpp(targets,sources) + +# Action for running the C/C++ compiler using precompiled headers. In addition +# to whatever else it needs to compile, this action also adds a temporary source +# .cpp file used to compile the precompiled headers themselves. + + +engine.register_action('compile-c-c++-pch', +'$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include "$(>[1]:D=)"\n)" $(.CC.FILTER)') + +engine.register_action('compile-c-c++-pch-s', +'$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)') + +def setup_c_cpp_pch(targets, sources, properties): + pch_source = bjam.call('get-target-variable', targets, 'PCH_SOURCE') + if pch_source: + sources += pch_source + get_manager().engine().add_dependency(targets,pch_source) + return 'compile-c-c++-pch-s' + else: + return 'compile-c-c++-pch' + +register_setup_action( + 'msvc.compile.c.pch', + setup_c_cpp_pch, + function=compile_c_pch) + +register_setup_action( + 'msvc.compile.c++.pch', + setup_c_cpp_pch, + function=compile_cpp_pch) + + +# See midl.py for details. +# +engine.register_action( + 'msvc.compile.idl', + '''$(.IDL) /nologo @"@($(<[1]:W).rsp:E= +"$(>:W)" +-D$(DEFINES) +"-I$(INCLUDES:W)" +-U$(UNDEFS) +$(MIDLFLAGS) +/tlb "$(<[1]:W)" +/h "$(<[2]:W)" +/iid "$(<[3]:W)" +/proxy "$(<[4]:W)" +/dlldata "$(<[5]:W)")" + {touch} "$(<[4]:W)" + {touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command())) + +engine.register_action( + 'msvc.compile.mc', + '$(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"') + +engine.register_action( + 'msvc.compile.rc', + '$(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"') + +def link_dll(targets,sources=None,properties=None): + get_manager().engine().add_dependency(targets,bjam.call('get-target-variable',targets,'DEF_FILE')) + manifest(targets, sources, properties) + +def manifest(targets,sources=None,properties=None): + if 'on' in properties.get(''): + get_manager().engine().set_update_action('msvc.manifest', targets, sources, properties) + + +# Incremental linking a DLL causes no end of problems: if the actual exports do +# not change, the import .lib file is never updated. Therefore, the .lib is +# always out-of-date and gets rebuilt every time. I am not sure that incremental +# linking is such a great idea in general, but in this case I am sure we do not +# want it. + +# Windows manifest is a new way to specify dependencies on managed DotNet +# assemblies and Windows native DLLs. The manifests are embedded as resources +# and are useful in any PE target (both DLL and EXE). + +if not on_cygwin(): + engine.register_action( + 'msvc.link', + '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E= +"$(>)" +$(LIBRARIES_MENTIONED_BY_FILE) +$(LIBRARIES) +"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" +"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" +if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''', + function=manifest, + bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE']) + + engine.register_action( + 'msvc.manifest', + '''if exist "$(<[1]).manifest" ( + $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1" + )''') + + engine.register_action( + 'msvc.link.dll', + '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E= +"$(>)" +$(LIBRARIES_MENTIONED_BY_FILE) +$(LIBRARIES) +"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" +"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")" +if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''', + function=link_dll, + bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE']) + + engine.register_action( + 'msvc.manifest.dll', + '''if exist "$(<[1]).manifest" ( + $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2" + )''') +else: + engine.register_action( + 'msvc.link', + '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E= +"$(>)" +$(LIBRARIES_MENTIONED_BY_FILE) +$(LIBRARIES) +"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" +"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''', + function=manifest, + bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE']) + + engine.register_action( + 'msvc.manifest', + '''if test -e "$(<[1]).manifest"; then + $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1" + fi''') + + engine.register_action( + 'msvc.link.dll', + '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E= +"$(>)" +$(LIBRARIES_MENTIONED_BY_FILE) +$(LIBRARIES) +"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" +"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''', + function=link_dll, + bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE']) + + engine.register_action( + 'msvc.manifest.dll', + '''if test -e "$(<[1]).manifest"; then + $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2" + fi''') + + +################################################################################ +# +# Classes. +# +################################################################################ + +class MsvcPchGenerator(pch.PchGenerator): + + # Inherit the __init__ method + def run_pch(self, project, name, prop_set, sources): + # Find the header in sources. Ignore any CPP sources. + pch_header = None + pch_source = None + for s in sources: + if type.is_derived(s.type(), 'H'): + pch_header = s + elif type.is_derived(s.type(), 'CPP') or type.is_derived(s.type(), 'C'): + pch_source = s + + if not pch_header: + raise RuntimeError( "can not build pch without pch-header" ) + + # If we do not have the PCH source - that is fine. We will just create a + # temporary .cpp file in the action. + properties = prop_set.all() + # Passing of is a dirty trick, needed because + # non-composing generators with multiple inputs are subtly + # broken. For more detailed information see: + # https://zigzag.cs.msu.su:7813/boost.build/ticket/111 + if pch_source: + properties.append(Property('pch-source',pch_source)) + generated = Generator.run(self,project,name,property_set.create(properties),[pch_header]) + pch_file = None + for g in generated: + if type.is_derived(g.type(), 'PCH'): + pch_file = g + result_props = [] + if pch_header: + result_props.append(Property('pch-header', pch_header)) + if pch_file: + result_props.append(Property('pch-file', pch_file)) + + return property_set.PropertySet(result_props), generated + + +################################################################################ +# +# Local rules. +# +################################################################################ + +# Detects versions listed as '_known_versions' by checking registry information, +# environment variables & default paths. Supports both native Windows and +# Cygwin. +def auto_detect_toolset_versions(): + if on_windows() or on_cygwin(): + for version in _known_versions: + versionVarName = '__version_{}_reg'.format(version.replace('.','_')) + if versionVarName in globals(): + vc_path = None + for x64elt in [ '', 'Wow6432Node\\' ]: + try: + with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\{}Microsoft\\{}'.format(x64elt, globals()[versionVarName])) as reg_key: + vc_path = _winreg.QueryValueEx(reg_key, "ProductDir")[0] + except: + pass + if vc_path: + vc_path = os.path.join(vc_path,'bin') + register_configuration(version,os.path.normpath(vc_path)) + + for i in _known_versions: + if not i in __versions.all(): + register_configuration(i,default_path(i)) + + +def maybe_rewrite_setup(toolset, setup_script, setup_options, version, rewrite_setup='off'): + """ + Helper rule to generate a faster alternative to MSVC setup scripts. + + We used to call MSVC setup scripts directly in every action, however in + newer MSVC versions (10.0+) they make long-lasting registry queries + which have a significant impact on build time. + """ + result = '"{}" {}'.format(setup_script, setup_options) + + # At the moment we only know how to rewrite scripts with cmd shell. + if os.name == 'nt' and rewrite_setup != 'off': + basename = os.path.basename(setup_script) + filename, _ = os.path.splitext(basename) + setup_script_id = 'b2_{}_{}_{}'.format(toolset, version, filename) + if setup_options: + setup_script_id = '{}_{}'.format(setup_script_id, setup_options) + + tempdir = os.environ.get('TEMP') + replacement = os.path.join(tempdir, setup_script_id + '.cmd') + if rewrite_setup == 'always' or not os.path.exists(replacement): + import subprocess + # call the setup script and print the environment after doing so + p = subprocess.Popen([ + setup_script, setup_options, '>', 'nul', '&&', 'set', + ], stdout=subprocess.PIPE, shell=True + ) + stdout, _ = p.communicate() + + diff_vars = [] + for var in stdout.splitlines(): + # returns a tuple of ('var-name', '=', 'value'). + # partition is being used here (over something like .split()) + # for two reasons: + # 1) an environment variable may have a value that contains an '='; + # .partition() will still return the correct key and value pair. + # 2) if the line doesn't contain an '=' at all, then the returned + # tuple will contain only empty strings rather than raising + # an exception. + key, _, value = var.partition('=') + # os.environ handles casing differences here. Usually the + # call to "set" above will produce pascal-cased environment + # variable names, so a normal python dict can't be used here. + # check for the existence of key in case the partitioning() above + # returned an empty key value pair. + if key and os.environ.get(key) != value: + diff_vars.append('SET {}={}'.format(key, value)) + + if diff_vars: + with open(replacement, 'wb') as f: + f.write(os.linesep.join(diff_vars)) + + result = '"{}"'.format(replacement) + else: + result = '"{}"'.format(replacement) + + return result + + +def generate_setup_cmd(version, command, parent, options, cpu, global_setup, + default_global_setup_options, default_setup): + setup_prefix = "call " + setup_suffix = """ >nul\n""" + if on_cygwin(): + setup_prefix = "cmd.exe /S /C call " + setup_suffix = " \">nul\" \"&&\" " + + setup_options = '' + setup_cpu = feature.get_values(''.format(cpu), options) + + if not setup_cpu: + if global_setup: + setup_cpu = global_setup + # If needed we can easily add using configuration flags + # here for overriding which options get passed to the + # global setup command for which target platform: + # setup_options = feature.get_values(''.format(cpu),options) + if not setup_options: + setup_options = default_global_setup_options[cpu] + else: + setup_cpu = locate_default_setup(command, parent, default_setup[cpu]) + else: + setup_cpu = setup_cpu[0] + + # Cygwin to Windows path translation. + # setup-$(c) = "\""$(setup-$(c):W)"\"" ; + + # Append setup options to the setup name and add the final setup + # prefix & suffix. + rewrite = feature.get_values('', options) + rewrite = rewrite[0] if rewrite else '' + setup = maybe_rewrite_setup( + 'msvc', setup_cpu, setup_options, version, rewrite) + return '{}{}{}'.format(setup_prefix, setup, setup_suffix) + + +# Worker rule for toolset version configuration. Takes an explicit version id or +# nothing in case it should configure the default toolset version (the first +# registered one or a new 'default' one in case no toolset versions have been +# registered yet). +# + +def configure_really(version=None, options=[]): + v = version + if not v: + # Take the first registered (i.e. auto-detected) version. + version = __versions.first() + v = version + + # Note: 'version' can still be empty at this point if no versions have + # been auto-detected. + if not version: + version = "default" + + # Version alias -> real version number. + version = globals().get("__version_alias_{}".format(version), version) + + # Check whether the selected configuration is already in use. + if version in __versions.used(): + # Allow multiple 'toolset.using' calls for the same configuration if the + # identical sets of options are used. + if options and options != __versions.get(version,'options'): + raise RuntimeError("MSVC toolset configuration: Toolset version '$(version)' already configured.".format(version)) + else: + # Register a new configuration. + __versions.register(version) + + # Add user-supplied to auto-detected options. + version_opts = __versions.get(version, 'options') + if (version_opts): + options = version_opts + options + + # Mark the configuration as 'used'. + __versions.use(version) + # Generate conditions and save them. + conditions = common.check_init_parameters('msvc', None, ('version', v)) + __versions.set(version, 'conditions', conditions) + command = feature.get_values('', options) + + # If version is specified, we try to search first in default paths, and + # only then in PATH. + command = common.get_invocation_command('msvc', 'cl.exe', command, default_paths(version)) + common.handle_options('msvc', conditions, command, options) + + if not version: + # Even if version is not explicitly specified, try to detect the + # version from the path. + # FIXME: We currently detect both Microsoft Visual Studio 9.0 and + # 9.0express as 9.0 here. + if re.search("Microsoft Visual Studio[\/\\]2017", command): + version = '15.0' + elif re.search("Microsoft Visual Studio 14", command): + version = '14.0' + elif re.search("Microsoft Visual Studio 12", command): + version = '12.0' + elif re.search("Microsoft Visual Studio 11", command): + version = '11.0' + elif re.search("Microsoft Visual Studio 10", command): + version = '10.0' + elif re.search("Microsoft Visual Studio 9", command): + version = '9.0' + elif re.search("Microsoft Visual Studio 8", command): + version = '8.0' + elif re.search("NET 2003[\/\\]VC7", command): + version = '7.1' + elif re.search("Microsoft Visual C\\+\\+ Toolkit 2003", command): + version = '7.1toolkit' + elif re.search(".NET[\/\\]VC7", command): + version = '7.0' + else: + version = '6.0' + + # Generate and register setup command. + + below_8_0 = re.search("^[67]\\.",version) != None + + if below_8_0: + cpu = ['i386'] + else: + cpu = ['i386', 'amd64', 'ia64'] + + setup_scripts = {} + + if command: + # TODO: Note that if we specify a non-existant toolset version then + # this rule may find and use a corresponding compiler executable + # belonging to an incorrect toolset version. For example, if you + # have only MSVC 7.1 installed, have its executable on the path and + # specify you want Boost Build to use MSVC 9.0, then you want Boost + # Build to report an error but this may cause it to silently use the + # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0 + # toolset version. + command = common.get_absolute_tool_path(command) + + if command: + parent = os.path.dirname(os.path.normpath(command)) + # Setup will be used if the command name has been specified. If + # setup is not specified explicitly then a default setup script will + # be used instead. Setup scripts may be global or arhitecture/ + # /platform/cpu specific. Setup options are used only in case of + # global setup scripts. + + # Default setup scripts provided with different VC distributions: + # + # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386 + # builds. It was located in the bin folder for the regular version + # and in the root folder for the free VC 7.1 tools. + # + # Later 8.0 & 9.0 versions introduce separate platform specific + # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium) + # located in or under the bin folder. Most also include a global + # vcvarsall.bat helper script located in the root folder which runs + # one of the aforementioned vcvars*.bat scripts based on the options + # passed to it. So far only the version coming with some PlatformSDK + # distributions does not include this top level script but to + # support those we need to fall back to using the worker scripts + # directly in case the top level script can not be found. + + global_setup = feature.get_values('',options) + if global_setup: + global_setup = global_setup[0] + else: + global_setup = None + + if not below_8_0 and not global_setup: + global_setup = locate_default_setup(command,parent,'vcvarsall.bat') + + + default_setup = { + 'amd64' : 'vcvarsx86_amd64.bat', + 'i386' : 'vcvars32.bat', + 'ia64' : 'vcvarsx86_ia64.bat' } + + # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and + # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx + # mention an x86_IPF option, that seems to be a documentation bug + # and x86_ia64 is the correct option. + default_global_setup_options = { + 'amd64' : 'x86_amd64', + 'i386' : 'x86', + 'ia64' : 'x86_ia64' } + + somehow_detect_the_itanium_platform = None + # When using 64-bit Windows, and targeting 64-bit, it is possible to + # use a native 64-bit compiler, selected by the "amd64" & "ia64" + # parameters to vcvarsall.bat. There are two variables we can use -- + # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is + # 'x86' when running 32-bit Windows, no matter which processor is + # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T) + # Windows. + # + if re.search( 'AMD64', environ[ "PROCESSOR_ARCHITECTURE" ] ) != None: + default_global_setup_options[ 'amd64' ] = 'amd64' + # TODO: The same 'native compiler usage' should be implemented for + # the Itanium platform by using the "ia64" parameter. For this + # though we need someone with access to this platform who can find + # out how to correctly detect this case. + elif somehow_detect_the_itanium_platform: + default_global_setup_options[ 'ia64' ] = 'ia64' + + for c in cpu: + setup_scripts[c] = generate_setup_cmd( + version, command, parent, options, c, global_setup, + default_global_setup_options, default_setup + ) + + # Get tool names (if any) and finish setup. + compiler = feature.get_values("", options) + compiler = compiler[0] if compiler else 'cl' + + linker = feature.get_values("", options) + if not linker: + linker = "link" + + resource_compiler = feature.get_values("", options) + if not resource_compiler: + resource_compiler = "rc" + + # Turn on some options for i386 assembler + # -coff generate COFF format object file (compatible with cl.exe output) + default_assembler_amd64 = 'ml64' + default_assembler_i386 = 'ml -coff' + default_assembler_ia64 = 'ias' + + assembler = feature.get_values('',options) + + idl_compiler = feature.get_values('',options) + if not idl_compiler: + idl_compiler = 'midl' + + mc_compiler = feature.get_values('',options) + if not mc_compiler: + mc_compiler = 'mc' + + manifest_tool = feature.get_values('',options) + if not manifest_tool: + manifest_tool = 'mt' + + cc_filter = feature.get_values('',options) + + for c in cpu: + cpu_conditions = [ condition + '/' + arch for arch in globals()['__cpu_arch_{}'.format(c)] for condition in conditions ] + + setup_script = setup_scripts.get(c, '') + + if debug(): + for cpu_condition in cpu_conditions: + print "notice: [msvc-cfg] condition: '{}', setup: '{}'".format(cpu_condition,setup_script) + + cpu_assembler = assembler + if not cpu_assembler: + cpu_assembler = locals()['default_assembler_{}'.format(c)] + + toolset.flags('msvc.compile', '.CC' , cpu_conditions, ['{}{} /Zm800 -nologo' .format(setup_script, compiler)]) + toolset.flags('msvc.compile', '.RC' , cpu_conditions, ['{}{}' .format(setup_script, resource_compiler)]) + toolset.flags('msvc.compile', '.ASM', cpu_conditions, ['{}{} -nologo' .format(setup_script, cpu_assembler)]) + toolset.flags('msvc.link' , '.LD' , cpu_conditions, ['{}{} /NOLOGO /INCREMENTAL:NO'.format(setup_script, linker)]) + toolset.flags('msvc.archive', '.LD' , cpu_conditions, ['{}{} /lib /NOLOGO' .format(setup_script, linker)]) + toolset.flags('msvc.compile', '.IDL', cpu_conditions, ['{}{}' .format(setup_script, idl_compiler)]) + toolset.flags('msvc.compile', '.MC' , cpu_conditions, ['{}{}' .format(setup_script, mc_compiler)]) + toolset.flags('msvc.link' , '.MT' , cpu_conditions, ['{}{} -nologo' .format(setup_script, manifest_tool)]) + + if cc_filter: + toolset.flags('msvc', '.CC.FILTER', cpu_conditions, ['"|" {}'.format(cc_filter)]) + + # Set version-specific flags. + configure_version_specific('msvc', version, conditions) + + +# Returns the default installation path for the given version. +# +def default_path(version): + # Use auto-detected path if possible. + options = __versions.get(version, 'options') + tmp_path = None + if options: + tmp_path = feature.get_values('', options) + + if tmp_path: + tmp_path="".join(tmp_path) + tmp_path=os.path.dirname(tmp_path) + else: + env_var_var_name = '__version_{}_env'.format(version.replace('.','_')) + vc_path = None + if env_var_var_name in globals(): + env_var_name = globals()[env_var_var_name] + if env_var_name in os.environ: + vc_path = environ[env_var_name] + if vc_path: + vc_path = os.path.join(vc_path,globals()['__version_{}_envpath'.format(version.replace('.','_'))]) + tmp_path = os.path.normpath(vc_path) + + var_name = '__version_{}_path'.format(version.replace('.','_')) + if not tmp_path and var_name in globals(): + tmp_path = os.path.normpath(os.path.join(common.get_program_files_dir(), globals()[var_name])) + return tmp_path + + +# Returns either the default installation path (if 'version' is not empty) or +# list of all known default paths (if no version is given) +# +def default_paths(version = None): + possible_paths = [] + if version: + path = default_path(version) + if path: + possible_paths.append(path) + else: + for i in _known_versions: + path = default_path(i) + if path: + possible_paths.append(path) + return possible_paths + + +class MsvcLinkingGenerator(builtin.LinkingGenerator): + # Calls the base version. If necessary, also create a target for the + # manifest file.specifying source's name as the name of the created + # target. As result, the PCH will be named whatever.hpp.gch, and not + # whatever.gch. + def generated_targets(self, sources, prop_set, project, name): + result = builtin.LinkingGenerator.generated_targets(self, sources, prop_set, project, name) + if result: + name_main = result[0].name() + action = result[0].action() + + if prop_set.get('') == 'on': + # We force exact name on PDB. The reason is tagging -- the tag rule may + # reasonably special case some target types, like SHARED_LIB. The tag rule + # will not catch PDB, and it cannot even easily figure if PDB is paired with + # SHARED_LIB or EXE or something else. Because PDB always get the + # same name as the main target, with .pdb as extension, just force it. + target = FileTarget(name_main.split_ext()[0]+'.pdb','PDB',project,action,True) + registered_target = virtual_target.register(target) + if target != registered_target: + action.replace_targets(target,registered_target) + result.append(registered_target) + if prop_set.get('') == 'off': + # Manifest is evil target. It has .manifest appened to the name of + # main target, including extension. E.g. a.exe.manifest. We use 'exact' + # name because to achieve this effect. + target = FileTarget(name_main+'.manifest', 'MANIFEST', project, action, True) + registered_target = virtual_target.register(target) + if target != registered_target: + action.replace_targets(target,registered_target) + result.append(registered_target) + return result + + +# Unsafe worker rule for the register-toolset() rule. Must not be called +# multiple times. + +def register_toolset_really(): + feature.extend('toolset', ['msvc']) + + # Intel and msvc supposedly have link-compatible objects. + feature.subfeature( 'toolset', 'msvc', 'vendor', ['intel'], ['propagated', 'optional']) + + # Inherit MIDL flags. + toolset.inherit_flags('msvc', 'midl') + + # Inherit MC flags. + toolset.inherit_flags('msvc','mc') + + # Dynamic runtime comes only in MT flavour. + toolset.add_requirements(['msvc,shared:multi']) + + # Declare msvc toolset specific features. + feature.feature('debug-store', ['object', 'database'], ['propagated']) + feature.feature('pch-source', [], ['dependency', 'free']) + + # Declare generators. + + # TODO: Is it possible to combine these? Make the generators + # non-composing so that they do not convert each source into a separate + # .rsp file. + generators.register(MsvcLinkingGenerator('msvc.link', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['EXE'], ['msvc'])) + generators.register(MsvcLinkingGenerator('msvc.link.dll', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['SHARED_LIB','IMPORT_LIB'], ['msvc'])) + + builtin.register_archiver('msvc.archive', ['OBJ'], ['STATIC_LIB'], ['msvc']) + builtin.register_c_compiler('msvc.compile.c++', ['CPP'], ['OBJ'], ['msvc']) + builtin.register_c_compiler('msvc.compile.c', ['C'], ['OBJ'], ['msvc']) + builtin.register_c_compiler('msvc.compile.c++.preprocess', ['CPP'], ['PREPROCESSED_CPP'], ['msvc']) + builtin.register_c_compiler('msvc.compile.c.preprocess', ['C'], ['PREPROCESSED_C'], ['msvc']) + + # Using 'register-c-compiler' adds the build directory to INCLUDES. + builtin.register_c_compiler('msvc.compile.rc', ['RC'], ['OBJ(%_res)'], ['msvc']) + generators.override('msvc.compile.rc', 'rc.compile.resource') + generators.register_standard('msvc.compile.asm', ['ASM'], ['OBJ'], ['msvc']) + + builtin.register_c_compiler('msvc.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], ['msvc']) + generators.override('msvc.compile.idl', 'midl.compile.idl') + + generators.register_standard('msvc.compile.mc', ['MC'], ['H','RC'], ['msvc']) + generators.override('msvc.compile.mc', 'mc.compile') + + # Note: the 'H' source type will catch both '.h' and '.hpp' headers as + # the latter have their HPP type derived from H. The type of compilation + # is determined entirely by the destination type. + generators.register(MsvcPchGenerator('msvc.compile.c.pch', False, ['H'], ['C_PCH','OBJ'], ['on', 'msvc'])) + generators.register(MsvcPchGenerator('msvc.compile.c++.pch', False, ['H'], ['CPP_PCH','OBJ'], ['on', 'msvc'])) + + generators.override('msvc.compile.c.pch', 'pch.default-c-pch-generator') + generators.override('msvc.compile.c++.pch', 'pch.default-cpp-pch-generator') + + toolset.flags('msvc.compile', 'PCH_FILE' , ['on'], ['' ]) + toolset.flags('msvc.compile', 'PCH_SOURCE', ['on'], ['']) + toolset.flags('msvc.compile', 'PCH_HEADER', ['on'], ['']) + + # + # Declare flags for compilation. + # + toolset.flags('msvc.compile', 'CFLAGS', ['speed'], ['/O2']) + toolset.flags('msvc.compile', 'CFLAGS', ['space'], ['/O1']) + + toolset.flags('msvc.compile', 'CFLAGS', [ a + '/' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium ], ['/G1']) + toolset.flags('msvc.compile', 'CFLAGS', [ a + '/' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium2 ], ['/G2']) + + toolset.flags('msvc.compile', 'CFLAGS', ['on/object'], ['/Z7']) + toolset.flags('msvc.compile', 'CFLAGS', ['on/database'], ['/Zi']) + toolset.flags('msvc.compile', 'CFLAGS', ['off'], ['/Od']) + toolset.flags('msvc.compile', 'CFLAGS', ['off'], ['/Ob0']) + toolset.flags('msvc.compile', 'CFLAGS', ['on'], ['/Ob1']) + toolset.flags('msvc.compile', 'CFLAGS', ['full'], ['/Ob2']) + + toolset.flags('msvc.compile', 'CFLAGS', ['on'], ['/W3']) + toolset.flags('msvc.compile', 'CFLAGS', ['off'], ['/W0']) + toolset.flags('msvc.compile', 'CFLAGS', ['all'], ['/W4']) + toolset.flags('msvc.compile', 'CFLAGS', ['on'], ['/WX']) + + toolset.flags('msvc.compile', 'C++FLAGS', ['on/off/off'], ['/EHs']) + toolset.flags('msvc.compile', 'C++FLAGS', ['on/off/on'], ['/EHsc']) + toolset.flags('msvc.compile', 'C++FLAGS', ['on/on/off'], ['/EHa']) + toolset.flags('msvc.compile', 'C++FLAGS', ['on/on/on'], ['/EHac']) + + # By default 8.0 enables rtti support while prior versions disabled it. We + # simply enable or disable it explicitly so we do not have to depend on this + # default behaviour. + toolset.flags('msvc.compile', 'CFLAGS', ['on'], ['/GR']) + toolset.flags('msvc.compile', 'CFLAGS', ['off'], ['/GR-']) + toolset.flags('msvc.compile', 'CFLAGS', ['off/shared'], ['/MD']) + toolset.flags('msvc.compile', 'CFLAGS', ['on/shared'], ['/MDd']) + + toolset.flags('msvc.compile', 'CFLAGS', ['off/static/multi'], ['/MT']) + toolset.flags('msvc.compile', 'CFLAGS', ['on/static/multi'], ['/MTd']) + + toolset.flags('msvc.compile', 'OPTIONS', [], ['']) + toolset.flags('msvc.compile.c++', 'OPTIONS', [], ['']) + + toolset.flags('msvc.compile', 'PDB_CFLAG', ['on/database'],['/Fd']) + + toolset.flags('msvc.compile', 'DEFINES', [], ['']) + toolset.flags('msvc.compile', 'UNDEFS', [], ['']) + toolset.flags('msvc.compile', 'INCLUDES', [], ['']) + + # Declare flags for the assembler. + toolset.flags('msvc.compile.asm', 'USER_ASMFLAGS', [], ['']) + + toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['on'], ['/Zi', '/Zd']) + + toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['on'], ['/W3']) + toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['off'], ['/W0']) + toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['all'], ['/W4']) + toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['on'], ['/WX']) + + toolset.flags('msvc.compile.asm', 'DEFINES', [], ['']) + + # Declare flags for linking. + toolset.flags('msvc.link', 'PDB_LINKFLAG', ['on/database'], ['/PDB']) # not used yet + toolset.flags('msvc.link', 'LINKFLAGS', ['on'], ['/DEBUG']) + toolset.flags('msvc.link', 'DEF_FILE', [], ['']) + + # The linker disables the default optimizations when using /DEBUG so we + # have to enable them manually for release builds with debug symbols. + toolset.flags('msvc', 'LINKFLAGS', ['on/off'], ['/OPT:REF,ICF']) + + toolset.flags('msvc', 'LINKFLAGS', ['console'], ['/subsystem:console']) + toolset.flags('msvc', 'LINKFLAGS', ['gui'], ['/subsystem:windows']) + toolset.flags('msvc', 'LINKFLAGS', ['wince'], ['/subsystem:windowsce']) + toolset.flags('msvc', 'LINKFLAGS', ['native'], ['/subsystem:native']) + toolset.flags('msvc', 'LINKFLAGS', ['auto'], ['/subsystem:posix']) + + toolset.flags('msvc.link', 'OPTIONS', [], ['']) + toolset.flags('msvc.link', 'LINKPATH', [], ['']) + + toolset.flags('msvc.link', 'FINDLIBS_ST', [], ['']) + toolset.flags('msvc.link', 'FINDLIBS_SA', [], ['']) + toolset.flags('msvc.link', 'LIBRARY_OPTION', ['msvc'], ['']) + toolset.flags('msvc.link', 'LIBRARIES_MENTIONED_BY_FILE', [], ['']) + + toolset.flags('msvc.archive', 'AROPTIONS', [], ['']) + + +# Locates the requested setup script under the given folder and returns its full +# path or nothing in case the script can not be found. In case multiple scripts +# are found only the first one is returned. +# +# TODO: There used to exist a code comment for the msvc.init rule stating that +# we do not correctly detect the location of the vcvars32.bat setup script for +# the free VC7.1 tools in case user explicitly provides a path. This should be +# tested or simply remove this whole comment in case this toolset version is no +# longer important. +# +def locate_default_setup(command, parent, setup_name): + for setup in [os.path.join(dir,setup_name) for dir in [command,parent]]: + if os.path.exists(setup): + return setup + return None + + +# Validates given path, registers found configuration and prints debug +# information about it. +# +def register_configuration(version, path=None): + if path: + command = os.path.join(path, 'cl.exe') + if os.path.exists(command): + if debug(): + print "notice: [msvc-cfg] msvc-$(version) detected, command: ''".format(version,command) + __versions.register(version) + __versions.set(version,'options',['{}'.format(command)]) + + +################################################################################ +# +# Startup code executed when loading this module. +# +################################################################################ + +# Similar to Configurations, but remembers the first registered configuration. +class MSVCConfigurations(Configurations): + def __init__(self): + Configurations.__init__(self) + self.first_ = None + + def register(self, id): + Configurations.register(self,id) + if not self.first_: + self.first_ = id + + def first(self): + return self.first_ + + +# List of all registered configurations. +__versions = MSVCConfigurations() + +# Supported CPU architectures. +__cpu_arch_i386 = [ + '/', + '/32', + 'x86/', + 'x86/32'] + +__cpu_arch_amd64 = [ + '/64', + 'x86/64'] + +__cpu_arch_ia64 = [ + 'ia64/', + 'ia64/64'] + + +# Supported CPU types (only Itanium optimization options are supported from +# VC++ 2005 on). See +# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more +# detailed information. +__cpu_type_g5 = ['i586', 'pentium', 'pentium-mmx' ] +__cpu_type_g6 = ['i686', 'pentiumpro', 'pentium2', 'pentium3', 'pentium3m', 'pentium-m', 'k6', + 'k6-2', 'k6-3', 'winchip-c6', 'winchip2', 'c3', 'c3-2', 'c7' ] +__cpu_type_em64t = ['prescott', 'nocona', 'core2', 'corei7', 'corei7-avx', 'core-avx-i', 'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'merom', + 'merom-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale', + 'yorksfield', 'nehalem', 'sandy-bridge', 'ivy-bridge', 'haswell', 'broadwell', 'skylake', 'skylake-avx512', 'cannonlake', + 'icelake-client', 'icelake-server', 'cascadelake', 'cooperlake', 'tigerlake' ] +__cpu_type_amd64 = ['k8', 'opteron', 'athlon64', 'athlon-fx', 'k8-sse3', 'opteron-sse3', 'athlon64-sse3', 'amdfam10', 'barcelona', + 'bdver1', 'bdver2', 'bdver3', 'btver1', 'btver2', 'znver1', 'znver2' ] +__cpu_type_g7 = ['pentium4', 'pentium4m', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp' + 'athlon-mp'] + __cpu_type_em64t + __cpu_type_amd64 +__cpu_type_itanium = ['itanium', 'itanium1', 'merced'] +__cpu_type_itanium2 = ['itanium2', 'mckinley'] + + +# Known toolset versions, in order of preference. +_known_versions = ['15.0', '14.0', '12.0', '11.0', '10.0', '10.0express', '9.0', '9.0express', '8.0', '8.0express', '7.1', '7.1toolkit', '7.0', '6.0'] + +# Version aliases. +__version_alias_6 = '6.0' +__version_alias_6_5 = '6.0' +__version_alias_7 = '7.0' +__version_alias_8 = '8.0' +__version_alias_9 = '9.0' +__version_alias_10 = '10.0' +__version_alias_11 = '11.0' +__version_alias_12 = '12.0' +__version_alias_14 = '14.0' +__version_alias_15 = '15.0' + +# Names of registry keys containing the Visual C++ installation path (relative +# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft"). +__version_6_0_reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" +__version_7_0_reg = "VisualStudio\\7.0\\Setup\\VC" +__version_7_1_reg = "VisualStudio\\7.1\\Setup\\VC" +__version_8_0_reg = "VisualStudio\\8.0\\Setup\\VC" +__version_8_0express_reg = "VCExpress\\8.0\\Setup\\VC" +__version_9_0_reg = "VisualStudio\\9.0\\Setup\\VC" +__version_9_0express_reg = "VCExpress\\9.0\\Setup\\VC" +__version_10_0_reg = "VisualStudio\\10.0\\Setup\\VC" +__version_10_0express_reg = "VCExpress\\10.0\\Setup\\VC" +__version_11_0_reg = "VisualStudio\\11.0\\Setup\\VC" +__version_12_0_reg = "VisualStudio\\12.0\\Setup\\VC" +__version_14_0_reg = "VisualStudio\\14.0\\Setup\\VC" +__version_15_0_reg = "VisualStudio\\15.0\\Setup\\VC" + +# Visual C++ Toolkit 2003 does not store its installation path in the registry. +# The environment variable 'VCToolkitInstallDir' and the default installation +# path will be checked instead. +__version_7_1toolkit_path = 'Microsoft Visual C++ Toolkit 2003\\bin' +__version_7_1toolkit_env = 'VCToolkitInstallDir' + +# Path to the folder containing "cl.exe" relative to the value of the +# corresponding environment variable. +__version_7_1toolkit_envpath = 'bin' ; +# +# +# Auto-detect all the available msvc installations on the system. +auto_detect_toolset_versions() + +# And finally trigger the actual Boost Build toolset registration. +register_toolset() diff --git a/src/boost/tools/build/src/tools/notfile.jam b/src/boost/tools/build/src/tools/notfile.jam new file mode 100644 index 000000000..678610853 --- /dev/null +++ b/src/boost/tools/build/src/tools/notfile.jam @@ -0,0 +1,65 @@ +# Copyright (c) 2005 Vladimir Prus. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import generators ; +import project ; +import targets ; +import toolset ; +import type ; + + +type.register NOTFILE_MAIN ; + + +class notfile-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) + : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : + $(17) : $(18) : $(19) ; + } + + rule run ( project name ? : property-set : sources * : multiple ? ) + { + local action ; + local action-name = [ $(property-set).get ] ; + local m = [ MATCH ^@(.*) : $(action-name) ] ; + if $(m) + { + action = [ new action $(sources) : $(m[1]) : $(property-set) ] ; + } + else + { + action = [ new action $(sources) : notfile.run : $(property-set) ] ; + } + local t = [ new notfile-target $(name) : $(project) : $(action) ] ; + return [ virtual-target.register $(t) ] ; + } +} + + +generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ; + + +toolset.flags notfile.run ACTION : ; + + +actions run +{ + $(ACTION) +} + + +rule notfile ( target-name : action + : sources * : requirements * : + default-build * ) +{ + targets.create-typed-target NOTFILE_MAIN : [ project.current ] : + $(target-name) : $(sources) : $(requirements) $(action) : + $(default-build) ; +} + +IMPORT $(__name__) : notfile : : notfile ; diff --git a/src/boost/tools/build/src/tools/notfile.py b/src/boost/tools/build/src/tools/notfile.py new file mode 100644 index 000000000..a650adbdf --- /dev/null +++ b/src/boost/tools/build/src/tools/notfile.py @@ -0,0 +1,51 @@ +# Status: ported. +# Base revision: 64429. +# +# Copyright (c) 2005-2010 Vladimir Prus. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + + +import b2.build.type as type +import b2.build.generators as generators +import b2.build.virtual_target as virtual_target +import b2.build.toolset as toolset +import b2.build.targets as targets + +from b2.manager import get_manager +from b2.util import bjam_signature + +type.register("NOTFILE_MAIN") + +class NotfileGenerator(generators.Generator): + + def run(self, project, name, ps, sources): + pass + action_name = ps.get('action')[0] + if action_name[0] == '@': + action = virtual_target.Action(get_manager(), sources, action_name[1:], ps) + else: + action = virtual_target.Action(get_manager(), sources, "notfile.run", ps) + + return [get_manager().virtual_targets().register( + virtual_target.NotFileTarget(name, project, action))] + +generators.register(NotfileGenerator("notfile.main", False, [], ["NOTFILE_MAIN"])) + +toolset.flags("notfile.run", "ACTION", [], [""]) + +get_manager().engine().register_action("notfile.run", "$(ACTION)") + +@bjam_signature((["target_name"], ["action"], ["sources", "*"], ["requirements", "*"], + ["default_build", "*"])) +def notfile(target_name, action, sources, requirements, default_build): + + requirements.append("" + action) + + return targets.create_typed_metatarget(target_name, "NOTFILE_MAIN", sources, requirements, + default_build, []) + + +get_manager().projects().add_rule("notfile", notfile) diff --git a/src/boost/tools/build/src/tools/openssl.jam b/src/boost/tools/build/src/tools/openssl.jam new file mode 100644 index 000000000..984f9100a --- /dev/null +++ b/src/boost/tools/build/src/tools/openssl.jam @@ -0,0 +1,140 @@ +# Copyright (c) 2019 Damian Jarek +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports the openssl library +# +# After 'using openssl', the following targets are available: +# +# /openssl//ssl -- The SSL/TLS library +# /openssl//crypto -- The cryptography library + +import project ; +import ac ; +import errors ; +import feature ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import indirect ; +import os ; +import property ; +import property-set ; + +header = openssl.h ; +ssl_names = ssl ssleay32 ; +crypto_names = crypto libeay32 ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +# Initializes the openssl library. +# +# openssl can be configured to use pre-existing binaries. +# +# Options for configuring a prebuilt openssl:: +# +# +# The directory containing the openssl binaries. +# +# Overrides the default library name. +# +# The directory containing the openssl headers. +# +# If none of these options is specified, then the environmental +# variables OPENSSL_LIBRARY_PATH, OPENSSL_NAME, and OPENSSL_INCLUDE will +# be used instead. +# +# Examples:: +# +# # Find openssl in the default system location +# using openssl ; +# # Find openssl in /usr/local +# using openssl : 1.2.7 +# : /usr/local/include /usr/local/lib ; +# +rule init ( + version ? + # The OpenSSL version (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the openssl target + + : is-default ? + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project openssl ; + } + + local library-path = [ feature.get-values : $(options) ] ; + local include-path = [ feature.get-values : $(options) ] ; + local library-name = [ feature.get-values : $(options) ] ; + + if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name) + { + is-default = true ; + } + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + if $(.configured.$(condition)) + { + if $(is-default) + { + if $(.debug) + { + ECHO "notice: [openssl] openssl is already configured" ; + } + } + else + { + errors.user-error "openssl is already configured" ; + } + return ; + } + else + { + if $(.debug) + { + ECHO "notice: [openssl] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [openssl] Condition" [ $(condition).raw ] ; + } + } + + local ssl_lib = [ new ac-library ssl : $(.project) : $(condition) : + $(include-path) : $(library-path) : ssl ] ; + $(ssl_lib).set-header openssl/ssl.h ; + $(ssl_lib).set-default-names $(ssl_names) ; + + local crypto_lib = [ new ac-library crypto : $(.project) : $(condition) : + $(include-path) : $(library-path) : crypto ] ; + $(crypto_lib).set-header openssl/crypto.h ; + $(crypto_lib).set-default-names $(crypto_names) ; + + targets.main-target-alternative $(ssl_lib) ; + targets.main-target-alternative $(crypto_lib) ; + } + .configured.$(condition) = true ; +} + diff --git a/src/boost/tools/build/src/tools/package.jam b/src/boost/tools/build/src/tools/package.jam new file mode 100644 index 000000000..75925d5e3 --- /dev/null +++ b/src/boost/tools/build/src/tools/package.jam @@ -0,0 +1,274 @@ +# Copyright (c) 2005 Vladimir Prus. +# Copyright 2006 Rene Rivera. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Provides mechanism for installing whole packages into a specific directory +# structure. This is opposed to the 'install' rule, that installs a number of +# targets to a single directory, and does not care about directory structure at +# all. + +# Example usage: +# +# package.install boost : +# : +# : +# : +# ; +# +# This will install binaries, libraries and headers to the 'proper' location, +# given by command line options --prefix, --exec-prefix, --bindir, --libdir and +# --includedir. +# +# The rule is just a convenient wrapper, avoiding the need to define several +# 'install' targets. +# +# The only install-related feature is . It will apply to +# headers only and if present, paths of headers relatively to source root will +# be retained after installing. If it is not specified, then "." is assumed, so +# relative paths in headers are always preserved. + +import "class" : new ; +import option ; +import project ; +import feature ; +import path ; +import property ; +import stage ; +import targets ; +import modules ; +import os ; + +feature.feature install-default-prefix : : free incidental ; + +class package-paths +{ + import feature ; + import modules ; + import option ; + import os ; + import path ; + rule __init__ ( default-prefix ) + { + local explicit-options = [ MATCH --(prefix|bindir|libdir|includedir|datarootdir)=.* + : [ modules.peek : ARGV ] ] ; + self.has-$(explicit-options) = true ; + if prefix in $(explicit-options) + { + # If --prefix is explicitly specified on the command line, + # then we need wipe away any settings of libdir/includir that + # is specified via options in config files. + option.set bindir : ; + option.set libdir : ; + option.set includedir : ; + option.set datarootdir : ; + } + + handle-path prefix : $(default-prefix) ; + handle-path libdir : $(self.prefix)/lib ; + handle-path bindir : $(self.prefix)/bin ; + handle-path includedir : $(self.prefix)/include ; + handle-path datarootdir : $(self.prefix)/share ; + } + + local rule handle-path ( option : default-value ) + { + local opt = [ option.get $(option) ] ; + if $(opt) + { + opt = [ path.root [ path.make $(opt) ] [ path.pwd ] ] ; + } + else + { + opt = $(default-value) ; + } + self.$(option) = $(opt) ; + } + + rule prefix ( ) + { + return $(self.prefix) ; + } + + rule libdir ( ) + { + return $(self.libdir) ; + } + + rule bindir ( ) + { + return $(self.bindir) ; + } + + rule includedir ( ) + { + return $(self.includedir) ; + } + + rule datarootdir ( ) + { + return $(self.datarootdir) ; + } + + rule get ( option ) + { + if ! $(self.$(option)) + { + local info = [ modules.peek package : .options.$(option) ] ; + local default-value = $(info[1]) ; + local relative-to = $(info[2]) ; + if $(self.has-$(relative-to)) + { + option.set $(option) ; + self.has-$(option) = true ; + } + if [ MATCH --$(option)=(.*) : [ modules.peek : ARGV ] ] + { + self.has-$(option) = true ; + } + local adjusted-default = + [ path.join [ get $(relative-to) ] $(default-value) ] ; + handle-path $(option) : $(adjusted-default) ; + } + return $(self.$(option)) ; + } +} + +# Registers an additional path option. The option name +# can then be used with a package-paths object. +# +# default-path is the default path that will be used if +# the option is not set explicitly. It will be interpreted +# relative to another option. This allows options to be +# defined hierarchically with --prefix as the root. +# +# relative-to should be the name of another option. It defaults +# to prefix. +# +# Example:: +# +# package.add-path-option cmakedir : cmake : libdir ; +# cmakedir = [ $(mypaths).get cmakedir ] ; # defaults to /usr/local/lib/cmake +# +rule add-path-option ( name : default-path : relative-to ? ) +{ + local value = $(default-path) $(relative-to:E=prefix) ; + if $(.options.$(name)) && $(.options.$(name)) != $(value) + { + import errors ; + errors.error Duplicate definition of $(name) ; + } + .options.$(name) = $(value) ; +} + + +# Returns a package-paths object that can be used +# to find the various install paths. If requirements +# contains then that will be used +# as the default prefix, otherwise a platform specific +# default prefix will be used. All other properties +# in requirements are ignored. +# +rule paths ( package-name : requirements * ) +{ + local default-prefix = [ feature.get-values : $(requirements) ] ; + # Or some likely defaults if neither is given. + if ! $(default-prefix) + { + if [ os.name ] = NT { default-prefix = C:\\$(package-name) ; } + else { default-prefix = /usr/local ; } + } + default-prefix = [ path.make $(default-prefix) ] ; + if ! $(.package-paths.$(default-prefix)) + { + .package-paths.$(default-prefix) = [ new package-paths $(default-prefix) ] ; + } + return $(.package-paths.$(default-prefix)) ; +} + +rule install ( name package-name ? : requirements * : binaries * : libraries * : headers * ) +{ + package-name ?= $(name) ; + + # If is not specified, all headers are installed to + # prefix/include, no matter what their relative path is. Sometimes that is + # what is needed. + local install-source-root = [ property.select : + $(requirements) ] ; + install-source-root = $(install-source-root:G=) ; + requirements = [ property.change $(requirements) : ] ; + + local install-header-subdir = [ property.select : + $(requirements) ] ; + install-header-subdir = /$(install-header-subdir:G=) ; + install-header-subdir ?= "" ; + requirements = [ property.change $(requirements) : ] + ; + + # First, figure out all locations. Use the default if no prefix option + # given. + local paths = [ paths $(package-name) : $(requirements) ] ; + + # Binaries. + local bin-locate = [ $(paths).bindir ] ; + + # Object code libraries. + local lib-locate = [ $(paths).libdir ] ; + + # Source header files. + local include-locate = [ $(paths).includedir ] ; + + stage.install $(name)-bin : $(binaries) : $(requirements) + $(bin-locate) ; + alias $(name)-lib : $(name)-lib-shared $(name)-lib-static ; + + # Since the install location of shared libraries differs on universe + # and cygwin, use target alternatives to make different targets. + # We should have used indirection conditioanl requirements, but it's + # awkward to pass bin-locate and lib-locate from there to another rule. + alias $(name)-lib-shared : $(name)-lib-shared-universe ; + alias $(name)-lib-shared : $(name)-lib-shared-cygwin : cygwin ; + + # For shared libraries, we install both explicitly specified one and the + # shared libraries that the installed executables depend on. + stage.install $(name)-lib-shared-universe : $(binaries) $(libraries) : $(requirements) + $(lib-locate) on SHARED_LIB ; + stage.install $(name)-lib-shared-cygwin : $(binaries) $(libraries) : $(requirements) + $(bin-locate) on SHARED_LIB ; + + # For static libraries, we do not care about executable dependencies, since + # static libraries are already incorporated into them. + stage.install $(name)-lib-static : $(libraries) : $(requirements) + $(lib-locate) on STATIC_LIB ; + stage.install $(name)-headers : $(headers) : $(requirements) + $(include-locate)$(install-header-subdir) + $(install-source-root) ; + alias $(name) : $(name)-bin $(name)-lib $(name)-headers ; + + local c = [ project.current ] ; + modules.call-in [ $(c).project-module ] : explicit $(name) $(name)-bin + $(name)-lib $(name)-headers $(name)-lib-shared $(name)-lib-static + $(name)-lib-shared-universe $(name)-lib-shared-cygwin ; +} + +rule install-data ( target-name : package-name ? : data * : requirements * ) +{ + package-name ?= $(target-name) ; + + local paths = [ paths $(package-name) : $(requirements) ] ; + local datadir = [ $(paths).datarootdir ] ; + + stage.install $(target-name) + : $(data) + : $(requirements) $(datadir)/$(package-name) + ; + + local c = [ project.current ] ; + local project-module = [ $(c).project-module ] ; + module $(project-module) + { + explicit $(1) ; + } +} diff --git a/src/boost/tools/build/src/tools/package.py b/src/boost/tools/build/src/tools/package.py new file mode 100644 index 000000000..16e8a4bd6 --- /dev/null +++ b/src/boost/tools/build/src/tools/package.py @@ -0,0 +1,168 @@ +# Status: ported +# Base revision: 64488 +# +# Copyright (c) 2005, 2010 Vladimir Prus. +# Copyright 2006 Rene Rivera. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Provides mechanism for installing whole packages into a specific directory +# structure. This is opposed to the 'install' rule, that installs a number of +# targets to a single directory, and does not care about directory structure at +# all. + +# Example usage: +# +# package.install boost : +# : +# : +# : +# ; +# +# This will install binaries, libraries and headers to the 'proper' location, +# given by command line options --prefix, --exec-prefix, --bindir, --libdir and +# --includedir. +# +# The rule is just a convenient wrapper, avoiding the need to define several +# 'install' targets. +# +# The only install-related feature is . It will apply to +# headers only and if present, paths of headers relatively to source root will +# be retained after installing. If it is not specified, then "." is assumed, so +# relative paths in headers are always preserved. + +import b2.build.feature as feature +import b2.build.property as property +import b2.util.option as option +import b2.tools.stage as stage + +from b2.build.alias import alias + +from b2.manager import get_manager + +from b2.util import bjam_signature +from b2.util.utility import ungrist + + +import os + +feature.feature("install-default-prefix", [], ["free", "incidental"]) + +@bjam_signature((["name", "package_name", "?"], ["requirements", "*"], + ["binaries", "*"], ["libraries", "*"], ["headers", "*"])) +def install(name, package_name=None, requirements=[], binaries=[], libraries=[], headers=[]): + + requirements = requirements[:] + binaries = binaries[:] + libraries + + if not package_name: + package_name = name + + if option.get("prefix"): + # If --prefix is explicitly specified on the command line, + # then we need wipe away any settings of libdir/includir that + # is specified via options in config files. + option.set("bindir", None) + option.set("libdir", None) + option.set("includedir", None) + + # If is not specified, all headers are installed to + # prefix/include, no matter what their relative path is. Sometimes that is + # what is needed. + install_source_root = property.select('install-source-root', requirements) + if install_source_root: + requirements = property.change(requirements, 'install-source-root', None) + + install_header_subdir = property.select('install-header-subdir', requirements) + if install_header_subdir: + install_header_subdir = ungrist(install_header_subdir[0]) + requirements = property.change(requirements, 'install-header-subdir', None) + + # First, figure out all locations. Use the default if no prefix option + # given. + prefix = get_prefix(name, requirements) + + # Architecture dependent files. + exec_locate = option.get("exec-prefix", prefix) + + # Binaries. + bin_locate = option.get("bindir", os.path.join(prefix, "bin")) + + # Object code libraries. + lib_locate = option.get("libdir", os.path.join(prefix, "lib")) + + # Source header files. + include_locate = option.get("includedir", os.path.join(prefix, "include")) + + stage.install(name + "-bin", binaries, requirements + ["" + bin_locate]) + + alias(name + "-lib", [name + "-lib-shared", name + "-lib-static"]) + + # Since the install location of shared libraries differs on universe + # and cygwin, use target alternatives to make different targets. + # We should have used indirection conditioanl requirements, but it's + # awkward to pass bin-locate and lib-locate from there to another rule. + alias(name + "-lib-shared", [name + "-lib-shared-universe"]) + alias(name + "-lib-shared", [name + "-lib-shared-cygwin"], ["cygwin"]) + + # For shared libraries, we install both explicitly specified one and the + # shared libraries that the installed executables depend on. + stage.install(name + "-lib-shared-universe", binaries + libraries, + requirements + ["" + lib_locate, "on", + "SHARED_LIB"]) + stage.install(name + "-lib-shared-cygwin", binaries + libraries, + requirements + ["" + bin_locate, "on", + "SHARED_LIB"]) + + # For static libraries, we do not care about executable dependencies, since + # static libraries are already incorporated into them. + stage.install(name + "-lib-static", libraries, requirements + + ["" + lib_locate, "on", "STATIC_LIB"]) + stage.install(name + "-headers", headers, requirements \ + + ["" + os.path.join(include_locate, s) for s in install_header_subdir] + + install_source_root) + + alias(name, [name + "-bin", name + "-lib", name + "-headers"]) + + pt = get_manager().projects().current() + + for subname in ["bin", "lib", "headers", "lib-shared", "lib-static", "lib-shared-universe", "lib-shared-cygwin"]: + pt.mark_targets_as_explicit([name + "-" + subname]) + +@bjam_signature((["target_name"], ["package_name"], ["data", "*"], ["requirements", "*"])) +def install_data(target_name, package_name, data, requirements): + if not package_name: + package_name = target_name + + if option.get("prefix"): + # If --prefix is explicitly specified on the command line, + # then we need wipe away any settings of datarootdir + option.set("datarootdir", None) + + prefix = get_prefix(package_name, requirements) + datadir = option.get("datarootdir", os.path.join(prefix, "share")) + + stage.install(target_name, data, + requirements + ["" + os.path.join(datadir, package_name)]) + + get_manager().projects().current().mark_targets_as_explicit([target_name]) + +def get_prefix(package_name, requirements): + + specified = property.select("install-default-prefix", requirements) + if specified: + specified = ungrist(specified[0]) + prefix = option.get("prefix", specified) + requirements = property.change(requirements, "install-default-prefix", None) + # Or some likely defaults if neither is given. + if not prefix: + if os.name == "nt": + prefix = "C:\\" + package_name + elif os.name == "posix": + prefix = "/usr/local" + + return prefix + diff --git a/src/boost/tools/build/src/tools/pathscale.jam b/src/boost/tools/build/src/tools/pathscale.jam new file mode 100644 index 000000000..83ce395bc --- /dev/null +++ b/src/boost/tools/build/src/tools/pathscale.jam @@ -0,0 +1,180 @@ +# Copyright 2006 Noel Belcourt +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import property ; +import generators ; +import toolset : flags ; +import feature ; +import type ; +import os ; +import common ; +import fortran ; + +feature.extend toolset : pathscale ; +toolset.inherit pathscale : unix ; +generators.override pathscale.prebuilt : builtin.prebuilt ; +generators.override pathscale.searched-lib-generator : searched-lib-generator ; + +# Documentation and toolchain description located +# http://www.pathscale.com/docs.html + +rule init ( version ? : command * : options * ) +{ + command = [ common.get-invocation-command pathscale : pathCC : $(command) + : /opt/ekopath/bin ] ; + + # Determine the version + local command-string = $(command:J=" ") ; + if $(command) + { + version ?= [ MATCH "^([0-9.]+)" + : [ SHELL "$(command-string) -dumpversion" ] ] ; + } + + local condition = [ common.check-init-parameters pathscale + : version $(version) ] ; + + common.handle-options pathscale : $(condition) : $(command) : $(options) ; + + toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) : + [ feature.get-values : $(options) ] : unchecked ; + + command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ; + + toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ; + + # fortran support + local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ; + local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ; + local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ; + + toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ; + toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ; + + # always link lib rt to resolve clock_gettime() + flags pathscale.link FINDLIBS-SA : rt : unchecked ; + + switch [ os.name ] + { + case SOLARIS : + toolset.flags pathscale.link RPATH_OPTION $(condition) : -Wl,-R, -Wl, : unchecked ; + + case * : # GNU + toolset.flags pathscale.link RPATH_OPTION $(condition) : -Wl,-rpath= : unchecked ; + } +} + +# Declare generators +generators.register-c-compiler pathscale.compile.c : C : OBJ : pathscale ; +generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : pathscale ; +generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : pathscale ; +generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : pathscale ; + +# Declare flags and actions for compilation +flags pathscale.compile OPTIONS off : -O0 ; +flags pathscale.compile OPTIONS speed : -O3 ; +flags pathscale.compile OPTIONS space : -Os ; + +flags pathscale.compile OPTIONS off : -noinline ; +flags pathscale.compile OPTIONS on : -inline ; +flags pathscale.compile OPTIONS full : -inline ; + +flags pathscale.compile OPTIONS off : -woffall ; +flags pathscale.compile OPTIONS on : -Wall ; +flags pathscale.compile OPTIONS all : -Wall ; +flags pathscale.compile OPTIONS extra : -Wall -Wextra ; +flags pathscale.compile OPTIONS pedantic : -Wall -Wextra -pedantic ; +flags pathscale.compile OPTIONS on : -Werror ; + +flags pathscale.compile OPTIONS on : -ggdb ; +flags pathscale.compile OPTIONS on : -pg ; +flags pathscale.compile OPTIONS shared : -fPIC ; +flags pathscale.compile OPTIONS 32 : -m32 ; +flags pathscale.compile OPTIONS 64 : -m64 ; + +flags pathscale.compile USER_OPTIONS ; +flags pathscale.compile.c++ USER_OPTIONS ; +flags pathscale.compile DEFINES ; +flags pathscale.compile INCLUDES ; + +flags pathscale.compile.fortran USER_OPTIONS ; +flags pathscale.compile.fortran90 USER_OPTIONS ; + +actions compile.c +{ + "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.fortran +{ + "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +rule compile.fortran90 ( targets * : sources * : properties * ) +{ + # the space rule inserts spaces between targets and it's necessary + SPACE on $(targets) = " " ; + # Serialize execution of the compile.fortran90 action + # F90 source must be compiled in a particular order so we + # serialize the build as a parallel F90 compile might fail + JAM_SEMAPHORE on $(targets) = pathscale-f90-semaphore ; +} + +actions compile.fortran90 +{ + "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)" +} + +# Declare flags and actions for linking +flags pathscale.link OPTIONS on : -ggdb -rdynamic ; +# Strip the binary when no debugging is needed +flags pathscale.link OPTIONS off : -g0 ; +flags pathscale.link OPTIONS on : -pg ; +flags pathscale.link USER_OPTIONS ; +flags pathscale.link LINKPATH ; +flags pathscale.link FINDLIBS-ST ; +flags pathscale.link FINDLIBS-SA ; +flags pathscale.link FINDLIBS-SA multi : pthread ; +flags pathscale.link LIBRARIES ; +flags pathscale.link LINK-RUNTIME static : static ; +flags pathscale.link LINK-RUNTIME shared : dynamic ; +flags pathscale.link RPATH ; +# On gcc, there are separate options for dll path at runtime and +# link time. On Solaris, there's only one: -R, so we have to use +# it, even though it's bad idea. +flags pathscale.link RPATH ; + +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" $(RPATH_OPTION:E=-Wl,-rpath=)"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) +} + +# Slight mods for dlls +rule link.dll ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" $(RPATH_OPTION:E=-Wl,-rpath=)"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) +} + +# Declare action for creating static libraries +# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)" +actions piecemeal archive +{ + ar $(ARFLAGS) ru "$(<)" "$(>)" +} diff --git a/src/boost/tools/build/src/tools/pch.jam b/src/boost/tools/build/src/tools/pch.jam new file mode 100644 index 000000000..3d18928da --- /dev/null +++ b/src/boost/tools/build/src/tools/pch.jam @@ -0,0 +1,95 @@ +# Copyright (c) 2005 Reece H. Dunn. +# Copyright 2006 Ilya Sokolov +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +##### Using Precompiled Headers (Quick Guide) ##### +# +# Make precompiled mypch.hpp: +# +# import pch ; +# +# cpp-pch mypch +# : # sources +# mypch.hpp +# : # requiremnts +# msvc:mypch.cpp +# ; +# +# Add cpp-pch to sources: +# +# exe hello +# : main.cpp hello.cpp mypch +# ; + +import "class" : new ; +import type ; +import feature ; +import generators ; + +type.register PCH : pch ; + +type.register C_PCH : : PCH ; +type.register CPP_PCH : : PCH ; + +# Control precompiled header (PCH) generation. +feature.feature pch : + on + off + : propagated ; + + +feature.feature pch-header : : free dependency ; +feature.feature pch-file : : free dependency ; + +# Base PCH generator. The 'run' method has the logic to prevent this generator +# from being run unless it's being used for a top-level PCH target. +class pch-generator : generator +{ + import property-set ; + + rule action-class ( ) + { + return compile-action ; + } + + rule run ( project name ? : property-set : sources + ) + { + if ! $(name) + { + # Unless this generator is invoked as the top-most generator for a + # main target, fail. This allows using 'H' type as input type for + # this generator, while preventing B2 to try this generator + # when not explicitly asked for. + # + # One bad example is msvc, where pch generator produces both PCH + # target and OBJ target, so if there's any header generated (like by + # bison, or by msidl), we'd try to use pch generator to get OBJ from + # that H, which is completely wrong. By restricting this generator + # only to pch main target, such problem is solved. + } + else + { + local r = [ run-pch $(project) $(name) + : [ $(property-set).add-raw BOOST_BUILD_PCH_ENABLED ] + : $(sources) ] ; + return [ generators.add-usage-requirements $(r) + : BOOST_BUILD_PCH_ENABLED ] ; + } + } + + # This rule must be overridden by the derived classes. + rule run-pch ( project name ? : property-set : sources + ) + { + } +} + + +# NOTE: requirements are empty, default pch generator can be applied when +# pch=off. +generators.register + [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ; +generators.register + [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ; diff --git a/src/boost/tools/build/src/tools/pch.py b/src/boost/tools/build/src/tools/pch.py new file mode 100644 index 000000000..d27cc54d7 --- /dev/null +++ b/src/boost/tools/build/src/tools/pch.py @@ -0,0 +1,83 @@ +# Status: Being ported by Steven Watanabe +# Base revision: 47077 +# +# Copyright (c) 2005 Reece H. Dunn. +# Copyright 2006 Ilya Sokolov +# Copyright (c) 2008 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +##### Using Precompiled Headers (Quick Guide) ##### +# +# Make precompiled mypch.hpp: +# +# import pch ; +# +# cpp-pch mypch +# : # sources +# mypch.hpp +# : # requiremnts +# msvc:mypch.cpp +# ; +# +# Add cpp-pch to sources: +# +# exe hello +# : main.cpp hello.cpp mypch +# ; + +from b2.build import type, feature, generators +from b2.tools import builtin + +type.register('PCH', ['pch']) +type.register('C_PCH', [], 'PCH') +type.register('CPP_PCH', [], 'PCH') + +# Control precompiled header (PCH) generation. +feature.feature('pch', + ['on', 'off'], + ['propagated']) + +feature.feature('pch-header', [], ['free', 'dependency']) +feature.feature('pch-file', [], ['free', 'dependency']) + +class PchGenerator(generators.Generator): + """ + Base PCH generator. The 'run' method has the logic to prevent this generator + from being run unless it's being used for a top-level PCH target. + """ + def action_class(self): + return builtin.CompileAction + + def run(self, project, name, prop_set, sources): + if not name: + # Unless this generator is invoked as the top-most generator for a + # main target, fail. This allows using 'H' type as input type for + # this generator, while preventing Boost.Build to try this generator + # when not explicitly asked for. + # + # One bad example is msvc, where pch generator produces both PCH + # target and OBJ target, so if there's any header generated (like by + # bison, or by msidl), we'd try to use pch generator to get OBJ from + # that H, which is completely wrong. By restricting this generator + # only to pch main target, such problem is solved. + pass + else: + r = self.run_pch(project, name, + prop_set.add_raw(['BOOST_BUILD_PCH_ENABLED']), + sources) + return generators.add_usage_requirements( + r, ['BOOST_BUILD_PCH_ENABLED']) + + # This rule must be overridden by the derived classes. + def run_pch(self, project, name, prop_set, sources): + pass + +# NOTE: requirements are empty, default pch generator can be applied when +# pch=off. +generators.register(builtin.DummyGenerator( + "pch.default-c-pch-generator", False, [], ['C_PCH'], [])) +generators.register(builtin.DummyGenerator( + "pch.default-cpp-pch-generator", False, [], ['CPP_PCH'], [])) diff --git a/src/boost/tools/build/src/tools/pgi.jam b/src/boost/tools/build/src/tools/pgi.jam new file mode 100644 index 000000000..7d4994ef5 --- /dev/null +++ b/src/boost/tools/build/src/tools/pgi.jam @@ -0,0 +1,141 @@ +# Copyright Noel Belcourt 2007. +# Copyright 2017, NVIDIA CORPORATION. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import property ; +import generators ; +import os ; +import toolset : flags ; +import feature ; +import fortran ; +import type ; +import common ; +import gcc ; + +feature.extend toolset : pgi ; +toolset.inherit pgi : unix ; +generators.override pgi.prebuilt : builtin.lib-generator ; +generators.override pgi.searched-lib-generator : searched-lib-generator ; + +# Documentation and toolchain description located +# http://www.pgroup.com/resources/docs.htm + +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters pgi : version $(version) ] ; + + local l_command = [ common.get-invocation-command pgi : pgc++ : $(command) ] ; + + common.handle-options pgi : $(condition) : $(l_command) : $(options) ; + + command_c = $(command_c[1--2]) $(l_command[-1]:B=pgcc) ; + + toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ; + + flags pgi.compile DEFINES $(condition) : + [ feature.get-values : $(options) ] : unchecked ; + + # set link flags + flags pgi.link FINDLIBS-ST : [ + feature.get-values : $(options) ] : unchecked ; +} + +# Declare generators +generators.register-c-compiler pgi.compile.c : C : OBJ : pgi ; +generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : pgi ; +generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : pgi ; + +# Declare flags and actions for compilation +flags pgi.compile.c++ OPTIONS 98 : -std=c++03 ; +flags pgi.compile.c++ OPTIONS 03 : -std=c++03 ; +flags pgi.compile.c++ OPTIONS 0x : -std=c++11 ; +flags pgi.compile.c++ OPTIONS 11 : -std=c++11 ; +flags pgi.compile.c++ OPTIONS 1y : -std=c++14 ; +flags pgi.compile.c++ OPTIONS 14 : -std=c++14 ; +flags pgi.compile.c++ OPTIONS 1z : -std=c++17 ; +flags pgi.compile.c++ OPTIONS 17 : -std=c++17 ; +flags pgi.compile.c++ OPTIONS 2a : -std=c++17 ; +flags pgi.compile.c++ OPTIONS 20 : -std=c++17 ; +flags pgi.compile.c++ OPTIONS latest : -std=c++17 ; + +flags pgi.compile OPTIONS shared : -fpic ; +flags pgi.compile OPTIONS on : -gopt ; +flags pgi.compile OPTIONS off : -O0 ; +flags pgi.compile OPTIONS speed : -fast ; +flags pgi.compile OPTIONS space : -fast ; + +flags pgi.compile OPTIONS off : -Minform=severe ; +flags pgi.compile OPTIONS on : -Minform=warn ; +flags pgi.compile OPTIONS all : -Minform=warn ; +flags pgi.compile OPTIONS extra : -Minform=inform ; +flags pgi.compile OPTIONS pedantic : -Minform=inform ; +flags pgi.compile OPTIONS on : -Werror ; + +flags pgi.compile.c++ OPTIONS off : --no_rtti ; +flags pgi.compile.c++ OPTIONS off : --no_exceptions ; + +flags pgi.compile OPTIONS ; +flags pgi.compile.c++ OPTIONS ; +flags pgi.compile DEFINES ; +flags pgi.compile INCLUDES ; + +flags pgi.compile.fortran OPTIONS ; + +actions compile.c +{ + "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.fortran +{ + "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +# Declare flags and actions for linking +flags pgi.link OPTIONS on : -gopt ; +# Strip the binary when no debugging is needed +flags pgi.link OPTIONS off : -s ; +flags pgi.link OPTIONS ; +flags pgi.link OPTIONS shared : -fpic ; +flags pgi.link LINKPATH ; +flags pgi.link FINDLIBS-ST ; +flags pgi.link FINDLIBS-SA ; +flags pgi.link FINDLIBS-SA multi : pthread rt ; +flags pgi.link LIBRARIES ; +flags pgi.link LINK-RUNTIME static : static ; +flags pgi.link LINK-RUNTIME shared : dynamic ; +flags pgi.link RPATH ; + +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) +} + +# Slight mods for dlls +rule link.dll ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -soname $(<[-1]:D=) -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) +} + +actions updated together piecemeal pgi.archive +{ + ar -rc$(ARFLAGS:E=) "$(<)" "$(>)" +} + diff --git a/src/boost/tools/build/src/tools/pkg-config.jam b/src/boost/tools/build/src/tools/pkg-config.jam new file mode 100644 index 000000000..9565f8fed --- /dev/null +++ b/src/boost/tools/build/src/tools/pkg-config.jam @@ -0,0 +1,486 @@ +#| +Copyright 2019 Dmitry Arkhipov +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + + +import "class" : new ; +import common ; +import errors ; +import feature ; +import os ; +import param ; +import project ; +import regex ; +import sequence ; +import string ; +import targets ; + + +#| tag::doc[] + += pkg-config +The *pkg-config* program is used to retrieve information about installed +libraries in the system. It retrieves information about packages from special +metadata files. These files are named after the package, and have a `.pc` +extension. The package name specified to *pkg-config* is defined to be the name +of the metadata file, minus the `.pc` extension. + +|# # end::doc[] + + +#| tag::doc[] + +== Feature: `pkg-config` + +Selects one of the initialized `pkg-config` configurations. This feature is +`propagated` to dependencies. Its use is dicussed in +section <>. + +|# # end::doc[] + +feature.feature pkg-config : : propagated ; + + +#| tag::doc[] + +== Feature: `pkg-config-define` + +This `free` feature adds a variable assignment to pkg-config invocation. For +example, + +[source, jam] +---- +pkg-config.import mypackage : requirements key=value ; +---- + +is equivalent to invoking on the comand line + +[source, shell] +---- +pkg-config --define-variable=key=value mypackage ; +---- + +|# # end::doc[] + +feature.feature pkg-config-define : : free ; + + +#| tag::doc[] + +== Rule: `import` + +Main target rule that imports a *pkg-config* package. When its consumer targets +are built, *pkg-config* command will be invoked with arguments that depend on +current property set. The features that have an effect are: + +* ``: adds a `--define-variable` argument; +* ``: adds `--static` argument when `static`; +* ``: adds `--static` argument when `static`; +* ``: specifies package name (target name is used instead if the property + is not present); +* ``: specifies package version range, can be used multiple times and + should be a dot-separated sequence of numbers optionally prefixed with `=`, + `<`, `>`, `<=` or `>=`. + +Example: + +[source, jam] +---- +pkg-config.import my-package + : requirements my_package <4 >=3.1 ; +---- + +|# # end::doc[] + + +rule import + ( target-name + : sources * + : requirements * + : default-build * + : usage-requirements * + ) +{ + param.handle-named-params + sources requirements default-build usage-requirements ; + targets.create-metatarget pkg-config-target + : [ project.current ] + : $(target-name) + : $(sources) + : $(requirements) + : $(default-build) + : $(usage-requirements) + ; +} + + +#| tag::doc[] + +[[pkg-config-init]] +== Initialization + +To use the `pkg-config` tool you need to declare it in a configuration file +with the `using` rule: + +[source, jam] +---- +using pkg-config : [config] : [command] ... : [ options ] ... ; +---- + + +* `config`: the name of initialized configuration. The name can be omitted, in + which case the configuration will become the default one. +* `command`: the command, with any extra arguments, to execute. If no command + is given, first `PKG_CONFIG` environment variable is checked, and if its + empty the string `pkg-config` is used. +* `options`: options that modify `pkg-config` behavior. Allowed options are: + * ``: sets `PKG_CONFIG_PATH` environment variable; + multiple occurences are allowed. + * ``: sets `PKG_CONFIG_LIBDIR` environment variable; + multiple occurences are allowed. + * ``: sets `PKG_CONFIG_ALLOW_SYSTEM_CFLAGS` + environment variable; multiple occurences are allowed. + * ``: sets `PKG_CONFIG_ALLOW_SYSTEM_LIBS` + environment variable; multiple occurences are allowed. + * ``: sets `PKG_CONFIG_SYSROOT_DIR` environment variable; + multiple occurences are allowed. + * ``: adds a variable definition argument to command invocation; + multiple occurences are allowed. + +|# # end::doc[] + +rule init ( config ? : command * : options * ) +{ + config ?= [ default-config ] ; + + local tool = [ os.environ PKG_CONFIG ] ; + tool ?= pkg-config ; + command = + [ common.get-invocation-command pkg-config : $(tool) : $(command) ] ; + + configure $(config) : $(command) : $(options) ; + $(.configs).use $(config) ; +} + + +rule run ( config ? : args * ) +{ + config ?= [ default-config ] ; + + local command = [ $(.configs).get $(config) : command ] ; + command = "$(command) $(args:J= )" ; + + local output = [ SHELL "$(command)" : exit-status ] ; + if 0 != $(output[2]) + { + errors.error "pkg-config: command '$(command)' resulted in error:" + [ common.newline-char ] $(output[1]) ; + } + + local ws = [ string.whitespace ] ; + output = [ regex.split $(output[1]) "[$(ws)]" ] ; + return [ sequence.filter non-empty : $(output) ] ; +} + + +#| tag::doc[] + +== Class `pkg-config-target` + +[source, jam] +---- +class pkg-config-target : alias-target-class { + rule construct ( name : sources * : property-set ) + rule version ( property-set ) + rule variable ( name : property-set ) +} +---- + +The class of objects returned by `import` rule. The objects themselves could be +useful in situations that require more complicated logic for consuming a +package. See <> for examples. + +. `rule construct ( name : sources * : property-set )` + Overrides `alias-target.construct`. + +. `rule version ( property-set )` + Returns the package's version, in the context of `property-set`. + +. `rule variable ( name : property-set )` + Returns the value of variable `name` in the package, in the context of + `property-set`. + + +|# # end::doc[] + +class pkg-config-target : alias-target-class +{ + import pkg-config ; + import regex ; + + rule construct ( name : sources * : property-set ) + { + local config = [ $(property-set).get ] ; + local args = [ common-arguments $(name) : $(property-set) ] ; + return + [ property-set.create + [ compile-flags $(config) $(property-set) : $(args) ] + [ link-flags $(config) $(property-set) : $(args) ] + ] ; + } + + rule version ( property-set ) + { + local config = [ $(property-set).get ] ; + local args = [ common-arguments [ name ] : $(property-set) ] ; + local version = [ pkg-config.run $(config) : --modversion $(args) ] ; + return [ regex.split $(version) "\\." ] ; + } + + rule variable ( name : property-set ) + { + local config = [ $(property-set).get ] ; + local args = [ common-arguments [ name ] : $(property-set) ] ; + return [ pkg-config.run $(config) : --variable=$(name) $(args) ] ; + } + + local rule common-arguments ( name : property-set ) + { + local defines = [ $(property-set).get ] ; + local args = --define-variable=$(defines) ; + if [ $(property-set).get ] = static + { + args += --static ; + } + return $(args) [ get-package-request $(property-set) $(name) ] ; + } + + local rule get-package-request ( property-set name ) + { + local pkg-name = [ $(property-set).get ] ; + pkg-name ?= $(name) ; + if $(pkg-name[2]) + { + errors.error "multiple package names were specified for target " + "'$(name)': $(pkg-name)" ; + } + + local versions ; + for local version in [ $(property-set).get ] + { + local match = [ MATCH "^(<=)(.*)" : $(version) ] ; + match ?= [ MATCH "^(>=)(.*)" : $(version) ] ; + match ?= [ MATCH "^([><=])(.*)" : $(version) ] ; + if $(match) + { + version = " $(match:J= )" ; + } + else + { + version = " = $(version)" ; + } + versions += $(version) ; + } + versions ?= "" ; + + return "'$(pkg-name)"$(versions)"'" ; + } + + local rule link-flags ( config property-set : args * ) + { + local flags = [ pkg-config.run $(config) : --libs $(args) ] ; + return $(flags) ; + } + + local rule compile-flags ( config property-set : args * ) + { + local flags = [ pkg-config.run $(config) : --cflags $(args) ] ; + return $(flags) ; + } +} + + +local rule default-config ( ) +{ + return default ; +} + + +local rule configure ( config : command + : options * ) +{ + $(.configs).register $(config) ; + + local path ; + local libdir ; + local allow-system-cflags ; + local allow-system-libs ; + local sysroot ; + local defines ; + for local opt in $(options) + { + switch $(opt:G) + { + case : path += $(opt:G=) ; + case : libdir += $(opt:G=) ; + case : allow-system-cflags += $(opt:G=) ; + case : allow-system-libs += $(opt:G=) ; + case : sysroot += $(opt:G=) ; + case : defines += $(opt:G=) ; + case * : + errors.error "pkg-config: invalid property '$(opt)' was " + "specified for configuration '$(config)'." ; + } + } + + for local opt in allow-system-cflags allow-system-libs + { + if ! $($(opt)) in "on" off + { + errors.error "pkg-config: invalid value '$($(opt))' was specified " + "for option <$(opt)> of configuration '$(config)'." + [ common.newline-char ] "Available values are 'on' and 'off'" ; + } + } + + if $(sysroot[2]) + { + errors.error "pkg-config: several values were specified for option " + " of configuration '$(config)'." + [ common.newline-char ] "Only one value is allowed." ; + } + + local sep = [ os.path-separator ] ; + path = [ envar-set-command PKG_CONFIG_PATH : $(path:J=$(sep)) ] ; + libdir = [ envar-set-command PKG_CONFIG_LIBDIR : $(libdir:J=$(sep)) ] ; + sysroot = [ envar-set-command PKG_CONFIG_SYSROOT_DIR : $(sysroot) ] ; + allow-cflags = + [ envar-set-command PKG_CONFIG_ALLOW_SYSTEM_CFLAGS + : $(allow-cflags) + : 1 + ] ; + allow-libs = + [ envar-set-command PKG_CONFIG_ALLOW_SYSTEM_LIBS + : $(allow-libs) + : 1 + ] ; + + command += --print-errors --errors-to-stdout --define-variable=$(defines) ; + $(.configs).set $(config) + : command + : "$(path)$(libdir)$(sysroot)$(allow-cflags)$(allow-libs)$(command:J= )" + ; + + feature.extend pkg-config : $(config) ; +} + + +local rule envar-set-command ( envar : value * : implied-value * ) +{ + if $(value) + { + if $(implied-value) + { + value = $(implied-value) ; + } + return [ common.path-variable-setting-command $(envar) : $(value) ] ; + } + else + { + return "" ; + } +} + + +local rule non-empty ( string ) +{ + if $(string) != "" { return true ; } +} + + +.configs = [ new configurations ] ; + + +#| tag::doc[] + +== Tips [[pkg-config-tips]] + + +=== Using several configurations + +Suppose, you have 2 collections of `.pc` files: one for platform A, and another +for platform B. You can initialize 2 configurations of `pkg-config` tool each +corresponding to specific collection: + +[source, jam] +---- +using pkg-config : A : : path/to/collection/A ; +using pkg-config : B : : path/to/collection/B ; +---- + +Then, you can specify that builds for platform A should use configuration A, +while builds for B should use configuration B: + +[source, jam] +---- +project + : requirements + A-os,A-arch:A + B-os,B-arch:B + ; +---- + +Thanks to the fact, that `project-config`, `user-config` and `site-config` +modules are parents of jamroot module, you can put it in any of those files.o + + +=== Choosing the package name based on the property set + +Since a file for a package should be named after the package suffixed with +`.pc`, some projects came up with naming schemes in order to allow simultaneous +installation of several major versions or build variants. In order to pick the +specific name corresponding to the build request you can use `` +property in requirements: + +[source, jam] +---- +pkg-config.import mypackage : requirements @infer-name ; + +rule infer-name ( properties * ) +{ + local name = mypackage ; + local variant = [ property.select : $(properties) ] ; + if $(variant) = debug + { + name += -d ; + } + return $(name) ; +} +---- + +The `common.format-name` rule can be very useful in this situation. + + +=== Modify usage requirements based on package version or variable + +Sometimes you need to apply some logic based on package's version or a +variable that it defines. For that you can use `` property in +usage requirements: + +---- +mypackage = + [ pkg-config.import mypackage : usage-requirements @define_ns + ] ; + +rule extra-props ( properties * ) +{ + local ps = [ property-set.create $(properties) ] ; + local prefix = [ $(mypackage).variable name_prefix : $(ps) ] ; + prefix += [ $(mypackage).version $(ps) ] ; + return $(prefix:J=_) ; +} +---- + +|# # end::doc[] diff --git a/src/boost/tools/build/src/tools/python-config.jam b/src/boost/tools/build/src/tools/python-config.jam new file mode 100644 index 000000000..a2ee025e7 --- /dev/null +++ b/src/boost/tools/build/src/tools/python-config.jam @@ -0,0 +1,27 @@ +#~ Copyright 2005 Rene Rivera. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Automatic configuration for Python tools and librries. To use, just import this module. + +import os ; +import toolset : using ; + +if [ os.name ] = NT +{ + for local R in 2.4 2.3 2.2 + { + local python-path = [ W32_GETREG + "HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore\\$(R)\\InstallPath" ] ; + local python-version = $(R) ; + + if $(python-path) + { + if --debug-configuration in [ modules.peek : ARGV ] + { + ECHO "notice:" using python ":" $(python-version) ":" $(python-path) ; + } + using python : $(python-version) : $(python-path) ; + } + } +} diff --git a/src/boost/tools/build/src/tools/python.jam b/src/boost/tools/build/src/tools/python.jam new file mode 100644 index 000000000..0dfc750a2 --- /dev/null +++ b/src/boost/tools/build/src/tools/python.jam @@ -0,0 +1,1345 @@ +# Copyright 2004 Vladimir Prus. +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Support for Python and the the Boost.Python library. +# +# This module defines +# +# - a project 'python' with a target 'python' in it, that corresponds to the +# python library +# +# - a main target rule 'python-extension' which can be used to build a python +# extension. +# +# Extensions that use Boost.Python must explicitly link to it. + +import type ; +import testing ; +import generators ; +import project ; +import errors ; +import targets ; +import "class" : new ; +import os ; +import common ; +import toolset ; +import regex ; +import numbers ; +import string ; +import property ; +import sequence ; +import path ; +import feature ; +import set ; +import builtin ; +import property-set ; + + +# Make this module a project. +project.initialize $(__name__) ; +project python ; + +# Save the project so that if 'init' is called several times we define new +# targets in the python project, not in whatever project we were called by. +.project = [ project.current ] ; + +# Dynamic linker lib. Necessary to specify it explicitly on some platforms. +lib dl ; +# This contains 'openpty' function need by python. Again, on some system need to +# pass this to linker explicitly. +lib util ; +# Python uses pthread symbols. +lib pthread : + : linux:shared + ; + +# Extra library needed by phtread on some platforms. +lib rt ; + +# The pythonpath feature specifies additional elements for the PYTHONPATH +# environment variable, set by run-pyd. For example, pythonpath can be used to +# access Python modules that are part of the product being built, but are not +# installed in the development system's default paths. +feature.feature pythonpath : : free optional path ; + +# The best configured version of Python 2 and 3. +py2-version = ; +py3-version = ; + +# Initializes the Python toolset. Note that all parameters are optional. +# +# - version -- the version of Python to use. Should be in Major.Minor format, +# for example 2.3. Do not include the subminor version. +# +# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter. +# Alternatively, the installation prefix for Python libraries and includes. If +# empty, will be guessed from the version, the platform's installation +# patterns, and the python executables that can be found in PATH. +# +# - includes: the include path to Python headers. If empty, will be guessed. +# +# - libraries: the path to Python library binaries. If empty, will be guessed. +# On MacOS/Darwin, you can also pass the path of the Python framework. +# +# - condition: if specified, should be a set of properties that are matched +# against the build configuration when B2 selects a Python +# configuration to use. +# +# - extension-suffix: A string to append to the name of extension modules before +# the true filename extension. Ordinarily we would just compute this based on +# the value of the feature. However ubuntu's python-dbg +# package uses the windows convention of appending _d to debug-build extension +# modules. We have no way of detecting ubuntu, or of probing python for the +# "_d" requirement, and if you configure and build python using +# --with-pydebug, you'll be using the standard *nix convention. Defaults to "" +# (or "_d" when targeting windows and is set). +# +# Example usage: +# +# using python : 2.3 ; +# using python : 2.3 : /usr/local/bin/python ; +# +rule init ( version ? : cmd-or-prefix ? : includes * : libraries ? + : condition * : extension-suffix ? ) +{ + project.push-current $(.project) ; + + debug-message Configuring python... ; + for local v in version cmd-or-prefix includes libraries condition + { + if $($(v)) + { + debug-message " user-specified $(v):" \"$($(v))\" ; + } + } + + configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ; + + project.pop-current ; +} + +# A simpler version of SHELL that grabs stderr as well as stdout, but returns +# nothing if there was an error. +# +local rule shell-cmd ( cmd ) +{ + debug-message running command '$(cmd)" 2>&1"' ; + x = [ SHELL $(cmd)" 2>&1" : exit-status ] ; + if $(x[2]) = 0 + { + return $(x[1]) ; + } + else + { + return ; + } +} + + +# Try to identify Cygwin symlinks. Invoking such a file directly as an NT +# executable from a native Windows build of bjam would be fatal to the bjam +# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove +# that those are not also symlinks. ;-) +# +# If a symlink is found returns non-empty; we try to extract the target of the +# symlink from the file and return that. +# +# Note: 1. only works on NT 2. path is a native path. +local rule is-cygwin-symlink ( path ) +{ + local is-symlink = ; + + # Look for a file with the given path having the S attribute set, as cygwin + # symlinks do. /-C means "do not use thousands separators in file sizes." + local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ; + + if $(dir-listing) + { + # Escape any special regex characters in the base part of the path. + local base-pat = [ regex.escape $(path:D=) : "].[()*+?|\\$^" : \\ ] ; + + # Extract the file's size from the directory listing. + local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ; + + # If the file has a reasonably small size, look for the special symlink + # identification text. + if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ] + { + local link = [ SHELL "FIND /OFF \"!\" \""$(path)"\" 2>&1" ] ; + if $(link[2]) != 0 + { + local nl = " + +" ; + is-symlink = [ MATCH ".*!([^"$(nl)"]*)" : $(link[1]) : 1 ] ; + if $(is-symlink) + { + is-symlink = [ *nix-path-to-native $(is-symlink) ] ; + is-symlink = $(is-symlink:R=$(path:D)) ; + } + + } + } + } + return $(is-symlink) ; +} + + +# Append ext to each member of names that does not contain '.'. +# +local rule default-extension ( names * : ext * ) +{ + local result ; + for local n in $(names) + { + switch $(n) + { + case *.* : result += $(n) ; + case * : result += $(n)$(ext) ; + } + } + return $(result) ; +} + + +# Tries to determine whether invoking "cmd" would actually attempt to launch a +# cygwin symlink. +# +# Note: only works on NT. +# +local rule invokes-cygwin-symlink ( cmd ) +{ + local dirs = $(cmd:D) ; + if ! $(dirs) + { + dirs = . [ os.executable-path ] ; + } + local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ; + local paths = [ GLOB $(dirs) : $(base) ] ; + if $(paths) + { + # Make sure we have not run into a Cygwin symlink. Invoking such a file + # as an NT executable would be fatal for the bjam process. + return [ is-cygwin-symlink $(paths[1]) ] ; + } +} + + +local rule debug-message ( message * ) +{ + if --debug-configuration in [ modules.peek : ARGV ] + { + ECHO "notice:" "[python-cfg]" $(message) ; + } +} + + +# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and +# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result +# found. Also accounts for the fact that on 64-bit machines, 32-bit software has +# its own area, under SOFTWARE\Wow6432node. +# +local rule software-registry-value ( path : data ? ) +{ + local result ; + for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE + { + for local x64elt in "" Wow6432node\\ # Account for 64-bit windows + { + if ! $(result) + { + result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ; + } + } + + } + return $(result) ; +} + + +.windows-drive-letter-re = "^([A-Za-z]):[\\/](.*)" ; +.cygwin-drive-letter-re = "^/cygdrive/([a-z])/(.*)" ; + +.working-directory = [ PWD ] ; +.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ; +.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ; + + +local rule windows-to-cygwin-path ( path ) +{ + # If path is rooted with a drive letter, rewrite it using the /cygdrive + # mountpoint. + local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ; + + # Else if path is rooted without a drive letter, use the working directory. + p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ; + + # Else return the path unchanged. + return $(p:E=$(path:T)) ; +} + + +# :W only works in Cygwin builds of bjam. This one works on NT builds as well. +# +local rule cygwin-to-windows-path ( path ) +{ + path = $(path:R="") ; # strip any trailing slash + + local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) "$1:/$2" ] ; + if $(drive-letter) + { + path = $(drive-letter) ; + } + else if $(path:R=/x) = $(path) # already rooted? + { + # Look for a cygwin mount that includes each head sequence in $(path). + local head = $(path) ; + local tail = "" ; + + while $(head) + { + local root = [ software-registry-value + "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ; + + if $(root) + { + path = $(tail:R=$(root)) ; + head = ; + } + tail = $(tail:R=$(head:D=)) ; + + if $(head) = / + { + head = ; + } + else + { + head = $(head:D) ; + } + } + } + return [ regex.replace $(path:R="") / \\ ] ; +} + + +# Convert a *nix path to native. +# +local rule *nix-path-to-native ( path ) +{ + if [ os.name ] = NT + { + path = [ cygwin-to-windows-path $(path) ] ; + } + return $(path) ; +} + + +# Convert an NT path to native. +# +local rule windows-path-to-native ( path ) +{ + if [ os.name ] = NT + { + return $(path) ; + } + else + { + return [ windows-to-cygwin-path $(path) ] ; + } +} + + +# Return nonempty if path looks like a windows path, i.e. it starts with a drive +# letter or contains backslashes. +# +local rule guess-windows-path ( path ) +{ + return [ SUBST $(path) "($(.windows-drive-letter-re)|.*([\\]).*)" $1 ] ; +} + + +local rule path-to-native ( paths * ) +{ + local result ; + + for local p in $(paths) + { + if [ guess-windows-path $(p) ] + { + result += [ windows-path-to-native $(p) ] ; + } + else + { + result += [ *nix-path-to-native $(p:T) ] ; + } + } + return $(result) ; +} + + +# Validate the version string and extract the major/minor part we care about. +# +local rule split-version ( version ) +{ + local major-minor = [ MATCH "^([0-9]+)\.([0-9]+)(.*)$" : $(version) : 1 2 3 ] ; + if ! $(major-minor[2]) || $(major-minor[3]) + { + ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ; + + # Add a zero to account for the missing digit if necessary. + major-minor += 0 ; + } + + return $(major-minor[1]) $(major-minor[2]) ; +} + + +# Build a list of versions from 3.4 down to 1.5. Because bjam can not enumerate +# registry sub-keys, we have no way of finding a version with a 2-digit minor +# version, e.g. 2.10 -- let us hope that never happens. +# +.version-countdown = ; +for local v in [ numbers.range 15 34 ] +{ + .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ; +} + + +local rule windows-installed-pythons ( version ? ) +{ + version ?= $(.version-countdown) ; + local interpreters ; + + for local v in $(version) + { + local install-path = [ + software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ; + + if $(install-path) + { + install-path = [ windows-path-to-native $(install-path) ] ; + debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ; + } + + interpreters += $(:E=python:R=$(install-path)) ; + } + return $(interpreters) ; +} + + +local rule darwin-installed-pythons ( version ? ) +{ + version ?= $(.version-countdown) ; + + local prefix + = [ GLOB /System/Library/Frameworks /Library/Frameworks + : Python.framework ] ; + + return $(prefix)/Versions/$(version)/bin/python ; +} + + +# Assume "python-cmd" invokes a python interpreter and invoke it to extract all +# the information we care about from its "sys" module. Returns void if +# unsuccessful. +# +local rule probe ( python-cmd ) +{ + # Avoid invoking a Cygwin symlink on NT. + local skip-symlink ; + if [ os.name ] = NT + { + skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ; + } + + if $(skip-symlink) + { + debug-message -------------------------------------------------------------------- ; + debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ; + debug-message causing a bjam built for Windows to hang. ; + debug-message ; + debug-message If you intend to target a Cygwin build of Python, please ; + debug-message replace the path to the link with the path to a real executable ; + debug-message "(guessing:" \"$(skip-symlink)\") "in" your 'using python' line ; + debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ; + debug-message backslashes ; + debug-message -------------------------------------------------------------------- ; + } + else + { + # Prepare a List of Python format strings and expressions that can be + # used to print the constants we want from the sys module. + + # We do not really want sys.version since that is a complicated string, + # so get the information from sys.version_info instead. + local format = "version=%d.%d" ; + local exprs = "version_info[0]" "version_info[1]" ; + + for local s in $(sys-elements[2-]) + { + format += $(s)=%s ; + exprs += $(s) ; + } + + # Invoke Python and ask it for all those values. + local full-cmd = + $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ; + + local output = [ shell-cmd $(full-cmd) ] ; + if $(output) + { + # Parse the output to get all the results. + local nl = " + +" ; + for s in $(sys-elements) + { + # These variables are expected to be declared local in the + # caller, so Jam's dynamic scoping will set their values there. + sys.$(s) = [ SUBST $(output) "\\<$(s)=([^$(nl)]+)" $1 ] ; + } + } + return $(output) ; + } +} + + +# Make sure the "libraries" and "includes" variables (in an enclosing scope) +# have a value based on the information given. +# +local rule compute-default-paths ( target-os : version ? : prefix ? : + exec-prefix ? ) +{ + exec-prefix ?= $(prefix) ; + + if $(target-os) = windows + { + # The exec_prefix is where you're supposed to look for machine-specific + # libraries. + local default-library-path = $(exec-prefix)\\libs ; + local default-include-path = $(:E=Include:R=$(prefix)) ; + + # If the interpreter was found in a directory called "PCBuild" or + # "PCBuild8," assume we're looking at a Python built from the source + # distro, and go up one additional level to the default root. Otherwise, + # the default root is the directory where the interpreter was found. + + # We ask Python itself what the executable path is in case of + # intermediate symlinks or shell scripts. + local executable-dir = $(sys.executable:D) ; + + if [ MATCH ^(PCBuild) : $(executable-dir:D=) ] + { + debug-message "This Python appears to reside in a source distribution;" ; + debug-message "prepending \""$(executable-dir)"\" to default library search path" ; + + default-library-path = $(executable-dir) $(default-library-path) ; + + default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ; + + debug-message "and \""$(default-include-path[1])"\" to default #include path" ; + } + + libraries ?= $(default-library-path) ; + includes ?= $(default-include-path) ; + } + else + { + local default-include-path = $(prefix)/include/python$(version) ; + if ! [ path.exists $(default-include-path) ] && [ path.exists $(default-include-path)m ] + { + default-include-path = $(default-include-path)m ; + } + + includes ?= $(default-include-path) ; + + local lib = $(exec-prefix)/lib ; + libraries ?= $(lib)/python$(version)/config $(lib) ; + } +} + +# The version of the python interpreter to use. +feature.feature python : : propagated symmetric ; +feature.feature python.interpreter : : free ; + +toolset.flags python.capture-output PYTHON : ; + +# +# Support for Python configured --with-pydebug +# +feature.feature python-debugging : off on : propagated ; +variant debug-python : debug : on ; + + +# Return a list of candidate commands to try when looking for a Python +# interpreter. prefix is expected to be a native path. +# +local rule candidate-interpreters ( version ? : prefix ? : target-os ) +{ + local bin-path = bin ; + if $(target-os) = windows + { + # On Windows, look in the root directory itself and, to work with the + # result of a build-from-source, the PCBuild directory. + bin-path = PCBuild8 PCBuild "" ; + } + + bin-path = $(bin-path:R=$(prefix)) ; + + if $(target-os) in windows darwin + { + return # Search: + $(:E=python:R=$(bin-path)) # Relative to the prefix, if any + python # In the PATH + [ $(target-os)-installed-pythons $(version) ] # Standard install locations + ; + } + else + { + # Search relative to the prefix, or if none supplied, in PATH. + local unversioned = $(:E=python:R=$(bin-path:E=)) ; + + # If a version was specified, look for a python with that specific + # version appended before looking for one called, simply, "python" + return $(unversioned)$(version) $(unversioned) ; + } +} + + +# Compute system library dependencies for targets linking with static Python +# libraries. +# +# On many systems, Python uses libraries such as pthreads or libdl. Since static +# libraries carry no library dependency information of their own that the linker +# can extract, these extra dependencies have to be given explicitly on the link +# line of the client. The information about these dependencies is packaged into +# the "python" target below. +# +# Even where Python itself uses pthreads, it never allows extension modules to +# be entered concurrently (unless they explicitly give up the interpreter lock). +# Therefore, extension modules do not need the efficiency overhead of threadsafe +# code as produced by multi, and we handle libpthread along with +# other libraries here. Note: this optimization is based on an assumption that +# the compiler generates link-compatible code in both the single- and +# multi-threaded cases, and that system libraries do not change their ABIs +# either. +# +# Returns a list of usage-requirements that link to the necessary system +# libraries. +# +local rule system-library-dependencies ( target-os ) +{ + switch $(target-os) + { + case s[uo][nl]* : # solaris, sun, sunos + # Add a librt dependency for the gcc toolset on SunOS (the sun + # toolset adds -lrt unconditionally). While this appears to + # duplicate the logic already in gcc.jam, it does not as long as + # we are not forcing multi. + + # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields + # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem + # to be the right list for extension modules. For example, on my + # installation, adding -ldl causes at least one test to fail because + # the library can not be found and removing it causes no failures. + + # Apparently, though, we need to add -lrt for gcc. + return gcc:rt ; + + case osf : return pthread gcc:rt ; + + case qnx* : return ; + case darwin : return ; + case windows : return ; + case haiku : return ; + + case hpux : return rt ; + case *bsd : return pthread gcc:util ; + + case aix : return pthread dl ; + + case * : return pthread dl + gcc:util linux:util ; + } +} + + +# Define a version suffix for libraries depending on Python. +# For example, Boost.Python built for Python 2.7 uses the suffix "27" +rule version-suffix ( version ) +{ + local major-minor = [ split-version $(version) ] ; + local suffix = $(major-minor:J="") ; + return $(suffix) ; +} + +# Declare a target to represent Python's library. +# +local rule declare-libpython-target ( version ? : requirements * ) +{ + # Compute the representation of Python version in the name of Python's + # library file. + local lib-version = $(version) ; + if windows in $(requirements) + { + local major-minor = [ split-version $(version) ] ; + lib-version = $(major-minor:J="") ; + if on in $(requirements) + { + lib-version = $(lib-version)_d ; + } + } + + if ! $(lib-version) + { + ECHO *** "warning:" could not determine Python version, which will ; + ECHO *** "warning:" probably prevent us from linking with the python ; + ECHO *** "warning:" library. Consider explicitly passing the version ; + ECHO *** "warning:" to 'using python'. ; + } + + # Declare it. + lib python.lib : : python$(lib-version) $(requirements) ; +} + + +# Implementation of init. +local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? : + condition * : extension-suffix ? ) +{ + local prefix ; + local exec-prefix ; + local cmds-to-try ; + local interpreter-cmd ; + + local target-os = [ feature.get-values target-os : $(condition) ] ; + target-os ?= [ feature.defaults target-os ] ; + target-os = $(target-os:G=) ; + + if $(target-os) = windows && on in $(condition) + { + extension-suffix ?= _d ; + } + extension-suffix ?= "" ; + + local cmds-to-try ; + + if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ] + { + # If the user did not pass a command, whatever we got was a prefix. + prefix = $(cmd-or-prefix) ; + cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ; + } + else + { + # Work with the command the user gave us. + cmds-to-try = $(cmd-or-prefix) ; + + # On Windows, do not nail down the interpreter command just yet in case + # the user specified something that turns out to be a cygwin symlink, + # which could bring down bjam if we invoke it. + if $(target-os) != windows + { + interpreter-cmd = $(cmd-or-prefix) ; + } + } + + # Values to use in case we can not really find anything in the system. + local fallback-cmd = $(cmds-to-try[1]) ; + local fallback-version ; + + # Anything left to find or check? + if ! ( $(interpreter-cmd) && $(version) && $(includes) && $(libraries) ) + { + # Values to be extracted from python's sys module. These will be set by + # the probe rule, above, using Jam's dynamic scoping. + local sys-elements = version platform prefix exec_prefix executable ; + local sys.$(sys-elements) ; + + # Compute the string Python's sys.platform needs to match. If not + # targeting Windows or cygwin we will assume only native builds can + # possibly run, so we will not require a match and we leave sys.platform + # blank. + local platform ; + switch $(target-os) + { + case windows : platform = win32 ; + case cygwin : platform = cygwin ; + } + + while $(cmds-to-try) + { + # Pop top command. + local cmd = $(cmds-to-try[1]) ; + cmds-to-try = $(cmds-to-try[2-]) ; + + debug-message Checking interpreter command \"$(cmd)\"... ; + if [ probe $(cmd) ] + { + fallback-version ?= $(sys.version) ; + + # Check for version/platform validity. + for local x in version platform + { + if $($(x)) && $($(x)) != $(sys.$(x)) + { + debug-message ...$(x) "mismatch (looking for" + $($(x)) but found $(sys.$(x))")" ; + cmd = ; + } + } + + if $(cmd) + { + debug-message ...requested configuration matched! ; + + exec-prefix = $(sys.exec_prefix) ; + + compute-default-paths $(target-os) : $(sys.version) : + $(sys.prefix) : $(sys.exec_prefix) ; + + version = $(sys.version) ; + interpreter-cmd ?= $(cmd) ; + cmds-to-try = ; # All done. + } + } + else + { + debug-message ...does not invoke a working interpreter ; + } + } + } + + # Check whether configuration succeeded. + if ! ( $(includes) && $(libraries) ) + { + debug-message Python headers and libraries not found. ; + return ; + } + + .configured = true ; + + if ! $(interpreter-cmd) + { + fallback-cmd ?= python ; + debug-message No working Python interpreter found. ; + if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ] + { + interpreter-cmd = $(fallback-cmd) ; + debug-message falling back to \"$(interpreter-cmd)\" ; + } + } + + includes = [ path-to-native $(includes) ] ; + libraries = [ path-to-native $(libraries) ] ; + + debug-message "Details of this Python configuration:" ; + debug-message " interpreter command:" \"$(interpreter-cmd:E=)\" ; + debug-message " include path:" \"$(includes:E=)\" ; + debug-message " library path:" \"$(libraries:E=)\" ; + if $(target-os) = windows + { + debug-message " DLL search path:" \"$(exec-prefix:E=)\" ; + } + + # + # Discover the presence of NumPy + # + debug-message "Checking for NumPy..." ; + local full-cmd = "import sys; sys.stderr = sys.stdout; import numpy; print(numpy.get_include())" ; + local full-cmd = $(interpreter-cmd)" -c \"$(full-cmd)\"" ; + debug-message "running command '$(full-cmd)'" ; + local result = [ SHELL $(full-cmd) : strip-eol : exit-status ] ; + if $(result[2]) = 0 + { + .numpy = true ; + .numpy-include = $(result[1]) ; + debug-message "NumPy enabled" ; + } + else + { + debug-message "NumPy disabled. Reason:" ; + debug-message " $(full-cmd) aborted with " ; + debug-message " $(result[1])" ; + } + + # + # End autoconfiguration sequence. + # + + # Normalize and dissect any version number. + local major-minor ; + if $(version) + { + major-minor = [ split-version $(version) ] ; + version = $(major-minor:J=.) ; + } + + + local target-requirements = $(condition) ; + + # Add the version, if any, to the target requirements. + if $(version) + { + if ! $(version) in [ feature.values python ] + { + feature.extend python : $(version) ; + py$(major-minor[1])-version ?= $(version) ; + if $(py$(major-minor[1])-version) < $(version) + { + py$(major-minor[1])-version = $(version) ; + } + } + target-requirements += $(version:E=default) ; + } + + target-requirements += $(target-os) ; + + # See if we can find a framework directory on darwin. + local framework-directory ; + if $(target-os) = darwin + { + # Search upward for the framework directory. + local framework-directory = $(libraries[-1]) ; + while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework + { + framework-directory = $(framework-directory:D) ; + } + + if $(framework-directory:D=) = Python.framework + { + debug-message framework directory is \"$(framework-directory)\" ; + } + else + { + debug-message "no framework directory found; using library path" ; + framework-directory = ; + } + } + + local dll-path = $(libraries) ; + + # Make sure that we can find the Python DLL on Windows. + if ( $(target-os) = windows ) && $(exec-prefix) + { + dll-path += $(exec-prefix) ; + } + + # + # Prepare usage requirements. + # + local usage-requirements = [ system-library-dependencies $(target-os) ] ; + usage-requirements += $(includes) $(interpreter-cmd) ; + if on in $(condition) + { + if $(target-os) = windows + { + # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define + # Py_DEBUG we will get multiple definition warnings. + usage-requirements += _DEBUG ; + } + else + { + usage-requirements += Py_DEBUG ; + } + } + + # In case we added duplicate requirements from what the user specified. + target-requirements = [ sequence.unique $(target-requirements) ] ; + + # Global, but conditional, requirements to give access to the interpreter + # for general utilities, like other toolsets, that run Python scripts. + toolset.add-requirements + "$(target-requirements:J=,):$(interpreter-cmd)" ; + + # Register the right suffix for extensions. + register-extension-suffix $(extension-suffix) : $(target-requirements) ; + + # Make sure that the python feature is always considered + # relevant for any targets that depend on python. Without + # this, it would only be considered relevant when there are + # multiple configurations defined within the same build. + target-requirements += python ; + + # + # Declare the "python" target. This should really be called + # python_for_embedding. + # + + if $(framework-directory) + { + alias python + : + : $(target-requirements) + : + : $(usage-requirements) $(framework-directory) + ; + } + else + { + declare-libpython-target $(version) : $(target-requirements) ; + + # This is an evil hack. On, Windows, when Python is embedded, nothing + # seems to set up sys.path to include Python's standard library + # (http://article.gmane.org/gmane.comp.python.general/544986). The evil + # here, aside from the workaround necessitated by Python's bug, is that: + # + # a. we're guessing the location of the python standard library from the + # location of pythonXX.lib + # + # b. we're hijacking the property to get the + # environment variable set up, and the user may want to use it for + # something else (e.g. launch the debugger). + local set-PYTHONPATH ; + if $(target-os) = windows + { + set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH : + $(libraries:D)/Lib ] ; + } + + alias python + : + : $(target-requirements) + : + # Why python.lib must be listed here instead of along with the + # system libs is a mystery, but if we do not do it, on cygwin, + # -lpythonX.Y never appears in the command line (although it does on + # linux). + : $(usage-requirements) + $(set-PYTHONPATH) + $(libraries) $(dll-path) python.lib + ; + } + + # On *nix, we do not want to link either Boost.Python or Python extensions + # to libpython, because the Python interpreter itself provides all those + # symbols. If we linked to libpython, we would get duplicate symbols. So + # declare two targets -- one for building extensions and another for + # embedding. + if $(target-os) in windows cygwin + { + alias python_for_extensions : python : $(target-requirements) ; + } + else if $(target-os) = darwin { + alias python_for_extensions + : + : $(target-requirements) + : + : $(usage-requirements) "-undefined dynamic_lookup" + ; + } + # On AIX we need Python extensions and Boost.Python to import symbols from + # the Python interpreter. Dynamic libraries opened with dlopen() do not + # inherit the symbols from the Python interpreter. + else if $(target-os) = aix + { + alias python_for_extensions + : + : $(target-requirements) + : + : $(usage-requirements) "-Wl,-bI:$(libraries[1])/python.exp" + ; + } + else + { + alias python_for_extensions + : + : $(target-requirements) + : + : $(usage-requirements) + ; + } + + # Declare the numpy target, which contains the NumPy include directory + + alias numpy : : $(target-requirements) : : $(.numpy-include) ; +} + +# Conditional rule specification that will prevent building of a target +# if there is no matching python configuration available with the given +# required properties. +rule require-py ( properties * ) +{ + local py-ext-target = [ $(.project).find python_for_extensions : no-error ] ; + if ! $(py-ext-target) + { + return no ; + } + local property-set = [ property-set.create $(properties) ] ; + property-set = [ $(property-set).expand ] ; + local py-ext-alternative = [ $(py-ext-target).select-alternatives $(property-set) ] ; + if ! $(py-ext-alternative) + { + return no ; + } +} + + +rule configured ( ) +{ + return $(.configured) ; +} + +rule numpy ( ) +{ + return $(.numpy) ; +} + +rule numpy-include ( ) +{ + return $(.numpy-include) ; +} + + +type.register PYTHON_EXTENSION : : SHARED_LIB ; + + +local rule register-extension-suffix ( root : condition * ) +{ + local suffix ; + + switch [ feature.get-values target-os : $(condition) ] + { + case windows : suffix = pyd ; + case cygwin : suffix = dll ; + case hpux : + { + if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4 + { + suffix = sl ; + } + else + { + suffix = so ; + } + } + case * : suffix = so ; + } + + type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ; +} + + +# Unset 'lib' prefix for PYTHON_EXTENSION +type.set-generated-target-prefix PYTHON_EXTENSION : : "" ; + + +rule python-extension ( name : sources * : requirements * : default-build * : + usage-requirements * ) +{ + if [ configured ] + { + requirements += /python//python_for_extensions ; + } + requirements += true ; + + local project = [ project.current ] ; + + targets.main-target-alternative + [ new typed-target $(name) : $(project) : PYTHON_EXTENSION + : [ targets.main-target-sources $(sources) : $(name) ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) ] + ] ; +} + +IMPORT python : python-extension : : python-extension ; + +# Support for testing. +type.register PY : py ; +type.register RUN_PYD_OUTPUT ; +type.register RUN_PYD : : TEST ; + + +class python-test-generator : generator +{ + import set ; + + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + self.composing = true ; + } + + rule run ( project name ? : property-set : sources * : multiple ? ) + { + local pyversion = [ $(property-set).get ] ; + local python ; + local other-pythons ; + + for local s in $(sources) + { + if [ $(s).type ] = PY + { + if ! $(python) + { + # First Python source ends up on command line. + python = $(s) ; + + } + else + { + # Other Python sources become dependencies. + other-pythons += $(s) ; + } + } + } + + local extensions ; + for local s in $(sources) + { + if [ $(s).type ] = PYTHON_EXTENSION + { + extensions += $(s) ; + } + } + + local libs ; + for local s in $(sources) + { + if [ type.is-derived [ $(s).type ] LIB ] + && ! $(s) in $(extensions) + { + libs += $(s) ; + } + } + + local new-sources ; + for local s in $(sources) + { + if [ type.is-derived [ $(s).type ] CPP ] + { + local name = [ utility.basename [ $(s).name ] ] ; + if $(name) = [ utility.basename [ $(python).name ] ] + { + name = $(name)_ext ; + } + local extension = [ generators.construct $(project) $(name) : + PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ; + + # The important part of usage requirements returned from + # PYTHON_EXTENSION generator are xdll-path properties that will + # allow us to find the python extension at runtime. + property-set = [ $(property-set).add $(extension[1]) ] ; + + # Ignore usage requirements. We're a top-level generator and + # nobody is going to use what we generate. + new-sources += $(extension[2-]) ; + } + } + + property-set = [ $(property-set).add-raw $(other-pythons) ] ; + + return [ construct-result $(python) $(extensions) $(new-sources) : + $(project) $(name) : $(property-set) ] ; + } +} + + +generators.register + [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ; + +generators.register-standard testing.expect-success + : RUN_PYD_OUTPUT : RUN_PYD ; + + +# There are two different ways of spelling OS names. One is used for [ os.name ] +# and the other is used for the and properties. Until that +# is remedied, this sets up a crude mapping from the latter to the former, that +# will work *for the purposes of cygwin/NT cross-builds only*. Could not think +# of a better name than "translate". +# +.translate-os-windows = NT ; +.translate-os-cygwin = CYGWIN ; +local rule translate-os ( src-os ) +{ + local x = $(.translate-os-$(src-os)) [ os.name ] ; + return $(x[1]) ; +} + + +# Extract the path to a single ".pyd" source. This is used to build the +# PYTHONPATH for running bpl tests. +# +local rule pyd-pythonpath ( source ) +{ + return [ on $(source) return $(LOCATE) $(SEARCH) ] ; +} + + +# The flag settings on testing.capture-output do not apply to python.capture +# output at the moment. Redo this explicitly. +toolset.flags python.capture-output ARGS ; +toolset.flags python.capture-output INPUT_FILES ; + +toolset.uses-features python.capture-output : + + ; + +rule capture-output ( target : sources * : properties * ) +{ + # Setup up a proper DLL search path. Here, $(sources[1]) is a python module + # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to + # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not + # consulted. Move it over explicitly. + RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ; + + PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ; + PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ; + + # After test is run, we remove the Python module, but not the Python script. + testing.capture-output $(target) : $(sources[1]) : $(properties) ; + + # PYTHONPATH is different; it will be interpreted by whichever Python is + # invoked and so must follow path rules for the target os. The only OSes + # where we can run python for other OSes currently are NT and CYGWIN so we + # only need to handle those cases. + local target-os = [ feature.get-values target-os : $(properties) ] ; + # Oddly, host-os is not in properties, so grab the default value. + local host-os = [ feature.defaults host-os ] ; + host-os = $(host-os:G=) ; + if $(target-os) != $(host-os) && $(target-os) in windows cygwin && $(host-os) in windows cygwin + { + PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path : + $(PYTHONPATH) ] ; + } + local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ; + local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH : + $(PYTHONPATH:E=:J=$(path-separator)) ] ; + LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ; +} + + +rule bpl-test ( name : sources * : requirements * ) +{ + local s ; + sources ?= $(name).py $(name).cpp ; + return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python + : $(requirements) : $(name) ] ; +} + +# The same as bpl-test but additionally require (and link to) boost_numpy. +# Masked whenever NumPy is not enabled. +rule numpy-test ( name : sources * : requirements * ) +{ + numpy-include = [ python.numpy-include ] ; + # yuk ! + if ! $(.numpy) { requirements += no ; } + sources ?= $(name).py $(name).cpp ; + name = [ regex.replace $(name) "[/]" "~" ] ; + return [ testing.make-test run-pyd + : $(sources) /boost/python//boost_numpy /boost/python//boost_python + : $(requirements) $(numpy-include) + : $(name) ] ; +} + +rule py-version ( n ) +{ + return $(py$(n)-version) ; +} + +IMPORT $(__name__) : bpl-test : : bpl-test ; +IMPORT $(__name__) : numpy-test : : numpy-test ; +IMPORT $(__name__) : py-version : : py-version ; diff --git a/src/boost/tools/build/src/tools/qcc.jam b/src/boost/tools/build/src/tools/qcc.jam new file mode 100644 index 000000000..6ad406701 --- /dev/null +++ b/src/boost/tools/build/src/tools/qcc.jam @@ -0,0 +1,299 @@ +# Copyright (c) 2001 David Abrahams. +# Copyright (c) 2002-2003 Rene Rivera. +# Copyright (c) 2002-2003 Vladimir Prus. +# Copyright (c) 2020 Alexander Karzhenkov. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import "class" : new ; +import common ; +import errors ; +import feature ; +import generators ; +import os ; +import property ; +import set ; +import toolset ; +import type ; +import unix ; + +feature.extend toolset : qcc ; + +toolset.inherit-generators qcc : unix : unix.link unix.link.dll ; +toolset.inherit-flags qcc : unix ; +toolset.inherit-rules qcc : unix ; + +# Initializes the qcc toolset for the given version. If necessary, command may +# be used to specify where the compiler is located. The parameter 'options' is a +# space-delimited list of options, each one being specified as +# option-value. Valid option names are: cxxflags, linkflags and +# linker-type. Accepted values for linker-type are gnu and sun, gnu being the +# default. +# +# Example: +# using qcc : 3.4 : : foo bar sun ; +# +rule init ( version ? : command * : options * ) +{ + local condition = [ common.check-init-parameters qcc : version $(version) ] ; + local command = [ common.get-invocation-command qcc : QCC : $(command) ] ; + common.handle-options qcc : $(condition) : $(command) : $(options) ; +} + + +generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : qcc ; +generators.register-c-compiler qcc.compile.c : C : OBJ : qcc ; +generators.register-c-compiler qcc.compile.asm : ASM : OBJ : qcc ; + + +# Select QCC target platform + +# Configuration checks don't use explicit value of QCC-TARGET-PLATFORM +toolset.flags qcc QCC-TARGET-PLATFORM / : NA ; + +# Combinations supported by both QNX 6.5.0 and QNX 7.0.0 +toolset.flags qcc QCC-TARGET-PLATFORM x86/32 : -Vgcc_ntox86 ; +toolset.flags qcc QCC-TARGET-PLATFORM arm/32 : -Vgcc_ntoarmv7le ; + +# Combinations supported by QNX 7.0.0 +toolset.flags qcc QCC-TARGET-PLATFORM x86/64 : -Vgcc_ntox86_64 ; +toolset.flags qcc QCC-TARGET-PLATFORM arm/64 : -Vgcc_ntoaarch64le ; + +# Combinations supported by QNX 6.5.0 +toolset.flags qcc QCC-TARGET-PLATFORM power/32 : -Vgcc_ntoppcbe ; +toolset.flags qcc QCC-TARGET-PLATFORM mips/32 : -Vgcc_ntomipsle ; +toolset.flags qcc QCC-TARGET-PLATFORM mips1/32 : -Vgcc_ntomipsle ; + +# There are also excluded alternatives (supported by QNX 6.5.0) +# toolset.flags qcc QCC-TARGET-PLATFORM arm/32 : -Vgcc_ntoarmle ; +# toolset.flags qcc QCC-TARGET-PLATFORM power/32 : -Vgcc_ntoppcbespe ; +# toolset.flags qcc QCC-TARGET-PLATFORM mips/32 : -Vgcc_ntomipsbe ; +# toolset.flags qcc QCC-TARGET-PLATFORM mips1/32 : -Vgcc_ntomipsbe ; + +local rule check-target-platform +{ + local opt = [ on $(1) return $(QCC-TARGET-PLATFORM) ] ; + + if $(opt) = NA + { + # Configuration checks are performed + QCC-TARGET-PLATFORM on $(1) = ; + } + else if ! $(opt) + { + errors.user-error "Unsupported achitecture / address model" ; + } +} + +# Declare flags for compilation. +toolset.flags qcc.compile OPTIONS on : -gstabs+ ; + +# Declare flags and action for compilation. +toolset.flags qcc.compile OPTIONS off : -O0 ; +toolset.flags qcc.compile OPTIONS speed : -O3 ; +toolset.flags qcc.compile OPTIONS space : -Os ; + +toolset.flags qcc.compile OPTIONS off : -Wc,-fno-inline ; +toolset.flags qcc.compile OPTIONS on : -Wc,-Wno-inline ; +toolset.flags qcc.compile OPTIONS full : -Wc,-finline-functions -Wc,-Wno-inline ; + +toolset.flags qcc.compile OPTIONS off : -w ; +toolset.flags qcc.compile OPTIONS all : -Wc,-Wall ; +toolset.flags qcc.compile OPTIONS extra : -Wc,-Wall -Wc,-Wextra ; +toolset.flags qcc.compile OPTIONS pedantic : -Wc,-Wall -Wc,-Wextra -Wc,-pedantic ; +toolset.flags qcc.compile OPTIONS on : -Wc,-Werror ; + +toolset.flags qcc.compile OPTIONS on : -p ; + +toolset.flags qcc.compile OPTIONS hidden : -fvisibility=hidden ; +toolset.flags qcc.compile.c++ OPTIONS hidden : -fvisibility-inlines-hidden ; +toolset.flags qcc.compile OPTIONS protected : -fvisibility=protected ; +toolset.flags qcc.compile OPTIONS global : -fvisibility=default ; + +toolset.flags qcc.compile OPTIONS ; +toolset.flags qcc.compile.c++ OPTIONS ; +toolset.flags qcc.compile DEFINES ; +toolset.flags qcc.compile INCLUDES ; + +toolset.flags qcc.compile OPTIONS shared : -shared ; + +toolset.flags qcc.compile.c++ TEMPLATE_DEPTH ; + + +rule compile.c++ +{ + # Here we want to raise the template-depth parameter value to something + # higher than the default value of 17. Note that we could do this using the + # feature.set-default rule but we do not want to set the default value for + # all toolsets as well. + # + # TODO: This 'modified default' has been inherited from some 'older Boost + # Build implementation' and has most likely been added to make some Boost + # library parts compile correctly. We should see what exactly prompted this + # and whether we can get around the problem more locally. + local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ; + if ! $(template-depth) + { + TEMPLATE_DEPTH on $(1) = 128 ; + } + + check-target-platform $(1) ; +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" $(QCC-TARGET-PLATFORM) -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +rule compile.c +{ + check-target-platform $(1) ; +} + +actions compile.c +{ + "$(CONFIG_COMMAND)" $(QCC-TARGET-PLATFORM) -lang-c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +rule compile.asm +{ + check-target-platform $(1) ; +} + +actions compile.asm +{ + "$(CONFIG_COMMAND)" $(QCC-TARGET-PLATFORM) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + + +# The class checking that we do not try to use the static property +# while creating or using a shared library, since it is not supported by qcc/ +# /libc. +# +class qcc-linking-generator : unix-linking-generator +{ + rule generated-targets ( sources + : property-set : project name ? ) + { + if static in [ $(property-set).raw ] + { + local m ; + if [ id ] = "qcc.link.dll" + { + m = "on qcc, DLL can't be build with static" ; + } + if ! $(m) + { + for local s in $(sources) + { + local type = [ $(s).type ] ; + if $(type) && [ type.is-derived $(type) SHARED_LIB ] + { + m = "on qcc, using DLLS together with the static options is not possible " ; + } + } + } + if $(m) + { + errors.user-error $(m) : "It is suggested to use" + "static together with static." ; + } + } + + return [ unix-linking-generator.generated-targets + $(sources) : $(property-set) : $(project) $(name) ] ; + } +} + +generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE + : qcc ] ; + +generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ + : SHARED_LIB : qcc ] ; + +generators.override qcc.prebuilt : builtin.prebuilt ; +generators.override qcc.searched-lib-generator : searched-lib-generator ; + + +# Declare flags for linking. +# First, the common flags. +toolset.flags qcc.link OPTIONS on : -gstabs+ ; +toolset.flags qcc.link OPTIONS on : -p ; +toolset.flags qcc.link OPTIONS ; +toolset.flags qcc.link LINKPATH ; +toolset.flags qcc.link FINDLIBS-ST ; +toolset.flags qcc.link FINDLIBS-SA ; +toolset.flags qcc.link LIBRARIES ; + +toolset.flags qcc.link FINDLIBS-SA : m ; + +# For static we made sure there are no dynamic libraries in the +# link. +toolset.flags qcc.link OPTIONS static : -static ; + +# Assuming this is just like with gcc. +toolset.flags qcc.link RPATH : : unchecked ; +toolset.flags qcc.link RPATH_LINK : : unchecked ; + + +# Declare actions for linking. +# +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; + check-target-platform $(1) ; +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(QCC-TARGET-PLATFORM) -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) +} + + +# Always remove archive and start again. Here is the rationale from Andre Hentz: +# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c, +# updated my Jamfiles and rebuilt. My program was crashing with absurd errors. +# After some debugging I traced it back to the fact that a1.o was *still* in +# liba.a +RM = [ common.rm-command ] ; +if [ os.name ] = NT +{ + RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ; +} + + +# Declare action for creating static libraries. The 'r' letter means to add +# files to the archive with replacement. Since we remove the archive, we do not +# care about replacement, but there is no option to "add without replacement". +# The 'c' letter suppresses warnings in case the archive does not exists yet. +# That warning is produced only on some platforms, for whatever reasons. +# +# Use qcc driver to create archive, see +# http://www.qnx.com/developers/docs/6.3.2/neutrino/utilities/q/qcc.html + +rule archive +{ + check-target-platform $(1) ; +} + +actions piecemeal archive +{ + $(RM) "$(<)" + "$(CONFIG_COMMAND)" $(QCC-TARGET-PLATFORM) -A "$(<)" "$(>)" +} + + +rule link.dll ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; + check-target-platform $(1) ; +} + + +# Differ from 'link' above only by -shared. +# +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(QCC-TARGET-PLATFORM) -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS) +} diff --git a/src/boost/tools/build/src/tools/qt.jam b/src/boost/tools/build/src/tools/qt.jam new file mode 100644 index 000000000..4b74e33a1 --- /dev/null +++ b/src/boost/tools/build/src/tools/qt.jam @@ -0,0 +1,17 @@ +# Copyright (c) 2006 Vladimir Prus. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Forwarning toolset file to Qt GUI library. Forwards to the toolset file +# for the current version of Qt. + +import qt4 ; + +rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * ) +{ + qt4.init $(prefix) : $(full_bin) : $(full_inc) : $(full_lib) : $(version) : $(condition) ; +} + + diff --git a/src/boost/tools/build/src/tools/qt3.jam b/src/boost/tools/build/src/tools/qt3.jam new file mode 100644 index 000000000..6cf0eef32 --- /dev/null +++ b/src/boost/tools/build/src/tools/qt3.jam @@ -0,0 +1,209 @@ +# Copyright 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Support for the Qt GUI library version 3 +# (http://www.trolltech.com/products/qt3/index.html). +# For new developments, it is recommended to use Qt4 via the qt4 B2 +# module. + +import modules ; +import feature ; +import errors ; +import type ; +import "class" : new ; +import generators ; +import project ; +import toolset : flags ; + +# Convert this module into a project, so that we can declare targets here. +project.initialize $(__name__) ; +project qt3 ; + + +# Initialized the QT support module. The 'prefix' parameter tells where QT is +# installed. When not given, environmental variable QTDIR should be set. +# +rule init ( prefix ? ) +{ + if ! $(prefix) + { + prefix = [ modules.peek : QTDIR ] ; + if ! $(prefix) + { + errors.error + "QT installation prefix not given and QTDIR variable is empty" ; + } + } + + if $(.initialized) + { + if $(prefix) != $(.prefix) + { + errors.error + "Attempt the reinitialize QT with different installation prefix" ; + } + } + else + { + .initialized = true ; + .prefix = $(prefix) ; + + generators.register-standard qt3.moc : H : CPP(moc_%) : qt3 ; + # Note: the OBJ target type here is fake, take a look at + # qt4.jam/uic-h-generator for explanations that apply in this case as + # well. + generators.register [ new moc-h-generator-qt3 + qt3.moc.cpp : MOCCABLE_CPP : OBJ : qt3 ] ; + + # The UI type is defined in types/qt.jam, and UIC_H is only used in + # qt.jam, but not in qt4.jam, so define it here. + type.register UIC_H : : H ; + + generators.register-standard qt3.uic-h : UI : UIC_H : qt3 ; + + # The following generator is used to convert UI files to CPP. It creates + # UIC_H from UI, and constructs CPP from UI/UIC_H. In addition, it also + # returns UIC_H target, so that it can be mocced. + class "qt::uic-cpp-generator" : generator + { + rule __init__ ( ) + { + generator.__init__ qt3.uic-cpp : UI UIC_H : CPP : qt3 ; + } + + rule run ( project name ? : properties * : sources + ) + { + # Consider this: + # obj test : test_a.cpp : off ; + # + # This generator will somehow be called in this case, and, + # will fail -- which is okay. However, if there are + # properties they will be converted to sources, so the size of + # 'sources' will be more than 1. In this case, the base generator + # will just crash -- and that's not good. Just use a quick test + # here. + + local result ; + if ! $(sources[2]) + { + # Construct CPP as usual + result = [ generator.run $(project) $(name) + : $(properties) : $(sources) ] ; + + # If OK, process UIC_H with moc. It's pretty clear that + # the object generated with UIC will have Q_OBJECT macro. + if $(result) + { + local action = [ $(result[1]).action ] ; + local sources = [ $(action).sources ] ; + local mocced = [ generators.construct $(project) $(name) + : CPP : $(properties) : $(sources[2]) ] ; + result += $(mocced[2-]) ; + } + } + + return $(result) ; + } + } + + generators.register [ new "qt::uic-cpp-generator" ] ; + + # Finally, declare prebuilt target for QT library. + local usage-requirements = + $(.prefix)/include + $(.prefix)/lib + $(.prefix)/lib + qt3 + ; + lib qt : : qt-mt multi : : $(usage-requirements) ; + lib qt : : qt single : : $(usage-requirements) ; + } +} + +class moc-h-generator-qt3 : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP + { + name = [ $(sources[1]).name ] ; + name = $(name:B) ; + + local a = [ new action $(sources[1]) : qt3.moc.cpp : + $(property-set) ] ; + + local target = [ + new file-target $(name) : MOC : $(project) : $(a) ] ; + + local r = [ virtual-target.register $(target) ] ; + + # Since this generator will return a H target, the linking generator + # won't use it at all, and won't set any dependency on it. However, + # we need the target to be seen by bjam, so that the dependency from + # sources to this generated header is detected -- if Jam does not + # know about this target, it won't do anything. + DEPENDS all : [ $(r).actualize ] ; + + return $(r) ; + } + } +} + + +# Query the installation directory. This is needed in at least two scenarios. +# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt +# plugins to the Qt-Tree. +# +rule directory +{ + return $(.prefix) ; +} + +# -f forces moc to include the processed source file. Without it, it would think +# that .qpp is not a header and would not include it from the generated file. +# +actions moc +{ + $(.prefix)/bin/moc -f $(>) -o $(<) +} + +# When moccing .cpp files, we don't need -f, otherwise generated code will +# include .cpp and we'll get duplicated symbols. +# +actions moc.cpp +{ + $(.prefix)/bin/moc $(>) -o $(<) +} + + +space = " " ; + +# Sometimes it's required to make 'plugins' available during uic invocation. To +# help with this we add paths to all dependency libraries to uic commane line. +# The intention is that it's possible to write +# +# exe a : ... a.ui ... : some_plugin ; +# +# and have everything work. We'd add quite a bunch of unrelated paths but it +# won't hurt. +# +flags qt3.uic-h LIBRARY_PATH ; +actions uic-h +{ + $(.prefix)/bin/uic $(>) -o $(<) -L$(space)$(LIBRARY_PATH) +} + + +flags qt3.uic-cpp LIBRARY_PATH ; +# The second target is uic-generated header name. It's placed in build dir, but +# we want to include it using only basename. +actions uic-cpp +{ + $(.prefix)/bin/uic $(>[1]) -i $(>[2]:D=) -o $(<) -L$(space)$(LIBRARY_PATH) +} diff --git a/src/boost/tools/build/src/tools/qt4.jam b/src/boost/tools/build/src/tools/qt4.jam new file mode 100644 index 000000000..0e5c5687d --- /dev/null +++ b/src/boost/tools/build/src/tools/qt4.jam @@ -0,0 +1,755 @@ +# Copyright 2002-2006 Vladimir Prus +# Copyright 2005 Alo Sarv +# Copyright 2005-2009 Juergen Hunold +# +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Qt4 library support module +# +# The module attempts to auto-detect QT installation location from QTDIR +# environment variable; failing that, installation location can be passed as +# argument: +# +# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ; +# +# The module supports code generation from .ui and .qrc files, as well as +# running the moc preprocessor on headers. Note that you must list all your +# moc-able headers in sources. +# +# Example: +# +# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc +# /qt4//QtGui /qt4//QtNetwork ; +# +# It's also possible to run moc on cpp sources: +# +# import cast ; +# +# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ; +# +# When moccing source file myapp.cpp you need to include "myapp.moc" from +# myapp.cpp. When moccing .h files, the output of moc will be automatically +# compiled and linked in, you don't need any includes. +# +# This is consistent with Qt guidelines: +# http://qt-project.org/doc/qt-4.8/moc.html +# +# The .qrc processing utility supports various command line option (see +# http://qt-project.org/doc/qt-4.8/rcc.html for a complete list). The +# module provides default arguments for the "output file" and +# "initialization function name" options. Other options can be set through +# the build property. E.g. if you wish the compression settings +# to be more aggressive than the defaults, you can apply them too all .qrc +# files like this: +# +# project my-qt-project : +# requirements +# "-compress 9 -threshold 10" +# ; +# +# Of course, this property can also be specified on individual targets. + + +import modules ; +import feature ; +import errors ; +import type ; +import "class" : new ; +import generators ; +import project ; +import toolset : flags ; +import os ; +import virtual-target ; +import scanner ; + +# Qt3Support control feature +# +# Qt4 configure defaults to build Qt4 libraries with Qt3Support. +# The autodetection is missing, so we default to disable Qt3Support. +# This prevents the user from inadvertedly using a deprecated API. +# +# The Qt3Support library can be activated by adding +# "on" to requirements +# +# Use "on:QT3_SUPPORT_WARNINGS" +# to get warnings about deprecated Qt3 support functions and classes. +# Files ported by the "qt3to4" conversion tool contain _tons_ of +# warnings, so this define is not set as default. +# +# Todo: Detect Qt3Support from Qt's configure data. +# Or add more auto-configuration (like python). +feature.feature qt3support : off on : propagated link-incompatible ; + +# The Qt version used for requirements +# Valid are 4.4 or 4.5.0 +# Auto-detection via qmake sets 'major.minor.patch' +feature.feature qt : : propagated ; + +# Extra flags for rcc +feature.feature rccflags : : free ; + +project.initialize $(__name__) ; +project qt ; + +# Save the project so that we tolerate 'import + using' combo. +.project = [ project.current ] ; + +# Helper utils for easy debug output +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = TRUE ; +} + +local rule debug-message ( message * ) +{ + if $(.debug-configuration) = TRUE + { + ECHO notice\: "[qt4-cfg]" $(message) ; + } +} + +# Capture qmake output line by line +local rule read-output ( content ) +{ + local lines ; + local nl = " +" ; + local << = "([^$(nl)]*)[$(nl)](.*)" ; + local line+ = [ MATCH "$(<<)" : "$(content)" ] ; + while $(line+) + { + lines += $(line+[1]) ; + line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ; + } + return $(lines) ; +} + +# Capture Qt version from qmake +local rule check-version ( bin_prefix ) +{ + full-cmd = $(bin_prefix)"/qmake -v" ; + debug-message Running '$(full-cmd)' ; + local output = [ SHELL $(full-cmd) ] ; + for line in [ read-output $(output) ] + { + # Parse the output to get all the results. + if [ MATCH "QMake" : $(line) ] + { + # Skip first line of output + } + else + { + temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ; + } + } + return $(temp) ; +} + +# Validate the version string and extract the major/minor part we care about. +# +local rule split-version ( version ) +{ + local major-minor = [ MATCH "^([0-9]+)\.([0-9]+)(.*)$" : $(version) : 1 2 3 ] ; + if ! $(major-minor[2]) || $(major-minor[3]) + { + ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ; + + # Add a zero to account for the missing digit if necessary. + major-minor += 0 ; + } + + return $(major-minor[1]) $(major-minor[2]) ; +} + +# Initialize the QT support module. +# Parameters: +# - 'prefix' parameter tells where Qt is installed. +# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc) +# - 'full_inc' optional full path to Qt top-level include directory +# - 'full_lib' optional full path to Qt library directory +# - 'version' optional version of Qt, else autodetected via 'qmake -v' +# - 'condition' optional requirements +rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * ) +{ + project.push-current $(.project) ; + + debug-message "==== Configuring Qt ... ====" ; + for local v in version cmd-or-prefix includes libraries condition + { + if $($(v)) + { + debug-message " user-specified $(v):" '$($(v))' ; + } + } + + # Needed as default value + .prefix = $(prefix) ; + + # pre-build paths to detect reinitializations changes + local inc_prefix lib_prefix bin_prefix ; + if $(full_inc) + { + inc_prefix = $(full_inc) ; + } + else + { + inc_prefix = $(prefix)/include ; + } + if $(full_lib) + { + lib_prefix = $(full_lib) ; + } + else + { + lib_prefix = $(prefix)/lib ; + } + if $(full_bin) + { + bin_prefix = $(full_bin) ; + } + else + { + bin_prefix = $(prefix)/bin ; + } + + # Globally needed variables + .incprefix = $(inc_prefix) ; + .libprefix = $(lib_prefix) ; + .binprefix = $(bin_prefix) ; + + if ! $(.initialized) + { + # Make sure this is initialised only once + .initialized = true ; + + # Generates cpp files from header files using "moc" tool + generators.register-standard qt4.moc : H : CPP(moc_%) : qt4 ; + + # The OBJ result type is a fake, 'H' will be really produced. See + # comments on the generator class, defined below the 'init' function. + generators.register [ new uic-generator qt4.uic : UI : OBJ : + qt4 ] ; + + # The OBJ result type is a fake here too. + generators.register [ new moc-h-generator + qt4.moc.inc : MOCCABLE_CPP : OBJ : qt4 ] ; + + generators.register [ new moc-inc-generator + qt4.moc.inc : MOCCABLE_H : OBJ : qt4 ] ; + + # Generates .cpp files from .qrc files. + generators.register-standard qt4.rcc : QRC : CPP(qrc_%) : qt4 ; + + # dependency scanner for wrapped files. + type.set-scanner QRC : qrc-scanner ; + + # Save value of first occurring prefix + .PREFIX = $(prefix) ; + } + + if $(version) + { + major-minor = [ split-version $(version) ] ; + version = $(major-minor:J=.) ; + } + else + { + version = [ check-version $(bin_prefix) ] ; + if $(version) + { + version = $(version:J=.) ; + } + debug-message Detected version '$(version)' ; + } + + local target-requirements = $(condition) ; + + # Add the version, if any, to the target requirements. + if $(version) + { + if ! $(version) in [ feature.values qt ] + { + feature.extend qt : $(version) ; + } + target-requirements += $(version:E=default) ; + } + + local target-os = [ feature.get-values target-os : $(condition) ] ; + if ! $(target-os) + { + target-os ?= [ feature.defaults target-os ] ; + target-os = $(target-os:G=) ; + target-requirements += $(target-os) ; + } + + # Build exact requirements for the tools + local tools-requirements = $(target-requirements:J=/) ; + + debug-message "Details of this Qt configuration:" ; + debug-message " prefix: " '$(prefix:E=)' ; + debug-message " binary path: " '$(bin_prefix:E=)' ; + debug-message " include path:" '$(inc_prefix:E=)' ; + debug-message " library path:" '$(lib_prefix:E=)' ; + debug-message " target requirements:" '$(target-requirements)' ; + debug-message " tool requirements: " '$(tools-requirements)' ; + + # setup the paths for the tools + toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; + toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; + toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ; + + # TODO: 2009-02-12: Better support for directories + # Most likely needed are separate getters for: include,libraries,binaries and sources. + toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ; + + # Test for a buildable Qt. + if [ glob $(.prefix)/Jamroot ] + { + .bjam-qt = true + + # this will declare QtCore (and qtmain on windows) + add-shared-library QtCore ; + } + else + # Setup common pre-built Qt. + # Special setup for QtCore on which everything depends + { + local link = [ feature.get-values link : $(condition) ] ; + + local usage-requirements = + $(.incprefix) + $(.libprefix) + multi + qt4 ; + + if $(link) in shared + { + usage-requirements += $(.libprefix) ; + } + + local suffix ; + + # Since Qt-4.2, debug versions on unix have to be built + # separately and therefore have no suffix. + .suffix_version = "" ; + .suffix_debug = "" ; + + # Control flag for auto-configuration of the debug libraries. + # This setup requires Qt 'configure -debug-and-release'. + # Only available on some platforms. + # ToDo: 2009-02-12: Maybe throw this away and + # require separate setup with debug as condition. + .have_separate_debug = FALSE ; + + # Setup other platforms + if $(target-os) in windows cygwin + { + .have_separate_debug = TRUE ; + + # On NT, the shared libs have "4" suffix, and "d" suffix in debug builds. + if $(link) in shared + { + .suffix_version = "4" ; + } + .suffix_debug = "d" ; + + # On Windows we must link against the qtmain library + lib qtmain + : # sources + : # requirements + qtmain$(.suffix_debug) + debug + $(target-requirements) + ; + + lib qtmain + : # sources + : # requirements + qtmain + $(target-requirements) + ; + } + else if $(target-os) = darwin + { + # On MacOS X, both debug and release libraries are available. + .suffix_debug = "_debug" ; + + .have_separate_debug = TRUE ; + + alias qtmain ; + } + else + { + alias qtmain : : $(target-requirements) ; + } + + lib QtCore : qtmain + : # requirements + QtCore$(.suffix_version) + $(target-requirements) + : # default-build + : # usage-requirements + QT_CORE_LIB + QT_NO_DEBUG + $(.incprefix)/QtCore + $(usage-requirements) + ; + + if $(.have_separate_debug) = TRUE + { + debug-message Configure debug libraries with suffix '$(.suffix_debug)' ; + + lib QtCore : $(main) + : # requirements + QtCore$(.suffix_debug)$(.suffix_version) + debug + $(target-requirements) + : # default-build + : # usage-requirements + QT_CORE_LIB + $(.incprefix)/QtCore + $(usage-requirements) + ; + } + } + + # Initialising the remaining libraries is canonical + # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include' + # 'include' only for non-canonical include paths. + add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ; + add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ; + add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ; + add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ; + + add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql + : QT_QT3SUPPORT_LIB QT3_SUPPORT + : on $(target-requirements) ; + + # Dummy target to enable "off" and + # "/qt//Qt3Support" at the same time. This enables quick + # switching from one to the other for test/porting purposes. + alias Qt3Support : : off $(target-requirements) ; + + # OpenGl Support + add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ; + + # SVG-Support (Qt 4.1) + add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ; + + # Test-Support (Qt 4.1) + add-shared-library QtTest : QtCore : : $(target-requirements) ; + + # Qt designer library + add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ; + add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ; + + # Support for dynamic Widgets (Qt 4.1) + add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ; + + # DBus-Support (Qt 4.2) + add-shared-library QtDBus : QtXml : : $(target-requirements) ; + + # Script-Engine (Qt 4.3) + add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ; + + # Tools for the Script-Engine (Qt 4.5) + add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ; + + # WebKit (Qt 4.4) + add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ; + + # Phonon Multimedia (Qt 4.4) + add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ; + + # Multimedia engine (Qt 4.6) + add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ; + + # XmlPatterns-Engine (Qt 4.4) + add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ; + + # Help-Engine (Qt 4.4) + add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ; + add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ; + + # QML-Engine (Qt 4.7) + add-shared-library QtDeclarative : QtGui QtXml : : $(target-requirements) ; + + # AssistantClient Support + # Compat library removed in 4.7.0 + # Pre-4.4 help system, use QtHelp for new programs + if $(version) < "4.7" + { + add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ; + } + debug-message "==== Configured Qt-$(version) ====" ; + + project.pop-current ; +} + +rule initialized ( ) +{ + return $(.initialized) ; +} + + + +# This custom generator is needed because in QT4, UI files are translated only +# into H files, and no C++ files are created. Further, the H files need not be +# passed via MOC. The header is used only via inclusion. If we define a standard +# UI -> H generator, B2 will run MOC on H, and then compile the +# resulting cpp. It will give a warning, since output from moc will be empty. +# +# This generator is declared with a UI -> OBJ signature, so it gets invoked when +# linking generator tries to convert sources to OBJ, but it produces target of +# type H. This is non-standard, but allowed. That header won't be mocced. +# +class uic-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(name) + { + name = [ $(sources[0]).name ] ; + name = $(name:B) ; + } + + local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ; + + # The 'ui_' prefix is to match qmake's default behavior. + local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ; + + local r = [ virtual-target.register $(target) ] ; + + # Since this generator will return a H target, the linking generator + # won't use it at all, and won't set any dependency on it. However, we + # need the target to be seen by bjam, so that dependency from sources to + # this generated header is detected -- if jam does not know about this + # target, it won't do anything. + DEPENDS all : [ $(r).actualize ] ; + + return $(r) ; + } +} + + +class moc-h-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP + { + name = [ $(sources[0]).name ] ; + name = $(name:B) ; + + local a = [ new action $(sources[1]) : qt4.moc.inc : + $(property-set) ] ; + + local target = [ new file-target $(name) : MOC : $(project) : $(a) + ] ; + + local r = [ virtual-target.register $(target) ] ; + + # Since this generator will return a H target, the linking generator + # won't use it at all, and won't set any dependency on it. However, + # we need the target to be seen by bjam, so that dependency from + # sources to this generated header is detected -- if jam does not + # know about this target, it won't do anything. + DEPENDS all : [ $(r).actualize ] ; + + return $(r) ; + } + } +} + + +class moc-inc-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H + { + name = [ $(sources[0]).name ] ; + name = $(name:B) ; + + local a = [ new action $(sources[1]) : qt4.moc.inc : + $(property-set) ] ; + + local target = [ new file-target moc_$(name) : CPP : $(project) : + $(a) ] ; + + # Since this generator will return a H target, the linking generator + # won't use it at all, and won't set any dependency on it. However, + # we need the target to be seen by bjam, so that dependency from + # sources to this generated header is detected -- if jam does not + # know about this target, it won't do anything. + DEPENDS all : [ $(target).actualize ] ; + + return [ virtual-target.register $(target) ] ; + } + } +} + + +# Query the installation directory. This is needed in at least two scenarios. +# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt +# plugins to the Qt-Tree. +# +rule directory +{ + return $(.PREFIX) ; +} + +# Add a shared Qt library. +rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? ) +{ + add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; +} + +# Add a static Qt library. +rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? ) +{ + add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; +} + +# Add a Qt library. +# Static libs are unversioned, whereas shared libs have the major number as suffix. +# Creates both release and debug versions on platforms where both are enabled by Qt configure. +# Flags: +# - lib-name Qt library Name +# - version Qt major number used as shared library suffix (QtCore4.so) +# - depends-on other Qt libraries +# - usage-defines those are set by qmake, so set them when using this library +# - requirements additional requirements +# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name). +rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? ) +{ + if $(.bjam-qt) + { + # Import Qt module + # Eveything will be setup there + alias $(lib-name) + : $(.prefix)//$(lib-name) + : + : + : qt4 ; + } + else + { + local real_include ; + real_include ?= $(include) ; + real_include ?= $(lib-name) ; + + lib $(lib-name) + : # sources + $(depends-on) + : # requirements + $(lib-name)$(version) + $(requirements) + : # default-build + : # usage-requirements + $(usage-defines) + $(.incprefix)/$(real_include) + ; + + if $(.have_separate_debug) = TRUE + { + lib $(lib-name) + : # sources + $(depends-on) + : # requirements + $(lib-name)$(.suffix_debug)$(version) + $(requirements) + debug + : # default-build + : # usage-requirements + $(usage-defines) + $(.incprefix)/$(real_include) + ; + } + } + + # Make library explicit so that a simple qt4 will not bring in everything. + # And some components like QtDBus/Phonon may not be available on all platforms. + explicit $(lib-name) ; +} + +# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match. +# The exact match is the last one. + +# Get and from current toolset. +flags qt4.moc INCLUDES ; +flags qt4.moc DEFINES ; + +# need a newline for expansion of DEFINES and INCLUDES in the response file. +.nl = " +" ; + +# Processes headers to create Qt MetaObject information. Qt4-moc has its +# c++-parser, so pass INCLUDES and DEFINES. +# We use response file with one INCLUDE/DEFINE per line +# +actions moc +{ + $(.BINPREFIX[-1])/moc -f $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" +} + +# When moccing files for include only, we don't need -f, otherwise the generated +# code will include the .cpp and we'll get duplicated symbols. +# +actions moc.inc +{ + $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" +} + + +# Get extra options for RCC +flags qt4.rcc RCC_OPTIONS ; + +# Generates source files from resource files. +# +actions rcc +{ + $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) $(RCC_OPTIONS) -o $(<) +} + + +# Generates user-interface source from .ui files. +# +actions uic +{ + $(.BINPREFIX[-1])/uic $(>) -o $(<) +} + + +# Scanner for .qrc files. Look for the CDATA section of the tag. Ignore +# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for +# detailed documentation of the Qt Resource System. +# +class qrc-scanner : common-scanner +{ + rule pattern ( ) + { + return "(.*)" ; + } +} + + +# Wrapped files are "included". +scanner.register qrc-scanner : include ; diff --git a/src/boost/tools/build/src/tools/qt5.jam b/src/boost/tools/build/src/tools/qt5.jam new file mode 100644 index 000000000..5fc41153a --- /dev/null +++ b/src/boost/tools/build/src/tools/qt5.jam @@ -0,0 +1,799 @@ +# Copyright 2002-2006 Vladimir Prus +# Copyright 2005 Alo Sarv +# Copyright 2005-2012 Juergen Hunold +# +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Qt5 library support module +# +# The module attempts to auto-detect QT installation location from QTDIR +# environment variable; failing that, installation location can be passed as +# argument: +# +# toolset.using qt5 : /usr/local/Trolltech/Qt-5.0.0 ; +# +# The module supports code generation from .ui and .qrc files, as well as +# running the moc preprocessor on headers. Note that you must list all your +# moc-able headers in sources. +# +# Example: +# +# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc +# /qt5//QtGui /qt5//QtNetwork ; +# +# It's also possible to run moc on cpp sources: +# +# import cast ; +# +# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt5//QtGui ; +# +# When moccing source file myapp.cpp you need to include "myapp.moc" from +# myapp.cpp. When moccing .h files, the output of moc will be automatically +# compiled and linked in, you don't need any includes. +# +# This is consistent with Qt guidelines: +# http://qt-project.org/doc/qt-5.0/moc.html + +# The .qrc processing utility supports various command line option (see +# http://qt-project.org/doc/qt-5.0/rcc.html for a complete list). The +# module provides default arguments for the "output file" and +# "initialization function name" options. Other options can be set through +# the build property. E.g. if you wish the compression settings +# to be more aggressive than the defaults, you can apply them too all .qrc +# files like this: +# +# project my-qt-project : +# requirements +# "-compress 9 -threshold 10" +# ; +# +# Of course, this property can also be specified on individual targets. + + +import modules ; +import feature ; +import errors ; +import type ; +import "class" : new ; +import generators ; +import project ; +import toolset : flags ; +import os ; +import virtual-target ; +import scanner ; + +# The Qt version used for requirements +# Valid are 5.0 or 5.1.0 +# Auto-detection via qmake sets 'major.minor.patch' +feature.feature qt5 : : propagated ; + +# Extra flags for rcc +# $TODO: figure out how to declare this only once +# feature.feature rccflags : : free ; + +project.initialize $(__name__) ; +project qt5 ; + +# Save the project so that we tolerate 'import + using' combo. +.project = [ project.current ] ; + +# Helper utils for easy debug output +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = TRUE ; +} + +local rule debug-message ( message * ) +{ + if $(.debug-configuration) = TRUE + { + ECHO notice\: "[qt5-cfg]" $(message) ; + } +} + +# Capture qmake output line by line +local rule read-output ( content ) +{ + local lines ; + local nl = " +" ; + local << = "([^$(nl)]*)[$(nl)](.*)" ; + local line+ = [ MATCH "$(<<)" : "$(content)" ] ; + while $(line+) + { + lines += $(line+[1]) ; + line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ; + } + return $(lines) ; +} + +# Capture Qt version from qmake +local rule check-version ( bin_prefix ) +{ + full-cmd = $(bin_prefix)"/qmake -v" ; + debug-message Running '$(full-cmd)' ; + local output = [ SHELL $(full-cmd) ] ; + for line in [ read-output $(output) ] + { + # Parse the output to get all the results. + if [ MATCH "QMake" : $(line) ] + { + # Skip first line of output + } + else + { + temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ; + } + } + return $(temp) ; +} + +# Validate the version string and extract the major/minor part we care about. +# +local rule split-version ( version ) +{ + local major-minor = [ MATCH "^([0-9]+)\.([0-9]+)(.*)$" : $(version) : 1 2 3 ] ; + if ! $(major-minor[2]) || $(major-minor[3]) + { + ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ; + + # Add a zero to account for the missing digit if necessary. + major-minor += 0 ; + } + + return $(major-minor[1]) $(major-minor[2]) ; +} + +# Initialize the QT support module. +# Parameters: +# - 'prefix' parameter tells where Qt is installed. +# - 'version' optional version of Qt, else autodetected via 'qmake -v' +# - 'condition' optional requirements +# - 'namespace' optional support for configure -qtnamespace +# - 'infix' optional support for configure -qtlibinfix +# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc) +# - 'full_inc' optional full path to Qt top-level include directory +# - 'full_lib' optional full path to Qt library directory +rule init ( prefix : version ? : condition * : namespace ? : infix ? : full_bin ? : full_inc ? : full_lib ? ) +{ + project.push-current $(.project) ; + + debug-message "==== Configuring Qt ... ====" ; + for local v in version prefix condition namespace infix full_bin full_inc full_lib + { + if $($(v)) + { + debug-message " user-specified $(v):" '$($(v))' ; + } + } + + # Needed as default value + .prefix = $(prefix) ; + + # pre-build paths to detect reinitializations changes + local inc_prefix lib_prefix bin_prefix ; + if $(full_inc) + { + inc_prefix = $(full_inc) ; + } + else + { + inc_prefix = $(prefix)/include ; + } + if $(full_lib) + { + lib_prefix = $(full_lib) ; + } + else + { + lib_prefix = $(prefix)/lib ; + } + if $(full_bin) + { + bin_prefix = $(full_bin) ; + } + else + { + bin_prefix = $(prefix)/bin ; + } + + # Globally needed variables + .incprefix = $(inc_prefix) ; + .libprefix = $(lib_prefix) ; + .binprefix = $(bin_prefix) ; + + if ! $(.initialized) + { + # Make sure this is initialised only once + .initialized = true ; + + # Generates cpp files from header files using "moc" tool + generators.register-standard qt5.moc : H : CPP(moc_%) : qt5 ; + + # The OBJ result type is a fake, 'H' will be really produced. See + # comments on the generator class, defined below the 'init' function. + generators.register [ new uic-5-generator qt5.uic : UI : OBJ : + qt5 ] ; + + # The OBJ result type is a fake here too. + generators.register [ new moc-h-5-generator + qt5.moc.inc : MOCCABLE5_CPP : OBJ : qt5 ] ; + + generators.register [ new moc-inc-5-generator + qt5.moc.inc : MOCCABLE5_H : OBJ : qt5 ] ; + + # Generates .cpp files from .qrc files. + generators.register-standard qt5.rcc : QRC : CPP(qrc_%) : qt5 ; + + # dependency scanner for wrapped files. + type.set-scanner QRC : qrc-5-scanner ; + + # Save value of first occurring prefix + .PREFIX = $(prefix) ; + } + + if $(version) + { + major-minor = [ split-version $(version) ] ; + version = $(major-minor:J=.) ; + } + else + { + version = [ check-version $(bin_prefix) ] ; + if $(version) + { + version = $(version:J=.) ; + } + debug-message Detected version '$(version)' ; + } + + local target-requirements = $(condition) ; + + # Add the version, if any, to the target requirements. + if $(version) + { + if ! $(version) in [ feature.values qt5 ] + { + feature.extend qt5 : $(version) ; + } + target-requirements += $(version:E=default) ; + } + + local target-os = [ feature.get-values target-os : $(condition) ] ; + if ! $(target-os) + { + target-os ?= [ feature.defaults target-os ] ; + target-os = $(target-os:G=) ; + target-requirements += $(target-os) ; + } + + # Build exact requirements for the tools + local tools-requirements = $(target-requirements:J=/) ; + + debug-message "Details of this Qt configuration:" ; + debug-message " prefix: " '$(prefix:E=)' ; + debug-message " binary path: " '$(bin_prefix:E=)' ; + debug-message " include path:" '$(inc_prefix:E=)' ; + debug-message " library path:" '$(lib_prefix:E=)' ; + debug-message " target requirements:" '$(target-requirements)' ; + debug-message " tool requirements: " '$(tools-requirements)' ; + + # setup the paths for the tools + toolset.flags qt5.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; + toolset.flags qt5.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ; + toolset.flags qt5.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ; + + # TODO: 2009-02-12: Better support for directories + # Most likely needed are separate getters for: include,libraries,binaries and sources. + toolset.flags qt5.directory .PREFIX $(tools-requirements) : $(prefix) ; + + # Test for a buildable Qt. + if [ glob $(.prefix)/Jamroot ] + { + .bjam-qt = true + + # this will declare QtCore (and qtmain on windows) + add-shared-library QtCore ; + } + else + # Setup common pre-built Qt. + # Special setup for QtCore on which everything depends + { + local link = [ feature.get-values link : $(condition) ] ; + + local usage-requirements = + $(.incprefix) + $(.libprefix) + qt5 ; + + if $(link) in shared + { + usage-requirements += $(.libprefix) ; + usage-requirements += windows:$(.binprefix) ; + } + + local suffix ; + + # debug versions on unix have to be built + # separately and therefore have no suffix. + .infix_version = "" ; + .suffix_debug = "" ; + + # Control flag for auto-configuration of the debug libraries. + # This setup requires Qt 'configure -debug-and-release'. + # Only available on some platforms. + # ToDo: 2009-02-12: Maybe throw this away and + # require separate setup with debug as condition. + .have_separate_debug = FALSE ; + + # Setup other platforms + if $(target-os) in windows cygwin + { + .have_separate_debug = TRUE ; + + # On NT, the libs have "d" suffix in debug builds. + .suffix_debug = "d" ; + + .infix_version = "5" ; + + # On Windows we must link against the qtmain library + lib qtmain + : # sources + : # requirements + qtmain$(.suffix_debug) + debug + $(target-requirements) + ; + + lib qtmain + : # sources + : # requirements + qtmain + $(target-requirements) + ; + } + else if $(target-os) = darwin + { + # On MacOS X, both debug and release libraries are available. + .suffix_debug = "_debug" ; + + .have_separate_debug = TRUE ; + + alias qtmain ; + } + else + { + alias qtmain : : $(target-requirements) ; + .infix_version = "5" ; + } + + lib QtCore : qtmain + : # requirements + Qt$(.infix_version)Core + $(target-requirements) + : # default-build + : # usage-requirements + QT_CORE_LIB + QT_NO_DEBUG + $(.incprefix)/QtCore + $(usage-requirements) + ; + + if $(.have_separate_debug) = TRUE + { + debug-message Configure debug libraries with suffix '$(.suffix_debug)' ; + + lib QtCore : $(main) + : # requirements + Qt$(.infix_version)Core$(.suffix_debug) + debug + $(target-requirements) + : # default-build + : # usage-requirements + QT_CORE_LIB + $(.incprefix)/QtCore + $(usage-requirements) + ; + } + } + + if [ glob $(.incprefix)/QtAngle ] + { + # Setup support of ANGLE builds. + alias QtAngle + : # sources + : # requirements + $(target-requirements) + : # default-build + : # usage-requirements + QT_OPENGL_ES_2 + QT_OPENGL_ES_2_ANGLE + $(.incprefix)/QtAngle + $(usage-requirements) + ; + } + else + { + alias QtAngle + : # sources + : # requirements + $(target-requirements) + ; + } + + # Initialising the remaining libraries is canonical + # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include' + # 'include' only for non-canonical include paths. + add-shared-library QtGui : QtCore QtAngle : QT_GUI_LIB : $(target-requirements) ; + add-shared-library QtWidgets : QtGui : QT_WIDGETS_LIB : $(target-requirements) ; + add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ; + add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ; + add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ; + add-shared-library QtPrintSupport : QtGui : QT_PRINTSUPPORT_LIB : $(target-requirements) ; + add-shared-library QtConcurrent : QtCore : QT_CONCURRENT_LIB : $(target-requirements) ; + + add-shared-library QtPositioning : QtCore : QT_POSITIONING_LIB : $(target-requirements) ; + + add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ; + add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ; + + add-shared-library QtTest : QtCore : : $(target-requirements) ; + + # Qt designer library et. al. + add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ; + add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ; + add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ; + + # DBus-Support + add-shared-library QtDBus : QtXml : : $(target-requirements) ; + + # Script-Engine and Tools + add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ; + add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ; + + # WebKit + add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ; + add-shared-library QtWebKitWidgets : QtGui : QT_WEBKITWIDGETS_LIB : $(target-requirements) ; + + # Multimedia engine + add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ; + add-shared-library QtMultimediaWidgets : QtMultimedia : QT_MULTIMEDIAWIDGETS_LIB : $(target-requirements) ; + + # + add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ; + + # Help-Engine + add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ; + add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ; + + # QtQuick + add-shared-library QtQml : QtCore QtNetwork QtGui : QT_QML_LIB : $(target-requirements) ; + add-shared-library QtQuick : QtQml : QT_QUICK_LIB : $(target-requirements) ; + add-shared-library QtQuickParticles : QtQml : : $(target-requirements) ; + add-shared-library QtQuickTest : QtQml : : $(target-requirements) ; + + add-shared-library QtSerialPort : QtCore : QT_SERIALPORT_LIB : $(target-requirements) ; + + # QtLocation (since 5.4) + add-shared-library QtLocation : QtQuick QtPositioning : QT_LOCATION_LIB : $(target-requirements) ; + + # Webengine support (since 5.4) + add-shared-library QtWebEngine : QtGui : QT_WEBENGINE_LIB : $(target-requirements) ; + add-shared-library QtWebEngineCore : QtWebEngine : QT_WEBENGINECORE_LIB : $(target-requirements) ; + add-shared-library QtWebEngineWidgets : QtWebEngineCore QtWidgets : QT_WEBENGINEWIDGETS_LIB : $(target-requirements) ; + + add-shared-library QtWebChannel : QtQml : QT_WEBCHANNEL_LIB : $(target-requirements) ; + add-shared-library QtWebSockets : QtNetwork : QT_WEBSOCKETS_LIB : $(target-requirements) ; + + add-shared-library QtWebView : QtWebEngineCore QtWebChannel : QT_WEBVIEW_LIB : $(target-requirements) ; + + # Qt3d libraries (since 5.6) + add-shared-library Qt3DCore : QtGui : QT_3DCORE_LIB : $(target-requirements) ; + add-shared-library Qt3DRender : Qt3DCore QtConcurrent : QT_3DRENDER_LIB : $(target-requirements) ; + add-shared-library Qt3DLogic : Qt3DCore : QT_3DLOGIC_LIB : $(target-requirements) ; + add-shared-library Qt3DInput : Qt3DRender : QT_3DINPUT_LIB : $(target-requirements) ; + + # QtCharts (since 5.7) + add-shared-library QtCharts : QtWidgets : QT_CHARTS_LIB : $(target-requirements) ; + + # 3D data visualization (since 5.7) + add-shared-library QtDataVisualization : QtGui : QT_DATAVISUALIZATION_LIB : $(target-requirements) ; + + # In-App purchase API (since 5.7) + add-shared-library QtPurchasing : QtCore : QT_PURCHASING_LIB : $(target-requirements) ; + + # Qt Connectivity (since 5.3) + add-shared-library QtBluetooth : QtCore : QT_BLUETOOTH_LIB : $(target-requirements) ; + add-shared-library QtNfc : QtCore : QT_NFC_LIB : $(target-requirements) ; + + # Gamepad (since 5.7) + add-shared-library QtGamepad : QtCore : QT_GAMEPAD_LIB : $(target-requirements) ; + + # SCXML state machine (since 5.7) + add-shared-library QtScxml : QtCore : QT_SCXML_LIB : $(target-requirements) ; + + # Tech Preview QtQuick + # SerialBus (since 5.7) + add-shared-library QtSerialBus : QtCore : QT_SERIALBUS_LIB : $(target-requirements) ; + + # Platform dependent libraries + # Regular expression support + add-shared-library QtV8 : QtCore : : $(target-requirements) ; + + # QML-Engine version1 + add-shared-library QtDeclarative : QtXml : : $(target-requirements) ; + + debug-message "==== Configured Qt-$(version) ====" ; + + project.pop-current ; +} + +rule initialized ( ) +{ + return $(.initialized) ; +} + + + +# This custom generator is needed because in QT5, UI files are translated only +# into H files, and no C++ files are created. Further, the H files need not be +# passed via MOC. The header is used only via inclusion. If we define a standard +# UI -> H generator, B2 will run MOC on H, and then compile the +# resulting cpp. It will give a warning, since output from moc will be empty. +# +# This generator is declared with a UI -> OBJ signature, so it gets invoked when +# linking generator tries to convert sources to OBJ, but it produces target of +# type H. This is non-standard, but allowed. That header won't be mocced. +# +class uic-5-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(name) + { + name = [ $(sources[0]).name ] ; + name = $(name:B) ; + } + + local a = [ new action $(sources[1]) : qt5.uic : $(property-set) ] ; + + # The 'ui_' prefix is to match qmake's default behavior. + local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ; + + local r = [ virtual-target.register $(target) ] ; + + # Since this generator will return a H target, the linking generator + # won't use it at all, and won't set any dependency on it. However, we + # need the target to be seen by bjam, so that dependency from sources to + # this generated header is detected -- if jam does not know about this + # target, it won't do anything. + DEPENDS all : [ $(r).actualize ] ; + + return $(r) ; + } +} + + +class moc-h-5-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE5_CPP + { + name = [ $(sources[0]).name ] ; + name = $(name:B) ; + + local a = [ new action $(sources[1]) : qt5.moc.inc : + $(property-set) ] ; + + local target = [ new file-target $(name) : MOC : $(project) : $(a) + ] ; + + local r = [ virtual-target.register $(target) ] ; + + # Since this generator will return a H target, the linking generator + # won't use it at all, and won't set any dependency on it. However, + # we need the target to be seen by bjam, so that dependency from + # sources to this generated header is detected -- if jam does not + # know about this target, it won't do anything. + DEPENDS all : [ $(r).actualize ] ; + + return $(r) ; + } + } +} + + +class moc-inc-5-generator : generator +{ + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE5_H + { + name = [ $(sources[0]).name ] ; + name = $(name:B) ; + + local a = [ new action $(sources[1]) : qt5.moc.inc : + $(property-set) ] ; + + local target = [ new file-target moc_$(name) : CPP : $(project) : + $(a) ] ; + + # Since this generator will return a H target, the linking generator + # won't use it at all, and won't set any dependency on it. However, + # we need the target to be seen by bjam, so that dependency from + # sources to this generated header is detected -- if jam does not + # know about this target, it won't do anything. + DEPENDS all : [ $(target).actualize ] ; + + return [ virtual-target.register $(target) ] ; + } + } +} + + +# Query the installation directory. This is needed in at least two scenarios. +# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt +# plugins to the Qt-Tree. +# +rule directory +{ + return $(.PREFIX) ; +} + +# Add a shared Qt library. +rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? ) +{ + add-library $(lib-name) : $(.infix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; +} + +# Add a static Qt library. +rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? ) +{ + add-library $(lib-name) : $(.infix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ; +} + +# Add a Qt library. +# Static libs are unversioned, whereas shared libs have the major number as suffix. +# Creates both release and debug versions on platforms where both are enabled by Qt configure. +# Flags: +# - lib-name Qt library Name +# - version Qt major number used as shared library suffix (QtCore5.so) +# - depends-on other Qt libraries +# - usage-defines those are set by qmake, so set them when using this library +# - requirements additional requirements +# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name). +rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? ) +{ + if $(.bjam-qt) + { + # Import Qt module + # Eveything will be setup there + alias $(lib-name) + : $(.prefix)//$(lib-name) + : + : + : qt5 ; + } + else + { + local real_include ; + real_include ?= $(include) ; + real_include ?= $(lib-name) ; + + local real_name = [ MATCH ^Qt(.*) : $(lib-name) ] ; + + lib $(lib-name) + : # sources + $(depends-on) + : # requirements + Qt$(version)$(real_name) + $(requirements) + : # default-build + : # usage-requirements + $(usage-defines) + $(.incprefix)/$(real_include) + ; + + if $(.have_separate_debug) = TRUE + { + lib $(lib-name) + : # sources + $(depends-on) + : # requirements + Qt$(version)$(real_name)$(.suffix_debug) + $(requirements) + debug + : # default-build + : # usage-requirements + $(usage-defines) + $(.incprefix)/$(real_include) + ; + } + } + + # Make library explicit so that a simple qt5 will not bring in everything. + # And some components like QtDBus/Phonon may not be available on all platforms. + explicit $(lib-name) ; +} + +# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match. +# The exact match is the last one. + +# Get and from current toolset. +flags qt5.moc INCLUDES ; +flags qt5.moc DEFINES ; + +# need a newline for expansion of DEFINES and INCLUDES in the response file. +.nl = " +" ; + +# Processes headers to create Qt MetaObject information. Qt5-moc has its +# c++-parser, so pass INCLUDES and DEFINES. +# We use response file with one INCLUDE/DEFINE per line +# +actions moc +{ + $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" +} + +# When moccing files for include only, we don't need -f, otherwise the generated +# code will include the .cpp and we'll get duplicated symbols. +# +actions moc.inc +{ + $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))" +} + + +# Get extra options for RCC +flags qt5.rcc RCC_OPTIONS ; + +# Generates source files from resource files. +# +actions rcc +{ + $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) $(RCC_OPTIONS) -o $(<) +} + + +# Generates user-interface source from .ui files. +# +actions uic +{ + $(.BINPREFIX[-1])/uic $(>) -o $(<) +} + + +# Scanner for .qrc files. Look for the CDATA section of the tag. Ignore +# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for +# detailed documentation of the Qt Resource System. +# +class qrc-5-scanner : common-scanner +{ + rule pattern ( ) + { + return "(.*)" ; + } +} + + +# Wrapped files are "included". +scanner.register qrc-5-scanner : include ; diff --git a/src/boost/tools/build/src/tools/quickbook-config.jam b/src/boost/tools/build/src/tools/quickbook-config.jam new file mode 100644 index 000000000..c6c3e7fbc --- /dev/null +++ b/src/boost/tools/build/src/tools/quickbook-config.jam @@ -0,0 +1,44 @@ +#~ Copyright 2005 Rene Rivera. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Automatic configuration for BoostBook tools. To use, just import this module. + +import os ; +import toolset : using ; + +if [ os.name ] = NT +{ + local boost-dir = ; + for local R in snapshot cvs 1.33.0 + { + boost-dir += [ W32_GETREG + "HKEY_LOCAL_MACHINE\\SOFTWARE\\Boost.org\\$(R)" + : "InstallRoot" ] ; + } + local quickbook-path = [ GLOB "$(boost-dir)\\bin" "\\Boost\\bin" : quickbook.exe ] ; + quickbook-path = $(quickbook-path[1]) ; + + if $(quickbook-path) + { + if --debug-configuration in [ modules.peek : ARGV ] + { + ECHO "notice:" using quickbook ":" $(quickbook-path) ; + } + using quickbook : $(quickbook-path) ; + } +} +else +{ + local quickbook-path = [ GLOB "/usr/local/bin" "/usr/bin" "/opt/bin" : quickbook ] ; + quickbook-path = $(quickbook-path[1]) ; + + if $(quickbook-path) + { + if --debug-configuration in [ modules.peek : ARGV ] + { + ECHO "notice:" using quickbook ":" $(quickbook-path) ; + } + using quickbook : $(quickbook-path) ; + } +} diff --git a/src/boost/tools/build/src/tools/quickbook.jam b/src/boost/tools/build/src/tools/quickbook.jam new file mode 100644 index 000000000..df53a43f5 --- /dev/null +++ b/src/boost/tools/build/src/tools/quickbook.jam @@ -0,0 +1,363 @@ +# +# Copyright (c) 2005 JoĂ£o Abecasis +# Copyright (c) 2005 Vladimir Prus +# Copyright (c) 2006 Rene Rivera +# +# Distributed under the Boost Software License, Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) +# + +# This toolset defines a generator to translate QuickBook to BoostBook. It can +# be used to generate nice (!) user documentation in different formats +# (pdf/html/...), from a single text file with simple markup. +# +# The toolset defines the QUICKBOOK type (file extension 'qbk') and +# a QUICKBOOK to XML (BOOSTBOOK) generator. +# +# +# =========================================================================== +# Q & A +# =========================================================================== +# +# If you don't know what this is all about, some Q & A will hopefully get you +# up to speed with QuickBook and this toolset. +# +# +# What is QuickBook ? +# +# QuickBook is a WikiWiki style documentation tool geared towards C++ +# documentation using simple rules and markup for simple formatting tasks. +# QuickBook extends the WikiWiki concept. Like the WikiWiki, QuickBook +# documents are simple text files. A single QuickBook document can +# generate a fully linked set of nice HTML and PostScript/PDF documents +# complete with images and syntax-colorized source code. +# +# +# Where can I get QuickBook ? +# +# Quickbook can be found in Boost's repository, under the tools/quickbook +# directory it was added there on Jan 2005, some time after the release of +# Boost v1.32.0 and has been an integral part of the Boost distribution +# since v1.33. +# +# Here's a link to the SVN repository: +# https://svn.boost.org/svn/boost/trunk/tools/quickbook +# +# And to QuickBook's QuickBook-generated docs: +# http://www.boost.org/doc/libs/release/tools/quickbook/index.html +# +# +# How do I use QuickBook and this toolset in my projects ? +# +# The minimal example is: +# +# using boostbook ; +# import quickbook ; +# +# boostbook my_docs : my_docs_source.qbk ; +# +# where my_docs is a target name and my_docs_source.qbk is a QuickBook +# file. The documentation format to be generated is determined by the +# boostbook toolset. By default html documentation should be generated, +# but you should check BoostBook's docs to be sure. +# +# +# What do I need ? +# +# You should start by setting up the BoostBook toolset. Please refer to +# boostbook.jam and the BoostBook documentation for information on how to +# do this. +# +# A QuickBook executable is also needed. The toolset will generate this +# executable if it can find the QuickBook sources. The following +# directories will be searched: +# +# BOOST_ROOT/tools/quickbook/ +# BOOST_BUILD_PATH/../../quickbook/ +# +# (BOOST_ROOT and BOOST_BUILD_PATH are environment variables) +# +# If QuickBook sources are not found the toolset will then try to use +# the shell command 'quickbook'. +# +# +# How do I provide a custom QuickBook executable ? +# +# You may put the following in your user-config.jam or site-config.jam: +# +# using quickbook : /path/to/quickbook ; +# +# or, if 'quickbook' can be found in your PATH, +# +# using quickbook : quickbook ; +# +# +# For convenience three alternatives are tried to get a QuickBook executable: +# +# 1. If the user points us to the a QuickBook executable, that is used. +# +# 2. Otherwise, we search for the QuickBook sources and compile QuickBook +# using the default toolset. +# +# 3. As a last resort, we rely on the shell for finding 'quickbook'. +# + +import boostbook ; +import "class" : new ; +import feature ; +import generators ; +import toolset ; +import type ; +import scanner ; +import project ; +import targets ; +import build-system ; +import path ; +import common ; +import errors ; + +# The one and only QUICKBOOK type! +type.register QUICKBOOK : qbk ; + +# shell command to run QuickBook +# targets to build QuickBook from sources. +feature.feature : : free ; +feature.feature : : free dependency ; +feature.feature : : free ; +feature.feature : : free ; +feature.feature : : free ; +feature.feature : : free ; + + +# quickbook-binary-generator handles generation of the QuickBook executable, by +# marking it as a dependency for QuickBook docs. +# +# If the user supplied the QuickBook command that will be used. +# +# Otherwise we search some sensible places for the QuickBook sources and compile +# from scratch using the default toolset. +# +# As a last resort we rely on the shell to find 'quickbook'. +# +class quickbook-binary-generator : generator +{ + import modules path targets quickbook ; + + rule run ( project name ? : property-set : sources * : multiple ? ) + { + quickbook.freeze-config ; + # QuickBook invocation command and dependencies. + local quickbook-binary = [ modules.peek quickbook : .quickbook-binary ] ; + local quickbook-binary-dependencies ; + + if ! $(quickbook-binary) + { + # If the QuickBook source directory was found, mark its main target + # as a dependency for the current project. Otherwise, try to find + # 'quickbook' in user's PATH + local quickbook-dir = [ modules.peek quickbook : .quickbook-dir ] ; + if $(quickbook-dir) + { + # Get the main-target in QuickBook directory. + local quickbook-main-target = [ targets.resolve-reference $(quickbook-dir) : $(project) ] ; + + # The first element are actual targets, the second are + # properties found in target-id. We do not care about these + # since we have passed the id ourselves. + quickbook-main-target = + [ $(quickbook-main-target[1]).main-target quickbook ] ; + + quickbook-binary-dependencies = + [ $(quickbook-main-target).generate [ $(property-set).propagated ] ] ; + + # Ignore usage-requirements returned as first element. + quickbook-binary-dependencies = $(quickbook-binary-dependencies[2-]) ; + + # Some toolsets generate extra targets (e.g. RSP). We must mark + # all targets as dependencies for the project, but we will only + # use the EXE target for quickbook-to-boostbook translation. + for local target in $(quickbook-binary-dependencies) + { + if [ $(target).type ] = EXE + { + quickbook-binary = + [ path.native + [ path.join + [ $(target).path ] + [ $(target).name ] + ] + ] ; + } + } + } + } + + # Add $(quickbook-binary-dependencies) as a dependency of the current + # project and set it as the feature for the + # quickbook-to-boostbook rule, below. + property-set = [ $(property-set).add-raw + $(quickbook-binary-dependencies) + $(quickbook-binary) + $(quickbook-binary-dependencies) + ] ; + + return [ generator.run $(project) $(name) : $(property-set) : $(sources) : $(multiple) ] ; + } +} + + +# Define a scanner for tracking QBK include dependencies. +# +class qbk-scanner : common-scanner +{ + rule pattern ( ) + { + return "\\[[ ]*include[ ]+([^]]+)\\]" + "\\[[ ]*include:[a-zA-Z0-9_]+[ ]+([^]]+)\\]" + "\\[[ ]*import[ ]+([^]]+)\\]" ; + } +} + + +scanner.register qbk-scanner : include ; + +type.set-scanner QUICKBOOK : qbk-scanner ; + + +# Initialization of toolset. +# +# Parameters: +# command ? -> path to QuickBook executable. +# +# When command is not supplied toolset will search for QuickBook directory and +# compile the executable from source. If that fails we still search the path for +# 'quickbook'. +# +rule init ( + command ? # path to the QuickBook executable. + ) +{ + if $(command) + { + if $(.config-frozen) + { + errors.user-error "quickbook: configuration cannot be changed after it has been used." ; + } + .command = $(command) ; + } +} + +rule freeze-config ( ) +{ + if ! $(.config-frozen) + { + .config-frozen = true ; + + # QuickBook invocation command and dependencies. + + .quickbook-binary = $(.command) ; + + if $(.quickbook-binary) + { + # Use user-supplied command. + .quickbook-binary = [ common.get-invocation-command quickbook : quickbook : $(.quickbook-binary) ] ; + } + else + { + # Search for QuickBook sources in sensible places, like + # $(BOOST_ROOT)/tools/quickbook + # $(BOOST_BUILD_PATH)/../../quickbook + + # And build quickbook executable from sources. + + local boost-root = [ modules.peek : BOOST_ROOT ] ; + local boost-build-path = [ build-system.location ] ; + + if $(boost-root) + { + .quickbook-dir += [ path.join $(boost-root) tools ] ; + } + + if $(boost-build-path) + { + .quickbook-dir += $(boost-build-path)/../.. ; + } + + .quickbook-dir = [ path.glob $(.quickbook-dir) : quickbook ] ; + + # If the QuickBook source directory was found, mark its main target + # as a dependency for the current project. Otherwise, try to find + # 'quickbook' in user's PATH + if $(.quickbook-dir) + { + .quickbook-dir = [ path.make $(.quickbook-dir[1]) ] ; + } + else + { + ECHO "QuickBook warning: The path to the quickbook executable was" ; + ECHO " not provided. Additionally, couldn't find QuickBook" ; + ECHO " sources searching in" ; + ECHO " * BOOST_ROOT/tools/quickbook" ; + ECHO " * BOOST_BUILD_PATH/../../quickbook" ; + ECHO " Will now try to find a precompiled executable by searching" ; + ECHO " the PATH for 'quickbook'." ; + ECHO " To disable this warning in the future, or to completely" ; + ECHO " avoid compilation of quickbook, you can explicitly set the" ; + ECHO " path to a quickbook executable command in user-config.jam" ; + ECHO " or site-config.jam with the call" ; + ECHO " using quickbook : /path/to/quickbook ;" ; + + # As a last resort, search for 'quickbook' command in path. Note + # that even if the 'quickbook' command is not found, + # get-invocation-command will still return 'quickbook' and might + # generate an error while generating the virtual-target. + + .quickbook-binary = [ common.get-invocation-command quickbook : quickbook ] ; + } + } + } +} + + +generators.register [ new quickbook-binary-generator quickbook.quickbook-to-boostbook : QUICKBOOK : XML ] ; + + +# shell command to run QuickBook +# targets to build QuickBook from sources. +toolset.flags quickbook.quickbook-to-boostbook QB-COMMAND ; +toolset.flags quickbook.quickbook-to-boostbook QB-DEPENDENCIES ; +toolset.flags quickbook.quickbook-to-boostbook INCLUDES ; +toolset.flags quickbook.quickbook-to-boostbook QB-DEFINES ; +toolset.flags quickbook.quickbook-to-boostbook QB-INDENT ; +toolset.flags quickbook.quickbook-to-boostbook QB-LINE-WIDTH ; +toolset.flags quickbook.quickbook-to-boostbook QB-OPTIONS on : --strict ; + + +rule quickbook-to-boostbook ( target : source : properties * ) +{ + # Signal dependency of quickbook sources on + # upon invocation of quickbook-to-boostbook. + DEPENDS $(target) : [ on $(target) return $(QB-DEPENDENCIES) ] ; +} + + +actions quickbook-to-boostbook +{ + "$(QB-COMMAND)" -I"$(INCLUDES)" -D"$(QB-DEFINES)" --indent="$(QB-INDENT)" --linewidth="$(QB-LINE-WIDTH)" $(QB-OPTIONS) --output-file="$(1)" "$(2)" +} + + +# Declare a main target to convert a quickbook source into a boostbook XML file. +# +rule to-boostbook ( target-name : sources * : requirements * : default-build * ) +{ + local project = [ project.current ] ; + + targets.main-target-alternative + [ new typed-target $(target-name) : $(project) : XML + : [ targets.main-target-sources $(sources) : $(target-name) ] + : [ targets.main-target-requirements $(requirements) : $(project) ] + : [ targets.main-target-default-build $(default-build) : $(project) ] + ] ; +} diff --git a/src/boost/tools/build/src/tools/rc.jam b/src/boost/tools/build/src/tools/rc.jam new file mode 100644 index 000000000..ce94b9b82 --- /dev/null +++ b/src/boost/tools/build/src/tools/rc.jam @@ -0,0 +1,155 @@ +# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and +# distribute this software is granted provided this copyright notice appears in +# all copies. This software is provided "as is" without express or implied +# warranty, and with no claim as to its suitability for any purpose. +# +# Copyright (c) 2006 Rene Rivera. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import generators ; +import feature ; +import scanner ; +import toolset : flags ; +import type ; + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +type.register RC : rc ; + +rule init ( ) +{ +} + +# Configures a new resource compilation command specific to a condition, +# usually a toolset selection condition. The possible options are: +# +# * (rc|windres) - Indicates the type of options the command +# accepts. +# +# Even though the arguments are all optional, only when a command, condition, +# and at minimum the rc-type option are given will the command be configured. +# This is so that callers don't have to check auto-configuration values before +# calling this. And still get the functionality of build failures when the +# resource compiler can not be found. +# +rule configure ( command ? : condition ? : options * ) +{ + local rc-type = [ feature.get-values : $(options) ] ; + + if $(command) && $(condition) && $(rc-type) + { + flags rc.compile.resource .RC $(condition) : $(command) ; + flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ; + flags rc.compile.resource DEFINES ; + flags rc.compile.resource INCLUDES ; + if $(.debug-configuration) + { + ECHO "notice:" using rc compiler "::" $(condition) "::" $(command) ; + } + } +} + +rule compile.resource ( target : sources * : properties * ) +{ + local rc-type = [ on $(target) return $(.RC_TYPE) ] ; + rc-type ?= null ; + compile.resource.$(rc-type) $(target) : $(sources[1]) ; +} + +actions compile.resource.rc +{ + "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)" +} + +actions compile.resource.windres +{ + "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)" +} + +actions quietly compile.resource.null +{ + as /dev/null -o "$(<)" +} + +# Since it is common practice to write +# exe hello : hello.cpp hello.rc +# we change the name of object created from RC file, to avoid conflict with +# hello.cpp. The reason we generate OBJ and not RES, is that gcc does not seem +# to like RES files, but works OK with OBJ (see +# http://article.gmane.org/gmane.comp.lib.boost.build/5643). +# +# Using 'register-c-compiler' adds the build directory to INCLUDES +generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ; + +# Register scanner for resources +class res-scanner : scanner +{ + import path ; + import regex ; + import scanner ; + import virtual-target ; + + rule __init__ ( includes * ) + { + scanner.__init__ ; + self.includes = $(includes) ; + } + + rule pattern ( ) + { + return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ; + } + + rule process ( target : matches * : binding ) + { + local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ; + local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ; + local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ; + + # Icons and other includes may be referenced as + # + # IDR_MAINFRAME ICON "res\\icon.ico" + # + # so we have to replace double backslashes with single ones. + res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ; + + # CONSIDER: the new scoping rules seem to defeat "on target" variables. + local g = [ on $(target) return $(HDRGRIST) ] ; + local b = [ NORMALIZE_PATH $(binding:D) ] ; + + # Attach binding of including file to included targets. When a target is + # directly created from a virtual target this extra information is + # unnecessary. But in other cases, it allows us to distinguish between + # two headers of the same name included from different places. We do not + # need this extra information for angle includes, since they should not + # depend on the including file (we can not get literal "." in the + # include path). + local g2 = $(g)"#"$(b) ; + + angle = $(angle:G=$(g)) ; + quoted = $(quoted:G=$(g2)) ; + res = $(res:G=$(g2)) ; + + local all = $(angle) $(quoted) $(res) ; + + INCLUDES $(target) : $(all) ; + NOCARE $(all) ; + SEARCH on $(angle) = $(self.includes:G=) ; + SEARCH on $(quoted) $(res) = $(b) $(self.includes:G=) ; + + # Just propagate the current scanner to includes, in hope that includes + # do not change scanners. + scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ; + + ISFILE $(all) ; + } +} + +scanner.register res-scanner : include ; +type.set-scanner RC : res-scanner ; diff --git a/src/boost/tools/build/src/tools/rc.py b/src/boost/tools/build/src/tools/rc.py new file mode 100644 index 000000000..f4e057104 --- /dev/null +++ b/src/boost/tools/build/src/tools/rc.py @@ -0,0 +1,197 @@ +# Status: being ported by Steven Watanabe +# Base revision: 47077 +# +# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and +# distribute this software is granted provided this copyright notice appears in +# all copies. This software is provided "as is" without express or implied +# warranty, and with no claim as to its suitability for any purpose. +# +# Copyright (c) 2006 Rene Rivera. +# +# Copyright (c) 2008 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +##import type ; +##import generators ; +##import feature ; +##import errors ; +##import scanner ; +##import toolset : flags ; + +import os.path +import re + +import bjam + +from b2.build import type, toolset, generators, scanner, feature +from b2.exceptions import AlreadyDefined +from b2.tools import builtin +from b2.util import regex +from b2.build.toolset import flags +from b2.manager import get_manager +from b2.util import utility + +__debug = None + +def debug(): + global __debug + if __debug is None: + __debug = "--debug-configuration" in bjam.variable("ARGV") + return __debug + +type.register('RC', ['rc']) + +def init(): + pass + +def configure (command = None, condition = None, options = None): + """ + Configures a new resource compilation command specific to a condition, + usually a toolset selection condition. The possible options are: + + * (rc|windres) - Indicates the type of options the command + accepts. + + Even though the arguments are all optional, only when a command, condition, + and at minimum the rc-type option are given will the command be configured. + This is so that callers don't have to check auto-configuration values + before calling this. And still get the functionality of build failures when + the resource compiler can't be found. + """ + rc_type = feature.get_values('', options) + if rc_type: + assert(len(rc_type) == 1) + rc_type = rc_type[0] + + if command and condition and rc_type: + flags('rc.compile.resource', '.RC', condition, command) + flags('rc.compile.resource', '.RC_TYPE', condition, [rc_type.lower()]) + flags('rc.compile.resource', 'DEFINES', [], ['']) + flags('rc.compile.resource', 'INCLUDES', [], ['']) + if debug(): + print 'notice: using rc compiler ::', condition, '::', command + +engine = get_manager().engine() + +class RCAction: + """Class representing bjam action defined from Python. + The function must register the action to execute.""" + + def __init__(self, action_name, function): + self.action_name = action_name + self.function = function + + def __call__(self, targets, sources, property_set): + if self.function: + self.function(targets, sources, property_set) + +# FIXME: What is the proper way to dispatch actions? +def rc_register_action(action_name, function = None): + global engine + if action_name in engine.actions: + raise AlreadyDefined("Bjam action %s is already defined" % action_name) + engine.actions[action_name] = RCAction(action_name, function) + +def rc_compile_resource(targets, sources, properties): + rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE') + rc_type = rc_type[0] if rc_type else '' + global engine + engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties) + +rc_register_action('rc.compile.resource', rc_compile_resource) + + +engine.register_action( + 'rc.compile.resource.rc', + '"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"') + +engine.register_action( + 'rc.compile.resource.windres', + '"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"') + +# FIXME: this was originally declared quietly +engine.register_action( + 'compile.resource.null', + 'as /dev/null -o "$(<)"') + +# Since it's a common practice to write +# exe hello : hello.cpp hello.rc +# we change the name of object created from RC file, to +# avoid conflict with hello.cpp. +# The reason we generate OBJ and not RES, is that gcc does not +# seem to like RES files, but works OK with OBJ. +# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/ +# +# Using 'register-c-compiler' adds the build directory to INCLUDES +# FIXME: switch to generators +builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], []) + +__angle_include_re = "#include[ ]*<([^<]+)>" + +# Register scanner for resources +class ResScanner(scanner.Scanner): + + def __init__(self, includes): + scanner.__init__ ; + self.includes = includes + + def pattern(self): + return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\ + "[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ; + + def process(self, target, matches, binding): + binding = binding[0] + angle = regex.transform(matches, "#include[ ]*<([^<]+)>") + quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"") + res = regex.transform(matches, + "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\ + "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4]) + + # Icons and other includes may referenced as + # + # IDR_MAINFRAME ICON "res\\icon.ico" + # + # so we have to replace double backslashes to single ones. + res = [ re.sub(r'\\\\', '/', match) for match in res if match is not None ] + + # CONSIDER: the new scoping rule seem to defeat "on target" variables. + g = bjam.call('get-target-variable', target, 'HDRGRIST')[0] + b = os.path.normpath(os.path.dirname(binding)) + + # Attach binding of including file to included targets. + # When target is directly created from virtual target + # this extra information is unnecessary. But in other + # cases, it allows to distinguish between two headers of the + # same name included from different places. + # We don't need this extra information for angle includes, + # since they should not depend on including file (we can't + # get literal "." in include path). + g2 = g + "#" + b + + g = "<" + g + ">" + g2 = "<" + g2 + ">" + angle = [g + x for x in angle] + quoted = [g2 + x for x in quoted] + res = [g2 + x for x in res] + + all = angle + quoted + + bjam.call('mark-included', target, all) + + engine = get_manager().engine() + + engine.add_dependency(target, res) + bjam.call('NOCARE', all + res) + engine.set_target_variable(angle, 'SEARCH', [utility.get_value(inc) for inc in self.includes]) + engine.set_target_variable(quoted, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes]) + engine.set_target_variable(res, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes]) + + # Just propagate current scanner to includes, in a hope + # that includes do not change scanners. + get_manager().scanners().propagate(self, angle + quoted) + +scanner.register(ResScanner, 'include') +type.set_scanner('RC', ResScanner) diff --git a/src/boost/tools/build/src/tools/sass.jam b/src/boost/tools/build/src/tools/sass.jam new file mode 100644 index 000000000..ef3b931ab --- /dev/null +++ b/src/boost/tools/build/src/tools/sass.jam @@ -0,0 +1,193 @@ +#| +Copyright 2017 Dmitry Arkhipov +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +import common ; +import feature ; +import generators ; +import modules ; +import sequence ; +import toolset ; +import "class" : new ; + +#| tag::doc[] + += Sass + +This tool converts SASS and SCSS files into CSS. This tool explicitly supports +both the version written in C (sassc) and the original Ruby implementation +(scss) but other variants might also work. In addition to tool-specific +features, described in this section, the tool recognizes features `` +and ``. + +|# # end::doc[] + +feature.feature sass : : implicit propagated symmetric ; + +#| tag::doc[] + +== Feature: `sass-style` + +Sets the output style. Available values are + +* `nested`: each property is put on its own line, rules are indented based on + how deeply they are nested; +* `expanded`: each property is put on its own line, rules are not indented; +* `compact`: each rule is put on a single line, nested rules occupy adjacent + lines, while groups of unrelated rules are separated by newlines; +* `compressed`: takes minimum amount of space: all unnecessary whitespace is + removed, property values are compressed to have minimal representation. + +The feature is `optional` and is not `propagated` to dependent targets. If no +style is specified, then, if property set contains property `on`, +`compressed` style is selected. Otherwise, `nested` style is selected. + +|# # end::doc[] + +feature.subfeature sass + : style + : nested expanded compact compressed + : optional + ; + +#| tag::doc[] + +== Feature: `sass-line-numbers` + +Enables emitting comments showing original line numbers for rules. This can be +useful for debugging a stylesheet. Available values are `on` and `off`. The +feature is `optional` and is not `propagated` to dependent targets. If no value +for this feature is specified, then one is copied from the feature +`debug-symbols`. + +|# # end::doc[] + +feature.subfeature sass : line-numbers : on off : optional ; + +#| tag::doc[] + +== Initialization + +To use the `sass` tool you need to declare it in a configuration file with the +`using` rule. The initialization takes the following arguments: + +* `command`: the command, with any extra arguments, to execute. + +For example you could insert the following in your `user-config.jam`: + +``` +using sass : /usr/local/bin/psass -p2 ; # Perl libsass-based version +``` + +If no `command` is given, `sassc` is tried, after which `scss` is tried. + +|# # end::doc[] + +rule init ( command * ) +{ + if ! $(.initialized) + { + # Setup only if we were called via "using .. ;" + .initialized = true ; + + # Register generators + generators.register [ new sass-generator sass.convert : SASS : CSS ] ; + } + + # Setting up command + if ! $(command) + { + # If none was specified by the user, first try sassc, then scss + SASS = [ common.find-tool sassc ] ; + SASS ?= [ common.find-tool scss ] ; + } + else + { + # Otherwise we attempt to resolve each component of the command to + # account for script interpreter wrappers. + SASS = [ sequence.transform maybe-find-tool : $(command) ] ; + } +} + +class sass-generator : generator +{ + import property-set ; + + rule run ( project name ? : property-set : sources + ) + { + local style = [ $(property-set).get ] ; + local line-numbers = [ $(property-set).get ] ; + + # Only one source file is sensible; we accept only sass and scss files + if ( ! $(sources[2]) ) && ( [ $(sources[1]).type ] in SASS ) + { + # If no output name was given, guess it from sources + if ! $(name) + { + name = [ generator.determine-output-name $(sources) ] ; + } + + # If output style was not given, then it is determined by + # feature + if ! $(style) + { + switch [ $(property-set).get ] + { + case "off" : style = nested ; + case * : style = compressed ; + } + } + + # If line-numbers feature wasn't specified, copy it from + # + line-numbers ?= [ $(property-set).get ] ; + } + + # We build a reduced property set so that we are not toolset dependent. + local raw-set + = $(style) + $(line-numbers) + ; + raw-set += + [ sequence.filter recognized-feature : [ $(property-set).raw ] ] ; + raw-set = [ feature.expand-composites $(raw-set) ] ; + raw-set += [ $(property-set).incidental ] ; + property-set = [ property-set.create $(raw-set) ] ; + return + [ generator.run $(project) $(name) + : $(property-set) + : $(sources) + ] ; + } + + local rule recognized-feature ( feature ) + { + local result ; + if $(feature:G) in + { + result = true ; + } + return $(result) ; + } +} + +_ = " " ; +toolset.flags sass STYLE : ; +toolset.flags sass LINE_NUMBERS on : --line-numbers ; +toolset.flags sass INCLUDES : ; +toolset.flags sass FLAGS : ; + +actions convert +{ + "$(SASS)" -t$(_)"$(STYLE)" $(LINE_NUMBERS) -I$(_)"$(INCLUDES)" $(FLAGS) "$(>)" $(_)"$(<)" +} + +local rule maybe-find-tool ( command ) +{ + local tool = [ common.find-tool $(command) ] ; + tool ?= $(command) ; + return $(tool) ; +} diff --git a/src/boost/tools/build/src/tools/saxonhe.jam b/src/boost/tools/build/src/tools/saxonhe.jam new file mode 100644 index 000000000..69ad16df2 --- /dev/null +++ b/src/boost/tools/build/src/tools/saxonhe.jam @@ -0,0 +1,53 @@ +# +# Copyright (c) 2018 Damian Jarek (damian dot jarek93 at gmail dot com) +# Copyright (c) 2019 Richard Hodges (hodges dot r at gmail dot com) +# +# Distributed under the Boost Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +# + +import common ; +import os ; + +rule init ( saxonhe_jar ? : java_exe ? ) +{ + .java_exe = [ common.get-invocation-command saxonhe : java : $(java_exe) : ] ; + if $(saxonhe_jar) + { + .saxonhe_jar = $(saxonhe_jar) ; + } + else + { + local jar = [ GLOB "/usr/share/java/saxon/" "/usr/share/java/" : Saxon-HE.jar ] ; + .saxonhe_jar = $(jar[1]) ; + } +} + +# +# execute the saxonhe jar file passing files as inputs and outputs. +# +actions saxonhe +{ + "$(.java_exe)" -jar "$(.saxonhe_jar)" -o:"$(<)" -s:"$(>[1])" -xsl:"$(>[2])" +} + +# +# execute the saxonhe jar file passing directories as inputs and outputs. +# saxonhe requires that the output directory already exists +# +if [ os.on-windows ] +{ + actions saxonhe_dir + { + if not exist "$(<)\\" mkdir "$(<)" + "$(.java_exe)" -jar "$(.saxonhe_jar)" -o:"$(<)" -s:"$(>[1])" -xsl:"$(>[2])" + } +} +else +{ + actions saxonhe_dir + { + mkdir -p "$(<)" + "$(.java_exe)" -jar "$(.saxonhe_jar)" -o:"$(<)" -s:"$(>[1])" -xsl:"$(>[2])" + } +} diff --git a/src/boost/tools/build/src/tools/stage.jam b/src/boost/tools/build/src/tools/stage.jam new file mode 100644 index 000000000..325129dc8 --- /dev/null +++ b/src/boost/tools/build/src/tools/stage.jam @@ -0,0 +1,905 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2005, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Copyright 2020 Dmitry Arkhipov +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.modules.stage]] += stage +This module defines the `install` rule, used to copy a set of targets to a +single location. + +|# # end::doc[] + + +import "class" : new ; +import feature ; +import generators ; +import option ; +import path ; +import project ; +import property ; +import targets ; +import type ; +import types/register ; +import virtual-target ; + + +feature.feature : off on : incidental ; +feature.feature : : free incidental ; +feature.feature : : free path ; +feature.feature : : free incidental ; + +# If 'on', version symlinks for shared libraries will not be created. Affects +# Unix builds only. +feature.feature : on : optional incidental ; + + +#| tag::features-install-package-doc[] + +[[bbv2.builtin.features.install-package]]`install-package`:: +Specifies the name of the package to which installed files belong. This is +used for default installation prefix on certain platforms. + +|# # end::features-install-package-doc[] + +feature.feature install-package : : free ; + + +#| tag::doc[] + +[[bbv2.reference.modules.stage.add-install-dir]] +. `rule add-install-dir ( name : suffix ? : parent ? : options * )` ++ +Defines a named installation directory. ++ +For example, `add-install-dir foo : bar : baz ;` creates feature +<`>> and adds support for +named directory `(foo)` to `install` rule. The rule will try to use the value +of `` property if present, otherwise will fallback to `(baz)/bar`. ++ +Arguments: ++ +* `name`: the name of the directory. +* `suffix`: the path suffix appended to the parent named directory. +* `parent`: the optional name of parent named directory. +* `options`: special options that modify treatment of the directory. + Allowed options: ++ +** `package-suffix`: append the package name to the default value. For example: ++ +[source] +---- +add-install-dir foo : bar : baz : package-suffix ; +install (foo) : a : xyz ; +---- ++ +installs `a` into `(baz)/bar/xyz`. + +|# # end::doc[] + +.dirs = ; +rule add-install-dir ( name : suffix ? : parent ? : options * ) +{ + suffix ?= "" ; + if $(name) in $(.dirs) + { + import errors ; + errors.error Directory name $(name) is already registered. ; + } + feature.feature install-$(name) : : free ; + .dirs += $(name) ; + .dir.$(name) = $(suffix) $(parent) ; + .dir.$(name).options = $(options) ; +} + + +#| tag::doc[] + +. `rule install-dir-names ( )` ++ +Returns names of all registered installation directories. + +|# # end::doc[] + +rule install-dir-names ( ) +{ + return $(.dirs) ; +} + + +#| tag::features-install-prefix-doc[] + +[[bbv2.builtin.features.install-prefix]]`install-`:: +Specifies installation prefix for <> targets. +These named installation prefixes are registered by default: ++ +* `prefix`: `C:\` if `windows` is in the property set, + `/usr/local` otherwise +* `exec-prefix`: `(prefix)` +* `bindir`: `(exec-prefix)/bin` +* `sbindir`: `(exec-prefix)/sbin` +* `libexecdir`: `(exec-prefix)/libexec` +* `libdir`: `(exec-prefix)/lib` +* `datarootdir`: `(prefix)/share` +* `datadir`: `(datarootdir)` +* `sysconfdir`: `(prefix)/etc` +* `sharedstatedir`: `(prefix)/com` +* `localstatedir`: `(prefix)/var` +* `runstatedir`: `(localstatedir)/run` +* `includedir`: `(prefix)/include` +* `oldincludedir`: `/usr/include` +* `docdir`: `(datarootdir)/doc/` +* `infodir`: `(datarootdir)/info` +* `htmldir`: `(docdir)` +* `dvidir` : `(docdir)` +* `pdfdir` : `(docdir)` +* `psdir` : `(docdir)` +* `lispdir`: `(datarootdir)/emacs/site-lisp` +* `localedir`: `(datarootdir)/locale` +* `mandir`: `(datarootdir)/man` + +If more are necessary, they could be added with +<>. + +|# # end::features-install-prefix-doc[] + +feature.feature install-prefix : : free path ; +add-install-dir exec-prefix : "" : prefix ; +add-install-dir bindir : bin : exec-prefix ; +add-install-dir sbindir : sbin : exec-prefix ; +add-install-dir libexecdir : libexec : exec-prefix ; +add-install-dir libdir : lib : exec-prefix ; +add-install-dir datarootdir : share : prefix ; +add-install-dir datadir : "" : datarootdir ; +add-install-dir sysconfdir : etc : prefix ; +add-install-dir sharedstatedir : com : prefix ; +add-install-dir localstatedir : var : prefix ; +add-install-dir runstatedir : run : localstatedir ; +add-install-dir includedir : "include" : prefix ; +add-install-dir oldincludedir : /usr/include ; +add-install-dir docdir : doc : datarootdir : package-suffix ; +add-install-dir infodir : info : datarootdir ; +add-install-dir htmldir : "" : docdir ; +add-install-dir dvidir : "" : docdir ; +add-install-dir pdfdir : "" : docdir ; +add-install-dir psdir : "" : docdir ; +add-install-dir lispdir : emacs/site-lisp : datarootdir ; +add-install-dir localedir : locale : datarootdir ; +add-install-dir mandir : man : datarootdir ; + + +#| tag::features-staging-prefix-doc[] + +[[bbv2.builtin.features.staging-prefix]]`staging-prefix`:: +Specifies staging prefix for <> targets. +If present, it will be used instead of the path to named directory `prefix`. +Example: ++ +[source] +---- +project : requirements x/y/z ; +install a1 : a : (bindir) ; # installs into x/y/z/bin +install a2 : a : (bindir) q ; # installs into q/bin +---- +The feature is useful when you cannot (or don't want to) put build artfiacts +into their intented locations during the build (such as when cross-compiling), +but still need to communicate those intended locations to the build system, +e.g. to generate configuration files. + +|# # end::features-staging-prefix-doc[] + +feature.feature staging-prefix : : free path ; + + +class install-target-class : basic-target +{ + import "class" : new ; + import feature ; + import generators ; + import path ; + import project ; + import property ; + import property-set ; + import stage ; + import type ; + + rule __init__ ( name-and-dir : project : sources * : requirements * : + default-build * : usage-requirements * ) + { + # The usage-requirements specified here are ignored but are taken as a + # parameter to have this metatarget class have the same standard + # instantiation interface as all the other Boost Build metatarget + # classes. + basic-target.__init__ $(name-and-dir) : $(project) : $(sources) : + $(requirements) : $(default-build) ; + } + + # If is not set, sets it based on the project data. + # Either way, expands installation prefixes. + rule update-location ( property-set ) + { + local location = [ $(property-set).get ] ; + local project-location = [ $(self.project).get location ] ; + + local prefix ; + local suffix = $(location) ; + if $(suffix) + { + local rel = [ path.relative $(suffix) $(project-location) + : no-error ] ; + if not-a-child != $(rel) + { + suffix = $(rel) ; + } + } + suffix ?= $(self.name) ; + local matches = [ MATCH "^\\((.+)\\)(/(.*))?$" : $(suffix) ] ; + + # if location can be split into named directory and optional trailing + # path, do the split and expand the name into path + if $(matches) + { + suffix = $(matches[3]) ; + suffix ?= "" ; + local package-name = [ stage.get-package-name $(property-set) + : [ $(self.project).project-module ] ] ; + prefix = [ stage.get-dir $(matches[1]) : $(property-set) + : $(package-name) : staged ] ; + } + # prefix location with the project's path + else if ! $(location) + { + prefix = $(project-location) ; + } + + # only modify location if it's necessary + if $(prefix) + { + suffix = [ path.root $(suffix) $(prefix) ] ; + local properties = [ $(property-set).raw ] ; + properties = [ property.change $(properties) : ] ; + property-set = [ property-set.create $(properties) $(suffix) ] ; + } + + return $(property-set) ; + } + + # Takes a target that is installed and a property set which is used when + # installing. + # + rule adjust-properties ( target : build-property-set ) + { + local ps-raw ; + local a = [ $(target).action ] ; + if $(a) + { + local ps = [ $(a).properties ] ; + ps-raw = [ $(ps).raw ] ; + + # Unless true is in properties, which can happen + # only if the user has explicitly requested it, nuke all + # properties. + if [ $(build-property-set).get ] != true + { + ps-raw = [ property.change $(ps-raw) : ] ; + } + + # If any properties were specified for installing, add + # them. + local l = [ $(build-property-set).get ] ; + ps-raw += $(l:G=) ; + + # Also copy feature from current build set, to be used + # for relinking. + local l = [ $(build-property-set).get ] ; + ps-raw += $(l:G=) ; + + # Remove the feature on original targets. + ps-raw = [ property.change $(ps-raw) : ] ; + + # And . If stage target has another stage target in + # sources, then we shall get virtual targets with the + # property set. + ps-raw = [ property.change $(ps-raw) : ] ; + } + + local d = [ $(build-property-set).get ] ; + ps-raw += $(d:G=) ; + + local d = [ $(build-property-set).get ] ; + ps-raw += $(d:G=) ; + + local ns = [ $(build-property-set).get ] ; + ps-raw += $(ns:G=) ; + + local d = [ $(build-property-set).get ] ; + # Make the path absolute: we shall use it to compute relative paths and + # making the path absolute will help. + if $(d) + { + d = [ path.root $(d) [ path.pwd ] ] ; + ps-raw += $(d:G=) ; + } + + if $(ps-raw) + { + return [ property-set.create $(ps-raw) ] ; + } + else + { + return [ property-set.empty ] ; + } + } + + rule construct ( name : source-targets * : property-set ) + { + source-targets = [ targets-to-stage $(source-targets) : + $(property-set) ] ; + + property-set = [ update-location $(property-set) ] ; + + local ename = [ $(property-set).get ] ; + + if $(ename) && $(source-targets[2]) + { + import errors : error : $(__name__) : errors.error ; + errors.error When property is used "in" 'install', only one + source is allowed. ; + } + + local result ; + for local i in $(source-targets) + { + local staged-targets ; + + local new-properties = [ adjust-properties $(i) : + $(property-set) ] ; + + # See if something special should be done when staging this type. It + # is indicated by the presence of a special "INSTALLED_" type. + local t = [ $(i).type ] ; + if $(t) && [ type.registered INSTALLED_$(t) ] + { + if $(ename) + { + import errors : error : $(__name__) : errors.error ; + errors.error In "'install':" property specified with + target that requires relinking. ; + } + else + { + local targets = [ generators.construct $(self.project) + $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ; + staged-targets += $(targets[2-]) ; + } + } + else + { + staged-targets = [ stage.copy-file $(self.project) $(ename) : + $(i) : $(new-properties) ] ; + } + + if ! $(staged-targets) + { + import errors : error : $(__name__) : errors.error ; + errors.error Unable to generate staged version of + [ $(source).str ] ; + } + + for t in $(staged-targets) + { + result += [ virtual-target.register $(t) ] ; + } + } + + return [ property-set.empty ] $(result) ; + } + + # Given the list of source targets explicitly passed to 'stage', returns the + # list of targets which must be staged. + # + rule targets-to-stage ( source-targets * : property-set ) + { + local result ; + + # Traverse the dependencies, if needed. + if [ $(property-set).get ] = "on" + { + source-targets = [ collect-targets $(source-targets) ] ; + } + + # Filter the target types, if needed. + local included-types = [ $(property-set).get ] ; + for local r in $(source-targets) + { + local ty = [ $(r).type ] ; + if $(ty) + { + # Do not stage searched libs. + if $(ty) != SEARCHED_LIB + { + if $(included-types) + { + if [ include-type $(ty) : $(included-types) ] + { + result += $(r) ; + } + } + else + { + result += $(r) ; + } + } + } + else if ! $(included-types) + { + # Do not install typeless targets if there is an explicit list + # of allowed types. + result += $(r) ; + } + } + + return $(result) ; + } + + # CONSIDER: figure out why we can not use virtual-target.traverse here. + # + rule collect-targets ( targets * ) + { + # Find subvariants + local s ; + for local t in $(targets) + { + s += [ $(t).creating-subvariant ] ; + } + s = [ sequence.unique $(s) ] ; + + local result = [ new set ] ; + $(result).add $(targets) ; + + for local i in $(s) + { + $(i).all-referenced-targets $(result) ; + } + local result2 ; + for local r in [ $(result).list ] + { + if $(r:G) != + { + result2 += $(r:G=) ; + } + } + DELETE_MODULE $(result) ; + return [ sequence.unique $(result2) ] ; + } + + rule skip-from-usage-requirements ( ) + { + } + + # Returns true iff 'type' is subtype of some element of 'types-to-include'. + # + local rule include-type ( type : types-to-include * ) + { + local found ; + while $(types-to-include) && ! $(found) + { + if [ type.is-subtype $(type) $(types-to-include[1]) ] + { + found = true ; + } + types-to-include = $(types-to-include[2-]) ; + } + + return $(found) ; + } +} + + +# Creates a copy of target 'source'. The 'properties' object should have a +# property which specifies where the target must be placed. +# +rule copy-file ( project name ? : source : properties ) +{ + name ?= [ $(source).name ] ; + local relative ; + + local new-a = [ new non-scanning-action $(source) : common.copy : + $(properties) ] ; + local source-root = [ $(properties).get ] ; + if $(source-root) + { + # Get the real path of the target. We probably need to strip relative + # path from the target name at construction. + local path = [ $(source).path ] ; + path = [ path.root $(name:D) $(path) ] ; + # Make the path absolute. Otherwise, it would be hard to compute the + # relative path. The 'source-root' is already absolute, see the + # 'adjust-properties' method above. + path = [ path.root $(path) [ path.pwd ] ] ; + + relative = [ path.relative-to $(source-root) $(path) ] ; + } + + # Note: Using $(name:D=$(relative)) might be faster here, but then we would + # need to explicitly check that relative is not ".", otherwise we might get + # paths like '/boost/.', try to create it and mkdir would obviously + # fail. + name = [ path.join $(relative) $(name:D=) ] ; + + return [ new file-target $(name) exact : [ $(source).type ] : $(project) : + $(new-a) ] ; +} + + +rule symlink ( name : project : source : properties ) +{ + local a = [ new action $(source) : symlink.ln : $(properties) ] ; + local t = [ new file-target $(name) exact : [ $(source).type ] : $(project) + : $(a) ] ; + return [ virtual-target.register $(t) ] ; +} + + +rule relink-file ( project : source : property-set ) +{ + local action = [ $(source).action ] ; + local cloned-action = [ virtual-target.clone-action $(action) : $(project) : + "" : $(property-set) ] ; + return [ $(cloned-action).targets ] ; +} + + +# Declare installed version of the EXE type. Generator for this type will cause +# relinking to the new location. +type.register INSTALLED_EXE : : EXE ; + + +class installed-exe-generator : generator +{ + import type ; + import property-set ; + import modules ; + import stage ; + + rule __init__ ( ) + { + generator.__init__ install-exe : EXE : INSTALLED_EXE ; + } + + rule run ( project name ? : property-set : source : multiple ? ) + { + local stage-rule = stage.copy-file ; + + if ! [ $(property-set).get ] in NT CYGWIN && + ! [ $(property-set).get ] in windows cygwin + { + # If dll-path properties have been changed for the stage target, + # relink instead of copying. + local a = [ $(source).action ] ; + local p = [ $(a).properties ] ; + local original = [ $(p).get ] ; + local current = [ $(property-set).get ] ; + + if $(current) != $(original) + { + stage-rule = stage.relink-file ; + } + } + + return [ $(stage-rule) $(project) : $(source) : $(property-set) ] ; + } +} + + +generators.register [ new installed-exe-generator ] ; + + +# Installing a shared link on Unix might cause a creation of versioned symbolic +# links. +type.register INSTALLED_SHARED_LIB : : SHARED_LIB ; + + +class installed-shared-lib-generator : generator +{ + import type ; + import property-set ; + import modules ; + import stage ; + + rule __init__ ( ) + { + generator.__init__ install-shared-lib : SHARED_LIB : + INSTALLED_SHARED_LIB ; + } + + rule run ( project name ? : property-set : source : multiple ? ) + { + if [ $(property-set).get ] in NT CYGWIN || + [ $(property-set).get ] in windows cygwin + { + local copied = [ stage.copy-file $(project) : $(source) : + $(property-set) ] ; + return [ virtual-target.register $(copied) ] ; + } + else + { + local a = [ $(source).action ] ; + local copied ; + if ! $(a) + { + # Non-derived file, just copy. + copied = [ stage.copy-file $(project) : $(source) : + $(property-set) ] ; + } + else + { + local cp = [ $(a).properties ] ; + local current-dll-path = [ $(cp).get ] ; + local new-dll-path = [ $(property-set).get ] ; + + if $(current-dll-path) != $(new-dll-path) + { + # Rpath changed, need to relink. + copied = [ stage.relink-file $(project) : $(source) : + $(property-set) ] ; + } + else + { + copied = [ stage.copy-file $(project) : $(source) : + $(property-set) ] ; + } + } + + copied = [ virtual-target.register $(copied) ] ; + + local result = $(copied) ; + # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and + # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY + # symbolic links. + local m = [ MATCH + "(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$" : + [ $(copied).name ] ] ; + if $(m) + { + # Symlink without version at all is used to make + # -lsome_library work. + result += [ stage.symlink $(m[1]) : $(project) : $(copied) : + $(property-set) ] ; + + # Symlinks of some libfoo.N and libfoo.N.M are used so that + # library can found at runtime, if libfoo.N.M.X has soname of + # libfoo.N. That happens when the library makes some binary + # compatibility guarantees. If not, it is possible to skip those + # symlinks. + local suppress = [ $(property-set).get + ] ; + + if $(suppress) != "on" + { + result += [ stage.symlink $(m[1]).$(m[2]) : $(project) : + $(copied) : $(property-set) ] ; + result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : + $(project) : $(copied) : $(property-set) ] ; + } + } + + return $(result) ; + } + } +} + +generators.register [ new installed-shared-lib-generator ] ; + + +#| tag::doc[] + +. `rule get-dir ( name : property-set : package-name : flags * )` ++ +Returns the path to a named installation directory. For a given `name=xyz` the +rule uses the value of `` property if it is present in +`property-set`. Otherwise it tries to construct the default value of the path +recursively getting the path to ``name``'s registered base named directory and +relative path. For example: ++ +[source] +---- +stage.add-install-dir foo : bar : baz ; + +local ps = [ property-set.create x/y/z ] ; +echo [ stage.get-dir foo : $(ps) : $(__name__) ] ; # outputs x/y/z + +ps = [ property-set.create a/b/c/d ] ; +echo [ stage.get-dir foo : $(ps) : $(__name__) ] ; # outputs a/b/c/d/bar +---- ++ +The argument `package-name` is used to construct the path for named directories +that were registered with `package-suffix` option and also to construct +`install-prefix` when targeting Windows. ++ +Available `flags`: ++ +* `staged`: take <> into + account. +* `relative`: return the path to `name` relative to its base directory. + +|# # end::doc[] + +rule get-dir ( name : property-set : package-name : flags * ) +{ + local result ; + + # We treat the 'prefix' directory in a special way, because it's default + # is based on the value of property. + if $(name) = prefix + { + result = [ get-install-prefix $(property-set) : $(package-name) + : $(flags) ] ; + } + else + { + # First, try getting the path for requested directory from properties. + result = [ $(property-set).get ] ; + local info = [ get-dir-info $(name) : $(package-name) ] ; + # Otherwise, use the default path. In both cases, it could be a + # relative path. + result ?= $(info[1]) ; + if $(result) + { + result = [ path.make $(result) ] ; + } + + # If there is a base directory, we may need to modify result further. + if $(info[2]) + { + local base = [ get-dir $(info[2]) : $(property-set) + : $(package-name) : $(flags) ] ; + if relative in $(flags) + { + local rel = [ path.relative $(result) $(base) : no-error ] ; + if not-a-child != $(rel) + { + result = $(rel) ; + } + } + else + { + result = [ path.root $(result) $(base) ] ; + } + } + } + + return $(result) ; +} + + +# For a given named directory returns its parent directory and relative path +local rule get-dir-info ( name : package-name ) { + local path = $(.dir.$(name)[1]) ; + if ! x$(path) + { + import errors ; + errors.error $(name) is not an installation directory name. ; + } + + if package-suffix in $(.dir.$(name).options) + { + path = [ path.join $(path) $(package-name) ] ; + } + + return $(path) $(.dir.$(name)[2]) ; +} + + +local rule get-install-prefix ( property-set : package-name : flags * ) +{ + local prefix ; + if staged in $(flags) + { + prefix = [ $(property-set).get ] ; + } + prefix ?= [ $(property-set).get ] ; + prefix = $(prefix[0]) ; + prefix ?= [ option.get prefix ] ; + if ! $(prefix) + { + if windows = [ $(property-set).get ] + { + prefix = C:\\$(package-name) ; + } + else + { + prefix = /usr/local ; + } + } + return [ path.make $(prefix) ] ; +} + + +#| tag::doc[] + +. `rule get-package-name ( property-set : project-module ? )` ++ +Returns the package name that will be used for `install` targets when +constructing installation location. The rule uses the value of +<`>> property if it's +present in `property-set`. Otherwise it deduces the package name using +``project-module``'s attributes. It traverses the project hierarchy up to the +root searching for the first project with an id. If none is found, the base +name of the root project's location is used. If `project-module` is empty, the +caller module is used (this allows invoking just `[ get-package-name $(ps) ]` +in project jam files). + +|# # end::doc[] + +rule get-package-name ( property-set : project-module ? ) +{ + local package = [ $(property-set).get ] ; + if ! $(package) + { + project-module ?= [ CALLER_MODULE 1 ] ; + + local m = $(project-module) ; + package = [ project.attribute $(m) id ] ; + while ! $(package) + { + m = [ project.attribute $(m) parent-module ] ; + if ! $(m) { break ; } + + package = [ project.attribute $(m) id ] ; + } + + if ! $(package) + { + local root = [ project.attribute $(project-module) project-root ] ; + package = [ path.root $(root) [ path.pwd ] ] ; + } + + package = $(package:B) ; + } + return $(package) ; +} + + +rule stage-translate-path ( feature value : properties * : project-id : project-location ) +{ + if $(feature) = && [ MATCH "^\\((.+)\\)(/(.*))?$" : $(value) ] + { + return $(value) ; + } +} + + +# Main target rule for 'install'. +# +rule install ( name : sources * : requirements * : default-build * ) +{ + local project = [ project.current ] ; + + # Unless the user has explicitly asked us to hardcode dll paths, add + # false in requirements, to override default value. + if ! true in $(requirements) + { + requirements += false ; + } + + if in $(requirements:G) + { + import errors ; + errors.user-error The property is not allowed for the 'install' + rule. ; + } + + targets.create-metatarget install-target-class : $(project) : $(name) : + $(sources) : $(requirements) @stage-translate-path : $(default-build) ; +} + + +IMPORT $(__name__) : install : : install ; +IMPORT $(__name__) : install : : stage ; +IMPORT $(__name__) : stage-translate-path : : stage-translate-path ; diff --git a/src/boost/tools/build/src/tools/stage.py b/src/boost/tools/build/src/tools/stage.py new file mode 100644 index 000000000..1fb6ae046 --- /dev/null +++ b/src/boost/tools/build/src/tools/stage.py @@ -0,0 +1,350 @@ +# Status: ported. +# Base revision 64444. +# +# Copyright 2003 Dave Abrahams +# Copyright 2005, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006, 2010 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module defines the 'install' rule, used to copy a set of targets to a +# single location. + +import b2.build.feature as feature +import b2.build.targets as targets +import b2.build.property as property +import b2.build.property_set as property_set +import b2.build.generators as generators +import b2.build.virtual_target as virtual_target + +from b2.manager import get_manager +from b2.util.sequence import unique +from b2.util import bjam_signature + +import b2.build.type + +import os.path +import re +import types + +feature.feature('install-dependencies', ['off', 'on'], ['incidental']) +feature.feature('install-type', [], ['free', 'incidental']) +feature.feature('install-source-root', [], ['free', 'path']) +feature.feature('so-version', [], ['free', 'incidental']) + +# If 'on', version symlinks for shared libraries will not be created. Affects +# Unix builds only. +feature.feature('install-no-version-symlinks', ['on'], ['optional', 'incidental']) + +class InstallTargetClass(targets.BasicTarget): + + def update_location(self, ps): + """If is not set, sets it based on the project data.""" + + loc = ps.get('location') + if not loc: + loc = os.path.join(self.project().get('location'), self.name()) + ps = ps.add_raw(["" + loc]) + + return ps + + def adjust_properties(self, target, build_ps): + a = target.action() + properties = [] + if a: + ps = a.properties() + properties = ps.all() + + # Unless true is in properties, which can happen + # only if the user has explicitly requested it, nuke all + # properties. + + if build_ps.get('hardcode-dll-paths') != ['true']: + properties = [p for p in properties if p.feature.name != 'dll-path'] + + # If any properties were specified for installing, add + # them. + properties.extend(build_ps.get_properties('dll-path')) + + # Also copy feature from current build set, to be used + # for relinking. + properties.extend(build_ps.get_properties('linkflags')) + + # Remove the feature on original targets. + # And . If stage target has another stage target in + # sources, then we shall get virtual targets with the + # property set. + properties = [p for p in properties + if not p.feature.name in ['tag', 'location']] + + properties.extend(build_ps.get_properties('dependency')) + + properties.extend(build_ps.get_properties('location')) + + + properties.extend(build_ps.get_properties('install-no-version-symlinks')) + + d = build_ps.get_properties('install-source-root') + + # Make the path absolute: we shall use it to compute relative paths and + # making the path absolute will help. + if d: + p = d[0] + properties.append(property.Property(p.feature, os.path.abspath(p.value))) + + return property_set.create(properties) + + + def construct(self, name, source_targets, ps): + + source_targets = self.targets_to_stage(source_targets, ps) + ps = self.update_location(ps) + + ename = ps.get('name') + if ename: + ename = ename[0] + if ename and len(source_targets) > 1: + get_manager().errors()("When property is used in 'install', only one source is allowed") + + result = [] + + for i in source_targets: + + staged_targets = [] + new_ps = self.adjust_properties(i, ps) + + # See if something special should be done when staging this type. It + # is indicated by the presence of a special "INSTALLED_" type. + t = i.type() + if t and b2.build.type.registered("INSTALLED_" + t): + + if ename: + get_manager().errors()("In 'install': property specified with target that requires relinking.") + else: + (r, targets) = generators.construct(self.project(), name, "INSTALLED_" + t, + new_ps, [i]) + assert isinstance(r, property_set.PropertySet) + staged_targets.extend(targets) + + else: + staged_targets.append(copy_file(self.project(), ename, i, new_ps)) + + if not staged_targets: + get_manager().errors()("Unable to generate staged version of " + i) + + result.extend(get_manager().virtual_targets().register(t) for t in staged_targets) + + return (property_set.empty(), result) + + def targets_to_stage(self, source_targets, ps): + """Given the list of source targets explicitly passed to 'stage', returns the + list of targets which must be staged.""" + + result = [] + + # Traverse the dependencies, if needed. + if ps.get('install-dependencies') == ['on']: + source_targets = self.collect_targets(source_targets) + + # Filter the target types, if needed. + included_types = ps.get('install-type') + for r in source_targets: + ty = r.type() + if ty: + # Do not stage searched libs. + if ty != "SEARCHED_LIB": + if included_types: + if self.include_type(ty, included_types): + result.append(r) + else: + result.append(r) + elif not included_types: + # Don't install typeless target if there is an explicit list of + # allowed types. + result.append(r) + + return result + + # CONSIDER: figure out why we can not use virtual-target.traverse here. + # + def collect_targets(self, targets): + + s = [t.creating_subvariant() for t in targets] + s = unique(filter(lambda l: l != None,s)) + + result = set(targets) + for i in s: + i.all_referenced_targets(result) + + result2 = [] + for r in result: + if isinstance(r, property.Property): + + if r.feature.name != 'use': + result2.append(r.value) + else: + result2.append(r) + result2 = unique(result2) + return result2 + + # Returns true iff 'type' is subtype of some element of 'types-to-include'. + # + def include_type(self, type, types_to_include): + return any(b2.build.type.is_subtype(type, ti) for ti in types_to_include) + +# Creates a copy of target 'source'. The 'properties' object should have a +# property which specifies where the target must be placed. +# +def copy_file(project, name, source, ps): + + if not name: + name = source.name() + + relative = "" + + new_a = virtual_target.NonScanningAction([source], "common.copy", ps) + source_root = ps.get('install-source-root') + if source_root: + source_root = source_root[0] + # Get the real path of the target. We probably need to strip relative + # path from the target name at construction. + path = os.path.join(source.path(), os.path.dirname(name)) + # Make the path absolute. Otherwise, it would be hard to compute the + # relative path. The 'source-root' is already absolute, see the + # 'adjust-properties' method above. + path = os.path.abspath(path) + + relative = os.path.relpath(path, source_root) + + name = os.path.join(relative, os.path.basename(name)) + return virtual_target.FileTarget(name, source.type(), project, new_a, exact=True) + +def symlink(name, project, source, ps): + a = virtual_target.Action([source], "symlink.ln", ps) + return virtual_target.FileTarget(name, source.type(), project, a, exact=True) + +def relink_file(project, source, ps): + action = source[0].action() + cloned_action = virtual_target.clone_action(action, project, "", ps) + targets = cloned_action.targets() + # We relink only on Unix, where exe or shared lib is always a single file. + assert len(targets) == 1 + return targets[0] + + +# Declare installed version of the EXE type. Generator for this type will cause +# relinking to the new location. +b2.build.type.register('INSTALLED_EXE', [], 'EXE') + +class InstalledExeGenerator(generators.Generator): + + def __init__(self): + generators.Generator.__init__(self, "install-exe", False, ['EXE'], ['INSTALLED_EXE']) + + def run(self, project, name, ps, source): + + need_relink = False; + + if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']: + # Never relink + pass + else: + # See if the dll-path properties are not changed during + # install. If so, copy, don't relink. + need_relink = source[0].action() and ps.get('dll-path') != source[0].action().properties().get('dll-path') + + if need_relink: + return [relink_file(project, source, ps)] + else: + return [copy_file(project, None, source[0], ps)] + +generators.register(InstalledExeGenerator()) + + +# Installing a shared link on Unix might cause a creation of versioned symbolic +# links. +b2.build.type.register('INSTALLED_SHARED_LIB', [], 'SHARED_LIB') + +class InstalledSharedLibGenerator(generators.Generator): + + def __init__(self): + generators.Generator.__init__(self, 'install-shared-lib', False, ['SHARED_LIB'], ['INSTALLED_SHARED_LIB']) + + def run(self, project, name, ps, source): + + source = source[0] + if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']: + copied = copy_file(project, None, source, ps) + return [get_manager().virtual_targets().register(copied)] + else: + a = source.action() + if not a: + # Non-derived file, just copy. + copied = copy_file(project, None, source, ps) + else: + + need_relink = ps.get('dll-path') != source.action().properties().get('dll-path') + + if need_relink: + # Rpath changed, need to relink. + copied = relink_file(project, source, ps) + else: + copied = copy_file(project, None, source, ps) + + result = [get_manager().virtual_targets().register(copied)] + # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and + # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY + # symbolic links. + m = re.match("(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$", + copied.name()); + if m: + # Symlink without version at all is used to make + # -lsome_library work. + result.append(symlink(m.group(1), project, copied, ps)) + + # Symlinks of some libfoo.N and libfoo.N.M are used so that + # library can found at runtime, if libfoo.N.M.X has soname of + # libfoo.N. That happens when the library makes some binary + # compatibility guarantees. If not, it is possible to skip those + # symlinks. + if ps.get('install-no-version-symlinks') != ['on']: + + result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps)) + result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3), + project, copied, ps)) + + return result + +generators.register(InstalledSharedLibGenerator()) + + +# Main target rule for 'install'. +# +@bjam_signature((["name"], ["sources", "*"], ["requirements", "*"], + ["default_build", "*"], ["usage_requirements", "*"])) +def install(name, sources, requirements=[], default_build=[], usage_requirements=[]): + + requirements = requirements[:] + # Unless the user has explicitly asked us to hardcode dll paths, add + # false in requirements, to override default value. + if not 'true' in requirements: + requirements.append('false') + + if any(r.startswith('') for r in requirements): + get_manager().errors()("The property is not allowed for the 'install' rule") + + from b2.manager import get_manager + t = get_manager().targets() + + project = get_manager().projects().current() + + return t.main_target_alternative( + InstallTargetClass(name, project, + t.main_target_sources(sources, name), + t.main_target_requirements(requirements, project), + t.main_target_default_build(default_build, project), + t.main_target_usage_requirements(usage_requirements, project))) + +get_manager().projects().add_rule("install", install) +get_manager().projects().add_rule("stage", install) + diff --git a/src/boost/tools/build/src/tools/stlport.jam b/src/boost/tools/build/src/tools/stlport.jam new file mode 100644 index 000000000..4b9dabfab --- /dev/null +++ b/src/boost/tools/build/src/tools/stlport.jam @@ -0,0 +1,312 @@ +# Copyright Gennadiy Rozental +# Copyright 2006 Rene Rivera +# Copyright 2003, 2004, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# The STLPort is usable by means of 'stdlib' feature. When +# stdlib=stlport is specified, default version of STLPort will be used, +# while stdlib=stlport-4.5 will use specific version. +# The subfeature value 'hostios' means to use host compiler's iostreams. +# +# The specific version of stlport is selected by features: +# The feature selects between static and shared library +# The on selects STLPort with debug symbols +# and stl debugging. +# There's no way to use STLPort with debug symbols but without +# stl debugging. + +# TODO: must implement selection of different STLPort installations based +# on used toolset. +# Also, finish various flags: +# +# This is copied from V1 toolset, "+" means "implemented" +#+flags $(CURR_TOOLSET) DEFINES off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ; +#+flags $(CURR_TOOLSET) DEFINES off : _STLP_NO_EXTENSIONS=1 ; +# flags $(CURR_TOOLSET) DEFINES off : _STLP_NO_ANACHRONISMS=1 ; +# flags $(CURR_TOOLSET) DEFINES global : _STLP_VENDOR_GLOBAL_CSTD=1 ; +# flags $(CURR_TOOLSET) DEFINES off : _STLP_NO_EXCEPTIONS=1 ; +# flags $(CURR_TOOLSET) DEFINES on : _STLP_DEBUG_ALLOC=1 ; +#+flags $(CURR_TOOLSET) DEFINES debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ; +#+flags $(CURR_TOOLSET) DEFINES dynamic : _STLP_USE_DYNAMIC_LIB=1 ; + + +import feature : feature subfeature ; +import project ; +import "class" : new ; +import targets ; +import property-set ; +import common ; +import type ; + +# Make this module into a project. +project.initialize $(__name__) ; +project stlport ; + +# The problem: how to request to use host compiler's iostreams? +# +# Solution 1: Global 'stlport-iostream' feature. +# That's ugly. Subfeature make more sense for stlport-specific thing. +# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream") +# is default. +# The problem is that such subfeature will appear in target paths, and that's ugly +# Solution 3: Use optional subfeature with only one value. + +feature.extend stdlib : stlport ; +feature.compose stlport : /stlport//stlport ; + +# STLport iostreams or native iostreams +subfeature stdlib stlport : iostream : hostios : optional propagated ; + +# STLport extensions +subfeature stdlib stlport : extensions : noext : optional propagated ; + +# STLport anachronisms -- NOT YET SUPPORTED +# subfeature stdlib stlport : anachronisms : on off ; + +# STLport debug allocation -- NOT YET SUPPORTED +#subfeature stdlib stlport : debug-alloc : off on ; + +# Declare a special target class to handle the creation of search-lib-target +# instances for STLport. We need a special class, because otherwise we'll have +# - declare prebuilt targets for all possible toolsets. And by the time 'init' +# is called we don't even know the list of toolsets that are registered +# - when host iostreams are used, we really should produce nothing. It would +# be hard/impossible to achieve this using prebuilt targets. + +class stlport-target-class : basic-target +{ + import feature project type errors generators ; + import set : difference ; + + rule __init__ ( project : headers ? : libraries * : version ? ) + { + basic-target.__init__ stlport : $(project) ; + self.headers = $(headers) ; + self.libraries = $(libraries) ; + self.version = $(version) ; + self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ; + + local requirements ; + requirements += $(self.version) ; + requirements += runtime-debugging ; + requirements += toolset ; + requirements += runtime-link ; + self.requirements = [ property-set.create $(requirements) ] ; + } + + rule generate ( property-set ) + { + # Since this target is built with stlport, it will also + # have /stlport//stlport in requirements, which will + # cause a loop in main target references. Remove that property + # manually. + + property-set = [ property-set.create + [ difference + [ $(property-set).raw ] : + /stlport//stlport + stlport + ] + ] ; + return [ basic-target.generate $(property-set) ] ; + } + + rule construct ( name : source-targets * : property-set ) + { + # Deduce the name of stlport library, based on toolset and + # debug setting. + local raw = [ $(property-set).raw ] ; + local hostios = [ feature.get-values : $(raw) ] ; + local toolset = [ feature.get-values : $(raw) ] ; + + if $(self.version.5) + { + # Version 5.x + + # STLport host IO streams no longer supported. So we always + # need libraries. + + # name: stlport(stl)?[dg]?(_static)?.M.R + local name = stlport ; + if [ feature.get-values : $(raw) ] = "on" + { + name += stl ; + switch $(toolset) + { + case gcc* : name += g ; + case darwin* : name += g ; + case * : name += d ; + } + } + + if [ feature.get-values : $(raw) ] = "static" + { + name += _static ; + } + + # Starting with version 5.2.0, the STLport static libraries no + # longer include a version number in their name + local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ; + if $(version.pre.5.2) || [ feature.get-values : + $(raw) ] != "static" + { + name += .$(self.version.5) ; + } + + name = $(name:J=) ; + + if [ feature.get-values : $(raw) ] = "on" + { + #~ Allow explicitly asking to install the STLport lib by + #~ referring to it directly: + #~ /stlport//stlport/on + #~ This allows for install packaging of all libs one might need + #~ for a standalone distribution. + import path : make : path-make ; + local runtime-link + = [ feature.get-values : $(raw) ] ; + local lib-file.props + = [ property-set.create $(raw) $(runtime-link) ] ; + local lib-file.prefix + = [ type.generated-target-prefix $(runtime-link:U)_LIB : + $(lib-file.props) ] ; + local lib-file.suffix + = [ type.generated-target-suffix $(runtime-link:U)_LIB : + $(lib-file.props) ] ; + lib-file.prefix + ?= "" "lib" ; + lib-file.suffix + ?= "" ; + local lib-file + = [ GLOB $(self.libraries) [ modules.peek : PATH ] : + $(lib-file.prefix)$(name).$(lib-file.suffix) ] ; + lib-file + = [ new file-reference [ path-make $(lib-file[1]) ] : + $(self.project) ] ; + lib-file + = [ $(lib-file).generate "" ] ; + local lib-file.requirements + = [ targets.main-target-requirements + [ $(lib-file.props).raw ] $(lib-file[-1]) + : $(self.project) ] ; + return [ generators.construct $(self.project) $(name) : LIB : + $(lib-file.requirements) ] ; + } + else + { + #~ Otherwise, it is just regular library usage. + return [ generators.construct + $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ; + } + } + else if ! $(hostios) && $(toolset) != msvc + { + # We don't need libraries if host istreams are used. For + # msvc, automatic library selection will be used. + + # name: stlport_(_stldebug)? + local name = stlport ; + name = $(name)_$(toolset) ; + if [ feature.get-values : $(raw) ] = "on" + { + name = $(name)_stldebug ; + } + + return [ generators.construct + $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ; + } + else + { + return [ property-set.empty ] ; + } + } + + rule compute-usage-requirements ( subvariant ) + { + local usage-requirements = + $(self.headers) + $(self.libraries) + $(self.libraries) + ; + + local rproperties = [ $(subvariant).build-properties ] ; + # CONSIDER: should this "if" sequence be replaced with + # some use of 'property-map' class? + if [ $(rproperties).get ] = "on" + { + usage-requirements += + _STLP_DEBUG=1 + _STLP_DEBUG_UNINITIALIZED=1 ; + } + if [ $(rproperties).get ] = "shared" + { + usage-requirements += + _STLP_USE_DYNAMIC_LIB=1 ; + } + if [ $(rproperties).get ] = noext + { + usage-requirements += + _STLP_NO_EXTENSIONS=1 ; + } + if [ $(rproperties).get ] = hostios + { + usage-requirements += + _STLP_NO_OWN_IOSTREAMS=1 + _STLP_HAS_NO_NEW_IOSTREAMS=1 ; + } + if $(self.version.5) + { + # Version 5.x + if [ $(rproperties).get ] = "single" + { + # Since STLport5 doesn't normally support single-thread + # we force STLport5 into the multi-thread mode. Hence + # getting what other libs provide of single-thread code + # linking against a multi-thread lib. + usage-requirements += + _STLP_THREADS=1 ; + } + } + + return [ property-set.create $(usage-requirements) ] ; + } +} + +rule stlport-target ( headers ? : libraries * : version ? ) +{ + local project = [ project.current ] ; + + targets.main-target-alternative + [ new stlport-target-class $(project) : $(headers) : $(libraries) + : $(version) + ] ; +} + +local .version-subfeature-defined ; + +# Initialize stlport support. +rule init ( + version ? : + headers : # Location of header files + libraries * # Location of libraries, lib and bin subdirs of STLport. + ) +{ + # FIXME: need to use common.check-init-parameters here. + # At the moment, that rule always tries to define subfeature + # of the 'toolset' feature, while we need to define subfeature + # of stlport, so tweaks to check-init-parameters are needed. + if $(version) + { + if ! $(.version-subfeature-defined) + { + feature.subfeature stdlib stlport : version : : propagated ; + .version-subfeature-defined = true ; + } + feature.extend-subfeature stdlib stlport : version : $(version) ; + } + + # Declare the main target for this STLPort version. + stlport-target $(headers) : $(libraries) : $(version) ; +} + diff --git a/src/boost/tools/build/src/tools/sun.jam b/src/boost/tools/build/src/tools/sun.jam new file mode 100644 index 000000000..a5a19089d --- /dev/null +++ b/src/boost/tools/build/src/tools/sun.jam @@ -0,0 +1,226 @@ +# Copyright (C) Christopher Currie 2003. Permission to copy, use, +# modify, sell and distribute this software is granted provided this +# copyright notice appears in all copies. This software is provided +# "as is" without express or implied warranty, and with no claim as +# to its suitability for any purpose. + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.sun]] += Sun Studio + +The `sun` module supports the +http://developers.sun.com/sunstudio/index.jsp[Sun Studio] C++ compilers +for the Solaris OS. + +The module is initialized using the following syntax: + +---- +using sun : [version] : [c++-compile-command] : [compiler options] ; +---- + +This statement may be repeated several times, if you want to configure +several versions of the compiler. + +If the command is not specified, B2 will search for a binary +named `CC` in `/opt/SUNWspro/bin` and in PATH. + +When using this compiler on complex C++ code, such as the +http://boost.org[Boost C++ library], it is recommended to specify the +following options when initializing the `sun` module: + +---- +-library=stlport4 -features=tmplife -features=tmplrefstatic +---- + +See the http://blogs.sun.com/sga/entry/command_line_options[Sun C++ +Frontend Tales] for details. + +The following options can be provided, using +_`option-value syntax`_: + +`cflags`:: +Specifies additional compiler flags that will be used when compiling C +sources. + +`cxxflags`:: +Specifies additional compiler flags that will be used when compiling C++ +sources. + +`compileflags`:: +Specifies additional compiler flags that will be used when compiling both C +and C++ sources. + +`linkflags`:: +Specifies additional command line options that will be passed to the linker. + +Starting with Sun Studio 12, you can create 64-bit applications by using +the `address-model=64` property. + +|# # end::doc[] + +import property ; +import generators ; +import os ; +import toolset : flags ; +import feature ; +import type ; +import common ; + +feature.extend toolset : sun ; +toolset.inherit sun : unix ; +generators.override sun.prebuilt : builtin.lib-generator ; +generators.override sun.prebuilt : builtin.prebuilt ; +generators.override sun.searched-lib-generator : searched-lib-generator ; + + +rule init ( version ? : command * : options * ) +{ + local condition = [ + common.check-init-parameters sun : version $(version) ] ; + + command = [ common.get-invocation-command sun : CC + : $(command) : "/opt/SUNWspro/bin" ] ; + + # Even if the real compiler is not found, put CC to + # command line so that user see command line that would have being executed. + command ?= CC ; + + common.handle-options sun : $(condition) : $(command) : $(options) ; + + command_c = $(command[1--2]) $(command[-1]:B=cc) ; + + toolset.flags sun CONFIG_C_COMMAND $(condition) : $(command_c) ; +} + +# Declare generators +generators.register-c-compiler sun.compile.c : C : OBJ : sun ; +generators.register-c-compiler sun.compile.c++ : CPP : OBJ : sun ; + +# Declare flags and actions for compilation +flags sun.compile OPTIONS on : -g ; +flags sun.compile OPTIONS on : -xprofile=tcov ; +flags sun.compile OPTIONS speed : -xO4 ; +flags sun.compile OPTIONS space : -xO2 -xspace ; +flags sun.compile OPTIONS multi : -mt ; +flags sun.compile OPTIONS off : -erroff ; +flags sun.compile OPTIONS on : -erroff=%none ; +flags sun.compile OPTIONS all : -erroff=%none ; +flags sun.compile OPTIONS extra : -erroff=%none ; +flags sun.compile OPTIONS pedantic : -erroff=%none ; +flags sun.compile OPTIONS on : -errwarn ; + +flags sun.compile OPTIONS hidden : -xldscope=hidden ; +flags sun.compile OPTIONS protected : -xldscope=symbolic ; +flags sun.compile OPTIONS global : -xldscope=global ; + +flags sun.compile.c++ OPTIONS off : +d ; + +# There are no less than 5 standard library options: +# 1) The default, which uses an old version of the Rogue Wave std lib, +# also available via -std=sun03. +# 2) C++03 mode + STLport, selected via the -library option. +# 3) C++03 mode plus the Apache std lib, selected via the -library option. +# 4) C++03 or C++11 in g++ compatibility mode, and GNU libstdc++3, selected via -std=c++03/11. +# +# Note that the -std, -library and -compat compiler switches appear to be largely mutually +# incompatible, and that going forward the -std switch seems to be the preferred one. +# +# See http://docs.oracle.com/cd/E37069_01/html/E37075/bkamw.html#OSSCPgnaof +# + +flags sun.compile.c++ OPTIONS sun-stlport : -library=stlport4 -compat=5 -features=zla ; +flags sun.link OPTIONS sun-stlport : -library=stlport4 -compat=5 ; + +flags sun.compile.c++ OPTIONS apache : -library=stdcxx4 -compat=5 -features=zla ; +flags sun.link OPTIONS apache : -library=stdcxx4 -compat=5 ; + +flags sun.compile.c++ OPTIONS gnu : -std=c++03 ; +flags sun.compile.c++ DEFINES gnu : _GLIBCXX_USE_CXX11_ABI=0 ; +flags sun.link OPTIONS gnu : -std=c++03 ; + +flags sun.compile.c++ OPTIONS gnu11 : -std=c++11 ; +flags sun.compile.c++ DEFINES gnu11 : _GLIBCXX_USE_CXX11_ABI=1 ; +flags sun.link OPTIONS gnu11 : -std=c++11 ; + +# The -m32 and -m64 options are supported starting +# with Sun Studio 12. On earlier compilers, the +# 'address-model' feature is not supported and should not +# be used. Instead, use -xarch=generic64 command line +# option. +# See http://svn.boost.org/trac/boost/ticket/1186 +# for details. +flags sun OPTIONS 32 : -m32 ; +flags sun OPTIONS 64 : -m64 ; +# On sparc, there's a difference between -Kpic +# and -KPIC. The first is slightly more efficient, +# but has the limits on the size of GOT table. +# For minimal fuss on user side, we use -KPIC here. +# See http://svn.boost.org/trac/boost/ticket/1186#comment:6 +# for detailed explanation. +flags sun OPTIONS shared : -KPIC ; + +flags sun.compile OPTIONS ; +flags sun.compile.c++ OPTIONS ; +flags sun.compile DEFINES ; +flags sun.compile INCLUDES ; + +actions compile.c +{ + "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.c++ +{ + "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +# Declare flags and actions for linking +flags sun.link OPTIONS on : -g ; +# Strip the binary when no debugging is needed +flags sun.link OPTIONS off : -s ; +flags sun.link OPTIONS on : -xprofile=tcov ; +flags sun.link OPTIONS multi : -mt ; +flags sun.link OPTIONS ; +flags sun.link LINKPATH ; +flags sun.link FINDLIBS-ST ; +flags sun.link FINDLIBS-SA ; +flags sun.link LIBRARIES ; +flags sun.link LINK-RUNTIME static : static ; +flags sun.link LINK-RUNTIME shared : dynamic ; +flags sun.link RPATH ; +# On gcc, there are separate options for dll path at runtime and +# link time. On Solaris, there's only one: -R, so we have to use +# it, even though it's bad idea. +flags sun.link RPATH ; + +# The POSIX real-time library is always needed (nanosleep, clock_gettime etc.) +flags sun.link FINDLIBS-SA : rt ; + +rule link ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) +} + +# Slight mods for dlls +rule link.dll ( targets * : sources * : properties * ) +{ + SPACE on $(targets) = " " ; +} + +actions link.dll bind LIBRARIES +{ + "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME) +} + +# Declare action for creating static libraries +actions piecemeal archive +{ + "$(CONFIG_COMMAND)" -xar -o "$(<)" "$(>)" +} + diff --git a/src/boost/tools/build/src/tools/symlink.jam b/src/boost/tools/build/src/tools/symlink.jam new file mode 100644 index 000000000..b1256d747 --- /dev/null +++ b/src/boost/tools/build/src/tools/symlink.jam @@ -0,0 +1,140 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2002, 2003 Rene Rivera +# Copyright 2002, 2003, 2004, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Defines the "symlink" special target. 'symlink' targets make symbolic links +# to the sources. + +import targets modules path class os feature project property-set ; + +.count = 0 ; + +feature.feature symlink-location : project-relative build-relative : incidental ; + +# The class representing "symlink" targets. +# +class symlink-targets : basic-target +{ + import numbers modules class property project path ; + + rule __init__ ( + project + : targets * + : sources * + ) + { + # Generate a fake name for now. Need unnamed targets eventually. + local c = [ modules.peek symlink : .count ] ; + modules.poke symlink : .count : [ numbers.increment $(c) ] ; + local fake-name = symlink#$(c) ; + + basic-target.__init__ $(fake-name) : $(project) : $(sources) ; + + # Remember the targets to map the sources onto. Pad or truncate + # to fit the sources given. + self.targets = ; + for local source in $(sources) + { + if $(targets) + { + self.targets += $(targets[1]) ; + targets = $(targets[2-]) ; + } + else + { + self.targets += $(source) ; + } + } + + # The virtual targets corresponding to the given targets. + self.virtual-targets = ; + } + + rule construct ( name : source-targets * : property-set ) + { + local i = 1 ; + for local t in $(source-targets) + { + local s = $(self.targets[$(i)]) ; + local a = [ class.new action $(t) : symlink.ln : $(property-set) ] ; + local vt = [ class.new file-target $(s:D=) + : [ $(t).type ] : $(self.project) : $(a) ] ; + + # Place the symlink in the directory relative to the project + # location, instead of placing it in the build directory. + if [ property.select : [ $(property-set).raw ] ] = project-relative + { + $(vt).set-path [ path.root $(s:D) [ $(self.project).get location ] ] ; + } + + self.virtual-targets += $(vt) ; + i = [ numbers.increment $(i) ] ; + } + return [ property-set.empty ] $(self.virtual-targets) ; + } +} + +# Creates a symbolic link from a set of targets to a set of sources. +# The targets and sources map one to one. The symlinks generated are +# limited to be the ones given as the sources. That is, the targets +# are either padded or trimmed to equate to the sources. The padding +# is done with the name of the corresponding source. For example:: +# +# symlink : one two ; +# +# Is equal to:: +# +# symlink one two : one two ; +# +# Names for symlink are relative to the project location. They cannot +# include ".." path components. +rule symlink ( + targets * + : sources * + ) +{ + local project = [ project.current ] ; + + return [ targets.main-target-alternative + [ class.new symlink-targets $(project) : $(targets) : + # Note: inline targets are not supported for symlink, intentionally, + # since it's used to linking existing non-local targets. + $(sources) ] ] ; +} + +rule ln +{ + local os ; + if [ modules.peek : UNIX ] { os = UNIX ; } + else { os ?= [ os.name ] ; } + # Remember the path to make the link relative to where the symlink is located. + local path-to-source = [ path.relative-to + [ path.make [ on $(<) return $(LOCATE) ] ] + [ path.make [ on $(>) return $(LOCATE) ] ] ] ; + if $(path-to-source) = . + { + PATH_TO_SOURCE on $(<) = "" ; + } + else + { + PATH_TO_SOURCE on $(<) = [ path.native $(path-to-source) ] ; + } + ln-$(os) $(<) : $(>) ; +} + +actions ln-UNIX +{ + ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)' +} + +# there is a way to do this; we fall back to a copy for now +actions ln-NT +{ + echo "NT symlinks not supported yet, making copy" + del /f /q "$(<)" 2>nul >nul + copy "$(>)" "$(<)" $(NULL_OUT) +} + +IMPORT $(__name__) : symlink : : symlink ; diff --git a/src/boost/tools/build/src/tools/symlink.py b/src/boost/tools/build/src/tools/symlink.py new file mode 100644 index 000000000..e2ce54684 --- /dev/null +++ b/src/boost/tools/build/src/tools/symlink.py @@ -0,0 +1,112 @@ +# Status: ported. +# Base revision: 64488. + +# Copyright 2003 Dave Abrahams +# Copyright 2002, 2003 Rene Rivera +# Copyright 2002, 2003, 2004, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Defines the "symlink" special target. 'symlink' targets make symbolic links +# to the sources. + +import b2.build.feature as feature +import b2.build.targets as targets +import b2.build.property_set as property_set +import b2.build.virtual_target as virtual_target +import b2.build.targets + +from b2.manager import get_manager + +import bjam + +import os + + +feature.feature("symlink-location", ["project-relative", "build-relative"], ["incidental"]) + +class SymlinkTarget(targets.BasicTarget): + + _count = 0 + + def __init__(self, project, targets, sources): + + # Generate a fake name for now. Need unnamed targets eventually. + fake_name = "symlink#%s" % SymlinkTarget._count + SymlinkTarget._count = SymlinkTarget._count + 1 + + b2.build.targets.BasicTarget.__init__(self, fake_name, project, sources) + + # Remember the targets to map the sources onto. Pad or truncate + # to fit the sources given. + assert len(targets) <= len(sources) + self.targets = targets[:] + sources[len(targets):] + + # The virtual targets corresponding to the given targets. + self.virtual_targets = [] + + def construct(self, name, source_targets, ps): + i = 0 + for t in source_targets: + s = self.targets[i] + a = virtual_target.Action(self.manager(), [t], "symlink.ln", ps) + vt = virtual_target.FileTarget(os.path.basename(s), t.type(), self.project(), a) + + # Place the symlink in the directory relative to the project + # location, instead of placing it in the build directory. + if not ps.get('symlink-location') == "project-relative": + vt.set_path(os.path.join(self.project().get('location'), os.path.dirname(s))) + + vt = get_manager().virtual_targets().register(vt) + self.virtual_targets.append(vt) + i = i + 1 + + return (property_set.empty(), self.virtual_targets) + +# Creates a symbolic link from a set of targets to a set of sources. +# The targets and sources map one to one. The symlinks generated are +# limited to be the ones given as the sources. That is, the targets +# are either padded or trimmed to equate to the sources. The padding +# is done with the name of the corresponding source. For example:: +# +# symlink : one two ; +# +# Is equal to:: +# +# symlink one two : one two ; +# +# Names for symlink are relative to the project location. They cannot +# include ".." path components. +def symlink(targets, sources): + + from b2.manager import get_manager + t = get_manager().targets() + p = get_manager().projects().current() + + return t.main_target_alternative( + SymlinkTarget(p, targets, + # Note: inline targets are not supported for symlink, intentionally, + # since it's used to linking existing non-local targets. + sources)) + + +def setup_ln(targets, sources, ps): + + source_path = bjam.call("get-target-variable", sources[0], "LOCATE")[0] + target_path = bjam.call("get-target-variable", targets[0], "LOCATE")[0] + rel = os.path.relpath(source_path, target_path) + if rel == ".": + bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", "") + else: + bjam.call("set-target-variable", targets, "PATH_TO_SOURCE", rel) + +if os.name == 'nt': + ln_action = """echo "NT symlinks not supported yet, making copy" +del /f /q "$(<)" 2>nul >nul +copy "$(>)" "$(<)" $(NULL_OUT)""" +else: + ln_action = "ln -f -s '$(>:D=:R=$(PATH_TO_SOURCE))' '$(<)'" + +get_manager().engine().register_action("symlink.ln", ln_action, function=setup_ln) + +get_manager().projects().add_rule("symlink", symlink) diff --git a/src/boost/tools/build/src/tools/testing-aux.jam b/src/boost/tools/build/src/tools/testing-aux.jam new file mode 100644 index 000000000..30309fbb8 --- /dev/null +++ b/src/boost/tools/build/src/tools/testing-aux.jam @@ -0,0 +1,344 @@ +import feature ; + +# This module is imported by testing.py. The definitions here are +# too tricky to do in Python + +# Causes the 'target' to exist after bjam invocation if and only if all the +# dependencies were successfully built. +# +rule expect-success ( target : dependency + : requirements * ) +{ + **passed** $(target) : $(sources) ; +} +IMPORT testing : expect-success : : testing.expect-success ; + +# Causes the 'target' to exist after bjam invocation if and only if all some of +# the dependencies were not successfully built. +# +rule expect-failure ( target : dependency + : properties * ) +{ + local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ; + local marker = $(dependency:G=$(grist)*fail) ; + (failed-as-expected) $(marker) ; + FAIL_EXPECTED $(dependency) ; + LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ; + RMOLD $(marker) ; + DEPENDS $(marker) : $(dependency) ; + DEPENDS $(target) : $(marker) ; + **passed** $(target) : $(marker) ; +} +IMPORT testing : expect-failure : : testing.expect-failure ; + +# The rule/action combination used to report successful passing of a test. +# +rule **passed** +{ + # Force deletion of the target, in case any dependencies failed to build. + RMOLD $(<) ; +} + + +# Used to create test files signifying passed tests. +# +actions **passed** +{ + echo passed > "$(<)" +} + + +# Used to create replacement object files that do not get created during tests +# that are expected to fail. +# +actions (failed-as-expected) +{ + echo failed as expected > "$(<)" +} + + +if [ os.name ] = VMS +{ + actions **passed** + { + PIPE WRITE SYS$OUTPUT "passed" > $(<:W) + } + + actions (failed-as-expected) + { + PIPE WRITE SYS$OUTPUT "failed as expected" > $(<:W) + } +} + + +# Runs executable 'sources' and stores stdout in file 'target'. Unless +# --preserve-test-targets command line option has been specified, removes the +# executable. The 'target-to-remove' parameter controls what should be removed: +# - if 'none', does not remove anything, ever +# - if empty, removes 'source' +# - if non-empty and not 'none', contains a list of sources to remove. +# +rule capture-output ( target : source : properties * : targets-to-remove * ) +{ + output-file on $(target) = $(target:S=.output) ; + LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ; + + # The INCLUDES kill a warning about independent target... + INCLUDES $(target) : $(target:S=.output) ; + # but it also puts .output into dependency graph, so we must tell jam it is + # OK if it cannot find the target or updating rule. + NOCARE $(target:S=.output) ; + + # This has two-fold effect. First it adds input files to the dependency + # graph, preventing a warning. Second, it causes input files to be bound + # before target is created. Therefore, they are bound using SEARCH setting + # on them and not LOCATE setting of $(target), as in other case (due to jam + # bug). + DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ; + + if $(targets-to-remove) = none + { + targets-to-remove = ; + } + else if ! $(targets-to-remove) + { + targets-to-remove = $(source) ; + } + + if [ on $(target) return $(REMOVE_TEST_TARGETS) ] + { + TEMPORARY $(targets-to-remove) ; + # Set a second action on target that will be executed after capture + # output action. The 'RmTemps' rule has the 'ignore' modifier so it is + # always considered succeeded. This is needed for 'run-fail' test. For + # that test the target will be marked with FAIL_EXPECTED, and without + # 'ignore' successful execution will be negated and be reported as + # failure. With 'ignore' we do not detect a case where removing files + # fails, but it is not likely to happen. + RmTemps $(target) : $(targets-to-remove) ; + } + + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } +} + + +if [ os.name ] = NT +{ + .STATUS = %status% ; + .SET_STATUS = "set status=%ERRORLEVEL%" ; + .RUN_OUTPUT_NL = "echo." ; + .THEN = "(" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "%status% EQU 0 $(.THEN)" ; + .STATUS_NOT_0 = "%status% NEQ 0 $(.THEN)" ; + .VERBOSE = "%verbose% EQU 1 $(.THEN)" ; + .ENDIF = ")" ; + .SHELL_SET = "set " ; + .CATENATE = type ; + .CP = copy ; + .NULLIN = ; +} +else if [ os.name ] = VMS +{ + local nl = " +" ; + + .STATUS = "''status'" ; + .SET_STATUS = "status=$STATUS" ; + .SAY = "pipe write sys$output" ; ## not really echo + .RUN_OUTPUT_NL = "$(.SAY) \"\"" ; + .THEN = "$(nl)then" ; + .EXIT_SUCCESS = "1" ; + .SUCCESS = "status .eq. $(.EXIT_SUCCESS) $(.THEN)" ; + .STATUS_0 = "status .eq. 0 $(.THEN)" ; + .STATUS_NOT_0 = "status .ne. 0 $(.THEN)" ; + .VERBOSE = "verbose .eq. 1 $(.THEN)" ; + .ENDIF = "endif" ; + .SHELL_SET = "" ; + .CATENATE = type ; + .CP = copy ; + .NULLIN = ; +} +else +{ + .STATUS = "$status" ; + .SET_STATUS = "status=$?" ; + .RUN_OUTPUT_NL = "echo" ; + .THEN = "; then" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "test $status -eq 0 $(.THEN)" ; + .STATUS_NOT_0 = "test $status -ne 0 $(.THEN)" ; + .VERBOSE = "test $verbose -eq 1 $(.THEN)" ; + .ENDIF = "fi" ; + .SHELL_SET = "" ; + .CATENATE = cat ; + .CP = cp ; + .NULLIN = "<" "/dev/null" ; +} + + +.VERBOSE_TEST = 0 ; +if --verbose-test in [ modules.peek : ARGV ] +{ + .VERBOSE_TEST = 1 ; +} + + +.RM = [ common.rm-command ] ; + + +actions capture-output bind INPUT_FILES output-file +{ + $(PATH_SETUP) + $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 + $(.SET_STATUS) + $(.RUN_OUTPUT_NL) >> "$(output-file)" + echo EXIT STATUS: $(.STATUS) >> "$(output-file)" + if $(.STATUS_0) + $(.CP) "$(output-file)" "$(<)" + $(.ENDIF) + $(.SHELL_SET)verbose=$(.VERBOSE_TEST) + if $(.STATUS_NOT_0) + $(.SHELL_SET)verbose=1 + $(.ENDIF) + if $(.VERBOSE) + echo ====== BEGIN OUTPUT ====== + $(.CATENATE) "$(output-file)" + echo ====== END OUTPUT ====== + $(.ENDIF) + exit $(.STATUS) +} + +IMPORT testing : capture-output : : testing.capture-output ; + + +actions quietly updated ignore piecemeal together RmTemps +{ + $(.RM) "$(>)" +} + + +if [ os.name ] = VMS +{ + actions capture-output bind INPUT_FILES output-file + { + $(PATH_SETUP) + !! Execute twice - first for status, second for output + set noon + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) 2>NL: >NL: + $(.SET_STATUS) + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) | type sys$input /out=$(output-file:W) + set on + !! Harmonize VMS success status with POSIX + if $(.SUCCESS) + $(.SHELL_SET)status="0" + $(.ENDIF) + $(.RUN_OUTPUT_NL) | append /new sys$input $(output-file:W) + $(.SAY) "EXIT STATUS: $(.STATUS)" | append /new sys$input $(output-file:W) + if $(.STATUS_0) + $(.CP) $(output-file:W) $(<:W) + $(.ENDIF) + $(.SHELL_SET)verbose=$(.VERBOSE_TEST) + if $(.STATUS_NOT_0) + $(.SHELL_SET)verbose=1 + $(.ENDIF) + if $(.VERBOSE) + $(.SAY) "====== BEGIN OUTPUT ======" + $(.CATENATE) $(output-file:W) + $(.SAY) "====== END OUTPUT ======" + $(.ENDIF) + !! Harmonize VMS success status with POSIX on exit + if $(.STATUS_0) + $(.SHELL_SET)status="$(.EXIT_SUCCESS)" + $(.ENDIF) + exit "$(.STATUS)" + } + + actions quietly updated ignore piecemeal together RmTemps + { + $(.RM) $(>:WJ=;*,);* + } +} + + +.MAKE_FILE = [ common.file-creation-command ] ; + + +rule unit-test ( target : source : properties * ) +{ + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } +} + +actions unit-test +{ + $(PATH_SETUP) + $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)" +} + +if [ os.name ] = VMS +{ + actions unit-test + { + $(PATH_SETUP) + pipe $(LAUNCHER) $(>:W) $(ARGS) && $(.MAKE_FILE) $(<:W) + } +} + +# Note that this rule may be called multiple times for a single target in case +# there are multiple actions operating on the same target in sequence. One such +# example are msvc exe targets first created by a linker action and then updated +# with an embedded manifest file by a separate action. +rule record-time ( target : source : start end user system ) +{ + local src-string = [$(source:G=:J=",")"] " ; + USER_TIME on $(target) += $(src-string)$(user) ; + SYSTEM_TIME on $(target) += $(src-string)$(system) ; + + # We need the following variables because attempting to perform such + # variable expansion in actions would not work due to quotes getting treated + # as regular characters. + USER_TIME_SECONDS on $(target) += $(src-string)$(user)" seconds" ; + SYSTEM_TIME_SECONDS on $(target) += $(src-string)$(system)" seconds" ; +} + +# Calling this rule requests that Boost Build time how long it takes to build +# the 'source' target and display the results both on the standard output and in +# the 'target' file. +# +rule time ( target : sources + : properties * ) +{ + # Set up rule for recording timing information. + __TIMING_RULE__ on $(sources) = testing.record-time $(target) ; + + # Make sure the sources get rebuilt any time we need to retrieve that + # information. + REBUILDS $(target) : $(sources) ; +} + + +actions time +{ + echo user: $(USER_TIME) + echo system: $(SYSTEM_TIME) + + echo user: $(USER_TIME_SECONDS) > "$(<)" + echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)" +} + +if [ os.name ] = VMS +{ + actions time + { + WRITE SYS$OUTPUT "user: ", "$(USER_TIME)" + WRITE SYS$OUTPUT "system: ", "(SYSTEM_TIME)" + + PIPE WRITE SYS$OUTPUT "user: ", "$(USER_TIME_SECONDS)" | TYPE SYS$INPUT /OUT=$(<:W) + PIPE WRITE SYS$OUTPUT "system: ", "$(SYSTEM_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W) + } +} diff --git a/src/boost/tools/build/src/tools/testing.jam b/src/boost/tools/build/src/tools/testing.jam new file mode 100644 index 000000000..ed24912e4 --- /dev/null +++ b/src/boost/tools/build/src/tools/testing.jam @@ -0,0 +1,847 @@ +# Copyright 2005 Dave Abrahams +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Copyright 2014-2015 Rene Rivera +# Copyright 2014 Microsoft Corporation +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module implements regression testing framework. It declares a number of +# main target rules which perform some action and, if the results are OK, +# creates an output file. +# +# The exact list of rules is: +# 'compile' -- creates .test file if compilation of sources was +# successful. +# 'compile-fail' -- creates .test file if compilation of sources failed. +# 'run' -- creates .test file is running of executable produced from +# sources was successful. Also leaves behind .output file +# with the output from program run. +# 'run-fail' -- same as above, but .test file is created if running fails. +# +# In all cases, presence of .test file is an indication that the test passed. +# For more convenient reporting, you might want to use C++ Boost regression +# testing utilities (see http://www.boost.org/more/regression.html). +# +# For historical reason, a 'unit-test' rule is available which has the same +# syntax as 'exe' and behaves just like 'run'. + +# Things to do: +# - Teach compiler_status handle Jamfile.v2. +# Notes: +# - is not implemented, since it is Como-specific, and it is not +# clear how to implement it +# - std::locale-support is not implemented (it is used in one test). + + +import alias ; +import build-system ; +import "class" ; +import common ; +import errors ; +import feature ; +import generators ; +import os ; +import param ; +import path ; +import project ; +import property ; +import property-set ; +import regex ; +import sequence ; +import targets ; +import toolset ; +import type ; +import virtual-target ; + + +rule init ( ) +{ +} + + +# Feature controlling the command used to launch test programs. +feature.feature testing.launcher : : free optional ; + +feature.feature test-info : : free incidental ; +feature.feature testing.arg : : free incidental ; +feature.feature testing.input-file : : free dependency ; + +feature.feature preserve-test-targets : on off : incidental propagated ; + +# Feature to control whether executable binaries are run as part of test. +# This can be used to just compile test cases in cross compilation situations. +feature.feature testing.execute : on off : incidental propagated ; +feature.set-default testing.execute : on ; + +# Register target types. +type.register TEST : test ; +type.register COMPILE : : TEST ; +type.register COMPILE_FAIL : : TEST ; +type.register RUN_OUTPUT : run ; +type.register RUN : : TEST ; +type.register RUN_FAIL : : TEST ; +type.register LINK_FAIL : : TEST ; +type.register LINK : : TEST ; +type.register UNIT_TEST : passed : TEST ; + + +# Suffix to denote test target directory +# +.TEST-DIR-SUFFIX = ".test" ; +if [ os.name ] = VMS +{ + .TEST-DIR-SUFFIX = "$test" ; +} + +# Declare the rules which create main targets. While the 'type' module already +# creates rules with the same names for us, we need extra convenience: default +# name of main target, so write our own versions. + +# Helper rule. Create a test target, using basename of first source if no target +# name is explicitly passed. Remembers the created target in a global variable. +# +rule make-test ( target-type : sources + : requirements * : target-name ? ) +{ + target-name ?= $(sources[1]:D=:S=) ; + + # Having periods (".") in the target name is problematic because the typed + # generator will strip the suffix and use the bare name for the file + # targets. Even though the location-prefix averts problems most times it + # does not prevent ambiguity issues when referring to the test targets. For + # example when using the XML log output. So we rename the target to remove + # the periods, and provide an alias for users. + local real-name = [ regex.replace $(target-name) "[.]" "~" ] ; + + local project = [ project.current ] ; + # The forces the build system for generate paths in the + # form '$build_dir/array1$(.TEST-DIR-SUFFIX)/gcc/debug'. This is necessary + # to allow post-processing tools to work. + local t = [ targets.create-typed-target [ type.type-from-rule-name + $(target-type) ] : $(project) : $(real-name) : $(sources) : + $(requirements) $(real-name)$(.TEST-DIR-SUFFIX) + toolset ] ; + + # The alias to the real target, per period replacement above. + if $(real-name) != $(target-name) + { + alias $(target-name) : $(t) ; + } + + # Remember the test (for --dump-tests). A good way would be to collect all + # given a project. This has some technical problems: e.g. we can not call + # this dump from a Jamfile since projects referred by 'build-project' are + # not available until the whole Jamfile has been loaded. + .all-tests += $(t) ; + return $(t) ; +} + + +# Note: passing more that one cpp file here is known to fail. Passing a cpp file +# and a library target works. +# +rule compile ( sources + : requirements * : target-name ? ) +{ + param.handle-named-params sources requirements target-name ; + return [ make-test compile : $(sources) : $(requirements) : $(target-name) ] + ; +} + + +rule compile-fail ( sources + : requirements * : target-name ? ) +{ + param.handle-named-params sources requirements target-name ; + return [ make-test compile-fail : $(sources) : $(requirements) : + $(target-name) ] ; +} + + +rule link ( sources + : requirements * : target-name ? ) +{ + param.handle-named-params sources requirements target-name ; + return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ; +} + + +rule link-fail ( sources + : requirements * : target-name ? ) +{ + param.handle-named-params sources requirements target-name ; + return [ make-test link-fail : $(sources) : $(requirements) : $(target-name) + ] ; +} + + +rule handle-input-files ( input-files * ) +{ + if $(input-files[2]) + { + # Check that sorting made when creating property-set instance will not + # change the ordering. + if [ sequence.insertion-sort $(input-files) ] != $(input-files) + { + errors.user-error "Names of input files must be sorted alphabetically" + : "due to internal limitations" ; + } + } + return $(input-files) ; +} + + +rule run ( sources + : args * : input-files * : requirements * : target-name ? : + default-build * ) +{ + param.handle-named-params sources args input-files requirements + target-name default-build ; + requirements += $(args:J=" ") ; + requirements += [ handle-input-files $(input-files) ] ; + return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ; +} + + +rule run-fail ( sources + : args * : input-files * : requirements * : + target-name ? : default-build * ) +{ + param.handle-named-params sources args input-files requirements + target-name default-build ; + requirements += $(args:J=" ") ; + requirements += [ handle-input-files $(input-files) ] ; + return [ make-test run-fail : $(sources) : $(requirements) : $(target-name) + ] ; +} + + +# Use 'test-suite' as a synonym for 'alias', for backward compatibility. +IMPORT : alias : : test-suite ; + + +# For all main targets in 'project-module', which are typed targets with type +# derived from 'TEST', produce some interesting information. +# +rule dump-tests +{ + for local t in $(.all-tests) + { + dump-test $(t) ; + } +} + +if ( --dump-tests in [ modules.peek : ARGV ] ) +{ + IMPORT testing : dump-tests : : testing.dump-tests ; + build-system.add-pre-build-hook testing.dump-tests ; +} + +# Given a project location in normalized form (slashes are forward), compute the +# name of the Boost library. +# +local rule get-library-name ( path ) +{ + # Path is in normalized form, so all slashes are forward. + local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ; + local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ; + local match3 = [ MATCH (/status$) : $(path) ] ; + + if $(match1) { return $(match1[2]) ; } + else if $(match2) { return $(match2[2]) ; } + else if $(match3) { return "" ; } + else if --dump-tests in [ modules.peek : ARGV ] + { + # The 'run' rule and others might be used outside boost. In that case, + # just return the path, since the 'library name' makes no sense. + return $(path) ; + } +} + + +# Was an XML dump requested? +.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ; + + +# Takes a target (instance of 'basic-target') and prints +# - its type +# - its name +# - comments specified via the property +# - relative location of all source from the project root. +# +rule dump-test ( target ) +{ + local type = [ $(target).type ] ; + local name = [ $(target).name ] ; + local project = [ $(target).project ] ; + + local project-root = [ $(project).get project-root ] ; + local library = [ get-library-name [ path.root [ $(project).get location ] + [ path.pwd ] ] ] ; + if $(library) + { + name = $(library)/$(name) ; + } + + local sources = [ $(target).sources ] ; + local source-files ; + for local s in $(sources) + { + if [ class.is-a $(s) : file-reference ] + { + local location = [ path.root [ path.root [ $(s).name ] + [ $(s).location ] ] [ path.pwd ] ] ; + + source-files += [ path.relative-to [ path.root $(project-root) + [ path.pwd ] ] $(location) ] ; + } + } + + local target-name = + [ $(project).get location ] // [ $(target).name ] $(.TEST-DIR-SUFFIX) ; + target-name = $(target-name:J=) ; + + local r = [ $(target).requirements ] ; + # Extract values of the feature. + local test-info = [ $(r).get ] ; + + # If the user requested XML output on the command-line, add the test info to + # that XML file rather than dumping them to stdout. + if $(.out-xml) + { + local nl = " +" ; + .contents on $(.out-xml) += + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + "$(nl) " + ; + } + else + { + # Format them into a single string of quoted strings. + test-info = \"$(test-info:J=\"\ \")\" ; + + ECHO boost-test($(type)) \"$(name)\" "[$(test-info)]" ":" + \"$(source-files)\" ; + } +} + +class testing.expect-failure-generator : generator +{ + rule generated-targets ( sources + : property-set : project name ? ) + { + for local s in $(sources) + { + local a = [ $(s).action ] ; + if $(a) + { + for local t in [ $(a).targets ] + { + $(t).fail-expected ; + } + } + } + return [ generator.generated-targets $(sources) + : $(property-set) : $(project) $(name) ] ; + } +} + +local rule register-fail-expected ( source-type : test-type ) +{ + generators.register [ class.new testing.expect-failure-generator + testing.expect-failure : $(source-type) : $(test-type) ] ; +} + +# Register generators. Depending on target type, either 'expect-success' or +# 'expect-failure' rule will be used. +generators.register-standard testing.expect-success : OBJ : COMPILE ; +register-fail-expected OBJ : COMPILE_FAIL ; +generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ; +register-fail-expected RUN_OUTPUT : RUN_FAIL ; +generators.register-standard testing.expect-success : EXE : LINK ; +register-fail-expected EXE : LINK_FAIL ; + +# Generator which runs an EXE and captures output. +generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ; + +# Generator which creates a target if sources run successfully. Differs from RUN +# in that run output is not captured. The reason why it exists is that the 'run' +# rule is much better for automated testing, but is not user-friendly (see +# http://article.gmane.org/gmane.comp.lib.boost.build/6353). +generators.register-standard testing.unit-test : EXE : UNIT_TEST ; + +toolset.uses-features testing.expect-success : ; +toolset.uses-features testing.expect-failure : ; + +# The action rules called by generators. + +# Causes the 'target' to exist after bjam invocation if and only if all the +# dependencies were successfully built. +# +rule expect-success ( target : dependency + : requirements * ) +{ + **passed** $(target) : $(dependency) : $(requirements) ; +} + + +# Causes the 'target' to exist after bjam invocation if and only if all some of +# the dependencies were not successfully built. +# +rule expect-failure ( target : dependency + : properties * ) +{ + local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ; + local marker = $(dependency:G=$(grist)*fail) ; + (failed-as-expected) $(marker) ; + LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ; + RMOLD $(marker) ; + DEPENDS $(marker) : $(dependency) ; + DEPENDS $(target) : $(marker) ; + **passed** $(target) : $(marker) : $(properties) ; +} + + +# The rule/action combination used to report successful passing of a test. +# +rule **passed** ( target : sources * : properties * ) +{ + if [ feature.get-values preserve-test-targets : $(properties) ] = off + { + remove-test-targets $(<) ; + } + # Force deletion of the target, in case any dependencies failed to build. + RMOLD $(<) ; +} + + + +# Used to create test files signifying passed tests. +# +actions **passed** +{ + echo passed > "$(<)" +} + +# Used to create replacement object files that do not get created during tests +# that are expected to fail. +# +actions (failed-as-expected) +{ + echo failed as expected > "$(<)" +} + + +if [ os.name ] = VMS +{ + actions **passed** + { + PIPE WRITE SYS$OUTPUT "passed" > $(<:W) + } + + actions (failed-as-expected) + { + PIPE WRITE SYS$OUTPUT "failed as expected" > $(<:W) + } +} + +rule run-path-setup ( target : source : properties * ) +{ + # For testing, we need to make sure that all dynamic libraries needed by the + # test are found. So, we collect all paths from dependency libraries (via + # xdll-path property) and add whatever explicit dll-path user has specified. + # The resulting paths are added to the environment on each test invocation. + local target-os = [ feature.get-values : $(properties) ] ; + local dll-paths = [ feature.get-values : $(properties) ] ; + dll-paths += [ feature.get-values : $(properties) ] ; + if $(target-os) != vxworks + { + dll-paths += [ on $(source) return $(RUN_PATH) ] ; + } + dll-paths = [ sequence.unique $(dll-paths) ] ; + if $(dll-paths) + { + translate-to-os = path.native ; + if [ os.name ] = VMS + { + translate-to-os = path.to-VMS ; + } + if $(target-os) = vxworks + { + # map paths to paths + local save-os = [ modules.peek os : .name ] ; + modules.poke os : .name : VXWORKS ; + local parent = [ os.environ PKG_SRC_BUILD_DIR ] ; + local prefix = [ os.environ LAYER_SRC_PATH ] ; + local target-dll-paths ; + for local e in $(dll-paths) + { + target-dll-paths += [ path.join $(prefix) [ path.relative $(e) $(parent) : noerror ] ] ; + } + PATH_SETUP on $(target) = [ common.prepend-path-variable-command + [ os.shared-library-path-variable ] : $(target-dll-paths) ] ; + modules.poke os : .name : $(save-os) ; + } + else + { + dll-paths = [ sequence.transform $(translate-to-os) : $(dll-paths) ] ; + PATH_SETUP on $(target) = [ common.prepend-path-variable-command + [ os.shared-library-path-variable ] : $(dll-paths) ] ; + } + } +} + + +local argv = [ modules.peek : ARGV ] ; + +toolset.flags testing.capture-output ARGS ; +toolset.flags testing.capture-output INPUT_FILES ; +toolset.flags testing.capture-output LAUNCHER ; + +toolset.uses-features testing.capture-output : + ; + +if --remove-test-targets in [ modules.peek : ARGV ] +{ + feature.set-default preserve-test-targets : off ; +} + + +# Runs executable 'sources' and stores stdout in file 'target'. Unless +# --preserve-test-targets command line option has been specified, removes the +# executable. +# +rule capture-output ( target : source : properties * ) +{ + output-file on $(target) = $(target:S=.output) ; + LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ; + + # The INCLUDES kill a warning about independent target... + INCLUDES $(target) : $(target:S=.output) ; + # but it also puts .output into dependency graph, so we must tell jam it is + # OK if it cannot find the target or updating rule. + NOCARE $(target:S=.output) ; + + # This has two-fold effect. First it adds input files to the dependency + # graph, preventing a warning. Second, it causes input files to be bound + # before target is created. Therefore, they are bound using SEARCH setting + # on them and not LOCATE setting of $(target), as in other case (due to jam + # bug). + DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ; + + run-path-setup $(target) : $(source) : $(properties) ; + + DISABLE_TEST_EXECUTION on $(target) = 0 ; + if [ feature.get-values testing.execute : $(properties) ] = off + { + DISABLE_TEST_EXECUTION on $(target) = 1 ; + } + + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } +} + +.types-to-remove = EXE OBJ ; + +local rule remove-test-targets ( target ) +{ + local action = [ on $(target) return $(.action) ] ; + local associated-targets = [ virtual-target.traverse [ $(action).targets ] ] ; + local targets-to-remove ; + for local t in [ sequence.unique $(associated-targets) ] + { + if [ $(t).type ] in $(.types-to-remove) + { + targets-to-remove += [ $(t).actual-name ] ; + } + } + rmtemp-sources $(target) : $(targets-to-remove) ; +} + +local rule rmtemp-sources ( target : sources * ) +{ + if $(sources) + { + TEMPORARY $(sources) ; + # Set a second action on target that will be executed after capture + # output action. The 'RmTemps' rule has the 'ignore' modifier so it is + # always considered succeeded. This is needed for 'run-fail' test. For + # that test the target will be marked with FAIL_EXPECTED, and without + # 'ignore' successful execution will be negated and be reported as + # failure. With 'ignore' we do not detect a case where removing files + # fails, but it is not likely to happen. + RmTemps $(target) : $(sources) ; + } +} + + +if [ os.name ] = NT +{ + .STATUS = %status% ; + .SET_STATUS = "set status=%ERRORLEVEL%" ; + .RUN_OUTPUT_NL = "echo." ; + .THEN = "(" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "%status% EQU 0 $(.THEN)" ; + .STATUS_NOT_0 = "%status% NEQ 0 $(.THEN)" ; + .VERBOSE = "%verbose% EQU 1 $(.THEN)" ; + .ENDIF = ")" ; + .SHELL_SET = "set " ; + .CATENATE = type ; + .CP = copy ; + .NULLIN = ; +} +else if [ os.name ] = VMS +{ + local nl = " +" ; + + .STATUS = "''status'" ; + .SET_STATUS = "status=$STATUS" ; + .SAY = "pipe write sys$output" ; ## not really echo + .RUN_OUTPUT_NL = "$(.SAY) \"\"" ; + .THEN = "$(nl)then" ; + .EXIT_SUCCESS = "1" ; + .SUCCESS = "status .eq. $(.EXIT_SUCCESS) $(.THEN)" ; + .STATUS_0 = "status .eq. 0 $(.THEN)" ; + .STATUS_NOT_0 = "status .ne. 0 $(.THEN)" ; + .VERBOSE = "verbose .eq. 1 $(.THEN)" ; + .ENDIF = "endif" ; + .SHELL_SET = "" ; + .CATENATE = type ; + .CP = copy ; + .NULLIN = ; +} +else +{ + .STATUS = "$status" ; + .SET_STATUS = "status=$?" ; + .RUN_OUTPUT_NL = "echo" ; + .THEN = "; then" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "test $status -eq 0 $(.THEN)" ; + .STATUS_NOT_0 = "test $status -ne 0 $(.THEN)" ; + .VERBOSE = "test $verbose -eq 1 $(.THEN)" ; + .ENDIF = "fi" ; + .SHELL_SET = "" ; + .CATENATE = cat ; + .CP = cp ; + .NULLIN = "<" "/dev/null" ; +} + + +.VERBOSE_TEST = 0 ; +if --verbose-test in [ modules.peek : ARGV ] +{ + .VERBOSE_TEST = 1 ; +} + + +.RM = [ common.rm-command ] ; + + +actions capture-output bind INPUT_FILES output-file +{ + $(PATH_SETUP) + $(.SHELL_SET)status=$(DISABLE_TEST_EXECUTION) + if $(.STATUS_NOT_0) + echo Skipping test execution due to testing.execute=off + exit $(.EXIT_SUCCESS) + $(.ENDIF) + $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 $(.NULLIN) + $(.SET_STATUS) + $(.RUN_OUTPUT_NL) >> "$(output-file)" + echo EXIT STATUS: $(.STATUS) >> "$(output-file)" + if $(.STATUS_0) + $(.CP) "$(output-file)" "$(<)" + $(.ENDIF) + $(.SHELL_SET)verbose=$(.VERBOSE_TEST) + if $(.STATUS_NOT_0) + $(.SHELL_SET)verbose=1 + $(.ENDIF) + if $(.VERBOSE) + echo ====== BEGIN OUTPUT ====== + $(.CATENATE) "$(output-file)" + echo ====== END OUTPUT ====== + $(.ENDIF) + exit $(.STATUS) +} + + +actions quietly updated ignore piecemeal together RmTemps +{ + $(.RM) "$(>)" +} + +if [ os.name ] = VMS +{ + actions capture-output bind INPUT_FILES output-file + { + $(PATH_SETUP) + $(.SHELL_SET)status=$(DISABLE_TEST_EXECUTION) + if $(.STATUS_NOT_0) + $(.SAY) "Skipping test execution due to testing.execute=off" + exit "$(.EXIT_SUCCESS)" + $(.ENDIF) + !! Execute twice - first for status, second for output + set noon + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) 2>NL: >NL: + $(.SET_STATUS) + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) | type sys$input /out=$(output-file:W) + set on + !! Harmonize VMS success status with POSIX + if $(.SUCCESS) + $(.SHELL_SET)status="0" + $(.ENDIF) + $(.RUN_OUTPUT_NL) | append /new sys$input $(output-file:W) + $(.SAY) "EXIT STATUS: $(.STATUS)" | append /new sys$input $(output-file:W) + if $(.STATUS_0) + $(.CP) $(output-file:W) $(<:W) + $(.ENDIF) + $(.SHELL_SET)verbose=$(.VERBOSE_TEST) + if $(.STATUS_NOT_0) + $(.SHELL_SET)verbose=1 + $(.ENDIF) + if $(.VERBOSE) + $(.SAY) "====== BEGIN OUTPUT ======" + $(.CATENATE) $(output-file:W) + $(.SAY) "====== END OUTPUT ======" + $(.ENDIF) + !! Harmonize VMS success status with POSIX on exit + if $(.STATUS_0) + $(.SHELL_SET)status="$(.EXIT_SUCCESS)" + $(.ENDIF) + exit "$(.STATUS)" + } + + actions quietly updated ignore piecemeal together RmTemps + { + $(.RM) $(>:WJ=;*,);* + } +} + +.MAKE_FILE = [ common.file-creation-command ] ; + +toolset.flags testing.unit-test LAUNCHER ; +toolset.flags testing.unit-test ARGS ; + + +rule unit-test ( target : source : properties * ) +{ + run-path-setup $(target) : $(source) : $(properties) ; + + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } +} + + +actions unit-test +{ + $(PATH_SETUP) + $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)" +} + +if [ os.name ] = VMS +{ + actions unit-test + { + $(PATH_SETUP) + pipe $(LAUNCHER) $(>:W) $(ARGS) && $(.MAKE_FILE) $(<:W) + } +} + +IMPORT $(__name__) : compile compile-fail run run-fail link link-fail + : : compile compile-fail run run-fail link link-fail ; + + +# This is a composing generator to support cases where a generator for the +# specified target constructs other targets as well. One such example is msvc's +# exe generator that constructs both EXE and PDB targets. +type.register TIME : time ; +generators.register-composing testing.time : : TIME ; + + +# Note that this rule may be called multiple times for a single target in case +# there are multiple actions operating on the same target in sequence. One such +# example are msvc exe targets first created by a linker action and then updated +# with an embedded manifest file by a separate action. +rule record-time ( target : source : start end user system clock ) +{ + local src-string = "[$(source:G=:J=,)] " ; + USER_TIME on $(target) += $(src-string)$(user) ; + SYSTEM_TIME on $(target) += $(src-string)$(system) ; + CLOCK_TIME on $(target) += $(src-string)$(clock) ; + + # We need the following variables because attempting to perform such + # variable expansion in actions would not work due to quotes getting treated + # as regular characters. + USER_TIME_SECONDS on $(target) += $(src-string)$(user)" seconds" ; + SYSTEM_TIME_SECONDS on $(target) += $(src-string)$(system)" seconds" ; + CLOCK_TIME_SECONDS on $(target) += $(src-string)$(clock)" seconds" ; +} + + +# Support for generating timing information for any main target. To use +# declare a custom make target that uses the testing.time generator rule +# specified here. For example: +# +# make main.cpp : main_cpp.pro : @do-something ; +# time main.time : main.cpp ; +# actions do-something +# { +# sleep 2 && echo "$(<)" > "$(<)" +# } +# +# The above will generate a "main.time", and echo to output, timing +# information for the action of source "main.cpp". + + +IMPORT testing : record-time : : testing.record-time ; + + +# Calling this rule requests that Boost Build time how long it takes to build +# the 'source' target and display the results both on the standard output and in +# the 'target' file. +# +rule time ( target : sources + : properties * ) +{ + # Set up rule for recording timing information. + local action = [ on $(target) return $(.action) ] ; + for local action.source in [ $(action).sources ] + { + # Yes, this uses the private "actual-name" of the target action. + # But it's the only way to get at the real name of the sources + # given the context of header scanners. + __TIMING_RULE__ on [ $(action.source).actual-name ] = testing.record-time $(target) ; + } + + # Make sure the sources get rebuilt any time we need to retrieve that + # information. + REBUILDS $(target) : $(sources) ; +} + + +actions time +{ + echo user: $(USER_TIME) + echo system: $(SYSTEM_TIME) + echo clock: $(CLOCK_TIME) + + echo user: $(USER_TIME_SECONDS) > "$(<)" + echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)" + echo clock: $(CLOCK_TIME_SECONDS) >> "$(<)" +} + +if [ os.name ] = VMS +{ + actions time + { + WRITE SYS$OUTPUT "user: ", "$(USER_TIME)" + WRITE SYS$OUTPUT "system: ", "(SYSTEM_TIME)" + WRITE SYS$OUTPUT "clock: ", "(CLOCK_TIME)" + + PIPE WRITE SYS$OUTPUT "user: ", "$(USER_TIME_SECONDS)" | TYPE SYS$INPUT /OUT=$(<:W) + PIPE WRITE SYS$OUTPUT "system: ", "$(SYSTEM_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W) + PIPE WRITE SYS$OUTPUT "clock: ", "$(CLOCK_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W) + } +} diff --git a/src/boost/tools/build/src/tools/testing.py b/src/boost/tools/build/src/tools/testing.py new file mode 100644 index 000000000..3565a61da --- /dev/null +++ b/src/boost/tools/build/src/tools/testing.py @@ -0,0 +1,359 @@ +# Status: ported, except for --out-xml +# Base revision: 64488 +# +# Copyright 2005 Dave Abrahams +# Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module implements regression testing framework. It declares a number of +# main target rules which perform some action and, if the results are OK, +# creates an output file. +# +# The exact list of rules is: +# 'compile' -- creates .test file if compilation of sources was +# successful. +# 'compile-fail' -- creates .test file if compilation of sources failed. +# 'run' -- creates .test file is running of executable produced from +# sources was successful. Also leaves behind .output file +# with the output from program run. +# 'run-fail' -- same as above, but .test file is created if running fails. +# +# In all cases, presence of .test file is an indication that the test passed. +# For more convenient reporting, you might want to use C++ Boost regression +# testing utilities (see http://www.boost.org/more/regression.html). +# +# For historical reason, a 'unit-test' rule is available which has the same +# syntax as 'exe' and behaves just like 'run'. + +# Things to do: +# - Teach compiler_status handle Jamfile.v2. +# Notes: +# - is not implemented, since it is Como-specific, and it is not +# clear how to implement it +# - std::locale-support is not implemented (it is used in one test). + +import b2.build.feature as feature +import b2.build.type as type +import b2.build.targets as targets +import b2.build.generators as generators +import b2.build.toolset as toolset +import b2.tools.common as common +import b2.util.option as option +import b2.build_system as build_system + + + +from b2.manager import get_manager +from b2.util import stem, bjam_signature, is_iterable_typed +from b2.util.sequence import unique + +import bjam + +import re +import os.path +import sys + +def init(): + pass + +# Feature controlling the command used to lanch test programs. +feature.feature("testing.launcher", [], ["free", "optional"]) + +feature.feature("test-info", [], ["free", "incidental"]) +feature.feature("testing.arg", [], ["free", "incidental"]) +feature.feature("testing.input-file", [], ["free", "dependency"]) + +feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"]) + +# Register target types. +type.register("TEST", ["test"]) +type.register("COMPILE", [], "TEST") +type.register("COMPILE_FAIL", [], "TEST") + +type.register("RUN_OUTPUT", ["run"]) +type.register("RUN", [], "TEST") +type.register("RUN_FAIL", [], "TEST") + +type.register("LINK", [], "TEST") +type.register("LINK_FAIL", [], "TEST") +type.register("UNIT_TEST", ["passed"], "TEST") + +__all_tests = [] + +# Declare the rules which create main targets. While the 'type' module already +# creates rules with the same names for us, we need extra convenience: default +# name of main target, so write our own versions. + +# Helper rule. Create a test target, using basename of first source if no target +# name is explicitly passed. Remembers the created target in a global variable. +def make_test(target_type, sources, requirements, target_name=None): + assert isinstance(target_type, basestring) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert isinstance(target_type, basestring) or target_type is None + if not target_name: + target_name = stem(os.path.basename(sources[0])) + + # Having periods (".") in the target name is problematic because the typed + # generator will strip the suffix and use the bare name for the file + # targets. Even though the location-prefix averts problems most times it + # does not prevent ambiguity issues when referring to the test targets. For + # example when using the XML log output. So we rename the target to remove + # the periods, and provide an alias for users. + real_name = target_name.replace(".", "~") + + project = get_manager().projects().current() + # The forces the build system for generate paths in the + # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow + # post-processing tools to work. + t = get_manager().targets().create_typed_target( + type.type_from_rule_name(target_type), project, real_name, sources, + requirements + ["" + real_name + ".test"], [], []) + + # The alias to the real target, per period replacement above. + if real_name != target_name: + get_manager().projects().project_rules().rules["alias"]( + target_name, [t]) + + # Remember the test (for --dump-tests). A good way would be to collect all + # given a project. This has some technical problems: e.g. we can not call + # this dump from a Jamfile since projects referred by 'build-project' are + # not available until the whole Jamfile has been loaded. + __all_tests.append(t) + return t + + +# Note: passing more that one cpp file here is known to fail. Passing a cpp file +# and a library target works. +# +@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) +def compile(sources, requirements, target_name=None): + return make_test("compile", sources, requirements, target_name) + +@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) +def compile_fail(sources, requirements, target_name=None): + return make_test("compile-fail", sources, requirements, target_name) + +@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) +def link(sources, requirements, target_name=None): + return make_test("link", sources, requirements, target_name) + +@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) +def link_fail(sources, requirements, target_name=None): + return make_test("link-fail", sources, requirements, target_name) + +def handle_input_files(input_files): + if len(input_files) > 1: + # Check that sorting made when creating property-set instance will not + # change the ordering. + if sorted(input_files) != input_files: + get_manager().errors()("Names of input files must be sorted alphabetically\n" + + "due to internal limitations") + return ["" + f for f in input_files] + +@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], + ["requirements", "*"], ["target_name", "?"], + ["default_build", "*"])) +def run(sources, args, input_files, requirements, target_name=None, default_build=[]): + if args: + requirements.append("" + " ".join(args)) + requirements.extend(handle_input_files(input_files)) + return make_test("run", sources, requirements, target_name) + +@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], + ["requirements", "*"], ["target_name", "?"], + ["default_build", "*"])) +def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]): + if args: + requirements.append("" + " ".join(args)) + requirements.extend(handle_input_files(input_files)) + return make_test("run-fail", sources, requirements, target_name) + +# Register all the rules +for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]: + get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_"))) + +# Use 'test-suite' as a synonym for 'alias', for backward compatibility. +from b2.build.alias import alias +get_manager().projects().add_rule("test-suite", alias) + +# For all main targets in 'project-module', which are typed targets with type +# derived from 'TEST', produce some interesting information. +# +def dump_tests(): + for t in __all_tests: + dump_test(t) + +# Given a project location in normalized form (slashes are forward), compute the +# name of the Boost library. +# +__ln1 = re.compile("/(tools|libs)/(.*)/(test|example)") +__ln2 = re.compile("/(tools|libs)/(.*)$") +__ln3 = re.compile("(/status$)") +def get_library_name(path): + assert isinstance(path, basestring) + + path = path.replace("\\", "/") + match1 = __ln1.match(path) + match2 = __ln2.match(path) + match3 = __ln3.match(path) + + if match1: + return match1.group(2) + elif match2: + return match2.group(2) + elif match3: + return "" + elif option.get("dump-tests", False, True): + # The 'run' rule and others might be used outside boost. In that case, + # just return the path, since the 'library name' makes no sense. + return path + +# Was an XML dump requested? +__out_xml = option.get("out-xml", False, True) + +# Takes a target (instance of 'basic-target') and prints +# - its type +# - its name +# - comments specified via the property +# - relative location of all source from the project root. +# +def dump_test(target): + assert isinstance(target, targets.AbstractTarget) + type = target.type() + name = target.name() + project = target.project() + + project_root = project.get('project-root') + library = get_library_name(os.path.abspath(project.get('location'))) + if library: + name = library + "/" + name + + sources = target.sources() + source_files = [] + for s in sources: + if isinstance(s, targets.FileReference): + location = os.path.abspath(os.path.join(s.location(), s.name())) + source_files.append(os.path.relpath(location, os.path.abspath(project_root))) + + target_name = project.get('location') + "//" + target.name() + ".test" + + test_info = target.requirements().get('test-info') + test_info = " ".join('"' + ti + '"' for ti in test_info) + + # If the user requested XML output on the command-line, add the test info to + # that XML file rather than dumping them to stdout. + #if $(.out-xml) + #{ +# local nl = " +#" ; +# .contents on $(.out-xml) += +# "$(nl) " +# "$(nl) " +# "$(nl) " +# "$(nl) " +# "$(nl) " +# ; +# } +# else + + source_files = " ".join('"' + s + '"' for s in source_files) + if test_info: + print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files) + else: + print 'boost-test(%s) "%s" : %s' % (type, name, source_files) + +# Register generators. Depending on target type, either 'expect-success' or +# 'expect-failure' rule will be used. +generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"]) +generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"]) +generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"]) +generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"]) +generators.register_standard("testing.expect-success", ["EXE"], ["LINK"]) +generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"]) + +# Generator which runs an EXE and captures output. +generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"]) + +# Generator which creates a target if sources run successfully. Differs from RUN +# in that run output is not captured. The reason why it exists is that the 'run' +# rule is much better for automated testing, but is not user-friendly (see +# http://article.gmane.org/gmane.comp.lib.boost.build/6353). +generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"]) + +# FIXME: if those calls are after bjam.call, then bjam will crash +# when toolset.flags calls bjam.caller. +toolset.flags("testing.capture-output", "ARGS", [], [""]) +toolset.flags("testing.capture-output", "INPUT_FILES", [], [""]) +toolset.flags("testing.capture-output", "LAUNCHER", [], [""]) + +toolset.flags("testing.unit-test", "LAUNCHER", [], [""]) +toolset.flags("testing.unit-test", "ARGS", [], [""]) + +# This is a composing generator to support cases where a generator for the +# specified target constructs other targets as well. One such example is msvc's +# exe generator that constructs both EXE and PDB targets. +type.register("TIME", ["time"]) +generators.register_composing("testing.time", [], ["TIME"]) + + +# The following code sets up actions for this module. It's pretty convoluted, +# but the basic points is that we most of actions are defined by Jam code +# contained in testing-aux.jam, which we load into Jam module named 'testing' + +def run_path_setup(target, sources, ps): + if __debug__: + from ..build.property_set import PropertySet + assert is_iterable_typed(target, basestring) or isinstance(target, basestring) + assert is_iterable_typed(sources, basestring) + assert isinstance(ps, PropertySet) + # For testing, we need to make sure that all dynamic libraries needed by the + # test are found. So, we collect all paths from dependency libraries (via + # xdll-path property) and add whatever explicit dll-path user has specified. + # The resulting paths are added to the environment on each test invocation. + dll_paths = ps.get('dll-path') + dll_paths.extend(ps.get('xdll-path')) + dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH")) + dll_paths = unique(dll_paths) + if dll_paths: + bjam.call("set-target-variable", target, "PATH_SETUP", + common.prepend_path_variable_command( + common.shared_library_path_variable(), dll_paths)) + +def capture_output_setup(target, sources, ps): + if __debug__: + from ..build.property_set import PropertySet + assert is_iterable_typed(target, basestring) + assert is_iterable_typed(sources, basestring) + assert isinstance(ps, PropertySet) + run_path_setup(target[0], sources, ps) + + if ps.get('preserve-test-targets') == ['off']: + bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1") + +get_manager().engine().register_bjam_action("testing.capture-output", + capture_output_setup) + + +path = os.path.dirname(__file__) +import b2.util.os_j +get_manager().projects().project_rules()._import_rule("testing", "os.name", + b2.util.os_j.name) +import b2.tools.common +get_manager().projects().project_rules()._import_rule("testing", "common.rm-command", + b2.tools.common.rm_command) +get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command", + b2.tools.common.file_creation_command) + +bjam.call("load", "testing", os.path.join(path, "testing-aux.jam")) + + +for name in ["expect-success", "expect-failure", "time"]: + get_manager().engine().register_bjam_action("testing." + name) + +get_manager().engine().register_bjam_action("testing.unit-test", + run_path_setup) + +if option.get("dump-tests", False, True): + build_system.add_pre_build_hook(dump_tests) diff --git a/src/boost/tools/build/src/tools/types/__init__.py b/src/boost/tools/build/src/tools/types/__init__.py new file mode 100644 index 000000000..9ee31d13a --- /dev/null +++ b/src/boost/tools/build/src/tools/types/__init__.py @@ -0,0 +1,19 @@ +__all__ = [ + 'asm', + 'cpp', + 'exe', + 'html', + 'lib', + 'obj', + 'preprocessed', + 'rsp', +] + +def register_all (): + for i in __all__: + m = __import__ (__name__ + '.' + i) + reg = i + '.register ()' + #exec (reg) + +# TODO: (PF) I thought these would be imported automatically. Anyone knows why they aren't? +register_all () diff --git a/src/boost/tools/build/src/tools/types/adoc.jam b/src/boost/tools/build/src/tools/types/adoc.jam new file mode 100644 index 000000000..278a32bd4 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/adoc.jam @@ -0,0 +1,26 @@ +#| +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +import scanner ; +import type ; + +type ASCIIDOC : adoc asciidoc ; + +class asciidoc-scanner : common-scanner +{ + rule pattern ( ) + { + return + "include::([^[]+)" + "image::([^[]+)" + "image:([^[]+)" + ; + } +} + +scanner.register asciidoc-scanner : include ; +type.set-scanner ASCIIDOC : asciidoc-scanner ; diff --git a/src/boost/tools/build/src/tools/types/asm.jam b/src/boost/tools/build/src/tools/types/asm.jam new file mode 100644 index 000000000..d2b233aca --- /dev/null +++ b/src/boost/tools/build/src/tools/types/asm.jam @@ -0,0 +1,4 @@ +# Copyright Craig Rodrigues 2005. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +type ASM : s S asm ; diff --git a/src/boost/tools/build/src/tools/types/asm.py b/src/boost/tools/build/src/tools/types/asm.py new file mode 100644 index 000000000..6317bfa75 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/asm.py @@ -0,0 +1,33 @@ +# Copyright Craig Rodrigues 2005. +# Copyright (c) 2008 Steven Watanabe +# +# Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +from b2.build import type as type_ +from b2.manager import get_manager +from b2.tools.cast import cast +from b2.util import bjam_signature + + +MANAGER = get_manager() +PROJECT_REGISTRY = MANAGER.projects() + +# maps project.name() + type to type +_project_types = {} + +type_.register_type('ASM', ['s', 'S', 'asm']) + + +@bjam_signature((['type_'], ['sources', '*'], ['name', '?'])) +def set_asm_type(type_, sources, name=''): + project = PROJECT_REGISTRY.current() + _project_types[project.name() + type_] = _project_types.get( + project.name() + type_, type_) + '_' + + name = name if name else _project_types[project.name() + type_] + type_ += '.asm' + return cast(name, type_.upper(), sources, [], [], []) + + +PROJECT_REGISTRY.add_rule("set-asm-type", set_asm_type) diff --git a/src/boost/tools/build/src/tools/types/cpp.jam b/src/boost/tools/build/src/tools/types/cpp.jam new file mode 100644 index 000000000..27046aa09 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/cpp.jam @@ -0,0 +1,93 @@ +# Copyright 2004 David Abrahams +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Copyright 2010 Rene Rivera +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import scanner ; +import type ; + + +class c-scanner : scanner +{ + import path ; + import regex ; + import scanner ; + import sequence ; + import toolset ; + import virtual-target ; + + rule __init__ ( includes * ) + { + scanner.__init__ ; + + # toolset.handle-flag-value is a bit of overkill, but it + # does correctly handle the topological sort of && separated + # include paths + self.includes = [ toolset.handle-flag-value : $(includes) ] ; + } + + rule pattern ( ) + { + return "#[ \t]*include[ \t]*(<(.*)>|\"(.*)\")" ; + } + + rule process ( target : matches * : binding ) + { + local angle = [ regex.transform $(matches) : "<(.*)>" ] ; + angle = [ sequence.transform path.native : $(angle) ] ; + local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ; + quoted = [ sequence.transform path.native : $(quoted) ] ; + + # CONSIDER: the new scoping rules seem to defeat "on target" variables. + local g = [ on $(target) return $(HDRGRIST) ] ; + local b = [ NORMALIZE_PATH $(binding:D) ] ; + + # Attach binding of including file to included targets. When a target is + # directly created from a virtual target this extra information is + # unnecessary. But in other cases, it allows us to distinguish between + # two headers of the same name included from different places. We do not + # need this extra information for angle includes, since they should not + # depend on the including file (we can not get literal "." in the + # include path). + local g2 = $(g)"#"$(b) ; + + angle = $(angle:G=$(g)) ; + quoted = $(quoted:G=$(g2)) ; + + local all = $(angle) $(quoted) ; + + INCLUDES $(target) : $(all) ; + NOCARE $(all) ; + SEARCH on $(angle) = $(self.includes:G=) ; + SEARCH on $(quoted) = $(b) $(self.includes:G=) ; + + # Just propagate the current scanner to includes, in hope that includes + # do not change scanners. + scanner.propagate $(__name__) : $(all) : $(target) ; + + ISFILE $(all) ; + } +} + +scanner.register c-scanner : include ; + +type.register CPP : cpp cxx cc ; +type.register H : h ; +type.register HPP : hpp : H ; +type.register IPP : ipp : HPP ; +type.register C : c ; + +# It most cases where a CPP file or a H file is a source of some action, we +# should rebuild the result if any of files included by CPP/H are changed. One +# case when this is not needed is installation, which is handled specifically. +type.set-scanner CPP : c-scanner ; +type.set-scanner C : c-scanner ; +# One case where scanning of H/HPP files is necessary is PCH generation -- if +# any header included by HPP being precompiled changes, we need to recompile the +# header. +type.set-scanner H : c-scanner ; +type.set-scanner HPP : c-scanner ; +# Private implementation files need scanning too. +type.set-scanner IPP : c-scanner ; diff --git a/src/boost/tools/build/src/tools/types/cpp.py b/src/boost/tools/build/src/tools/types/cpp.py new file mode 100644 index 000000000..5c5290394 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/cpp.py @@ -0,0 +1,11 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +from b2.build import type as type_ + + +type_.register_type('CPP', ['cpp', 'cxx', 'cc']) +type_.register_type('H', ['h']) +type_.register_type('HPP', ['hpp'], 'H') +type_.register_type('IPP', ['ipp'], 'HPP') +type_.register_type('C', ['c']) diff --git a/src/boost/tools/build/src/tools/types/css.jam b/src/boost/tools/build/src/tools/types/css.jam new file mode 100644 index 000000000..e78f3b8f0 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/css.jam @@ -0,0 +1,10 @@ +#| +Copyright 2017 Dmitry Arkhipov +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +import type ; + +type CSS : css ; diff --git a/src/boost/tools/build/src/tools/types/docbook.jam b/src/boost/tools/build/src/tools/types/docbook.jam new file mode 100644 index 000000000..479e964c0 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/docbook.jam @@ -0,0 +1,10 @@ +#| +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +import xml ; # must import to have registered XML type + +type DOCBOOK : docbook : XML ; diff --git a/src/boost/tools/build/src/tools/types/exe.jam b/src/boost/tools/build/src/tools/types/exe.jam new file mode 100644 index 000000000..6b068cacb --- /dev/null +++ b/src/boost/tools/build/src/tools/types/exe.jam @@ -0,0 +1,9 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import type ; + +type.register EXE ; +type.set-generated-target-suffix EXE : windows : "exe" ; +type.set-generated-target-suffix EXE : cygwin : "exe" ; diff --git a/src/boost/tools/build/src/tools/types/exe.py b/src/boost/tools/build/src/tools/types/exe.py new file mode 100644 index 000000000..ac83ea856 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/exe.py @@ -0,0 +1,11 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +from b2.build import type + +def register (): + type.register_type ('EXE', ['exe'], None, ['NT', 'CYGWIN']) + type.register_type ('EXE', [], None, []) + +register () diff --git a/src/boost/tools/build/src/tools/types/html.jam b/src/boost/tools/build/src/tools/types/html.jam new file mode 100644 index 000000000..04e6cab6e --- /dev/null +++ b/src/boost/tools/build/src/tools/types/html.jam @@ -0,0 +1,4 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +type HTML : html ; diff --git a/src/boost/tools/build/src/tools/types/html.py b/src/boost/tools/build/src/tools/types/html.py new file mode 100644 index 000000000..0393d322b --- /dev/null +++ b/src/boost/tools/build/src/tools/types/html.py @@ -0,0 +1,10 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +from b2.build import type + +def register (): + type.register_type ('HTML', ['html']) + +register () diff --git a/src/boost/tools/build/src/tools/types/lib.jam b/src/boost/tools/build/src/tools/types/lib.jam new file mode 100644 index 000000000..dc16aba4b --- /dev/null +++ b/src/boost/tools/build/src/tools/types/lib.jam @@ -0,0 +1,74 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import type ; # for set-generated-target-suffix +import os ; + +# The following naming scheme is used for libraries. +# +# On *nix: +# libxxx.a static library +# libxxx.so shared library +# +# On windows (msvc) +# libxxx.lib static library +# xxx.dll DLL +# xxx.lib import library +# +# On windows (mingw): +# libxxx.a static library +# libxxx.dll DLL +# libxxx.dll.a import library +# +# On cygwin i.e. cygwin +# libxxx.a static library +# cygxxx.dll DLL +# libxxx.dll.a import library +# + +type.register LIB ; + +# FIXME: should not register both extensions on both platforms. +type.register STATIC_LIB : a lib : LIB ; + +# The 'lib' prefix is used everywhere +type.set-generated-target-prefix STATIC_LIB : : lib ; + +# Use '.lib' suffix for windows +type.set-generated-target-suffix STATIC_LIB : windows : lib ; + +# Except with gcc. +type.set-generated-target-suffix STATIC_LIB : gcc windows : a ; + +# Use xxx.lib for import libs +type IMPORT_LIB : : STATIC_LIB ; +type.set-generated-target-prefix IMPORT_LIB : : "" ; +type.set-generated-target-suffix IMPORT_LIB : : lib ; + +# Except with gcc (mingw or cygwin), where use libxxx.dll.a +type.set-generated-target-prefix IMPORT_LIB : gcc : lib ; +type.set-generated-target-suffix IMPORT_LIB : gcc : dll.a ; + +type.register SHARED_LIB : so dll dylib : LIB ; + +# Both mingw and cygwin use libxxx.dll naming scheme. +# On Linux, use "lib" prefix +type.set-generated-target-prefix SHARED_LIB : : lib ; +# But don't use it on windows +type.set-generated-target-prefix SHARED_LIB : windows : "" ; +# But use it again on mingw +type.set-generated-target-prefix SHARED_LIB : gcc windows : lib ; +# And use 'cyg' on cygwin +type.set-generated-target-prefix SHARED_LIB : cygwin : cyg ; + + +type.set-generated-target-suffix SHARED_LIB : windows : dll ; +type.set-generated-target-suffix SHARED_LIB : cygwin : dll ; +type.set-generated-target-suffix SHARED_LIB : darwin : dylib ; + +type SEARCHED_LIB : : LIB ; +# This is needed so that when we create a target of SEARCHED_LIB +# type, there's no prefix or suffix automatically added. +type.set-generated-target-prefix SEARCHED_LIB : : "" ; +type.set-generated-target-suffix SEARCHED_LIB : : "" ; diff --git a/src/boost/tools/build/src/tools/types/lib.py b/src/boost/tools/build/src/tools/types/lib.py new file mode 100644 index 000000000..6cc4dd1a2 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/lib.py @@ -0,0 +1,77 @@ +# Status: ported +# Base revision: 64456. +# Copyright David Abrahams 2004. +# Copyright Vladimir Prus 2010. +# Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import b2.build.type as type + +# The following naming scheme is used for libraries. +# +# On *nix: +# libxxx.a static library +# libxxx.so shared library +# +# On windows (msvc) +# libxxx.lib static library +# xxx.dll DLL +# xxx.lib import library +# +# On windows (mingw): +# libxxx.a static library +# libxxx.dll DLL +# libxxx.dll.a import library +# +# On cygwin i.e. cygwin +# libxxx.a static library +# cygxxx.dll DLL +# libxxx.dll.a import library +# + +type.register('LIB') + +# FIXME: should not register both extensions on both platforms. +type.register('STATIC_LIB', ['a', 'lib'], 'LIB') + +# The 'lib' prefix is used everywhere +type.set_generated_target_prefix('STATIC_LIB', [], 'lib') + +# Use '.lib' suffix for windows +type.set_generated_target_suffix('STATIC_LIB', ['windows'], 'lib') + +# Except with gcc. +type.set_generated_target_suffix('STATIC_LIB', ['gcc', 'windows'], 'a') + +# Use xxx.lib for import libs +type.register('IMPORT_LIB', [], 'STATIC_LIB') +type.set_generated_target_prefix('IMPORT_LIB', [], '') +type.set_generated_target_suffix('IMPORT_LIB', [], 'lib') + +# Except with gcc (mingw or cygwin), where use libxxx.dll.a +type.set_generated_target_prefix('IMPORT_LIB', ['gcc'], 'lib') +type.set_generated_target_suffix('IMPORT_LIB', ['gcc'], 'dll.a') + +type.register('SHARED_LIB', ['so', 'dll', 'dylib'], 'LIB') + +# Both mingw and cygwin use libxxx.dll naming scheme. +# On Linux, use "lib" prefix +type.set_generated_target_prefix('SHARED_LIB', [], 'lib') +# But don't use it on windows +type.set_generated_target_prefix('SHARED_LIB', ['windows'], '') +# But use it again on mingw +type.set_generated_target_prefix('SHARED_LIB', ['gcc', 'windows'], 'lib') +# And use 'cyg' on cygwin +type.set_generated_target_prefix('SHARED_LIB', ['cygwin'], 'cyg') + + +type.set_generated_target_suffix('SHARED_LIB', ['windows'], 'dll') +type.set_generated_target_suffix('SHARED_LIB', ['cygwin'], 'dll') +type.set_generated_target_suffix('SHARED_LIB', ['darwin'], 'dylib') + +type.register('SEARCHED_LIB', [], 'LIB') +# This is needed so that when we create a target of SEARCHED_LIB +# type, there's no prefix or suffix automatically added. +type.set_generated_target_prefix('SEARCHED_LIB', [], '') +type.set_generated_target_suffix('SEARCHED_LIB', [], '') diff --git a/src/boost/tools/build/src/tools/types/man.jam b/src/boost/tools/build/src/tools/types/man.jam new file mode 100644 index 000000000..7fed263d5 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/man.jam @@ -0,0 +1,8 @@ +#| +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +type MANPAGE : man 1M n p x ; diff --git a/src/boost/tools/build/src/tools/types/markdown.jam b/src/boost/tools/build/src/tools/types/markdown.jam new file mode 100644 index 000000000..1ba18bb04 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/markdown.jam @@ -0,0 +1,4 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +type MARKDOWN : md markdown ; diff --git a/src/boost/tools/build/src/tools/types/markdown.py b/src/boost/tools/build/src/tools/types/markdown.py new file mode 100644 index 000000000..0d271a6d3 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/markdown.py @@ -0,0 +1,10 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +from b2.build import type + +def register (): + type.register_type ('MARKDOWN', ['markdown', 'md']) + +register () diff --git a/src/boost/tools/build/src/tools/types/obj.jam b/src/boost/tools/build/src/tools/types/obj.jam new file mode 100644 index 000000000..d369a936a --- /dev/null +++ b/src/boost/tools/build/src/tools/types/obj.jam @@ -0,0 +1,9 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import type ; + +type.register OBJ : o obj ; +type.set-generated-target-suffix OBJ : windows : obj ; +type.set-generated-target-suffix OBJ : cygwin : obj ; diff --git a/src/boost/tools/build/src/tools/types/obj.py b/src/boost/tools/build/src/tools/types/obj.py new file mode 100644 index 000000000..6aa73c2ad --- /dev/null +++ b/src/boost/tools/build/src/tools/types/obj.py @@ -0,0 +1,11 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +from b2.build import type + +def register (): + type.register_type ('OBJ', ['obj'], None, ['NT', 'CYGWIN']) + type.register_type ('OBJ', ['o']) + +register () diff --git a/src/boost/tools/build/src/tools/types/objc.jam b/src/boost/tools/build/src/tools/types/objc.jam new file mode 100644 index 000000000..2762d9791 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/objc.jam @@ -0,0 +1,26 @@ +# Copyright Rene Rivera 2008, 2010. +# Distributed under the Boost Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +import type ; +import scanner ; +import types/cpp ; + +class objc-scanner : c-scanner +{ + rule __init__ ( includes * ) + { + c-scanner.__init__ $(includes) ; + } + + rule pattern ( ) + { + return "#[ \t]*include|import[ ]*(<(.*)>|\"(.*)\")" ; + } +} + +scanner.register objc-scanner : include ; + +type.register OBJECTIVE_C : m ; +type.register OBJECTIVE_CPP : mm ; +type.set-scanner OBJECTIVE_C : objc-scanner ; +type.set-scanner OBJECTIVE_CPP : objc-scanner ; diff --git a/src/boost/tools/build/src/tools/types/pdf.jam b/src/boost/tools/build/src/tools/types/pdf.jam new file mode 100644 index 000000000..ae01d6f0e --- /dev/null +++ b/src/boost/tools/build/src/tools/types/pdf.jam @@ -0,0 +1,8 @@ +#| +Copyright 2017 Rene Rivera +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +type PDF : pdf ; diff --git a/src/boost/tools/build/src/tools/types/preprocessed.jam b/src/boost/tools/build/src/tools/types/preprocessed.jam new file mode 100644 index 000000000..514f0e8a3 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/preprocessed.jam @@ -0,0 +1,10 @@ +# Copyright Steven Watanabe 2011 +# Distributed under the Boost Software License Version 1.0. (See +# accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import type ; +import cpp ; # must import to have registered C CPP types + +type.register PREPROCESSED_C : i : C ; +type.register PREPROCESSED_CPP : ii : CPP ; diff --git a/src/boost/tools/build/src/tools/types/preprocessed.py b/src/boost/tools/build/src/tools/types/preprocessed.py new file mode 100644 index 000000000..058fe2a5b --- /dev/null +++ b/src/boost/tools/build/src/tools/types/preprocessed.py @@ -0,0 +1,11 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +from b2.build import type + +def register (): + type.register_type('PREPROCESSED_C', ['i'], 'C') + type.register_type('PREPROCESSED_CPP', ['ii'], 'CPP') + +register () diff --git a/src/boost/tools/build/src/tools/types/qt.jam b/src/boost/tools/build/src/tools/types/qt.jam new file mode 100644 index 000000000..60665a0c3 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/qt.jam @@ -0,0 +1,14 @@ +# Copyright Vladimir Prus 2005. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +import cpp ; # must import to have registered H type + +type UI : ui ; +type QRC : qrc ; +type MOCCABLE_CPP ; +type MOCCABLE_H ; +type MOCCABLE5_CPP ; +type MOCCABLE5_H ; +# Result of running moc. +type MOC : moc : H ; diff --git a/src/boost/tools/build/src/tools/types/register.jam b/src/boost/tools/build/src/tools/types/register.jam new file mode 100644 index 000000000..51a687546 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/register.jam @@ -0,0 +1,39 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module's job is to automatically import all the type +# registration modules in its directory. +import type os path modules ; + +# Register the given type on the specified OSes, or on remaining OSes +# if os is not specified. This rule is injected into each of the type +# modules for the sake of convenience. +local rule type ( type : suffixes * : base-type ? : os * ) +{ + if ! [ type.registered $(type) ] + { + if ( ! $(os) ) || [ os.name ] in $(os) + { + type.register $(type) : $(suffixes) : $(base-type) ; + } + } +} + +.this-module's-file = [ modules.binding $(__name__) ] ; +.this-module's-dir = [ path.parent [ path.make $(.this-module's-file) ] ] ; +.sibling-jamfiles = [ path.glob $(.this-module's-dir) : *.jam ] ; +.sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ; + +# A loop over all modules in this directory +for m in $(.sibling-modules) +{ + m = [ path.basename $(m) ] ; + m = types/$(m) ; + + # Inject the type rule into the new module + IMPORT $(__name__) : type : $(m:B) : type ; + import $(m) ; +} + + diff --git a/src/boost/tools/build/src/tools/types/rsp.jam b/src/boost/tools/build/src/tools/types/rsp.jam new file mode 100644 index 000000000..ac9a303a9 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/rsp.jam @@ -0,0 +1,4 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) +type RSP : rsp ; diff --git a/src/boost/tools/build/src/tools/types/rsp.py b/src/boost/tools/build/src/tools/types/rsp.py new file mode 100644 index 000000000..228070aa2 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/rsp.py @@ -0,0 +1,10 @@ +# Copyright David Abrahams 2004. Distributed under the Boost +# Software License, Version 1.0. (See accompanying +# file LICENSE.txt or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +from b2.build import type + +def register (): + type.register_type ('RSP', ['rsp']) + +register () diff --git a/src/boost/tools/build/src/tools/types/sass-type.jam b/src/boost/tools/build/src/tools/types/sass-type.jam new file mode 100644 index 000000000..0dd5483dc --- /dev/null +++ b/src/boost/tools/build/src/tools/types/sass-type.jam @@ -0,0 +1,49 @@ +#| +Copyright 2017 Dmitry Arkhipov +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +import scanner ; +import type ; + +class sass-scanner : common-scanner +{ + import sequence ; + + local rule import-to-file ( import ) + { + if ! ( $(import:S) in .sass .scss ) + { + return $(import).sass $(import).scss ; + } + else + { + return $(import) ; + } + } + + rule pattern ( ) + { + return + "@import[ \t]+\"([^\"]+)\"" + "@import[ \t]+\'([^\']+)\'" + ; + } + + rule process ( target : matches * : binding ) + { + common-scanner.process + $(target) + : [ sequence.transform import-to-file : $(matches) ] + : $(binding) + ; + } +} + +scanner.register sass-scanner : include ; + +type SASS : sass scss ; + +type.set-scanner SASS : sass-scanner ; diff --git a/src/boost/tools/build/src/tools/types/xml.jam b/src/boost/tools/build/src/tools/types/xml.jam new file mode 100644 index 000000000..cb44395e1 --- /dev/null +++ b/src/boost/tools/build/src/tools/types/xml.jam @@ -0,0 +1,49 @@ +#| +Copyright 2017 Rene Rivera +Copyright 2003, 2004, 2005 Dave Abrahams +Copyright 2003, 2004, 2005 Douglas Gregor +Copyright 2005, 2006, 2007 Rene Rivera +Copyright 2003, 2004, 2005 Vladimir Prus +Distributed under the Boost Software License, Version 1.0. (See +accompanying file LICENSE.txt or copy at +https://www.bfgroup.xyz/b2/LICENSE.txt) +|# + +import scanner ; +import type ; + +type.register XML : xml ; + +# XInclude scanner. Mostly stolen from c-scanner. :) +# Note that this assumes an "xi" prefix for XIncludes. This is not always the +# case for XML documents, but we assume it is true for anything we encounter. +# +class xinclude-scanner : scanner +{ + import scanner ; + + rule __init__ ( includes * ) + { + scanner.__init__ ; + self.includes = $(includes) ; + } + + rule pattern ( ) + { + return "xi:include[ ]*href=\"([^\"]*)\"" ; + } + + rule process ( target : matches * : binding ) + { + local target_path = [ NORMALIZE_PATH $(binding:D) ] ; + + NOCARE $(matches) ; + INCLUDES $(target) : $(matches) ; + SEARCH on $(matches) = $(target_path) $(self.includes:G=) ; + + scanner.propagate $(__name__) : $(matches) : $(target) ; + } +} + +scanner.register xinclude-scanner : "xsl:path" ; +type.set-scanner XML : xinclude-scanner ; diff --git a/src/boost/tools/build/src/tools/unix.jam b/src/boost/tools/build/src/tools/unix.jam new file mode 100644 index 000000000..c86f089de --- /dev/null +++ b/src/boost/tools/build/src/tools/unix.jam @@ -0,0 +1,223 @@ +# Copyright (c) 2004 Vladimir Prus. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This file implements linking semantic common to all unixes. On unix, static +# libraries must be specified in a fixed order on the linker command line. Generators +# declared there store information about the order and use it property. + +import feature ; +import "class" : new ; +import generators ; +import type ; +import set ; +import order ; +import builtin ; + +class unix-linking-generator : linking-generator +{ + import property-set ; + import type ; + import unix ; + + rule __init__ ( id + composing ? : # Specify if generator is composing. The generator will be + # composing if non-empty string is passed, or parameter is + # not given. To make generator non-composing, pass empty + # string ("") + source-types + : target-types + : + requirements * ) + { + composing ?= true ; + generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) : + $(requirements) ; + } + + rule run ( project name ? : property-set : sources + ) + { + local result = [ linking-generator.run $(project) $(name) : $(property-set) + : $(sources) ] ; + + unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; + + return $(result) ; + } + + rule generated-targets ( sources + : property-set : project name ? ) + { + local sources2 ; + local libraries ; + for local l in $(sources) + { + if [ type.is-derived [ $(l).type ] LIB ] + { + libraries += $(l) ; + } + else + { + sources2 += $(l) ; + } + } + + sources = $(sources2) [ unix.order-libraries $(libraries) ] ; + + return [ linking-generator.generated-targets $(sources) : $(property-set) + : $(project) $(name) ] ; + } + +} + +class unix-archive-generator : archive-generator +{ + import unix ; + + rule __init__ ( id composing ? : source-types + : target-types + : + requirements * ) + { + composing ?= true ; + archive-generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) : + $(requirements) ; + } + + rule run ( project name ? : property-set : sources + ) + { + local result = [ archive-generator.run $(project) $(name) : $(property-set) + : $(sources) ] ; + + unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; + + return $(result) ; + + } +} + +class unix-searched-lib-generator : searched-lib-generator +{ + import unix ; + rule __init__ ( * : * ) + { + generator.__init__ + $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule optional-properties ( ) + { + return $(self.requirements) ; + } + + rule run ( project name ? : property-set : sources * ) + { + local result = [ searched-lib-generator.run $(project) $(name) + : $(property-set) : $(sources) ] ; + + unix.set-library-order $(sources) : $(property-set) : $(result[2-]) ; + + return $(result) ; + } +} + +class unix-prebuilt-lib-generator : generator +{ + import unix ; + rule __init__ ( * : * ) + { + generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ; + } + + rule run ( project name ? : property-set : sources * ) + { + local f = [ $(property-set).get ] ; + unix.set-library-order-aux $(f) : $(sources) ; + return $(f) $(sources) ; + } +} + +generators.register + [ new unix-prebuilt-lib-generator unix.prebuilt : : LIB + : unix ] ; + +generators.override unix.prebuilt : builtin.lib-generator ; + + +# Declare generators +generators.register [ new unix-linking-generator unix.link : LIB OBJ : EXE + : unix ] ; + +generators.register [ new unix-archive-generator unix.archive : OBJ : STATIC_LIB + : unix ] ; + +generators.register [ new unix-linking-generator unix.link.dll : LIB OBJ : SHARED_LIB + : unix ] ; + +generators.register [ new unix-searched-lib-generator + unix.searched-lib-generator : : SEARCHED_LIB : unix ] ; + + +# The derived toolset must specify their own actions. +actions link { +} + +actions link.dll { +} + +actions archive { +} + +actions searched-lib-generator { +} + +actions prebuilt { +} + + + + + +.order = [ new order ] ; + +rule set-library-order-aux ( from * : to * ) +{ + for local f in $(from) + { + for local t in $(to) + { + if $(f) != $(t) + { + $(.order).add-pair $(f) $(t) ; + } + } + } +} + +rule set-library-order ( sources * : property-set : result * ) +{ + local used-libraries ; + local deps = [ $(property-set).dependency ] ; + for local l in $(sources) $(deps:G=) + { + if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ] + { + used-libraries += $(l) ; + } + } + + local created-libraries ; + for local l in $(result) + { + if [ $(l).type ] && [ type.is-derived [ $(l).type ] LIB ] + { + created-libraries += $(l) ; + } + } + + created-libraries = [ set.difference $(created-libraries) : $(used-libraries) ] ; + set-library-order-aux $(created-libraries) : $(used-libraries) ; +} + +rule order-libraries ( libraries * ) +{ + local r = [ $(.order).order $(libraries) ] ; + return $(r) ; +} diff --git a/src/boost/tools/build/src/tools/unix.py b/src/boost/tools/build/src/tools/unix.py new file mode 100644 index 000000000..307bda247 --- /dev/null +++ b/src/boost/tools/build/src/tools/unix.py @@ -0,0 +1,155 @@ +# Copyright (c) 2004 Vladimir Prus. +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +""" This file implements linking semantics common to all unixes. On unix, static + libraries must be specified in a fixed order on the linker command line. Generators + declared there store information about the order and use it properly. +""" + +import builtin +from b2.build import generators, type +from b2.util.utility import * +from b2.util import set, sequence + +class UnixLinkingGenerator (builtin.LinkingGenerator): + + def __init__ (self, id, composing, source_types, target_types, requirements): + builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements) + + def run (self, project, name, prop_set, sources): + result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources) + if result: + set_library_order (project.manager (), sources, prop_set, result [1]) + + return result + + def generated_targets (self, sources, prop_set, project, name): + sources2 = [] + libraries = [] + for l in sources: + if type.is_derived (l.type (), 'LIB'): + libraries.append (l) + + else: + sources2.append (l) + + sources = sources2 + order_libraries (libraries) + + return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name) + + +class UnixArchiveGenerator (builtin.ArchiveGenerator): + def __init__ (self, id, composing, source_types, target_types_and_names, requirements): + builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) + + def run (self, project, name, prop_set, sources): + from b2.build.property_set import PropertySet + result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources) + if result and isinstance(result[0], PropertySet): + _, targets = result + else: + targets = result + set_library_order(project.manager(), sources, prop_set, targets) + return result + +class UnixSearchedLibGenerator (builtin.SearchedLibGenerator): + + def __init__ (self): + builtin.SearchedLibGenerator.__init__ (self) + + def optional_properties (self): + return self.requirements () + + def run (self, project, name, prop_set, sources): + result = SearchedLibGenerator.run (project, name, prop_set, sources) + + set_library_order (sources, prop_set, result) + + return result + +class UnixPrebuiltLibGenerator (generators.Generator): + def __init__ (self, id, composing, source_types, target_types_and_names, requirements): + generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) + + def run (self, project, name, prop_set, sources): + f = prop_set.get ('') + set_library_order_aux (f, sources) + return f + sources + +### # The derived toolset must specify their own rules and actions. +# FIXME: restore? +# action.register ('unix.prebuilt', None, None) + + +generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB'], ['', 'unix'])) + + + + + +### # Declare generators +### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE +### : unix ] ; +generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['unix'])) + +### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB +### : unix ] ; +### +### generators.register [ new UnixSearchedLibGenerator +### unix.SearchedLibGenerator : : SEARCHED_LIB : unix ] ; +### +### +### # The derived toolset must specify their own actions. +### actions link { +### } +### +### actions link.dll { +### } + +def unix_archive (manager, targets, sources, properties): + pass + +# FIXME: restore? +#action.register ('unix.archive', unix_archive, ['']) + +### actions searched-lib-generator { +### } +### +### actions prebuilt { +### } + + +from b2.util.order import Order +__order = Order () + +def set_library_order_aux (from_libs, to_libs): + for f in from_libs: + for t in to_libs: + if f != t: + __order.add_pair (f, t) + +def set_library_order (manager, sources, prop_set, result): + used_libraries = [] + deps = prop_set.dependency () + + sources.extend(d.value for d in deps) + sources = sequence.unique(sources) + + for l in sources: + if l.type () and type.is_derived (l.type (), 'LIB'): + used_libraries.append (l) + + created_libraries = [] + for l in result: + if l.type () and type.is_derived (l.type (), 'LIB'): + created_libraries.append (l) + + created_libraries = set.difference (created_libraries, used_libraries) + set_library_order_aux (created_libraries, used_libraries) + +def order_libraries (libraries): + return __order.order (libraries) + diff --git a/src/boost/tools/build/src/tools/vacpp.jam b/src/boost/tools/build/src/tools/vacpp.jam new file mode 100644 index 000000000..b7d2e2de7 --- /dev/null +++ b/src/boost/tools/build/src/tools/vacpp.jam @@ -0,0 +1,173 @@ +# Copyright Vladimir Prus 2004. +# Copyright Toon Knapen 2004. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +#| tag::doc[] + +[[bbv2.reference.tools.compiler.vacpp]] += IBM Visual Age + +The `vacpp` module supports the http://www.ibm.com/software/ad/vacpp[IBM +Visual Age] C++ Compiler, for the AIX operating system. Versions 7.1 and +8.0 are known to work. + +The module is initialized using the following syntax: + +---- +using vacpp ; +---- + +The module does not accept any initialization options. The compiler +should be installed in the `/usr/vacpp/bin` directory. + +Later versions of Visual Age are known as XL C/C++. They were not tested +with the the `vacpp` module. + +|# # end::doc[] + +# +# B2 V2 toolset for the IBM XL C++ compiler +# + +import toolset : flags ; +import feature ; +import common ; +import generators ; +import os ; + +feature.extend toolset : vacpp ; +toolset.inherit vacpp : unix ; +generators.override vacpp.prebuilt : builtin.prebuilt ; +generators.override vacpp.searched-lib-generator : searched-lib-generator ; + +# Configure the vacpp toolset +rule init ( version ? : command * : options * ) +{ + local condition = [ + common.check-init-parameters vacpp : version $(version) ] ; + + command = [ common.get-invocation-command vacpp : xlC + : $(command) : "/usr/vacpp/bin/xlC" ] ; + + common.handle-options vacpp : $(condition) : $(command) : $(options) ; +} + +# Declare generators +generators.register-c-compiler vacpp.compile.c : C : OBJ : vacpp ; +generators.register-c-compiler vacpp.compile.c++ : CPP : OBJ : vacpp ; + +# Allow C++ style comments in C files +flags vacpp CFLAGS : -qcpluscmt ; + +# Declare flags +flags vacpp CFLAGS off : -qNOOPTimize ; +flags vacpp CFLAGS speed : -O3 -qstrict ; +flags vacpp CFLAGS space : -O2 -qcompact ; + +# Discretionary inlining (not recommended) +flags vacpp CFLAGS off : -qnoinline ; +flags vacpp CFLAGS on : -qinline ; +#flags vacpp CFLAGS full : -qinline ; +flags vacpp CFLAGS full : ; + +# Exception handling +flags vacpp C++FLAGS off : -qnoeh ; +flags vacpp C++FLAGS on : -qeh ; + +# Run-time Type Identification +flags vacpp C++FLAGS off : -qnortti ; +flags vacpp C++FLAGS on : -qrtti ; + +# Enable 64-bit memory addressing model +flags vacpp CFLAGS 64 : -q64 ; +flags vacpp LINKFLAGS 64 : -q64 ; +flags vacpp ARFLAGS aix/64 : -X 64 ; + +# Use absolute path when generating debug information +flags vacpp CFLAGS on : -g -qfullpath ; +flags vacpp LINKFLAGS on : -g -qfullpath ; +flags vacpp LINKFLAGS off : -s ; + +if [ os.name ] = AIX +{ + flags vacpp.compile C++FLAGS : -qfuncsect ; + + # The -bnoipath strips the prepending (relative) path of libraries from + # the loader section in the target library or executable. Hence, during + # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded + # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without + # this option, the prepending (relative) path + library name is + # hard-coded in the loader section, causing *only* this path to be + # searched during load-time. Note that the AIX linker does not have an + # -soname equivalent, this is as close as it gets. + # + # The above options are definitely for AIX 5.x, and most likely also for + # AIX 4.x and AIX 6.x. For details about the AIX linker see: + # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf + # + flags vacpp.link LINKFLAGS shared : -bnoipath ; + + # Run-time linking + flags vacpp.link EXE-LINKFLAGS shared : -brtl ; +} +else +{ + # Linux PPC + flags vacpp.compile CFLAGS shared : -qpic=large ; + flags vacpp FINDLIBS : rt ; +} + +# Profiling +flags vacpp CFLAGS on : -pg ; +flags vacpp LINKFLAGS on : -pg ; + +flags vacpp.compile OPTIONS ; +flags vacpp.compile.c++ OPTIONS ; +flags vacpp DEFINES ; +flags vacpp UNDEFS ; +flags vacpp HDRS ; +flags vacpp STDHDRS ; +flags vacpp.link OPTIONS ; +flags vacpp ARFLAGS ; + +flags vacpp LIBPATH ; +flags vacpp NEEDLIBS ; +flags vacpp FINDLIBS ; +flags vacpp FINDLIBS ; + +# Select the compiler name according to the threading model. +flags vacpp VA_C_COMPILER single : xlc ; +flags vacpp VA_C_COMPILER multi : xlc_r ; +flags vacpp VA_CXX_COMPILER single : xlC ; +flags vacpp VA_CXX_COMPILER multi : xlC_r ; + +SPACE = " " ; + +flags vacpp.link.dll HAVE_SONAME linux : "" ; + +actions vacpp.link bind NEEDLIBS +{ + $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) +} + +actions vacpp.link.dll bind NEEDLIBS +{ + xlC_r -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) +} + +actions vacpp.compile.c +{ + $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" +} + +actions vacpp.compile.c++ +{ + $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" +} + +actions updated together piecemeal vacpp.archive +{ + ar $(ARFLAGS) ru "$(<)" "$(>)" +} diff --git a/src/boost/tools/build/src/tools/vmsdecc.jam b/src/boost/tools/build/src/tools/vmsdecc.jam new file mode 100644 index 000000000..a4007f722 --- /dev/null +++ b/src/boost/tools/build/src/tools/vmsdecc.jam @@ -0,0 +1,580 @@ +# Copyright (c) 2015 Artur Shepilko +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Implements OpenVMS-based HP DECC/C++ toolset. +# Relies on POSIX-style path handling bjam/B2 implementation for VMS. + +import "class" : new ; +import property ; +import generators ; +import os ; +import toolset : flags ; +import feature ; +import type ; +import common ; +import unix ; +import path ; + + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +feature.extend toolset : vmsdecc ; + +toolset.inherit-generators vmsdecc : unix : unix.link unix.link.dll ; +toolset.inherit-flags vmsdecc : unix ; +toolset.inherit-rules vmsdecc : unix ; + +generators.override vmsdecc.archive-generator : builtin.archive-generator ; +generators.override vmsdecc.prebuilt : builtin.prebuilt ; +generators.override vmsdecc.searched-lib-generator : searched-lib-generator ; + +type.set-generated-target-suffix EXE : vmsdecc vms : exe ; +type.set-generated-target-suffix OBJ : vmsdecc vms : obj ; +type.set-generated-target-suffix PREPROCESSED_C : vmsdecc vms : i ; +type.set-generated-target-suffix PREPROCESSED_CPP : vmsdecc vms : ixx ; +type.set-generated-target-suffix STATIC_LIB : vmsdecc vms : olb ; ## xxx.olb + +type.register-suffixes exe : SHARED_LIB ; +type.set-generated-target-prefix SHARED_LIB : vmsdecc vms : shr ; ## shrxxx.exe +type.set-generated-target-suffix SHARED_LIB : vmsdecc vms : exe ; ## shrxxx.exe + +.OBJ = .obj ; ## suffix +.nl = " +" ; + +rule init ( version ? : command * : options * ) +{ + local argv = [ modules.peek : ARGV ] ; + + local condition = [ + common.check-init-parameters vmsdecc : version $(version) ] ; + + # CC and CXX are CLI commands, so no need to search for the executables + command = CXX ; + toolset.flags vmsdecc .CXX $(condition) : CXX ; + common.handle-options vmsdecc : $(condition) : $(command) : $(options) ; + + local command_c = $(command[1--2]) $(command[-1]:B=CC) ; + toolset.flags vmsdecc .CC $(condition) : $(command_c) ; + + local linker = [ feature.get-values : $(options) ] ; + linker ?= CXXLINK ; + toolset.flags vmsdecc.link .LD $(condition) : $(linker) ; + if $(.debug-configuration) + { + ECHO notice\: using linker "::" $(condition) "::" $(linker[1]) ; + } + + local archiver = LIB ; + toolset.flags vmsdecc.archive .AR $(condition) : $(archiver) ; + + local b2 = $(argv[1]) ; + toolset.flags vmsdecc .B2 $(condition) : $(b2) ; +} + +# Declare generators +generators.register-c-compiler vmsdecc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : vmsdecc ; +generators.register-c-compiler vmsdecc.compile.c.preprocess : C : PREPROCESSED_C : vmsdecc ; +generators.register-c-compiler vmsdecc.compile.c : C : OBJ : vmsdecc ; +generators.register-c-compiler vmsdecc.compile.c++ : CPP : OBJ : vmsdecc ; + +# Declare flags and actions for compilation +flags vmsdecc.compile OPTIONS on : /DEBUG ; +flags vmsdecc.compile OPTIONS on : /DEBUG ; ## needs PCA link options +flags vmsdecc.compile OPTIONS off : /NOOPT ; +flags vmsdecc.compile OPTIONS speed : /OPT=INLINE=SPEED/OPT=NOINLINE ; +flags vmsdecc.compile OPTIONS space : /OPT=INLINE=SIZE/OPT=NOINLINE ; +flags vmsdecc.compile OPTIONS off : /NOWARN ; +flags vmsdecc.compile OPTIONS on : /WARN ; +flags vmsdecc.compile OPTIONS all : /WARN=ENABLE=ALL ; +flags vmsdecc.compile OPTIONS extra : /WARN=ENABLE=ALL ; +flags vmsdecc.compile OPTIONS pedantic : /WARN=ENABLE=ALL ; + +flags vmsdecc.compile.c++ OPTIONS off : /OPT=NOINLINE ; + +flags vmsdecc OPTIONS 32 : /POINTER=32 ; +flags vmsdecc OPTIONS 64 : /POINTER=64 ; ## /POINTER=64=ARGV argv-64 + +flags vmsdecc.compile OPTIONS ; +flags vmsdecc.compile.c++ OPTIONS ; +flags vmsdecc.compile DEFINES ; +flags vmsdecc.compile UNDEFS ; +flags vmsdecc.compile INCLUDES ; +flags vmsdecc.compile.c++ TEMPLATE_DEPTH ; + +feature.feature cxx-repository : : free path ; #order-sensitive ; +flags vmsdecc CXX-REPOS ; + + +local rule get-includes ( sources * : includes * ) +{ + local result ; + + ## Expect POSIX-style path, quote in double-quotes + for local d in $(sources:D) $(includes) + { + if $(d) + { + local QUOTE = \" ; + local SEP = / ; + + local enquote = false ; + local addsep = false ; + + s = [ SPLIT_BY_CHARACTERS $(d) : $(QUOTE) ] ; + + if $(s) = $(d) { enquote = true ; } + if [ SPLIT_BY_CHARACTERS $(s) : $(SEP) ] = $(s) { addsep = true ; } + + if $(addsep) + { + d = $(s)$(SEP) ; + enquote = true ; + } + + if $(enquote) + { + d = $(QUOTE)$(d)$(QUOTE) ; + } + + if ! $(d) in $(result) + { + result += $(d) ; + } + } + } + + return $(result) ; +} + +CXX-REPO-NAME = cxx_repository ; + +local rule get-target-cxx-repo ( target ) +{ + return [ path.join $(target) $(CXX-REPO-NAME) ] ; +} + +rule compile.c++ ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(SOURCE-INCLUDES) ] ; + DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ; + + DEFINES on $(targets) = [ on $(targets) return "__USE_STD_IOSTREAM" $(DEFINES) ] ; + + INCLUDES on $(targets) = [ on $(targets) get-includes $(sources) : $(INCLUDES) ] ; + + TARGET-CXX-REPO on $(targets) = [ on $(targets[1]) get-target-cxx-repo $(LOCATE) ] ; + CXX-REPOS on $(targets) = [ on $(targets) return $(TARGET-CXX-REPO) $(CXX-REPOS) ] ; +} + + +rule compile.c ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(SOURCE-INCLUDES) ] ; + + INCLUDES on $(targets) = [ on $(targets) get-includes $(sources) : $(INCLUDES) ] ; +} + +actions compile.c +{ + $(.CC) $(OPTIONS) /DEF=("$(DEFINES:J=",")") /UNDEF=("$(UNDEFS:J=",")") /INC=($(INCLUDES:J=,)) /OBJ=$(<:W) $(>:W) +} + +actions compile.c++ +{ + $(.CXX) $(OPTIONS) /DEF=("$(DEFINES:J=",")") /UNDEF=("$(UNDEFS:J=",")") /INC=($(INCLUDES:J=,)) /REPO=($(CXX-REPOS:WJ=,)) /OBJ=$(<:W) $(>:W) +} + + + +# Custom linking generator to separate dependency libraries and optfiles from +# the list of sources. The objfiles, libraries, and optfiles are then referenced +# via properties. This allows separate qualification of object-files and libraries +# on linker command line. +# +class vmsdecc-linking-generator : linking-generator +{ + rule run ( project name ? : property-set : sources + ) + { + local result = [ linking-generator.run $(project) $(name) : $(property-set) + : $(sources) ] ; + + return $(result) ; + } + + rule generated-targets ( sources + : property-set : project name ? ) + { + local sources2 ; # Sources to pass to inherited rule. + local properties2 ; # Properties to pass to inherited rule. + local objfiles ; # Object files. + local libraries ; # Library sources. + + properties2 = [ $(property-set).raw ] ; + + for local s in $(sources) + { + if [ type.is-derived [ $(s).type ] OBJ ] + { + objfiles += $(s) ; + properties2 += $(s) ; + } + else if [ type.is-derived [ $(s).type ] STATIC_LIB ] + { + libraries += $(s) ; + properties2 += $(s) ; + } + else if [ type.is-derived [ $(s).type ] SHARED_LIB ] + { + libraries += $(s) ; + properties2 += $(s) ; + } + } + + + return [ linking-generator.generated-targets $(sources) + : [ property-set.create $(properties2) ] : $(project) $(name) ] ; + } +} + + +generators.register [ new vmsdecc-linking-generator vmsdecc.link : + OBJ SEARCHED_LIB STATIC_LIB SHARED_LIB : EXE : vmsdecc ] ; + +generators.register [ new vmsdecc-linking-generator vmsdecc.link.dll : + OBJ SEARCHED_LIB STATIC_LIB SHARED_LIB : SHARED_LIB : vmsdecc ] ; + + + +# Declare flags and actions for linking +flags vmsdecc.link OPTIONS on : /DEBUG ; +# Strip the binary when no debugging is needed +flags vmsdecc.link OPTIONS off : /NODEBUG ; +flags vmsdecc.link OPTIONS on : /DEBUG ; ## need "DEFINE LIB$DEBUG PCA$COLLECTOR" +flags vmsdecc.link OPTIONS ; +flags vmsdecc.link LINKPATH ; +flags vmsdecc.link FINDLIBS-ST ; +flags vmsdecc.link FINDLIBS-SA ; +flags vmsdecc.link LIBRARIES ; +flags vmsdecc.link LINK-RUNTIME static : static ; +flags vmsdecc.link LINK-RUNTIME shared : dynamic ; +flags vmsdecc.link RPATH ; +flags vmsdecc.link FINDLIBS-SA ; + +feature.feature "link-objfile" : : free dependency path incidental ; +flags vmsdecc.link LINK-OBJS ; + +feature.feature "link-libmodule" : : free dependency incidental ; +flags vmsdecc.link LINK-LIBMODULES ; + +feature.feature "link-staticlib" : : free dependency path incidental ; +flags vmsdecc.link LINK-LIBS ; + +feature.feature "link-sharedlib" : : free dependency path incidental ; +flags vmsdecc.link LINK-SHAREDLIBS ; + +feature.feature "link-optfile" : : free dependency path incidental ; +flags vmsdecc.link LINK-OPTS ; + + +local rule export-target-var-contents ( var-name : values * ) +{ + local result ; + local nl = " +" ; + local locate ; + + if $(var-name) + { + result += + "$(nl)$(var-name) =" ; + for local f in $(values) + { + locate = [ on $(f) return $(LOCATE) ] ; + result += + "$(nl)\"$(f:TG=:R=$(locate))\"" ; + } + result += "$(nl) ;" ; + } + + return $(result) ; +} + +# VMS linker usually expects an explicit object module that contains main(). +# Yet on *NIX, the main module can be automatically resolved from a library -- +# this may arguably be convenient with dynamic linking, and is also used with +# Boost.Test. +# To handle such cases on VMS, one needs first to locate the library module +# containing main(), then include it in sources for the link command. +# GLOB_ARCHIVE built-in can locate the module name (globbing by symbol MAIN). +# To be able to use its result during jam-parsing stage, we need to execute it +# from a separate jam-file that produces a pre-defined option file for link. +# + +actions write-jam-file-contents +{ + SET FILE /VER=1 @($(<:W):E= $(>) ) +} + + +local rule mainmod-link-opt.generate ( jam-file : opt-file : objs * : libs * : sharedlibs * ) +{ + local nl = " +" ; + local $ = $ ; + local @ = @ ; + + if $(jam-file) && $(opt-file) + { + local .contents on $(jam-file) = + "# This file was auto-generated by $(__name__)." ; + + .contents on $(jam-file) += + "$(nl)OPT-FILE = $(opt-file) ;" ; + + .contents on $(jam-file) += [ on $(jam-file) + export-target-var-contents "OBJS" : $(objs) ] ; + + .contents on $(jam-file) += [ on $(jam-file) + export-target-var-contents "LIBS" : $(libs) ] ; + + .contents on $(jam-file) += [ on $(jam-file) + export-target-var-contents "SHAREDLIBS" : $(sharedlibs) ] ; + + .contents on $(jam-file) += + "$(nl).nl = \"$(nl)\" ;" + ; + .contents on $(jam-file) += + "$(nl)local rule get-main-members ( libs * : symbol-main ? )" + "$(nl){" + "$(nl) local result ;" + "$(nl) symbol-main ?= \"MAIN\" ;" + "$(nl) for local libfile in $($)(libs)" + "$(nl) {" + "$(nl) local main = [ GLOB_ARCHIVE $($)(libfile) : : : $($)(symbol-main) ] ;" + "$(nl) if $($)(main)" + "$(nl) {" + "$(nl) result += $($)(main) ;" + "$(nl) }" + "$(nl) }" + "$(nl) return $($)(result) ;" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)local rule get-libmods ( members * )" + "$(nl){" + "$(nl) local result ;" + "$(nl) for local m in $($)(members)" + "$(nl) {" + "$(nl) local lib = $($)(m:WDBS) ;" + "$(nl) local mem = $($)(m:M) ;" + "$(nl) if $($)(mem)" + "$(nl) {" + "$(nl) local mod = [ SPLIT_BY_CHARACTERS $($)(mem) : \"()\" ] ;" + "$(nl) result += $($)(lib)/INC=($($)(mod:B))/LIB ;" + "$(nl) }" + "$(nl) }" + "$(nl) return $($)(result) ;" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)rule mainmod-link-opt ( opt-file : libs * : objs * )" + "$(nl){" + "$(nl) local main-members = [ on $($)(opt-file[1]) get-main-members $($)(libs) ] ;" + "$(nl) LIBMODULES on $($)(opt-file[1]) = [ on $($)(opt-file[1]) get-libmods $($)(main-members[1]) ] ;" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)actions mainmod-link-opt bind OBJS LIBMODULES" + "$(nl){" + "$(nl) SET FILE /VER=1 $(@)($($)(<:W):E= $($)(LIBMODULES:J=,-$($)(.nl))-$($)(.nl) )" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)local rule make" + "$(nl){" + "$(nl) if $($)(OPT-FILE)" + "$(nl) {" + "$(nl) DEPENDS all : $($)(OPT-FILE) ;" + "$(nl) DEPENDS $($)(OPT-FILE) : $($)(LIBS) $($)(OBJS) ;" + "$(nl) mainmod-link-opt $($)(OPT-FILE) : $($)(LIBS) : $($)(OBJS) ;" + "$(nl) }" + "$(nl)}" + "$(nl)make all ;" + ; + + write-jam-file-contents $(jam-file) : [ on $(jam-file) return $(.contents) ] ; + + } +} + + +rule link ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OBJS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-LIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-SHAREDLIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OPTS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LIBRARIES) ] ; + + + for local s in $(sources) + { + local r = [ on $(s) return $(TARGET-CXX-REPO) ] ; + + if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPOS on $(targets[1]) += $(r) ; + } + } + + local locate = [ on $(targets[1]) return $(LOCATE) ] ; + LINK-MAINMOD-OPT on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.opt) ; + LINK-MAINMOD-JAM on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.jam) ; + #on $(targets[1]) TEMPORARY $(LINK-MAINMOD-JAM) ; + + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-OPT) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-JAM) ] ; + on $(targets[1]) DEPENDS $(LINK-MAINMOD-OPT) : $(LINK-MAINMOD-JAM) ; + + on $(targets[1]) mainmod-link-opt.generate $(LINK-MAINMOD-JAM) + : $(LINK-MAINMOD-OPT) : $(LINK-OBJS) : $(LINK-LIBS) $(LIBRARIES) : $(LINK-SHAREDLIBS) ; + + +} + +actions link bind LINK-OBJS LINK-MAINMOD-JAM LINK-MAINMOD-OPT LINK-LIBS LIBRARIES LINK-SHAREDLIBS LINK-OPTS CXX-REPOS +{ + CXX_REPOS = "" +"$(CXX-REPOS:WJ=,)" + IF (CXX_REPOS .EQS. "") THEN CXX_REPOS = "NL:" + DEF /NOLOG REPOS 'CXX_REPOS' + SET FILE /VER=1 @($(<:WS=$INPUT.opt):E= $(LINK-OBJS:WJ=,-$(.nl))-$(.nl) ,$(LINK-LIBS:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LIBRARIES:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LINK-SHAREDLIBS:WJ=/SHARE,-$(.nl))/SHARE-$(.nl) ) + MC $(.B2) -f $(LINK-MAINMOD-JAM:W) + $(.LD) $(OPTIONS) /REPO=(REPOS:) /EXE=$(<:W) $(LINK-MAINMOD-OPT:W)/OPT, $(<:WS=$INPUT.opt)/OPT ,$(LINK-OPTS:WJ=/OPT,)/OPT +} + +# Slight mods for dlls +rule link.dll ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OBJS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-LIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-SHAREDLIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OPTS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LIBRARIES) ] ; + + for local s in $(sources) + { + local r = [ on $(s) return $(TARGET-CXX-REPO) ] ; + + if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPOS on $(targets[1]) += $(r) ; + } + } + + + local locate = [ on $(targets[1]) return $(LOCATE) ] ; + LINK-MAINMOD-OPT on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.opt) ; + LINK-MAINMOD-JAM on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.jam) ; + #on $(targets[1]) TEMPORARY $(LINK-MAINMOD-JAM) ; + + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-OPT) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-JAM) ] ; + on $(targets[1]) DEPENDS $(LINK-MAINMOD-OPT) : $(LINK-MAINMOD-JAM) ; + + on $(targets[1]) mainmod-link-opt.generate $(LINK-MAINMOD-JAM) + : $(LINK-MAINMOD-OPT) : $(LINK-OBJS) : $(LINK-LIBS) $(LIBRARIES) : $(LINK-SHAREDLIBS) ; + +} + +actions link.dll bind LINK-OBJS LINK-MAINMOD-JAM LINK-MAINMOD-OPT LINK-LIB LINK-LIBS LIBRARIES LINK-SHAREDLIBS LINK-OPTS CXX-REPOS +{ + CXX_REPOS = "" +"$(CXX-REPOS:WJ=,)" + IF (CXX_REPOS .EQS. "") THEN CXX_REPOS = "NL:" + DEF /NOLOG REPOS 'CXX_REPOS' + SET FILE /VER=1 @($(<:WS=$INPUT.opt):E= $(LINK-OBJS:WJ=,-$(.nl))-$(.nl) ,$(LINK-LIBS:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LIBRARIES:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LINK-SHAREDLIBS:WJ=/SHARE,-$(.nl))/SHARE-$(.nl) ) + MC $(.B2) -f $(LINK-MAINMOD-JAM:W) + $(.LD) $(OPTIONS) /REPO=(REPOS:) /SHARE=$(<:W) $(LINK-MAINMOD-OPT:W)/OPT, $(<:WS=$INPUT.opt)/OPT ,$(LINK-OPTS:WJ=/OPT,)/OPT +} + + + +flags vmsdecc.archive AROPTIONS ; + + +local rule vms-join-wildcard-name ( path * : name ) +{ + local files ; + + if $(name) + { + for local d in $(path) + { + files += $(d)$(name) ; + } + + files ?= $(name) ; + + } + + return $(files) ; +} + + +rule archive ( targets + : sources * : properties * ) +{ + local clean.a = $(targets[1])(clean) ; + TEMPORARY $(clean.a) ; + NOCARE $(clean.a) ; + LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; + DEPENDS $(clean.a) : $(sources) ; + DEPENDS $(targets) : $(clean.a) ; + common.RmTemps $(clean.a) : $(targets) ; + + + #CXX-REPOS on $(targets[1]) = null ; ## reset + + for local s in $(sources) + { + local r = [ on $(s) return $(TARGET-CXX-REPO) ] ; + + if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPOS on $(targets[1]) += $(r) ; + } + } + + if [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPO-OBJS on $(targets[1]) = [ on $(targets[1]) return [ vms-join-wildcard-name $(CXX-REPOS:W) : *$(.OBJ) ] ] ; + + #DEPENDS $(targets) : [ on $(targets[1]) return $(CXX-REPO-OBJS) ] ; + } +} + +# Declare action for creating static libraries +actions piecemeal archive +{ + HAVE_REPO_OBJS = "F" + IF ("" +"$(CXX-REPO-OBJS[1])" .NES. "") + THEN + IF ( "" +F$SEARCH("$(CXX-REPO-OBJS[1])") .NES. "") + THEN + HAVE_REPO_OBJS = "T" + ENDIF + ENDIF + $(.AR) /CREATE /REPL $(AROPTIONS) $(<:W) $(>:WJ=,) + IF (HAVE_REPO_OBJS) + THEN + $(.AR) /REPL $(AROPTIONS) $(<:W) $(CXX-REPO-OBJS:J=,) + PIPE DEL /NOLOG /NOCONF $(CXX-REPO-OBJS:J=;*,);* 2>NL: >NL: + ENDIF +} + diff --git a/src/boost/tools/build/src/tools/whale.jam b/src/boost/tools/build/src/tools/whale.jam new file mode 100644 index 000000000..3e6a86d79 --- /dev/null +++ b/src/boost/tools/build/src/tools/whale.jam @@ -0,0 +1,116 @@ +# Copyright (C) Vladimir Prus 2002-2005. + +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# This module implements support for Whale/Dolphin/WD parser/lexer tools. +# See http://www.cs.queensu.ca/home/okhotin/whale/ for details. +# +# There are three interesting target types: +# - WHL (the parser sources), that are converted to CPP and H +# - DLP (the lexer sources), that are converted to CPP and H +# - WD (combined parser/lexer sources), that are converted to WHL + DLP + +import type ; +import generators ; +import path ; +import "class" : new ; +import errors ; + +rule init ( path # path the Whale/Dolphin/WD binaries + ) +{ + if $(.configured) && $(.path) != $(path) + { + errors.user-error "Attempt to reconfigure Whale support" : + "Previously configured with path \"$(.path:E=)\"" : + "Now configuring with path \"$(path:E=)\"" ; + + } + .configured = true ; + .path = $(path) ; + + .whale = [ path.join $(path) whale ] ; + .dolphin = [ path.join $(path) dolphin ] ; + .wd = [ path.join $(path) wd ] ; +} + + +# Declare the types. +type.register WHL : whl ; +type.register DLP : dlp ; +type.register WHL_LR0 : lr0 ; +type.register WD : wd ; + +# Declare standard generators. +generators.register-standard whale.whale : WHL : CPP H H(%_symbols) ; +generators.register-standard whale.dolphin : DLP : CPP H ; +generators.register-standard whale.wd : WD : WHL(%_parser) DLP(%_lexer) ; + +# The conversions defines above a ambiguious when we generated CPP from WD. +# We can either go via WHL type, or via DLP type. +# The following custom generator handles this by running both conversions. + +class wd-to-cpp : generator +{ + rule __init__ ( * : * : * ) + { + generator.__init__ $(1) : $(2) : $(3) ; + } + + rule run ( project name ? : property-set : source * ) + { + if ! $(source[2]) + { + local new-sources ; + if ! [ $(source).type ] in WHL DLP + { + local r1 = [ generators.construct $(project) $(name) + : WHL : $(property-set) : $(source) ] ; + local r2 = [ generators.construct $(project) $(name) + : DLP : $(property-set) : $(source) ] ; + + new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ; + } + else + { + new-sources = $(source) ; + } + + local result ; + for local i in $(new-sources) + { + local t = [ generators.construct $(project) $(name) : CPP + : $(property-set) : $(i) ] ; + result += $(t[2-]) ; + } + return $(result) ; + } + } + +} + + +generators.override whale.wd-to-cpp : whale.whale ; +generators.override whale.wd-to-cpp : whale.dolphin ; + + +generators.register [ new wd-to-cpp whale.wd-to-cpp : : CPP ] ; + + +actions whale +{ + $(.whale) -d $(<[1]:D) $(>) +} + +actions dolphin +{ + $(.dolphin) -d $(<[1]:D) $(>) +} + +actions wd +{ + $(.wd) -d $(<[1]:D) -g $(>) +} + diff --git a/src/boost/tools/build/src/tools/xlcpp.jam b/src/boost/tools/build/src/tools/xlcpp.jam new file mode 100644 index 000000000..03082e294 --- /dev/null +++ b/src/boost/tools/build/src/tools/xlcpp.jam @@ -0,0 +1,168 @@ +# Copyright Vladimir Prus 2004. +# Copyright Toon Knapen 2004. +# Copyright Catherine Morton 2015. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt +# or copy at https://www.bfgroup.xyz/b2/LICENSE.txt) + +# +# B2 V2 toolset for the IBM XL C++ compiler +# + +import toolset : flags ; +import feature ; +import common ; +import generators ; +import os ; + +feature.extend toolset : xlcpp ; +toolset.inherit xlcpp : unix ; +generators.override xlcpp.prebuilt : builtin.prebuilt ; +generators.override xlcpp.searched-lib-generator : searched-lib-generator ; + +# Configure the xlcpp toolset +rule init ( version ? : command * : options * ) +{ + local condition = [ + common.check-init-parameters xlcpp : version $(version) ] ; + + command = [ common.get-invocation-command xlcpp : xlC + : $(command) : "/usr/xlcpp/bin/xlC" ] ; + + common.handle-options xlcpp : $(condition) : $(command) : $(options) ; +} + +# Declare generators +generators.register-c-compiler xlcpp.compile.c : C : OBJ : xlcpp ; +generators.register-c-compiler xlcpp.compile.c++ : CPP : OBJ : xlcpp ; + +# Allow C++ style comments in C files +flags xlcpp CFLAGS : -qnoxlcompatmacros ; + +# Declare flags +flags xlcpp CFLAGS off : -qNOOPTimize ; +flags xlcpp CFLAGS speed : ; +flags xlcpp CFLAGS space : -O2 -qcompact ; + +# Discretionary inlining (not recommended) +flags xlcpp CFLAGS off : -qnoinline ; +flags xlcpp CFLAGS on : -qinline ; +#flags xlcpp CFLAGS full : -qinline ; +flags xlcpp CFLAGS full : ; + +# Exception handling +flags xlcpp C++FLAGS off : -qnoeh ; +flags xlcpp C++FLAGS on : -qeh ; + +# Run-time Type Identification +flags xlcpp C++FLAGS off : -qnortti ; +flags xlcpp C++FLAGS on : -qrtti ; + +# Enable 64-bit memory addressing model +flags xlcpp CFLAGS 64 : -q64 ; +flags xlcpp LINKFLAGS 64 : -q64 ; +flags xlcpp ARFLAGS aix/64 : -X 64 ; + +# Use absolute path when generating debug information +flags xlcpp CFLAGS on : -g -qfullpath ; +flags xlcpp LINKFLAGS on : -g -qfullpath ; +flags xlcpp LINKFLAGS off : -s ; + +if [ os.name ] = AIX +{ + flags xlcpp.compile C++FLAGS : -qfuncsect ; + + # The -bnoipath strips the prepending (relative) path of libraries from + # the loader section in the target library or executable. Hence, during + # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded + # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without + # this option, the prepending (relative) path + library name is + # hard-coded in the loader section, causing *only* this path to be + # searched during load-time. Note that the AIX linker does not have an + # -soname equivalent, this is as close as it gets. + # + # The above options are definitely for AIX 5.x, and most likely also for + # AIX 4.x and AIX 6.x. For details about the AIX linker see: + # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf + # + flags xlcpp.link LINKFLAGS shared : -bnoipath ; + + # Run-time linking + flags xlcpp.link EXE-LINKFLAGS shared : -brtl ; +} +else +{ + # Linux PPC + flags xlcpp.compile CFLAGS shared : -qpic=large ; + flags xlcpp FINDLIBS : rt ; + + flags xlcpp.compile OPTIONS hidden : -qvisibility=hidden ; + flags xlcpp.compile OPTIONS protected : -qvisibility=protected ; + flags xlcpp.compile OPTIONS global : -qvisibility=default ; +} + +# Profiling +flags xlcpp CFLAGS on : -pg ; +flags xlcpp LINKFLAGS on : -pg ; + +# Declare flags and actions for compilation +flags xlcpp.compile.c++ OPTIONS 98 : -std=c++03 ; +flags xlcpp.compile.c++ OPTIONS 03 : -std=c++03 ; +flags xlcpp.compile.c++ OPTIONS 0x : -std=c++11 ; +flags xlcpp.compile.c++ OPTIONS 11 : -std=c++11 ; +flags xlcpp.compile.c++ OPTIONS 1y : -std=c++1y ; +flags xlcpp.compile.c++ OPTIONS 14 : -std=c++1y ; +flags xlcpp.compile.c++ OPTIONS 1z : -std=c++1y ; +flags xlcpp.compile.c++ OPTIONS 17 : -std=c++1y ; +flags xlcpp.compile.c++ OPTIONS 2a : -std=c++1y ; +flags xlcpp.compile.c++ OPTIONS 20 : -std=c++1y ; +flags xlcpp.compile.c++ OPTIONS latest : -std=c++1y ; + +flags xlcpp.compile OPTIONS ; +flags xlcpp.compile.c++ OPTIONS ; +flags xlcpp DEFINES ; +flags xlcpp UNDEFS ; +flags xlcpp HDRS ; +flags xlcpp STDHDRS ; +flags xlcpp.link OPTIONS ; +flags xlcpp ARFLAGS ; + +flags xlcpp LIBPATH ; +flags xlcpp NEEDLIBS ; +flags xlcpp FINDLIBS ; +flags xlcpp FINDLIBS ; + +# Select the compiler name according to the threading model. +flags xlcpp VA_C_COMPILER single : xlc ; +flags xlcpp VA_C_COMPILER multi : xlc ; +flags xlcpp VA_CXX_COMPILER single : xlC ; +flags xlcpp VA_CXX_COMPILER multi : xlC ; + +SPACE = " " ; + +flags xlcpp.link.dll HAVE_SONAME linux : "" ; + +actions xlcpp.link bind NEEDLIBS +{ + $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) +} + +actions xlcpp.link.dll bind NEEDLIBS +{ + xlC -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) +} + +actions xlcpp.compile.c +{ + $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" +} + +actions xlcpp.compile.c++ +{ + $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" +} + +actions updated together piecemeal xlcpp.archive +{ + ar $(ARFLAGS) ru "$(<)" "$(>)" +} diff --git a/src/boost/tools/build/src/tools/xlf.jam b/src/boost/tools/build/src/tools/xlf.jam new file mode 100644 index 000000000..52b9cf281 --- /dev/null +++ b/src/boost/tools/build/src/tools/xlf.jam @@ -0,0 +1,39 @@ +# Copyright (C) 2004 Toon Knapen +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# +# toolset configuration for the IBM Fortran compiler (xlf) +# + +import toolset : flags ; +import feature ; +import fortran ; + +rule init ( version ? : command * : options * ) +{ +} + +# Declare flags and action for compilation +flags xlf OPTIONS off : -O0 ; +flags xlf OPTIONS speed : -O3 ; +flags xlf OPTIONS space : -Os ; + +flags xlf OPTIONS on : -g ; +flags xlf OPTIONS on : -pg ; + +flags xlf DEFINES ; +flags xlf INCLUDES ; + +rule compile-fortran +{ +} + +actions compile-fortran +{ + xlf $(OPTIONS) -I$(INCLUDES) -c -o "$(<)" "$(>)" +} + +generators.register-fortran-compiler xlf.compile-fortran : FORTRAN : OBJ ; diff --git a/src/boost/tools/build/src/tools/xsltproc-config.jam b/src/boost/tools/build/src/tools/xsltproc-config.jam new file mode 100644 index 000000000..43f0cf14e --- /dev/null +++ b/src/boost/tools/build/src/tools/xsltproc-config.jam @@ -0,0 +1,36 @@ +#~ Copyright 2005 Rene Rivera. +#~ Distributed under the Boost Software License, Version 1.0. +#~ (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Automatic configuration for the xsltproc toolset. To use, just import this +# module. + +import os ; +import toolset : using ; + + +local rule locate-executable ( name ) +{ + local path = [ modules.peek : PATH ] ; + local exe ; + if [ os.name ] = NT + { + exe = [ GLOB $(path) "C:\\Boost\\bin" : $(name)\.exe ] ; + } + else + { + exe = [ GLOB $(path) : $(name) ] ; + } + return $(exe[1]) ; +} + + +local xsltproc-exe = [ locate-executable xsltproc ] ; +if $(xsltproc-exe) +{ + if --debug-configuration in [ modules.peek : ARGV ] + { + ECHO notice\: using xsltproc ":" $(xsltproc-exe) ; + } + using xsltproc : $(xsltproc-exe) ; +} diff --git a/src/boost/tools/build/src/tools/xsltproc.jam b/src/boost/tools/build/src/tools/xsltproc.jam new file mode 100644 index 000000000..6baf0491a --- /dev/null +++ b/src/boost/tools/build/src/tools/xsltproc.jam @@ -0,0 +1,232 @@ +# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and +# distribute this software is granted provided this copyright notice appears in +# all copies. This software is provided "as is" without express or implied +# warranty, and with no claim as to its suitability for any purpose. + +# This module defines rules to apply an XSLT stylesheet to an XML file using the +# xsltproc driver, part of libxslt. + +import "class" : new ; +import common ; +import feature ; +import generators ; +import modules ; +import os ; +import path ; +import regex ; +import sequence ; +import toolset ; +import virtual-target ; + +feature.feature "xsl:param" : : free ; +feature.feature "xsl:path" : : free ; +feature.feature catalog : : free ; + + +# Initialize xsltproc support. The parameters are: +# xsltproc: The xsltproc executable +# +rule init ( xsltproc ? ) +{ + if $(xsltproc) + { + modify-config ; + .xsltproc = $(xsltproc) ; + check-xsltproc ; + } +} + + +rule freeze-config ( ) +{ + if ! $(.config-frozen) + { + .config-frozen = true ; + .xsltproc ?= [ modules.peek : XSLTPROC ] ; + .xsltproc ?= xsltproc ; + check-xsltproc ; + .is-cygwin = [ .is-cygwin $(.xsltproc) ] ; + } +} + + +rule modify-config ( ) +{ + if $(.config-frozen) + { + import errors ; + errors.user-error + "xsltproc: Cannot change xsltproc command after it has been used." ; + } +} + + +rule check-xsltproc ( ) +{ + if $(.xsltproc) + { + local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ] + ; + if $(status[2]) != 0 + { + import errors ; + errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ; + } + } +} + +rule name ( ) +{ + freeze-config ; + return $(.xsltproc) ; +} + +# Returns a non-empty string if a cygwin xsltproc binary was specified. +# +rule is-cygwin ( ) +{ + freeze-config ; + return $(.is-cygwin) ; +} + + +rule .is-cygwin ( xsltproc ) +{ + if [ os.on-windows ] + { + local file = [ path.make [ modules.binding $(__name__) ] ] ; + local dir = [ path.native [ path.join [ path.parent $(file) ] xsltproc ] + ] ; + if [ os.name ] = CYGWIN + { + dir = $(dir:W) ; + } + local command = + "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ; + local status = [ SHELL $(command) : no-output : exit-status ] ; + if $(status[2]) != "0" + { + return true ; + } + } +} + +class xsltproc-action : action +{ + rule adjust-properties ( property-set ) + { + local s = [ $(self.targets[1]).creating-subvariant ] ; + if $(s) + { + return [ $(property-set).add-raw + [ $(s).implicit-includes "xsl:path" : XML ] ] ; + } + else + { + return $(property-set) ; + } + } +} + +class xsltproc-generator : generator +{ + rule action-class ( ) + { + return xsltproc-action ; + } +} + +rule register-generator ( id : source-types + : target-types + : requirements * ) +{ + if ! $(id) in $(.known-rules) + { + .known-rules += $(id) ; + flags $(id) ; + } + generators.register [ new xsltproc-generator $(id) : + $(source-types) : $(target-types) : $(requirements) ] ; +} + +IMPORT xsltproc : register-generator : : generators.register-xslt ; + +rule flags ( rulename ) +{ + toolset.uses-features $(rulename) : : unchecked ; + toolset.flags $(rulename) XSL-PATH : : unchecked ; + toolset.flags $(rulename) FLAGS : : unchecked ; +} + +rule compute-xslt-flags ( target : properties * ) +{ + local flags ; + # Translate into command line flags. + for local param in [ feature.get-values : $(properties) ] + { + local namevalue = [ regex.split $(param) "=" ] ; + flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ; + } + + return $(flags) ; +} + + +local rule .xsltproc ( target : source stylesheet : properties * : dirname ? : + action ) +{ + freeze-config ; + STYLESHEET on $(target) = $(stylesheet) ; + FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ; + NAME on $(target) = $(.xsltproc) ; + + for local catalog in [ feature.get-values : $(properties) ] + { + CATALOG = [ common.variable-setting-command XML_CATALOG_FILES : + $(catalog:T) ] ; + } + + if [ os.on-windows ] && ! [ is-cygwin ] + { + action = $(action).windows ; + } + + $(action) $(target) : $(source) ; +} + + +rule xslt ( target : source stylesheet : properties * ) +{ + return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : : + xslt-xsltproc ] ; +} + + +rule xslt-dir ( target : source stylesheet : properties * : dirname ) +{ + return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : + $(dirname) : xslt-xsltproc-dir ] ; +} + +_ = " " ; + +actions xslt-xsltproc.windows +{ + $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:W)" --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)" +} + + +actions xslt-xsltproc bind STYLESHEET +{ + $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:T)" --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)" +} + + +actions xslt-xsltproc-dir.windows bind STYLESHEET +{ + $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:W)" --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)" +} + + +actions xslt-xsltproc-dir bind STYLESHEET +{ + $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:T)" --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)" +} diff --git a/src/boost/tools/build/src/tools/xsltproc/included.xsl b/src/boost/tools/build/src/tools/xsltproc/included.xsl new file mode 100644 index 000000000..ef86394a9 --- /dev/null +++ b/src/boost/tools/build/src/tools/xsltproc/included.xsl @@ -0,0 +1,11 @@ + + + + diff --git a/src/boost/tools/build/src/tools/xsltproc/test.xml b/src/boost/tools/build/src/tools/xsltproc/test.xml new file mode 100644 index 000000000..57c8ba187 --- /dev/null +++ b/src/boost/tools/build/src/tools/xsltproc/test.xml @@ -0,0 +1,2 @@ + + diff --git a/src/boost/tools/build/src/tools/xsltproc/test.xsl b/src/boost/tools/build/src/tools/xsltproc/test.xsl new file mode 100644 index 000000000..a142c91dd --- /dev/null +++ b/src/boost/tools/build/src/tools/xsltproc/test.xsl @@ -0,0 +1,12 @@ + + + + + diff --git a/src/boost/tools/build/src/tools/zlib.jam b/src/boost/tools/build/src/tools/zlib.jam new file mode 100644 index 000000000..84f7c82a1 --- /dev/null +++ b/src/boost/tools/build/src/tools/zlib.jam @@ -0,0 +1,235 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports the zlib library +# +# After 'using zlib', the following targets are available: +# +# /zlib//zlib -- The zlib library + +import project ; +import ac ; +import errors ; +import feature ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import indirect ; +import os ; +import property ; +import property-set ; + +header = zlib.h ; +names = z zlib zll zdll ; + +sources = adler32.c compress.c + crc32.c deflate.c gzclose.c gzio.c gzlib.c gzread.c gzwrite.c + infback.c inffast.c inflate.c inftrees.c trees.c uncompr.c zutil.c ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +# Initializes the zlib library. +# +# zlib can be configured either to use pre-existing binaries +# or to build the library from source. +# +# Options for configuring a prebuilt zlib:: +# +# +# The directory containing the zlib binaries. +# +# Overrides the default library name. +# +# The directory containing the zlib headers. +# +# If none of these options is specified, then the environmental +# variables ZLIB_LIBRARY_PATH, ZLIB_NAME, and ZLIB_INCLUDE will +# be used instead. +# +# Options for building zlib from source:: +# +# +# The zlib source directory. Defaults to the environmental variable +# ZLIB_SOURCE. +# +# A rule which computes the actual name of the compiled +# libraries based on the build properties. Ignored +# when using precompiled binaries. +# +# The base name to use for the compiled library. Ignored +# when using precompiled binaries. +# +# Examples:: +# +# # Find zlib in the default system location +# using zlib ; +# # Build zlib from source +# using zlib : 1.2.7 : /home/steven/zlib-1.2.7 ; +# # Find zlib in /usr/local +# using zlib : 1.2.7 +# : /usr/local/include /usr/local/lib ; +# # Build zlib from source for msvc and find +# # prebuilt binaries for gcc. +# using zlib : 1.2.7 : C:/Devel/src/zlib-1.2.7 : msvc ; +# using zlib : 1.2.7 : : gcc ; +# +rule init ( + version ? + # The zlib version (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the zlib target + + : is-default ? + # Default configurations are only used when zlib + # has not yet been configured. This option is + # deprecated. A configuration will be treated + # as a default when none of , , + # , and are present. + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project zlib ; + } + + local library-path = [ feature.get-values : $(options) ] ; + local include-path = [ feature.get-values : $(options) ] ; + local source-path = [ feature.get-values : $(options) ] ; + local library-name = [ feature.get-values : $(options) ] ; + local tag = [ feature.get-values : $(options) ] ; + local build-name = [ feature.get-values : $(options) ] ; + + if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name) + { + is-default = true ; + } + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + # Ignore environmental ZLIB_SOURCE if this initialization + # requested to search for a specific pre-built library. + if $(library-path) || $(include-path) || $(library-name) + { + if $(source-path) || $(tag) || $(build-name) + { + errors.user-error "incompatible options for zlib:" + [ property.select : $(options) ] "and" + [ property.select : $(options) ] ; + } + } + else + { + source-path ?= [ os.environ ZLIB_SOURCE ] ; + if $(source-path) + { + source-path = [ path.root [ path.make $(source-path) ] + [ path.pwd ] ] ; + } + } + + if $(.configured.$(condition)) + { + if $(is-default) + { + if $(.debug) + { + ECHO "notice: [zlib] zlib is already configured" ; + } + } + else + { + errors.user-error "zlib is already configured" ; + } + return ; + } + else if $(source-path) + { + build-name ?= z ; + library-id = [ CALC $(library-id) + 1 ] ; + tag = [ MATCH ^@?(.*)$ : $(tag) ] ; + if $(tag) + { + tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ; + } + sources = [ path.glob $(source-path) : $(sources) ] ; + if $(.debug) + { + ECHO "notice: [zlib] Building zlib from source as $(build-name)" ; + if $(condition) + { + ECHO "notice: [zlib] Condition" [ $(condition).raw ] ; + } + if $(sources) + { + ECHO "notice: [zlib] found zlib source in $(source-path)" ; + } + else + { + ECHO "warning: [zlib] could not find zlib source in $(source-path)" ; + } + } + local target ; + if $(sources) + { + target = [ targets.create-typed-target LIB : $(.project) + : $(build-name).$(library-id) + : $(sources) + : $(requirements) + @$(tag) + $(source-path) + msvc:_CRT_SECURE_NO_DEPRECATE + msvc:_SCL_SECURE_NO_DEPRECATE + shared:ZLIB_DLL + : + : $(source-path) ] ; + } + + local mt = [ new ac-library zlib : $(.project) : $(condition) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + if $(target) + { + $(mt).set-target $(target) ; + } + targets.main-target-alternative $(mt) ; + } + else + { + if $(.debug) + { + ECHO "notice: [zlib] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [zlib] Condition" [ $(condition).raw ] ; + } + } + + local mt = [ new ac-library zlib : $(.project) : $(condition) : + $(include-path) : $(library-path) : $(library-name) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + targets.main-target-alternative $(mt) ; + } + .configured.$(condition) = true ; +} diff --git a/src/boost/tools/build/src/tools/zstd.jam b/src/boost/tools/build/src/tools/zstd.jam new file mode 100644 index 000000000..e73100fe1 --- /dev/null +++ b/src/boost/tools/build/src/tools/zstd.jam @@ -0,0 +1,100 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE.txt or +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Supports the zstd library +# +# After 'using zstd', the following targets are available: +# +# /zstd//zstd -- The zstd library + +import project ; +import ac ; +import errors ; +import feature ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import indirect ; +import property ; +import property-set ; + +header = zstd.h ; +# libzstd only needed for VisualC++ builds +# *_static variants for prebuilt Windows static libraries +names = zstd zstd_static libzstd libzstd_static ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +rule init ( + version ? + # (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the target + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project zstd ; + } + + local library-path = [ feature.get-values : $(options) ] ; + local include-path = [ feature.get-values : $(options) ] ; + local library-name = [ feature.get-values : $(options) ] ; + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + if $(.configured.$(condition)) + { + if ! $(options) + { + if $(.debug) + { + ECHO "notice: [zstd] zstd is already configured" ; + } + } + else + { + errors.user-error "zstd is already configured" ; + } + return ; + } + else + { + if $(.debug) + { + ECHO "notice: [zstd] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [zstd] Condition" [ $(condition).raw ] ; + } + } + + local mt = [ new ac-library zstd : $(.project) : $(condition) : + $(include-path) : $(library-path) : $(library-name) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + targets.main-target-alternative $(mt) ; + } + .configured.$(condition) = true ; +} diff --git a/src/boost/tools/build/src/util/__init__.py b/src/boost/tools/build/src/util/__init__.py new file mode 100644 index 000000000..7c847cb57 --- /dev/null +++ b/src/boost/tools/build/src/util/__init__.py @@ -0,0 +1,321 @@ + +import bjam +import re +import types + +from itertools import groupby + + +def safe_isinstance(value, types=None, class_names=None): + """To prevent circular imports, this extends isinstance() + by checking also if `value` has a particular class name (or inherits from a + particular class name). This check is safe in that an AttributeError is not + raised in case `value` doesn't have a __class__ attribute. + """ + # inspect is being imported here because I seriously doubt + # that this function will be used outside of the type + # checking below. + import inspect + result = False + if types is not None: + result = result or isinstance(value, types) + if class_names is not None and not result: + # this doesn't work with inheritance, but normally + # either the class will already be imported within the module, + # or the class doesn't have any subclasses. For example: PropertySet + if isinstance(class_names, basestring): + class_names = [class_names] + # this is the part that makes it "safe". + try: + base_names = [class_.__name__ for class_ in inspect.getmro(value.__class__)] + for name in class_names: + if name in base_names: + return True + except AttributeError: + pass + return result + + +def is_iterable_typed(values, type_): + return is_iterable(values) and all(isinstance(v, type_) for v in values) + + +def is_iterable(value): + """Returns whether value is iterable and not a string.""" + return not isinstance(value, basestring) and hasattr(value, '__iter__') + + +def is_iterable_or_none(value): + return is_iterable(value) or value is None + + +def is_single_value(value): + # some functions may specify a bjam signature + # that is a string type, but still allow a + # PropertySet to be passed in + return safe_isinstance(value, (basestring, type(None)), 'PropertySet') + + +if __debug__: + + from textwrap import dedent + message = dedent( + """The parameter "{}" was passed in a wrong type for the "{}()" function. + Actual: + \ttype: {} + \tvalue: {} + Expected: + \t{} + """ + ) + + bjam_types = { + '*': is_iterable_or_none, + '+': is_iterable_or_none, + '?': is_single_value, + '': is_single_value, + } + + bjam_to_python = { + '*': 'iterable', + '+': 'iterable', + '?': 'single value', + '': 'single value', + } + + + def get_next_var(field): + it = iter(field) + var = it.next() + type_ = None + yield_var = False + while type_ not in bjam_types: + try: + # the first value has already + # been consumed outside of the loop + type_ = it.next() + except StopIteration: + # if there are no more values, then + # var still needs to be returned + yield_var = True + break + if type_ not in bjam_types: + # type_ is not a type and is + # another variable in the same field. + yield var, '' + # type_ is the next var + var = type_ + else: + # otherwise, type_ is a type for var + yield var, type_ + try: + # the next value should be a var + var = it.next() + except StopIteration: + # if not, then we're done with + # this field + break + if yield_var: + yield var, '' + + +# Decorator the specifies bjam-side prototype for a Python function +def bjam_signature(s): + if __debug__: + from inspect import getcallargs + def decorator(fn): + function_name = fn.__module__ + '.' + fn.__name__ + def wrapper(*args, **kwargs): + callargs = getcallargs(fn, *args, **kwargs) + for field in s: + for var, type_ in get_next_var(field): + try: + value = callargs[var] + except KeyError: + raise Exception( + 'Bjam Signature specifies a variable named "{}"\n' + 'but is not found within the python function signature\n' + 'for function {}()'.format(var, function_name) + ) + if not bjam_types[type_](value): + raise TypeError( + message.format(var, function_name, type(type_), repr(value), + bjam_to_python[type_]) + ) + return fn(*args, **kwargs) + wrapper.__name__ = fn.__name__ + wrapper.bjam_signature = s + return wrapper + return decorator + else: + def decorator(f): + f.bjam_signature = s + return f + + return decorator + +def metatarget(f): + + f.bjam_signature = (["name"], ["sources", "*"], ["requirements", "*"], + ["default_build", "*"], ["usage_requirements", "*"]) + return f + +class cached(object): + + def __init__(self, function): + self.function = function + self.cache = {} + + def __call__(self, *args): + try: + return self.cache[args] + except KeyError: + v = self.function(*args) + self.cache[args] = v + return v + + def __get__(self, instance, type): + return types.MethodType(self, instance, type) + +def unquote(s): + if s and s[0] == '"' and s[-1] == '"': + return s[1:-1] + else: + return s + +_extract_jamfile_and_rule = re.compile("(Jamfile<.*>)%(.*)") + +def qualify_jam_action(action_name, context_module): + + if action_name.startswith("###"): + # Callable exported from Python. Don't touch + return action_name + elif _extract_jamfile_and_rule.match(action_name): + # Rule is already in indirect format + return action_name + else: + ix = action_name.find('.') + if ix != -1 and action_name[:ix] == context_module: + return context_module + '%' + action_name[ix+1:] + + return context_module + '%' + action_name + + +def set_jam_action(name, *args): + + m = _extract_jamfile_and_rule.match(name) + if m: + args = ("set-update-action-in-module", m.group(1), m.group(2)) + args + else: + args = ("set-update-action", name) + args + + return bjam.call(*args) + + +def call_jam_function(name, *args): + + m = _extract_jamfile_and_rule.match(name) + if m: + args = ("call-in-module", m.group(1), m.group(2)) + args + return bjam.call(*args) + else: + return bjam.call(*((name,) + args)) + +__value_id = 0 +__python_to_jam = {} +__jam_to_python = {} + +def value_to_jam(value, methods=False): + """Makes a token to refer to a Python value inside Jam language code. + + The token is merely a string that can be passed around in Jam code and + eventually passed back. For example, we might want to pass PropertySet + instance to a tag function and it might eventually call back + to virtual_target.add_suffix_and_prefix, passing the same instance. + + For values that are classes, we'll also make class methods callable + from Jam. + + Note that this is necessary to make a bit more of existing Jamfiles work. + This trick should not be used to much, or else the performance benefits of + Python port will be eaten. + """ + + global __value_id + + r = __python_to_jam.get(value, None) + if r: + return r + + exported_name = '###_' + str(__value_id) + __value_id = __value_id + 1 + __python_to_jam[value] = exported_name + __jam_to_python[exported_name] = value + + if methods and type(value) == types.InstanceType: + for field_name in dir(value): + field = getattr(value, field_name) + if callable(field) and not field_name.startswith("__"): + bjam.import_rule("", exported_name + "." + field_name, field) + + return exported_name + +def record_jam_to_value_mapping(jam_value, python_value): + __jam_to_python[jam_value] = python_value + +def jam_to_value_maybe(jam_value): + + if type(jam_value) == type(""): + return __jam_to_python.get(jam_value, jam_value) + else: + return jam_value + +def stem(filename): + i = filename.find('.') + if i != -1: + return filename[0:i] + else: + return filename + + +def abbreviate_dashed(s): + """Abbreviates each part of string that is delimited by a '-'.""" + r = [] + for part in s.split('-'): + r.append(abbreviate(part)) + return '-'.join(r) + + +def abbreviate(s): + """Apply a set of standard transformations to string to produce an + abbreviation no more than 4 characters long. + """ + if not s: + return '' + # check the cache + if s in abbreviate.abbreviations: + return abbreviate.abbreviations[s] + # anything less than 4 characters doesn't need + # an abbreviation + if len(s) < 4: + # update cache + abbreviate.abbreviations[s] = s + return s + # save the first character in case it's a vowel + s1 = s[0] + s2 = s[1:] + if s.endswith('ing'): + # strip off the 'ing' + s2 = s2[:-3] + # reduce all doubled characters to one + s2 = ''.join(c for c, _ in groupby(s2)) + # remove all vowels + s2 = s2.translate(None, "AEIOUaeiou") + # shorten remaining consonants to 4 characters + # and add the first char back to the front + s2 = s1 + s2[:4] + # update cache + abbreviate.abbreviations[s] = s2 + return s2 +# maps key to its abbreviated form +abbreviate.abbreviations = {} diff --git a/src/boost/tools/build/src/util/assert.jam b/src/boost/tools/build/src/util/assert.jam new file mode 100644 index 000000000..9d906d879 --- /dev/null +++ b/src/boost/tools/build/src/util/assert.jam @@ -0,0 +1,346 @@ +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2006 Rene Rivera +# Copyright 2002, 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or copy at +# https://www.bfgroup.xyz/b2/LICENSE.txt) + +import errors ; +import modules ; + + +################################################################################ +# +# Private implementation details. +# +################################################################################ + +# Rule added as a replacement for the regular Jam = operator but which does not +# ignore trailing empty string elements. +# +local rule exact-equal-test ( lhs * : rhs * ) +{ + local lhs_extended = $(lhs) xxx ; + local rhs_extended = $(rhs) xxx ; + if $(lhs_extended) = $(rhs_extended) + { + return true ; + } +} + + +# Two lists are considered set-equal if they contain the same elements, ignoring +# duplicates and ordering. +# +local rule set-equal-test ( set1 * : set2 * ) +{ + if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) ) + { + return true ; + } +} + + +################################################################################ +# +# Public interface. +# +################################################################################ + +# Assert the equality of A and B, ignoring trailing empty string elements. +# +rule equal ( a * : b * ) +{ + if $(a) != $(b) + { + errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "==" \"$(b)\" + (ignoring trailing empty strings) ; + } +} + + +# Assert that the result of calling RULE-NAME on the given arguments has a false +# logical value (is either an empty list or all empty strings). +# +rule false ( rule-name args * : * ) +{ + local result ; + module [ CALLER_MODULE ] + { + modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) + : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) + : $(16) : $(17) : $(18) : $(19) ] ; + } + + if $(result) + { + errors.error-skip-frames 3 assertion "failure:" Expected false result from + "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) : + $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : + $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] "]" : "Got:" "[" + \"$(result)\" "]" ; + } +} + + +# Assert that ELEMENT is present in LIST. +# +rule "in" ( element : list * ) +{ + if ! $(element) in $(list) + { + errors.error-skip-frames 3 assertion "failure:" Expected \"$(element)\" in + "[" \"$(list)\" "]" ; + } +} + + +# Assert the inequality of A and B, ignoring trailing empty string elements. +# +rule not-equal ( a * : b * ) +{ + if $(a) = $(b) + { + errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "!=" \"$(b)\" + (ignoring trailing empty strings) ; + } +} + + +# Assert that ELEMENT is not present in LIST. +# +rule not-in ( element : list * ) +{ + if $(element) in $(list) + { + errors.error-skip-frames 3 assertion "failure:" Did not expect + \"$(element)\" in "[" \"$(list)\" "]" ; + } +} + + +# Assert the inequality of A and B as sets. +# +rule not-set-equal ( a * : b * ) +{ + if [ set-equal-test $(a) : $(b) ] + { + errors.error-skip-frames 3 assertion "failure:" Expected "[" \"$(a)\" "]" + and "[" \"$(b)\" "]" to not be equal as sets ; + } +} + + +# Assert that A and B are not exactly equal, not ignoring trailing empty string +# elements. +# +rule not-exact-equal ( a * : b * ) +{ + if [ exact-equal-test $(a) : $(b) ] + { + errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "!=" \"$(b)\" ; + } +} + + +# Assert that EXPECTED is the result of calling RULE-NAME with the given +# arguments. +# +rule result ( expected * : rule-name args * : * ) +{ + local result ; + module [ CALLER_MODULE ] + { + modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) + : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : + $(16) : $(17) : $(18) : $(19) ] ; + } + + if ! [ exact-equal-test $(result) : $(expected) ] + { + errors.error-skip-frames 3 assertion "failure:" "[" $(rule-name) [ + errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : + $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) + : $(18) : $(19) ] "]" : "Expected:" "[" \"$(expected)\" "]" : "Got:" "[" + \"$(result)\" "]" ; + } +} + + +# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored) +# to the result of calling RULE-NAME with the given arguments. Note that rules +# called this way may accept at most 18 parameters. +# +rule result-set-equal ( expected * : rule-name args * : * ) +{ + local result ; + module [ CALLER_MODULE ] + { + modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) + : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : + $(16) : $(17) : $(18) : $(19) ] ; + } + + if ! [ set-equal-test $(result) : $(expected) ] + { + errors.error-skip-frames 3 assertion "failure:" "[" $(rule-name) [ + errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : + $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) + : $(18) : $(19) ] "]" : "Expected:" "[" \"$(expected)\" "]" : "Got:" "[" + \"$(result)\" "]" ; + } +} + + +# Assert the equality of A and B as sets. +# +rule set-equal ( a * : b * ) +{ + if ! [ set-equal-test $(a) : $(b) ] + { + errors.error-skip-frames 3 assertion "failure:" Expected "[" \"$(a)\" "]" + and "[" \"$(b)\" "]" to be equal as sets ; + } +} + + +# Assert that the result of calling RULE-NAME on the given arguments has a true +# logical value (is neither an empty list nor all empty strings). +# +rule true ( rule-name args * : * ) +{ + local result ; + module [ CALLER_MODULE ] + { + modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) + : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) + : $(16) : $(17) : $(18) : $(19) ] ; + } + + if ! $(result) + { + errors.error-skip-frames 3 assertion "failure:" Expected true result from + "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) : + $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : + $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] "]" ; + } +} + + +# Assert the exact equality of A and B, not ignoring trailing empty string +# elements. +# +rule exact-equal ( a * : b * ) +{ + if ! [ exact-equal-test $(a) : $(b) ] + { + errors.error-skip-frames 3 assertion "failure:" \"$(a)\" "==" \"$(b)\" ; + } +} + + +# Assert that the given variable is not an empty list. +# +rule variable-not-empty ( name ) +{ + local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ; + if ! $(value)-is-not-empty + { + errors.error-skip-frames 3 assertion "failure:" Expected variable + \"$(name)\" not to be an empty list ; + } +} + + +rule __test__ ( ) +{ + # Helper rule used to avoid test duplication related to different list + # equality test rules. + # + local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? ) + { + local not-equality-assert = not-$(equality-assert) ; + + # When the given equality test is expected to ignore trailing empty + # strings some of the test results should be inverted. + local not-equality-assert-i = not-$(equality-assert) ; + if $(ignore-trailing-empty-strings) + { + not-equality-assert-i = $(equality-assert) ; + } + + $(equality-assert) : ; + $(equality-assert) "" "" : "" "" ; + $(not-equality-assert-i) : "" "" ; + $(equality-assert) x : x ; + $(not-equality-assert) : x ; + $(not-equality-assert) "" : x ; + $(not-equality-assert) "" "" : x ; + $(not-equality-assert-i) x : x "" ; + $(equality-assert) x "" : x "" ; + $(not-equality-assert) x : "" x ; + $(equality-assert) "" x : "" x ; + + $(equality-assert) 1 2 3 : 1 2 3 ; + $(not-equality-assert) 1 2 3 : 3 2 1 ; + $(not-equality-assert) 1 2 3 : 1 5 3 ; + $(not-equality-assert) 1 2 3 : 1 "" 3 ; + $(not-equality-assert) 1 2 3 : 1 1 2 3 ; + $(not-equality-assert) 1 2 3 : 1 2 2 3 ; + $(not-equality-assert) 1 2 3 : 5 6 7 ; + + # Extra variables used here just to make sure Boost Jam or Boost Build + # do not handle lists with empty strings differently depending on + # whether they are literals or stored in variables. + + local empty = ; + local empty-strings = "" "" ; + local x-empty-strings = x "" "" ; + local empty-strings-x = "" "" x ; + + $(equality-assert) : $(empty) ; + $(not-equality-assert-i) "" : $(empty) ; + $(not-equality-assert-i) "" "" : $(empty) ; + $(not-equality-assert-i) : $(empty-strings) ; + $(not-equality-assert-i) "" : $(empty-strings) ; + $(equality-assert) "" "" : $(empty-strings) ; + $(equality-assert) $(empty) : $(empty) ; + $(equality-assert) $(empty-strings) : $(empty-strings) ; + $(not-equality-assert-i) $(empty) : $(empty-strings) ; + $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ; + $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ; + $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ; + $(not-equality-assert-i) x : $(x-empty-strings) ; + $(not-equality-assert) x : $(empty-strings-x) ; + $(not-equality-assert-i) x : $(x-empty-strings) ; + $(not-equality-assert-i) x "" : $(x-empty-strings) ; + $(equality-assert) x "" "" : $(x-empty-strings) ; + $(not-equality-assert) x : $(empty-strings-x) ; + $(not-equality-assert) "" x : $(empty-strings-x) ; + $(equality-assert) "" "" x : $(empty-strings-x) ; + } + + + # --------------- + # Equality tests. + # --------------- + + run-equality-test equal : ignore-trailing-empty-strings ; + run-equality-test exact-equal ; + + + # ------------------------- + # assert.set-equal() tests. + # ------------------------- + + set-equal : ; + not-set-equal "" "" : ; + set-equal "" "" : "" ; + set-equal "" "" : "" "" ; + set-equal a b c : a b c ; + set-equal a b c : b c a ; + set-equal a b c a : a b c ; + set-equal a b c : a b c a ; + not-set-equal a b c : a b c d ; + not-set-equal a b c d : a b c ; +} diff --git a/src/boost/tools/build/src/util/container.jam b/src/boost/tools/build/src/util/container.jam new file mode 100644 index 000000000..489db7d75 --- /dev/null +++ b/src/boost/tools/build/src/util/container.jam @@ -0,0 +1,339 @@ +# Copyright 2003 Dave Abrahams +# Copyright 2002, 2003 Rene Rivera +# Copyright 2002, 2003, 2004 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Various container classes. + +# Base for container objects. This lets us construct recursive structures. That +# is containers with containers in them, specifically so we can tell literal +# values from node values. +# +class node +{ + rule __init__ ( + value ? # Optional value to set node to initially. + ) + { + self.value = $(value) ; + } + + # Set the value of this node, passing nothing will clear it. + # + rule set ( value * ) + { + self.value = $(value) ; + } + + # Get the value of this node. + # + rule get ( ) + { + return $(self.value) ; + } +} + + +# A simple vector. Interface mimics the C++ std::vector and std::list, with the +# exception that indices are one (1) based to follow Jam standard. +# +# TODO: Possibly add assertion checks. +# +class vector : node +{ + import numbers ; + import utility ; + import sequence ; + + rule __init__ ( + values * # Initial contents of vector. + ) + { + node.__init__ ; + self.value = $(values) ; + } + + # Get the value of the first element. + # + rule front ( ) + { + return $(self.value[1]) ; + } + + # Get the value of the last element. + # + rule back ( ) + { + return $(self.value[-1]) ; + } + + # Get the value of the element at the given index, one based. Access to + # elements of recursive structures is supported directly. Specifying + # additional index values recursively accesses the elements as containers. + # For example: [ $(v).at 1 : 2 ] would retrieve the second element of our + # first element, assuming the first element is a container. + # + rule at ( + index # The element index, one based. + : * # Additional indices to access recursively. + ) + { + local r = $(self.value[$(index)]) ; + if $(2) + { + r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; + } + return $(r) ; + } + + # Get the value contained in the given element. This has the same + # functionality and interface as "at" but in addition gets the value of the + # referenced element, assuming it is a "node". + # + rule get-at ( + index # The element index, one based. + : * # Additional indices to access recursively. + ) + { + local r = $(self.value[$(index)]) ; + if $(2) + { + r = [ $(r).at $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ; + } + return [ $(r).get ] ; + } + + # Insert the given value into the front of the vector pushing the rest of + # the elements back. + # + rule push-front ( + value # Value to become first element. + ) + { + self.value = $(value) $(self.value) ; + } + + # Remove the front element from the vector. Does not return the value. No + # effect if vector is empty. + # + rule pop-front ( ) + { + self.value = $(self.value[2-]) ; + } + + # Add the given value at the end of the vector. + # + rule push-back ( + value # Value to become back element. + ) + { + self.value += $(value) ; + } + + # Remove the back element from the vector. Does not return the value. No + # effect if vector is empty. + # + rule pop-back ( ) + { + self.value = $(self.value[1--2]) ; + } + + # Insert the given value at the given index, one based. The values at and to + # the right of the index are pushed back to make room for the new value. + # If the index is passed the end of the vector the element is added to the + # end. + # + rule insert ( + index # The index to insert at, one based. + : value # The value to insert. + ) + { + local left = $(self.value[1-$(index)]) ; + local right = $(self.value[$(index)-]) ; + if $(right)-is-not-empty + { + left = $(left[1--2]) ; + } + self.value = $(left) $(value) $(right) ; + } + + # Remove one or more elements from the vector. The range is inclusive, and + # not specifying an end is equivalent to the [start, start] range. + # + rule erase ( + start # Index of first element to remove. + end ? # Optional, index of last element to remove. + ) + { + end ?= $(start) ; + local left = $(self.value[1-$(start)]) ; + left = $(left[1--2]) ; + local right = $(self.value[$(end)-]) ; + right = $(right[2-]) ; + self.value = $(left) $(right) ; + } + + # Remove all elements from the vector. + # + rule clear ( ) + { + self.value = ; + } + + # The number of elements in the vector. + # + rule size ( ) + { + return [ sequence.length $(self.value) ] ; + } + + # Returns "true" if there are NO elements in the vector, empty otherwise. + # + rule empty ( ) + { + if ! $(self.value)-is-not-empty + { + return true ; + } + } + + # Returns the textual representation of content. + # + rule str ( ) + { + return "[" [ sequence.transform utility.str : $(self.value) ] "]" ; + } + + # Sorts the vector inplace, calling 'utility.less' for comparisons. + # + rule sort ( ) + { + self.value = [ sequence.insertion-sort $(self.value) : utility.less ] ; + } + + # Returns true if content is equal to the content of other vector. Uses + # 'utility.equal' for comparison. + # + rule equal ( another ) + { + local mismatch ; + local size = [ size ] ; + if $(size) = [ $(another).size ] + { + for local i in [ numbers.range 1 $(size) ] + { + if ! [ utility.equal [ at $(i) ] [ $(another).at $(i) ] ] + { + mismatch = true ; + } + } + } + else + { + mismatch = true ; + } + + if ! $(mismatch) + { + return true ; + } + } +} + + +rule __test__ ( ) +{ + import assert ; + import "class" : new ; + + local v1 = [ new vector ] ; + assert.true $(v1).equal $(v1) ; + assert.true $(v1).empty ; + assert.result 0 : $(v1).size ; + assert.result "[" "]" : $(v1).str ; + $(v1).push-back b ; + $(v1).push-front a ; + assert.result "[" a b "]" : $(v1).str ; + assert.result a : $(v1).front ; + assert.result b : $(v1).back ; + $(v1).insert 2 : d ; + $(v1).insert 2 : c ; + $(v1).insert 4 : f ; + $(v1).insert 4 : e ; + $(v1).pop-back ; + assert.result 5 : $(v1).size ; + assert.result d : $(v1).at 3 ; + $(v1).pop-front ; + assert.result c : $(v1).front ; + assert.false $(v1).empty ; + $(v1).erase 3 4 ; + assert.result 2 : $(v1).size ; + + local v2 = [ new vector q w e r t y ] ; + assert.result 6 : $(v2).size ; + $(v1).push-back $(v2) ; + assert.result 3 : $(v1).size ; + local v2-alias = [ $(v1).back ] ; + assert.result e : $(v2-alias).at 3 ; + $(v1).clear ; + assert.true $(v1).empty ; + assert.false $(v2-alias).empty ; + $(v2).pop-back ; + assert.result t : $(v2-alias).back ; + + local v3 = [ new vector ] ; + $(v3).push-back [ new vector 1 2 3 4 5 ] ; + $(v3).push-back [ new vector a b c ] ; + assert.result "[" "[" 1 2 3 4 5 "]" "[" a b c "]" "]" : $(v3).str ; + $(v3).push-back [ new vector [ new vector x y z ] [ new vector 7 8 9 ] ] ; + assert.result 1 : $(v3).at 1 : 1 ; + assert.result b : $(v3).at 2 : 2 ; + assert.result a b c : $(v3).get-at 2 ; + assert.result 7 8 9 : $(v3).get-at 3 : 2 ; + + local v4 = [ new vector 4 3 6 ] ; + $(v4).sort ; + assert.result 3 4 6 : $(v4).get ; + assert.false $(v4).equal $(v3) ; + + local v5 = [ new vector 3 4 6 ] ; + assert.true $(v4).equal $(v5) ; + # Check that vectors of different sizes are considered non-equal. + $(v5).pop-back ; + assert.false $(v4).equal $(v5) ; + + local v6 = [ new vector [ new vector 1 2 3 ] ] ; + assert.true $(v6).equal [ new vector [ new vector 1 2 3 ] ] ; + + local v7 = [ new vector 111 222 333 ] ; + assert.true $(v7).equal $(v7) ; + $(v7).insert 4 : 444 ; + assert.result 111 222 333 444 : $(v7).get ; + $(v7).insert 999 : xxx ; + assert.result 111 222 333 444 xxx : $(v7).get ; + + local v8 = [ new vector "" "" "" ] ; + assert.true $(v8).equal $(v8) ; + assert.false $(v8).empty ; + assert.result 3 : $(v8).size ; + assert.result "" : $(v8).at 1 ; + assert.result "" : $(v8).at 2 ; + assert.result "" : $(v8).at 3 ; + assert.result : $(v8).at 4 ; + $(v8).insert 2 : 222 ; + assert.result 4 : $(v8).size ; + assert.result "" 222 "" "" : $(v8).get ; + $(v8).insert 999 : "" ; + assert.result 5 : $(v8).size ; + assert.result "" 222 "" "" "" : $(v8).get ; + $(v8).insert 999 : xxx ; + assert.result 6 : $(v8).size ; + assert.result "" 222 "" "" "" xxx : $(v8).get ; + + # Regression test for a bug causing vector.equal to compare only the first + # and the last element in the given vectors. + local v9 = [ new vector 111 xxx 222 ] ; + local v10 = [ new vector 111 yyy 222 ] ; + assert.false $(v9).equal $(v10) ; +} diff --git a/src/boost/tools/build/src/util/doc.jam b/src/boost/tools/build/src/util/doc.jam new file mode 100644 index 000000000..35c968e84 --- /dev/null +++ b/src/boost/tools/build/src/util/doc.jam @@ -0,0 +1,1076 @@ +# Copyright 2002, 2005 Dave Abrahams +# Copyright 2002, 2003, 2006 Rene Rivera +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE.txt or https://www.bfgroup.xyz/b2/LICENSE.txt) + +# Documentation system, handles --help requests. +# It defines rules that attach documentation to modules, rules, and variables. +# Collects and generates documentation for the various parts of the build +# system. The documentation is collected from comments integrated into the code. + +import modules ; +import print ; +import set ; +import container ; +import "class" ; +import sequence ; +import path ; + + +# The type of output to generate. +# "console" is formatted text echoed to the console (the default); +# "text" is formatted text appended to the output file; +# "html" is HTML output to the file. +# +help-output = console ; + + +# The file to output documentation to when generating "text" or "html" help. +# This is without extension as the extension is determined by the type of +# output. +# +help-output-file = help ; + +# Whether to include local rules in help output. +# +.option.show-locals ?= ; + +# When showing documentation for a module, whether to also generate +# automatically the detailed docs for each item in the module. +# +.option.detailed ?= ; + +# Generate debug output as the help is generated and modules are parsed. +# +.option.debug ?= ; + +# These are all the options available for enabling or disabling to control the +# help system in various ways. Options can be enabled or disabled with +# '--help-enable-